hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
5d5c3d6e909776f7bf47105eff588cf5906316c3 | 350 | class RegistrationsController < Devise::RegistrationsController
private
def sign_up_params
params.require(:user).permit(:name, :username, :email, :password, :password_confirmation)
end
def acount_update_params
params.require(:user).permit(:name, :username, :email, :password, :password_confirmation, :current_password)
end
end | 26.923077 | 112 | 0.762857 |
79beb197d3f70ac1b4298f63c467491bf555e178 | 800 | describe 'Quickbooks::Service::VendorChange' do
let(:service) { construct_service :vendor_change }
it 'can query for vendors' do
xml = fixture('vendor_changes.xml')
model = Quickbooks::Model::VendorChange
stub_http_request(:get, service.url_for_query, %w[200 OK], xml)
vendors = service.query
expect(vendors.entries.count).to eq 1
first_vendor = vendors.entries.first
expect(first_vendor.status).to eq 'Deleted'
expect(first_vendor.id).to eq '39'
expect(first_vendor.meta_data).to_not be_nil
expect(first_vendor.meta_data.last_updated_time).to eq DateTime.parse('2014-12-08T19:36:24-08:00')
end
describe '#url_for_query' do
subject { service.url_for_query }
it { is_expected.to eq "#{service.url_for_base}/cdc?entities=Vendor" }
end
end
| 32 | 102 | 0.72625 |
bbd1779fc876c3efbfcdea3d1ec9a3f2abfd33d6 | 1,786 | # Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
require "json"
package = JSON.parse(File.read(File.join(__dir__, "..", "..", "package.json")))
version = package['version']
source = { :git => 'https://github.com/facebook/react-native.git' }
if version == '1000.0.0'
# This is an unpublished version, use the latest commit hash of the react-native repo, which we’re presumably in.
source[:commit] = `git rev-parse HEAD`.strip if system("git rev-parse --git-dir > /dev/null 2>&1")
else
source[:tag] = "v#{version}"
end
folly_compiler_flags = '-DFOLLY_NO_CONFIG -DFOLLY_MOBILE=1 -DFOLLY_USE_LIBCPP=1 -Wno-comma -Wno-shorten-64-to-32'
folly_version = '2021.06.28.00-v2'
boost_compiler_flags = '-Wno-documentation'
Pod::Spec.new do |s|
s.name = "React-jsiexecutor"
s.version = version
s.summary = "-" # TODO
s.homepage = "https://reactnative.dev/"
s.license = package["license"]
s.author = "Facebook, Inc. and its affiliates"
s.platforms = { :ios => "12.4" }
s.source = source
s.source_files = "jsireact/*.{cpp,h}"
s.compiler_flags = folly_compiler_flags + ' ' + boost_compiler_flags
s.pod_target_xcconfig = { "HEADER_SEARCH_PATHS" => "\"$(PODS_ROOT)/boost\" \"$(PODS_ROOT)/RCT-Folly\" \"$(PODS_ROOT)/DoubleConversion\"" }
s.header_dir = "jsireact"
s.dependency "React-cxxreact", version
s.dependency "React-jsi", version
s.dependency "React-perflogger", version
s.dependency "RCT-Folly", folly_version
s.dependency "DoubleConversion"
s.dependency "glog"
end
| 40.590909 | 143 | 0.641097 |
e97c415ffa71362efaa0390640dec1ce629512fc | 2,558 | # *******************************************************************************
# OpenStudio(R), Copyright (c) 2008-2021, Alliance for Sustainable Energy, LLC.
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# (1) Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# (2) Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# (3) Neither the name of the copyright holder nor the names of any contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission from the respective party.
#
# (4) Other than as required in clauses (1) and (2), distributions in any form
# of modifications or other derivative works may not use the "OpenStudio"
# trademark, "OS", "os", or any other confusingly similar designation without
# specific prior written permission from Alliance for Sustainable Energy, LLC.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDER(S) AND ANY CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER(S), ANY CONTRIBUTORS, THE
# UNITED STATES GOVERNMENT, OR THE UNITED STATES DEPARTMENT OF ENERGY, NOR ANY OF
# THEIR EMPLOYEES, BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT,
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
# OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# *******************************************************************************
require 'openstudio/effibem_measures/version'
require 'openstudio/extension'
module OpenStudio
module EffibemMeasures
class EffibemMeasures < OpenStudio::Extension::Extension
# Override parent class
def initialize
super
@root_dir = File.absolute_path(File.join(File.dirname(__FILE__), '..', '..'))
end
end
end
end
| 50.156863 | 85 | 0.722048 |
6ae451f2d89afd492a17eb783e00de3b10b8636a | 1,172 | class Libdvdcss < Formula
desc "Access DVDs as block devices without the decryption"
homepage "https://www.videolan.org/developers/libdvdcss.html"
url "https://download.videolan.org/pub/videolan/libdvdcss/1.4.2/libdvdcss-1.4.2.tar.bz2"
sha256 "78c2ed77ec9c0d8fbed7bf7d3abc82068b8864be494cfad165821377ff3f2575"
bottle do
cellar :any
sha256 "352a2c343c04e65ee38fe154c797a29cc9cca509212e2296e9cd54e3e824ce29" => :catalina
sha256 "645422cdd6facba8137146fd12df0538b27432a72bc79c5ca8c2667ab9fc70bc" => :mojave
sha256 "4029db91ed7536435bd29db6b67f55509be13e70b6170337edec72daad8992c4" => :high_sierra
sha256 "907d51957c4674ddeb27b458dcf5f1f4b382219bda893fc8908147acc1c2b1ea" => :sierra
sha256 "0aaed21ecd3c8d3b4a9997300a599de5a541689ab200a6ffce52167b2ce5b664" => :el_capitan
end
head do
url "https://code.videolan.org/videolan/libdvdcss.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
end
def install
system "autoreconf", "-if" if build.head?
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make", "install"
end
end
| 40.413793 | 93 | 0.769625 |
1866848bad97eea4b8b823c21747b1be97279a15 | 169 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :goal do
category
amount 1000
period "Month"
end
end
| 16.9 | 68 | 0.727811 |
e232b094a8037799aef02f9e415349e0ac0f27de | 2,515 | class OrdersController < ApplicationController
skip_before_action :authenticate_user!
include CartBuilder
def confirm
set_order
set_address_variables
validate_confirm_render
end
def complete
set_order
redirect_url = Store::PayProvider.new(order: @order, provider: @order.payment_type, session: session, ip_address: request.remote_ip).complete
redirect_to redirect_url
end
def success
set_session_order
if @order.latest_transaction.pending? || @order.latest_transaction.completed?
render theme_presenter.page_template_path('orders/success'), layout: theme_presenter.layout_template_path
else
redirect_to root_url
end
rescue ActiveRecord::RecordNotFound
redirect_to root_url
end
def failed
set_session_order
if @order.latest_transaction.failed?
render theme_presenter.page_template_path('orders/failed'), layout: theme_presenter.layout_template_path
else
redirect_to root_url
end
rescue ActiveRecord::RecordNotFound
redirect_to root_url
end
def retry
set_order
if Modulatron4000.paypal? && TradoPaypalModule::Paypaler.fatal_error_code?(@order.last_error_code)
Payatron4000.decommission_order(@order)
end
redirect_to mycart_carts_url
end
def destroy
set_order
Payatron4000.decommission_order(@order)
flash_message :success, "Your order has been cancelled."
redirect_to root_url
end
private
def set_session_order
@order = Order.active.includes(:delivery_address).find(session[:order_id])
end
def set_order
@order ||= Order.active.find(params[:id])
end
def set_eager_loading_order
@order ||= Order.active.includes(:delivery_address, :billing_address).find(params[:id])
end
def set_address_variables
@delivery_address = @order.delivery_address
@billing_address = @order.billing_address
end
def validate_confirm_render
if Payatron4000.order_pay_provider_valid?(@order, params)
TradoPaypalModule::Paypaler.assign_paypal_token(params[:token], params[:PayerID], @order) if @order.paypal?
render theme_presenter.page_template_path('orders/confirm'), layout: theme_presenter.layout_template_path
else
flash_message :error, 'An error ocurred when trying to complete your order. Please try again.'
redirect_to checkout_carts_url
end
end
end | 30.670732 | 147 | 0.716103 |
f717a6557cf8557414b46fb6effab53edbb688d5 | 134 | require "peatio/bsc/version"
require "pry-byebug"
require "peatio"
module Peatio
module bsc
# Your code goes here...
end
end
| 13.4 | 28 | 0.708955 |
0871c034da5727a8307a9491086d6922c5189397 | 1,835 | require 'spec_helper'
module RailsDuplicateKeyChecker
describe DuplicateKeysAnalyzer do
let(:duplicate_keys_analyzer) { described_class.new }
describe '#alter_table_statements' do
subject { duplicate_keys_analyzer.alter_table_statements }
let(:statements) { double(:statements) }
let(:drop_index_statements) do
instance_double(DropIndexStatements, parse: statements)
end
before do
allow(DropIndexStatements)
.to receive(:new).with('command output')
.and_return(drop_index_statements)
end
before do
allow(Kernel)
.to receive(:system)
.with('pt-duplicate-key-checker D=teambox_development -u root')
.and_return('command output')
end
context 'when the pt-duplicate-key-checker command is successful' do
before { allow($?).to receive(:exitstatus).and_return(0) }
it 'raises a InvalidScanError' do
expect { duplicate_keys_analyzer.scan }
.not_to raise_error(DuplicateKeysAnalyzer::InvalidScanError)
end
it 'calls #parse on the drop index statements' do
expect(drop_index_statements).to receive(:parse)
duplicate_keys_analyzer.alter_table_statements
end
it 'returns parsed drop index statements' do
expect(duplicate_keys_analyzer.alter_table_statements)
.to eq(statements)
end
end
context 'when the pt-duplicate-key-checker command is unsuccessful' do
before do
allow($?).to receive(:exitstatus).and_return(-1)
end
it 'raises a InvalidScanError' do
expect { duplicate_keys_analyzer.alter_table_statements }
.to raise_error(DuplicateKeysAnalyzer::InvalidScanError)
end
end
end
end
end
| 30.583333 | 76 | 0.66158 |
ed4123b17b6c91136d208e554d6974c3e0a837fe | 441 | require 'spec_helper'
describe Player do
context 'associations' do
it { should belong_to(:team) }
end
context 'validations' do
it { should validate_presence_of(:team) }
it { should validate_presence_of(:name) }
it { should validate_uniqueness_of(:name).scoped_to(:team_id) }
it { should validate_presence_of(:position) }
it { should ensure_inclusion_of(:position).in_array(Player::POSITIONS) }
end
end
| 22.05 | 76 | 0.709751 |
5ddb57cb6c684f3e2509b17c8233b2d03da1a114 | 14,910 | # -*- encoding: utf-8 -*-
class NdlStatistic < ActiveRecord::Base
has_many :ndl_stat_manifestations, :dependent => :destroy
has_many :ndl_stat_accepts, :dependent => :destroy
has_many :ndl_stat_checkouts, :dependent => :destroy
has_many :ndl_stat_accesses, :dependent => :destroy
attr_accessible :term_id
term_ids = Term.select(:id).map(&:id)
validates_presence_of :term_id
validates_uniqueness_of :term_id
validates_inclusion_of :term_id, :in => term_ids
TYPE_LIST = [ "all_items", "removed", "removed_sum" ]
REGION_LIST = [ "domestic", "foreign" ]
# 呼び出し用メソッド
def self.calc_sum
term = Term.current_term
NdlStatistic.where(:term_id => term.id).destroy_all
NdlStatistic.create!(:term_id => term.id).calc_all
end
def self.calc_sum_prev_year
term = Term.previous_term
NdlStatistic.where(:term_id => term.id).destroy_all
NdlStatistic.create!(:term_id => term.id).calc_all
end
# NDL 年報用集計処理
def calc_all
# validates term_id
begin
@prev_term_end = Term.where(:id => term_id).first.start_at.yesterday
@curr_term_end = Term.where(:id => term_id).first.end_at
@language_japanese_id = Language.find_by_name('Japanese').id
@circulation_removed_id = CirculationStatus.find_by_name('Removed').id
@checkout_types = CheckoutType.all
@carrier_types = CarrierType.all
@accept_types = AcceptType.all
rescue Exception => e
p "Failed: #{e}"
logger.error "Failed: #{e}"
return false
end
# calculate ndl statistics
self.calc_manifestation_counts
self.calc_accept_counts
self.calc_checkout_counts
self.calc_access_counts
rescue Exception => e
p "Failed to calculate ndl statistics: #{e}"
logger.error "Failed to calculate ndl statistics: #{e}"
end
# 1. 所蔵
def calc_manifestation_counts
NdlStatistic.transaction do
# 公開区分
[ TRUE, FALSE].each do |pub_flg|
TYPE_LIST.each do |type|
# 製本済み資料を除く # 環境省以外は bookbinding_id ではなく bookbinder_id
query = "bookbinding_id IS NULL OR items.bookbinder IS TRUE AND items.created_at <= ?"
case type
when "all_items", "public_items"
query += " AND circulation_status_id != #{@circulation_removed_id}"
query += " AND public_flg IS TRUE" unless pub_flg
when "removed", "removed_sum"
query += " AND circulation_status_id = #{@circulation_removed_id}"
query += " AND removed_at between ? and ?" if type == "removed"
end
if type == "removed"
items_all = Item.joins(:manifestation).where(query, @curr_term_end, @prev_term_end, @curr_term_end)
else
items_all = Item.joins(:manifestation).where(query, @curr_term_end)
end
# 日本、外国
REGION_LIST.each do |region|
if region == "domestic"
items = items_all.where("language_id = ?", @language_japanese_id)
else
items = items_all.where("language_id != ?", @language_japanese_id)
end
# 貸出区分/資料区分(環境省)
@checkout_types.each do |checkout_type|
# 資料形態
@carrier_types.each do |carrier_type|
count = items.where("checkout_type_id = ?", checkout_type.id).
where("carrier_type_id = ?", carrier_type.id).count
# サブクラス生成
n= ndl_stat_manifestations.new(
:stat_type => type,
:region => region,
:checkout_type_id => checkout_type.id,
:carrier_type_id => carrier_type.id,
:count => count)
n.pub_flg = pub_flg
n.save!
end
end
end
end
end
end
rescue Exception => e
p "Failed to manifestation counts: #{e}"
logger.error "Failed to calculate manifestation counts: #{e}"
end
# 2. 受入
def calc_accept_counts
NdlStatistic.transaction do
# 公開区分
[ TRUE, FALSE].each do |pub_flg|
items_all = Item.joins(:manifestation).
where("bookbinding_id IS NULL OR items.bookbinder IS TRUE"). # 環境省以外は bookbinder_id
where("items.created_at BETWEEN ? AND ?" ,@prev_term_end ,@curr_term_end)
items_all = items_all.where("public_flg IS TRUE") unless pub_flg
# 日本、外国
[ "domestic", "foreign" ].each do |region|
if region == "domestic"
items = items_all.where("language_id = ?", @language_japanese_id)
else
items = items_all.where("language_id != ?", @language_japanese_id)
end
# 貸出区分/資料区分(環境省)
@checkout_types.each do |checkout_type|
# 資料形態
@carrier_types.each do |carrier_type|
# 受入区分
@accept_types.each do |accept_type|
count = items.where("checkout_type_id = ?", checkout_type.id).
where("carrier_type_id = ?", carrier_type.id).
where("accept_type_id = ?", accept_type.id).count
# サブクラス生成
ndl_stat_accepts.create(
:region => region,
:checkout_type_id => checkout_type.id,
:carrier_type_id => carrier_type.id,
:accept_type_id => accept_type.id,
:pub_flg => pub_flg,
:count => count)
end
end
end
end
end
end
rescue Exception => e
p "Failed to accept counts: #{e}"
logger.error "Failed to accept manifestation counts: #{e}"
end
# 3. 利用
def calc_checkout_counts
NdlStatistic.transaction do
# p "ndl_statistics of checkout_counts"
# 貸出区分
@checkout_types.each do |checkout_type|
# 資料形態
@carrier_types.each do |carrier_type|
checkouts = Checkout.joins(:item => :manifestation).
where("checkout_type_id = ?", checkout_type.id).
where("carrier_type_id = ?", carrier_type.id)
# 貸出者数
user = checkouts.where("checkouts.created_at between ? and ?",
@prev_term_end, @curr_term_end).count
# 貸出資料数
item = checkouts.where("checkouts.created_at between ? and ?",
@prev_term_end, @curr_term_end).count
ndl_stat_checkouts.create(
:checkout_type_id => checkout_type.id,
:carrier_type_id => carrier_type.id,
:users_count => user,
:items_count => item)
end
end
end
rescue Exception => e
p "Failed to checkout counts: #{e}"
logger.error "Failed to calculate checkout counts: #{e}"
end
# 4. アクセス件数
def calc_access_counts
NdlStatistic.transaction do
# 内部/外部
[ TRUE, FALSE ].each do |internal|
# アクセス画面
AccessLog.group(:log_type).select(:log_type).map(&:log_type).each do |log_type|
datas = AccessLog.where(:log_type => log_type, :internal => internal).
where("date between ? and ?", @prev_term_end, @curr_term_end)
ndl_stat_accesses.create(
:log_type => log_type,
:internal => internal,
:count => datas.sum(:value))
end
end
end
rescue Exception => e
p "Failed to access counts: #{e}"
logger.error "Failed to access counts: #{e}"
end
private
# excel 出力
def self.get_ndl_report_excelx(ndl_statistic)
# initialize
out_dir = "#{Rails.root}/private/system/ndl_report_excelx"
excel_filepath = "#{out_dir}/ndlreport#{Time.now.strftime('%s')}#{rand(10)}.xlsx"
FileUtils.mkdir_p(out_dir) unless FileTest.exist?(out_dir)
logger.info "get_ndl_report_excelx filepath=#{excel_filepath}"
@font_size = 10
@height = @font_size * 1.5
# prepare for header
@checkout_types = CheckoutType.all
@carrier_types = CarrierType.all
@accept_types = AcceptType.all
require 'axlsx'
Axlsx::Package.new do |p|
wb = p.workbook
wb.styles do |s|
title_style = s.add_style :font_name => Setting.manifestation_list_print_excelx.fontname,
:alignment => { :vertical => :center },
:sz => @font_size+2, :b => true
@header_style = s.add_style :font_name => Setting.manifestation_list_print_excelx.fontname,
:alignment => { :vertical => :center },
:border => Axlsx::STYLE_THIN_BORDER,
:sz => @font_size, :b => true
default_style = s.add_style :font_name => Setting.manifestation_list_print_excelx.fontname,
:alignment => { :vertical => :center },
:border => Axlsx::STYLE_THIN_BORDER,
:sz => @font_size
# 所蔵統計
# 公開区分
last_row = 3+@carrier_types.size # 所蔵数出力の最終行(合計出力用)
[ TRUE, FALSE ].each do |pub_flg|
sheet_name = I18n.t("ndl_statistics.pub_flg.#{pub_flg.to_s}")
wb.add_worksheet(:name => sheet_name) do |sheet|
# (1) 所蔵
sheet.add_row ['(1) 図書'], :style => title_style, :height => @height*2
checkout_region_header(sheet)
sheet.column_info[0].width = 15
sheet.column_info[1].width = 15
TYPE_LIST.each do |type|
next if pub_flg && type != 'all_items'
@carrier_types.each do |carrier_type|
row = [I18n.t("ndl_statistics.type.#{type}"), carrier_type.display_name.localize]
@checkout_types.each do |checkout_type|
REGION_LIST.each do |region|
data = ndl_statistic.ndl_stat_manifestations.where(:stat_type => type, :carrier_type_id => carrier_type.id,
:checkout_type_id => checkout_type.id, :region => region).first
row << data.count if data
end
end
sheet.add_row row, :style => default_style, :height => @height
end
# 合計の出力
if type == 'all_items'
columns = ('C'..'Z').to_a #TODO for columns after AA
row = ['','合計']
num = @checkout_types.size*2
num.times do |i|
column = columns[i]
row << "=SUM(#{column}4:#{column}#{last_row})"
end
sheet.add_row row, :style => default_style, :height => @height
end
end
sheet.add_row
# (2) 受入
sheet.add_row ['(2) 受入'], :style => title_style, :height => @height*2
checkout_region_header(sheet)
# 受入区分
@accept_types.each do |accept_type|
@carrier_types.each do |carrier_type|
row = [accept_type.name, carrier_type.display_name.localize]
sum = 0
@checkout_types.each do |checkout_type|
REGION_LIST.each do |region|
data = ndl_statistic.ndl_stat_accepts.where(:accept_type_id => accept_type.id, :checkout_type_id => checkout_type.id,
:region => region, :carrier_type_id => carrier_type.id).first
row << data.count if data
sum += data.count
end
end
row << sum
sheet.add_row row, :style => default_style, :height => @height
end
end
# 合計の出力
columns = ('C'..'Z').to_a
@carrier_types.each_with_index do |carrier_type, index|
row = ['合計',carrier_type.display_name.localize]
num = @checkout_types.size*2+1
num.times do |i|
row_num = last_row+6+index
column = columns[i]
sum_columns = []
@accept_types.size.times {sum_columns << "#{column}#{row_num}";row_num+=@carrier_types.size}
row << "=SUM(#{sum_columns.join(',')})"
end
sheet.add_row row, :style => default_style, :height => @height
end
end
end
# 利用統計
checkout_stat_types = ['items_count', 'users_count']
wb.add_worksheet(:name => '利用統計') do |sheet|
# (3) 利用
sheet.add_row ['(3)利用'], :style => title_style, :height => @height*2
# 貸出冊数/貸出者数ヘッダ
header = checkout_stat_types.inject(['']){|array,t| array += [I18n.t("ndl_statistics.checkout.#{t}"),'']}
sheet.add_row header, :style => @header_style, :height => @height
# 印刷物/それ以外ヘッダ
header = checkout_stat_types.inject(['']){|array,t| array += [I18n.t("ndl_statistics.carrier_type.print"), I18n.t("ndl_statistics.carrier_type.other")]}
sheet.add_row header, :style => @header_style, :height => @height
@checkout_types.each do |checkout_type|
row = [checkout_type.display_name.localize]
checkout_stat_types.each do |s_type| # 貸出冊数/貸出者数
['print', 'other'].each do |c_type| # 印刷物/それ以外
datas = ndl_statistic.ndl_stat_checkouts.where(:checkout_type_id => checkout_type.id,
:carrier_type_id => CarrierType.try(c_type).collect(&:id))
row << datas.sum(s_type)
end
end
sheet.add_row row, :style => default_style, :height => @height
end
# 合計の出力
columns = ('B'..'Z').to_a
last_row = @checkout_types.size+3
row = ['合計']
4.times do |i|
column = columns[i]
row << "=SUM(#{column}4:#{column}#{last_row})"
end
sheet.add_row row, :style => default_style, :height => @height
end
# アクセス件数
wb.add_worksheet(:name => 'アクセス件数') do |sheet|
sheet.add_row ['(4)アクセス件数'], :style => title_style, :height => @height*2
# 内部/外部
[TRUE, FALSE].each do |internal|
end
end
p.serialize(excel_filepath)
end
return excel_filepath
end
rescue Exception => e
p "Failed to create ndl report excelxt: #{e}"
logger.error "Failed to create ndl report excelx: #{e}"
end
# 貸出区分・日本/外国区分ヘッダ
def self.checkout_region_header(sheet)
# 貸出区分ヘッダ
header = @checkout_types.inject(['','']){|array,c| array += [c.display_name.localize, '']}
sheet.add_row header, :style => @header_style, :height => @height
# 日本、外国区分ヘッダ
header = @checkout_types.inject(['','']){|array,c| array += ['日本','外国']}
sheet.add_row header, :style => @header_style, :height => @height
end
end
| 39.97319 | 162 | 0.560899 |
d53c377e2f4ab7f5220c36197ad5227e877075f0 | 1,308 | class Gif2png < Formula
desc "Convert GIFs to PNGs"
homepage "http://www.catb.org/~esr/gif2png/"
url "http://www.catb.org/~esr/gif2png/gif2png-2.5.13.tar.gz"
sha256 "997275b20338e6cfe3bd4adb084f82627c34c856bc1d67c915c397cf55146924"
livecheck do
url :homepage
regex(/href=.*?gif2png[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 cellar: :any, arm64_big_sur: "a8b1dd6b1f3b029b7ca53f99f18caea098810634aea1a745630028e66ecc4203"
sha256 cellar: :any, big_sur: "2c3b07aba9f301e689fbc6268894e3ab3a56044741b8b4adabd6afb1d4962af1"
sha256 cellar: :any, catalina: "cfbf0572aec85f33c51bc58064e20a44de374a319bb369e46c0aab8581756253"
sha256 cellar: :any, mojave: "95c85cb74a70b1f217c3db5f4f6f6bab2b9871755435a25301bc4215015f1341"
sha256 cellar: :any, high_sierra: "fd15459a5000f08952b7609ef743d80c84749710e30b7bfbe02d68e7ccc27ed7"
sha256 cellar: :any, sierra: "25aa7ef95b5ca8e7a79bf884fa8e9c8eafb21f2887caabc3ffb40de5fda2ab26"
sha256 cellar: :any, x86_64_linux: "6e56f9b8b442870d44877d51f23a824821c39926ce909faed2375cfb3f84e830" # linuxbrew-core
end
depends_on "libpng"
def install
system "make", "install", "prefix=#{prefix}"
end
test do
pipe_output "#{bin}/gif2png -O", File.read(test_fixtures("test.gif"))
end
end
| 40.875 | 123 | 0.75841 |
18ea4cd9b03bb055cf7c26648d9d22fa7b9796e2 | 1,372 | #! /usr/bin/env ruby -S rspec
require 'spec_helper_acceptance'
describe 'any2array function', :unless => UNSUPPORTED_PLATFORMS.include?(fact('operatingsystem')) do
describe 'success' do
it 'should create an empty array' do
pp = <<-EOS
$input = ''
$output = any2array($input)
validate_array($output)
notify { "Output: ${output}": }
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/Notice: Output: /)
end
end
it 'should leave arrays modified' do
pp = <<-EOS
$input = ['test', 'array']
$output = any2array($input)
validate_array($output)
notify { "Output: ${output}": }
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/Notice: Output: (\[|)test(,\s|)array(\]|)/)
end
end
it 'should turn a hash into an array' do
pp = <<-EOS
$input = {'test' => 'array'}
$output = any2array($input)
validate_array($output)
# Check each element of the array is a plain string.
validate_string($output[0])
validate_string($output[1])
notify { "Output: ${output}": }
EOS
apply_manifest(pp, :catch_failures => true) do |r|
expect(r.stdout).to match(/Notice: Output: (\[|)test(,\s|)array(\]|)/)
end
end
end
end
| 27.44 | 100 | 0.581633 |
083b99d123260f8b330c802a0c44dea746985617 | 1,487 | # frozen_string_literal: true
require 'active_support/core_ext/module/anonymous'
module Rails
module GraphQL
module Helpers
# Helper module responsible for name stuff
module WithName
NAME_EXP = /GraphQL::(?:Type::\w+::|Directive::)?([:\w]+?)([A-Z][a-z]+)?\z/.freeze
# Here we define a couple of attributes used by registration
def self.extended(other)
# An abstract type won't appear in the introspection and will not be
# instantiated by requests
other.class_attribute :abstract, instance_accessor: false, default: false
# The given description of the element
other.class_attribute :description, instance_writer: false
end
# Return the name of the object as a GraphQL name
def gql_name
@gql_name ||= begin
name.match(NAME_EXP).try(:[], 1)&.tr(':', '')
end unless anonymous?
end
alias graphql_name gql_name
# Return the name of the object as a symbol
def to_sym
@gql_key ||= gql_name&.underscore&.to_sym
end
protected
# An alias for +description = value.strip_heredoc.chomp+ that can be
# used as method
def desc(value)
self.description = value.strip_heredoc.chomp
end
# Change the gql name of the object
def rename!(name)
@gql_name = name.to_s
end
end
end
end
end
| 28.596154 | 90 | 0.600538 |
389de77aa322b1737ef2ba95fe8c3c41547aab3e | 571 | # frozen_string_literal: true
require "test_helper"
class ElementalStyleguideTest < ActiveSupport::TestCase
# rubocop:disable Style/WordArray
test "#page_names returns a hash of page names" do
assert_equal [
["01_home", "Home"],
["02_pages", "Pages", [
["01_page_one", "Page One"],
["02_page_two", "Page Two"]
]],
["03_examples", "Examples", [
["01_example_one", "Example One"],
["02_example_two", "Example Two"]
]]
], ElementalStyleguide.page_names
end
# rubocop:enable Style/WordArray
end
| 25.954545 | 55 | 0.630473 |
87a61ffabe9352ba86e52aa2fdbcab2c95246614 | 97 | json.extract! program, :id, :created_at, :updated_at
json.url program_url(program, format: :json) | 48.5 | 52 | 0.773196 |
acafff148e26e930c0189c3f5202ab4eaa0f509e | 6,873 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Auxiliary::Report
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Scanner
HttpFingerprint = { :pattern => [ /DManager/ ] }
def initialize(info = {})
super(update_info(
info,
'Name' => 'SurgeNews User Credentials',
'Description' => %q{
This module exploits a vulnerability in the WebNews web interface
of SurgeNews on TCP ports 9080 and 8119 which allows unauthenticated
users to download arbitrary files from the software root directory;
including the user database, configuration files and log files.
This module extracts the administrator username and password, and
the usernames and passwords or password hashes for all users.
This module has been tested successfully on SurgeNews version
2.0a-13 on Windows 7 SP 1 and 2.0a-12 on Ubuntu Linux.
},
'License' => MSF_LICENSE,
'References' =>
[
['URL', 'http://news.netwinsite.com:8119/webnews?cmd=body&item=34896&group=netwin.surgemail'],
],
'Author' => 'bcoles',
'DisclosureDate' => '2017-06-16'))
register_options [ Opt::RPORT(9080) ]
end
def max_retries
3
end
def check_host(ip)
@tries = 0
res = read_file 'install.log'
if res =~ /SurgeNews/
return Exploit::CheckCode::Vulnerable
end
Exploit::CheckCode::Safe
end
def read_file(file)
data = nil
@tries += 1
vprint_status "Retrieving file: #{file}"
res = send_request_cgi 'uri' => normalize_uri(target_uri.path, 'webnews'),
'vars_get' => { 'cmd' => 'part', 'fname' => file }
if !res
vprint_error 'Connection failed'
elsif res.code == 550
vprint_error "Could not find file '#{file}'"
elsif res.code == 200 && res.body =~ /550 Key: No key activated/
# unregistered software throws an error once in every ~20 requests
# try again...
if @tries >= max_retries
vprint_error "Failed to retrieve file '#{file}' after max retries (#{max_retries})"
else
vprint_status 'Retrying...'
return read_file file
end
elsif res.code == 200 && !res.body.empty?
vprint_good "Found #{file} (#{res.body.length} bytes)"
data = res.body
else
vprint_error 'Unexpected reply'
end
@tries = 0
data
end
def parse_log(log_data)
return if log_data.nil?
username = log_data.scan(/value_set\(manager\)\((.*)\)/).flatten.reject { |c| c.to_s.empty? }.last
password = log_data.scan(/value_set\(password\)\((.*)\)/).flatten.reject { |c| c.to_s.empty? }.last
{ 'username' => username, 'password' => password }
end
def parse_user_db(user_data)
return if user_data.nil?
creds = []
user_data.lines.each do |line|
next if line.eql? ''
if line =~ /^(.+?):(.*):Groups=/
user = $1
pass = $2
# clear text credentials are prefaced with '*'
if pass.starts_with? '*'
creds << { 'username' => user, 'password' => pass[1..-1] }
# otherwise its a hash
else
creds << { 'username' => user, 'hash' => pass }
end
end
end
creds
end
def run_host(ip)
@tries = 0
service_data = { address: rhost,
port: rport,
service_name: (ssl ? 'https' : 'http'),
protocol: 'tcp',
workspace_id: myworkspace_id }
cred_table = Rex::Text::Table.new 'Header' => 'SurgeNews User Credentials',
'Indent' => 1,
'Columns' => ['Username', 'Password', 'Password Hash', 'Admin']
# Read administrator password from password.log
admin = parse_log read_file 'password.log'
# If password.log doesn't contain credentials
# then the password hasn't been updated since install.
# Retrieve the credentials from install.log instead.
admin = parse_log read_file 'install.log' if admin.nil?
if admin.nil?
vprint_error 'Found no administrator credentials'
else
print_good "Found administrator credentials (#{admin['username']}:#{admin['password']})"
cred_table << [admin['username'], admin['password'], nil, true]
credential_data = { origin_type: :service,
module_fullname: fullname,
private_type: :password,
private_data: admin['password'],
username: admin['username'] }
credential_data.merge! service_data
credential_core = create_credential credential_data
login_data = { core: credential_core,
access_level: 'Administrator',
status: Metasploit::Model::Login::Status::UNTRIED }
login_data.merge! service_data
create_credential_login login_data
end
# Read user credentials from nwauth.add
users = parse_user_db read_file 'nwauth.add'
if users.nil?
vprint_error 'Found no user credentials in nwauth.add'
else
vprint_status "Found #{users.length} users in nwauth.add"
end
users.each do |user|
next if user.empty?
cred_table << [user['username'], user['password'], user['hash'], false]
if user['password']
print_good "Found user credentials (#{user['username']}:#{user['password']})"
credential_data = { origin_type: :service,
module_fullname: fullname,
private_type: :password,
private_data: user['password'],
username: user['username'] }
else
credential_data = { origin_type: :service,
module_fullname: fullname,
private_type: :nonreplayable_hash,
private_data: user['hash'],
username: user['username'] }
end
credential_data.merge! service_data
credential_core = create_credential credential_data
login_data = { core: credential_core,
access_level: 'User',
status: Metasploit::Model::Login::Status::UNTRIED }
login_data.merge! service_data
create_credential_login login_data
end unless users.nil?
print_line
print_line cred_table.to_s
p = store_loot 'surgenews.user.creds', 'text/csv', rhost, cred_table.to_csv, 'SurgeNews User Credentials'
print_good "Credentials saved in: #{p}"
end
end
| 35.427835 | 109 | 0.585188 |
e2a2b0e34b63c654bed0cabb70ba5d62dd9679f1 | 741 | $:.push File.expand_path("../lib", __FILE__)
require "consul/version"
Gem::Specification.new do |s|
s.name = 'consul'
s.version = Consul::VERSION
s.authors = ["Henning Koch"]
s.email = '[email protected]'
s.homepage = 'https://github.com/makandra/consul'
s.summary = 'A scope-based authorization solution for Ruby on Rails.'
s.description = s.summary
s.license = 'MIT'
s.files = `git ls-files`.split("\n").reject { |f| f.match(%r{^(test|spec|features)/}) }
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
s.require_paths = ["lib"]
s.add_dependency('memoizer')
s.add_dependency('rails')
end
| 33.681818 | 97 | 0.645074 |
91acd6c14c3409697576126213fd03b7ec917283 | 366 | name 'cleanup'
default_version '1.0.0'
skip_transitive_dependency_licensing true
license :project_license
build do
# strip shared object files related to gecode installs
command "strip #{install_dir}/embedded/lib/libgecode*.so.32.0"
# remove any test fixture pivotal keys to avoid user confusion
command "find #{install_dir} -name pivotal.pem -delete"
end
| 28.153846 | 64 | 0.784153 |
e8bcde1fd566636d4d157396d8b400b3aee7ce30 | 10,497 | class Climate_calc
# generic climate data calculations
# :tmax is maximum temperature in degrees centigrade
# :tmin is minimum temperature in degrees centigrade
# :tprecip is total precipitation in millimeters
# :radiation is solar radiation at the top of the atmosphere
def Climate_calc.monthly2dailyinterps(monthlyData, daysPerMonth)
#Calculates 365 daily values from 12 monthly values using a trig interpolation
#monthlyData has 12 values which normally represent min or max monthly mean temperature values for Jan-Dec
extendedArray = [monthlyData[11]] + monthlyData + [monthlyData[0]]
dailyArray = Array.new
for x in 0..11 do
a = 7.29*(extendedArray[x+1] - extendedArray[x]) - 3.91*(extendedArray[x+2] - extendedArray[x]);
b = 1.95*(extendedArray[x+1] - extendedArray[x]);
c = extendedArray[x] - 6.47*(extendedArray[x+1] - extendedArray[x]) + 3.74*(extendedArray[x+2] - extendedArray[x]);
for y in 1..daysPerMonth[x] do
dailyArray.push( (a*Math.cos(0.0174533*y) + b*Math.sin(0.0174533*y) + c) )
end
end
return dailyArray;
end
def Climate_calc.monthly2dailyprecip(monthlyData, daysPerMonth)
#Calculates 365 daily values from 12 monthly values
#monthlyData has 12 values which normally represent total monthly precipitation values for Jan-Dec
dailyPrecipArray = Array.new
for month in 0..11 do
precip = monthlyData[month] / daysPerMonth[month].to_f;
daysPerMonth[month].times{dailyPrecipArray.push(precip)}
end
return dailyPrecipArray
end
def Climate_calc.tenyearnormals(climateArray)
for day in climateArray do
day[:t10ave] = (day[:tmaxArray].sum + day[:tminArray].sum)/ 20
day[:t10min] = day[:tminArray].sum / 10
end
end
def Climate_calc.monthly(normalsKey, polygon, redis)
# calculates and stores climate indices for a site based on monthly climate normals data
# get data (based in redis)
site = JSON.parse(redis.hget(normalsKey,polygon),:symbolize_names => true) # TODO - add or delete [:monthly]
@year = "2017" # just any non-leap year - required for yday
# create climate array with daily values of TMAX, TMIN, TMEAN, PRECIP
site[:climate] = Array.new(365){|e| {}} # create empty hashes
daysPerMonth = [31,28,31,30,31,30,31,31,30,31,30,31]
Climate_calc.monthly2dailyinterps(site[:tmax],daysPerMonth).map.with_index{|v,i| site[:climate][i][:tmax]=v}
Climate_calc.monthly2dailyinterps(site[:tmin],daysPerMonth).map.with_index{|v,i| site[:climate][i][:tmin]=v}
Climate_calc.monthly2dailyprecip(site[:precip],daysPerMonth).map.with_index{|v,i| site[:climate][i][:precip]=v}
# populate climate array with DAYNUMBER, TMEAN
site[:climate].each_with_index{|v,i| v[:daynumber] = i+1}
site[:climate].map{|v| v[:tmean] = ( v[:tmax] + v[:tmin] ) / 2}
# GDD
site.merge!(Climate_gdd.calc(site,5.0,1,365,1))
# EGDD
site.merge!(Climate_egdd.dailyrange(site[:climate],yday("April 1"),site[:GDD_First],yday("October 31")))
# should probably use Climate_egdd.monthly calc, but monthly is SNAFU
site[:climate] = Climate_egdd.daily(site[:climate],site[:EGDD_First],site[:EGDD_Last],site[:lat])
site[:EGDD] = site[:climate].map{|day| day[:egdd]}.compact.sum.round(0)
# populate climate array with RADIATION, PE
Climate_evap.radiation(site[:lat]).map.with_index{|v,i| site[:climate][i].merge!(v)}
site[:climate] = Climate_evap.baier_robertson(site[:climate])
# P-PE
site[:PPE] = Climate_evap.ppe(site[:climate],yday("May 1"),yday("August 31")).round(1)
# chu
site[:chu_thresholds] = Climate_chu.thresholds(site[:lat], site[:long])
site[:chu_start] = Climate_chu.startmonthly(site[:climate],yday("April 1"),site[:chu_thresholds][:start_temp])
site[:chu_stop] = Climate_chu.stopmonthly(site[:climate],site[:chu_thresholds][:stop_temp])
site[:climate] = Climate_chu.calculate(site[:climate],site[:chu_start],site[:chu_stop],"chu1")
site[:CHU1] = site[:climate].map{|day| day[:chu1]}.compact.sum.round(0)
site[:CHU2] = Climate_chu.ave(site[:CHU1],site[:chu_thresholds])
# canhm
site[:EGDD600] = Climate_canolaheat.egdd_sum(site[:climate],site[:EGDD_First],600)
site[:EGDD1100] = Climate_canolaheat.egdd_sum(site[:climate],site[:EGDD_First],1100)
if site[:EGDD1100] == nil then site[:TmaxEGDD] = 0 else
site[:TmaxEGDD] = Climate_canolaheat.tmax_egdd(site[:climate],site[:EGDD600],site[:EGDD1100])
end
site[:CanHM] = Climate_canolaheat.canhm(site[:TmaxEGDD])
return site
end
def Climate_calc.monthlies(normalsKey, indicesKey, indicesDumpPathname, redis)
# calculates indices for all polygons in a polygonset, based on monthly normals
# assumes raw climate data is available in Redis
for polygon in redis.hkeys(normalsKey) do
redis.hset(indicesKey, polygon, Climate_calc.monthly(normalsKey, polygon, redis).except(:monthly, :climate, :lat, :long, :elev, :chu_thresholds).to_json)
end
# dump the hash to a file
File.open(indicesDumpPathname,"w"){ |f| f << redis.dump(indicesKey) }
end
def Climate_calc.daily(params)
# calculates and stores climate indices for a site based on daily climate data
# site[:climate] is an array of 365 hashes
dir = "/production/data/climate/stations/#{params[:station]}"
site = JSON.parse(File.read("#{dir}/coordinates.json"),:symbolize_names => true)
site[:climate] = CSV.read("#{dir}/daily/#{params[:daily]}.csv", headers:true, header_converters: :symbol, converters: :all, col_sep: "\t").map{|row| row.to_hash}
site[:year] = params[:daily].to_i
@year = site[:year]
# populate climate array with DAYNUMBER, TMEAN
site[:climate].each_with_index{|v,i| v[:daynumber] = i+1}
site[:climate].map{|v| v[:tmean] = ( v[:tmax] + v[:tmin] ) / 2}
# GDD
site.merge!(Climate_gdd.calc(site,5.0,1,365,params[:chu2springdays].to_i))
# EGDD
site.merge!(Climate_egdd.dailyrange(site[:climate],yday("April 1"),site[:GDD_First],yday("October 31")))
site[:climate] = Climate_egdd.daily(site[:climate],site[:EGDD_First],site[:EGDD_Last],site[:lat])
site[:EGDD] = site[:climate].map{|day| day[:egdd]}.compact.sum.round(0)
# populate climate array with RADIATION, PE
Climate_evap.radiation(site[:lat]).map.with_index{|v,i| site[:climate][i].merge!(v)}
site[:climate] = Climate_evap.baier_robertson(site[:climate])
# P-PE
site[:PPE] = Climate_evap.ppe(site[:climate],yday("May 1"),yday("August 31")).round(1)
# PPE cumulative
site[:climate] = Climate_evap.ppe_cum(site[:climate],yday("May 1"),yday("August 31"))
# PPE monthlies
site[:PPE_April] = Climate_evap.ppe(site[:climate],yday("April 1"),yday("April 30")).round(1)
site[:PPE_May] = Climate_evap.ppe(site[:climate],yday("May 1"),yday("May 31")).round(1)
site[:PPE_June] = Climate_evap.ppe(site[:climate],yday("June 1"),yday("June 30")).round(1)
site[:PPE_July] = Climate_evap.ppe(site[:climate],yday("July 1"),yday("July 31")).round(1)
site[:PPE_August] = Climate_evap.ppe(site[:climate],yday("August 1"),yday("August 31")).round(1)
site[:PPE_September] = Climate_evap.ppe(site[:climate],yday("September 1"),yday("September 30")).round(1)
# ESM
site[:ESM] = site[:PPE_May].round(0)
# EFM
site[:EFM] = site[:PPE_September].round(0)
# CanolaHeatModel and
site[:CanHM] = Climate_canolaheat.daily(site[:climate],site[:EGDD_First],site[:EGDD_Last])
# CHU
site[:CHU_First] = Climate_chu.startstreak(site[:climate],yday("April 15"),12.8,3)
site[:CHU_Last] = Climate_chu.stopmin(site[:climate],yday("October 15"),-2.0)
site[:climate] = Climate_chu.calculate(site[:climate],site[:CHU_First],site[:CHU_Last],"chu1")
site[:CHU1] = site[:climate].map{|day| day[:chu1]}.compact.sum.round(0)
# CHU test2
#site[:normals] = []
site[:climate].map{|day| day[:tmaxArray] = [] }
site[:climate].map{|day| day[:tminArray] = [] }
for i in 0..9 do
year = site[:year] - i
if File.exist?("#{dir}/daily/#{year}.csv") then
yearData = CSV.read("#{dir}/daily/#{year}.csv", headers:true, header_converters: :symbol, converters: :all, col_sep: "\t").map{|row| row.to_hash}
yearData.map.with_index{|day,i| site[:climate][i][:tmaxArray].push(day[:tmax]); site[:climate][i][:tminArray].push(day[:tmin])}
else
#render "error_missingyear" and return and exit 1
missingyear = true
end
end
# populate climate with T10AVE, T10MIN
if missingyear == true then
site[:CHU2] = "ERROR: missing data required to calculate prior 10 year normals."
else
site[:climate] = Climate_calc.tenyearnormals(site[:climate])
site[:CHU2_First] = Climate_chu.startmean(site[:climate],yday(params[:chu2springfirstday]),params[:chu2springtemp].to_f,params[:chu2springtemp10ave].to_f)
site[:CHU2_Last] = Climate_chu.stopmean(site[:climate],yday(params[:chu2falllastday]),params[:chu2falltemp].to_f,params[:chu2falltemp10min].to_f,params[:chu2falltempmin].to_f)
site[:climate] = Climate_chu.calculate(site[:climate],site[:CHU2_First],site[:CHU2_Last],"chu2")
site[:CHU2] = site[:climate].map{|day| day[:chu2]}.compact.sum.round(0)
end
#File.open("#{dir}/daily/#{params[:daily]}_indices.json","w"){ |f| f << site.except(:monthly, :climate, :lat, :long, :elev, :chu_thresholds).to_json }
File.open("#{dir}/daily/#{params[:daily]}_indices.json","w"){ |f| f << site.except(:climate, :lat, :long, :elev, :year, :normals).to_json }
return site
end
def Climate_calc.dailies(params)
# calculates and stores climate indices for a site for all years with daily climate data
#params = {:station=>"2101300", :daily=>"1951", :chu2springfirstday=>"April 15", :chu2springtemp=>"14.2", :chu2springdays=>"5", :chu2springtemp10ave=>"10", :chu2falltemp=>"10.1", :chu2falltempmin=>"-2", "chu2falltemp10min"=>"10", :chu2falllastday=>"October 15"}
Dir.chdir("/production/data/climate/stations/2101300/daily/")
# calculate the indices for each year
for year in Dir.glob("*.csv").sort.map{|filename| filename.split(".").first} do
puts year
params[:daily]=year
Climate_calc.daily(params)
end
# calculate the indices for each year
xx = Hash.new
for year in Dir.glob("*.csv").sort.map{|filename| filename.split(".").first} do
xx[year] = JSON.parse( File.read(year+"_indices.json") ) # TODO: turn these into symbols
# TODO: get rid of ErosivityRegion for each year.
end
# have to do all years because params can change, so this unless statement can't be used
#unless File.exists?("#{year}_indices.json") and File.mtime("#{year}.csv") < File.mtime("#{year}_indices.json") then
#puts "Do calcs for #{year}"
#end
end
private
def self.yday(monthday)
"#{monthday}, #{@year}".to_date.yday
end
end
| 52.485 | 261 | 0.710584 |
1ddf7aa30c5a967026842c0c5b5e66f8f45fa1d4 | 650 | require 'tmpdir'
class TestConfigGenerator
attr_reader :config_manifest_path
def initialize
@config_manifest_path = Dir.mktmpdir
%w[cloud runtime cpi].each do |type|
FileUtils.touch(File.join(@config_manifest_path, "my-custom-#{type}-vars.yml"))
FileUtils.touch(File.join(@config_manifest_path, "01-my-custom-#{type}-operators.yml"))
FileUtils.touch(File.join(@config_manifest_path, "02-my-custom-#{type}-operators.yml"))
FileUtils.touch(File.join(@config_manifest_path, "#{type}-config.yml"))
end
end
def cleanup
FileUtils.rm_rf @config_manifest_path if File.exist?(@config_manifest_path)
end
end | 34.210526 | 93 | 0.733846 |
61ce43ae2842ef585f64faecd84428c686080798 | 5,252 | # encoding: utf-8
require "securerandom"
require "csv"
class ServiceOrder < ActiveRecord::Base
attr_accessible :alipay_id, :alipay_pix, :cmobile, :cname, :price, :site_id,
:site_pix, :site_worker_id, :status, :tb_trade_id, :uid, :user_id,
:time_service,:memo,:time_pay
belongs_to :tb_trade
belongs_to :site
belongs_to :user
scope :status, lambda {|status| where(:status => status)}
state_machine :status, :initial => :pending do
# pending #地址不正确,等待修改
event :assign do
transition [:pending,:assigned]=>:assigned #指派第一次
end
# event :reassign do
# transition []=>:assigned #客服重新指派
# end
event :inform do
transition [:informed,:assigned]=>:informed #安装
end
event :install do
transition :informed=>:installation #安装
end
event :pay do
transition :installation=>:payed #支付
end
event :cancle do
transition [:assigned,:installation]=>:cancled #取消
end
event :revert do
transition :payed=>:installation,:installation=>:informed,:informed=>:assigned,:assigned=>:pending
end
after_transition :informed => :installation do |service_order,transition|
service_order.time_service = DateTime.now
service_order.save!
end
after_transition :installation => :payed do |service_order,transition|
service_order.time_pay = DateTime.now
service_order.save!
end
after_transition :pending => :assigned do |service_order,transition|
# service_order.send_assign_sms
end
after_transition [:informed,:assigned] => :informed do |service_order,transition|
# service_order.send_cancle transition.args[0]
service_order.send_assign_sms
end
after_transition :assigned => :cancled do |service_order,transition|
# service_order.send_cancle
end
end
def self.create_from_trade tb_trade
#tb_trade->site
logger.info "create_from_trade--#{tb_trade.id}--start"
user = tb_trade.user
service_order = user.service_orders.build(:uid=>"%012d" % SecureRandom.random_number(1000000000000),
:cname=>tb_trade.cname,:cmobile=>tb_trade.cmobile,
:status=>:pending,:tb_trade_id=>tb_trade.id)
tb_trade.service_order = service_order
# site = user.sites.city(tb_trade.city).order(:cert).last || Site.find_near(tb_trade.city)
site = user.find_site(tb_trade)
logger.info "create_from_trade--#{tb_trade.id}--find site"
if site then
service_order.site = site
service_order.status = "assigned"
service_order.save!
tb_trade.status = "assigned"
else
tb_trade.status = "imported"
end
logger.info "create_from_trade--#{tb_trade.id}--save again"
tb_trade.save!
logger.info "create_from_trade--#{tb_trade.id}--out"
return service_order
end
def set_site site
if site then
self.site_id = site.id
else
self.status = "pending"
end
self.save!
end
def send_cancle site
message_s = "服务预约取消:#{time_service.strftime("%F %H")},客户:#{cname},联系电话:#{cmobile},订单信息:"
SmsWorker.new.perform site.phone,message_s
end
def send_assign_sms
# message_s = "服务预约时间:#{time_service.strftime("%F %H")},客户:#{cname},联系电话:#{cmobile},订单信息:"
# message_c = "#{cname},服务预约时间:#{time_service.strftime("%F %H点")},#{site.summary}"
# SmsWorker.new.perform site.phone,message_s
# # SmsWorker.perform_async cmobile,message_c
# SmsWorker.new.perform cmobile,message_c
if Setting[:sms_send] then
msg_c = assign_sms_c
SmsWorker.perform_async user.id,cmobile,msg_c if msg_c.length>0 && cmobile
msg_s = assign_sms_s
SmsWorker.perform_async user.id,site.phone,msg_s if msg_s.length>0 && site.phone
else
logger.info "sms #{cmobile}--->>>#{assign_sms}"
end
end
def site_sms
assign_sms_s
end
def assign_sms
assign_sms_c
end
def assign_sms_c
template = user.sms_templates.where(:sms_type=>"inform").first
return "" if !template
template = '<%="'+template.content+'"%>'
vars = {phone:site.phone,contactor:site.contactor,tid:tb_trade.tid,address:site.address,name:site.name}
ERB.new(template).result(OpenStruct.new(vars).instance_eval{binding})
end
def assign_sms_s
template = user.sms_templates.where(:sms_type=>"site").first
return "" if !template
template = '<%="'+template.content+'"%>'
vars = {phone:tb_trade.cmobile,contactor:tb_trade.cname,product:tb_trade.title}
ERB.new(template).result(OpenStruct.new(vars).instance_eval{binding})
end
def time flag=nil
case flag
when :pay
return time_pay
when :service
return time_service
else
return updated_at
end
end
def txt_status
case status
when "pending"
"待分配"
when "assigned"
"待通知"
when "informed"
"待安装"
when "installation"
"待付款"
when "payed"
"已付款"
end
end
def self.to_csv
column_names = ["uid","cname","cmobile","time_service","time_pay","memo"]
CSV.generate do |csv|
csv << column_names
all.each do |product|
csv << product.attributes.values_at(*column_names)
end
end
end
end
| 28.543478 | 107 | 0.664318 |
798379501d1d9f42784946d3bff6d53d865bd003 | 195 | class User < ApplicationRecord
validates :username, presence: true, length: { minimum: 5 }
validates :email, presence: true, length: { minimum: 10 }
validates :password, presence: true
end
| 32.5 | 61 | 0.728205 |
62164544d3c92e6cced924de75546244a03070b9 | 1,559 | module IIIF
##
# An response object for Image requests
class ImageResponse < Response
require 'iiif/image_response/parameter'
autoload :Region, 'iiif/image_response/region'
autoload :Size, 'iiif/image_response/size'
autoload :Rotation, 'iiif/image_response/rotation'
# @!attribute [rw] id
# @return [String] the id for the resource
# @!attribute [r] region
# @return [String] the region segement of the request
# @!attribute [r] size
# @return [String] the size segement of the request
# @!attribute [r] rotation
# @return [String] the rotation segement of the request
# @!attribute [r] quality
# @return [String] the quality segement of the request
# @!attribute [r] format
# @return [String] the format segement of the request
attr_reader :region, :size, :rotation, :quality, :format
attr_accessor :id
##
# @param [String] id
# @param [String] region
# @param [String] size
# @param [String] rotation
# @param [String] quality
# @param [String] format
def initialize(id:, region:, size:, rotation:, quality:, format:)
@id = id
self.region = region
@size = size
self.rotation = rotation
@quality = quality
@format = format
end
##
# @see [Symbol] :image
def type
:image
end
def region=(value)
@region = Region.new(value)
end
def rotation=(value)
@rotation = Rotation.new(value)
end
end
end
| 27.350877 | 69 | 0.603592 |
1cde95e21a09f0ef944dffe0a630db903f37f8d5 | 71 | { :'id_ID' => { :i18n => { :plural => { :keys => nil, :rule => } } } } | 71 | 71 | 0.366197 |
acf91573f5293b1f797a009f97d76aeef82561d8 | 1,583 | # frozen_string_literal: true
require "application_system_test_case"
class IssuesTest < ApplicationSystemTestCase
setup do
@issue = issues(:one)
end
test "visiting the index" do
visit issues_url
assert_selector "h1", text: "Issues"
end
test "creating a Issue" do
visit issues_url
click_on "New Issue"
fill_in "Author", with: @issue.author_id
fill_in "Description", with: @issue.description
fill_in "Iid", with: @issue.iid
fill_in "Last edited at", with: @issue.last_edited_at
fill_in "Last editor", with: @issue.last_editor_id
fill_in "Repository", with: @issue.repository_id
fill_in "Status", with: @issue.status
fill_in "Title", with: @issue.title
click_on "Create Issue"
assert_text "Issue was successfully created"
click_on "Back"
end
test "updating a Issue" do
visit issues_url
click_on "Edit", match: :first
fill_in "Author", with: @issue.author_id
fill_in "Description", with: @issue.description
fill_in "Iid", with: @issue.iid
fill_in "Last edited at", with: @issue.last_edited_at
fill_in "Last editor", with: @issue.last_editor_id
fill_in "Repository", with: @issue.repository_id
fill_in "Status", with: @issue.status
fill_in "Title", with: @issue.title
click_on "Update Issue"
assert_text "Issue was successfully updated"
click_on "Back"
end
test "destroying a Issue" do
visit issues_url
page.accept_confirm do
click_on "Destroy", match: :first
end
assert_text "Issue was successfully destroyed"
end
end
| 26.383333 | 57 | 0.698673 |
189421451de05e3665560c930880e03129846bb2 | 1,058 | #!/usr/bin/env ruby
##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
#
# This tool asks Google for the location of a given set of BSSIDs
#
msfbase = __FILE__
while File.symlink?(msfbase)
msfbase = File.expand_path(File.readlink(msfbase), File.dirname(msfbase))
end
$LOAD_PATH.unshift(File.expand_path(File.join(File.dirname(msfbase), '..', '..','lib')))
require 'rex/google/geolocation'
require 'optparse'
if ARGV.empty?
$stderr.puts("Usage: #{$PROGRAM_NAME} <mac> [mac] ...")
$stderr.puts("Ask Google for the location of the given set of BSSIDs")
$stderr.puts
$stderr.puts("Example: iwlist sc 2>/dev/null|awk '/Address/{print $5}'|xargs #{$PROGRAM_NAME}")
$stderr.puts("Example: /System/Library/PrivateFrameworks/Apple80211.framework/Versions/A/Resources/airport -I|awk '/BSSID/{print $2}'|xargs #{$PROGRAM_NAME}")
exit(1)
end
g = Rex::Google::Geolocation.new
ARGV.each do |mac|
g.add_wlan(mac, nil, -83)
end
g.fetch!
puts g, g.google_maps_url
| 27.842105 | 160 | 0.719282 |
18854c5b81f6418b6244cccad16cfb231836b9e7 | 1,185 | # frozen_string_literal: true
class MonographSearchBuilder < ::SearchBuilder
self.default_processor_chain += [:filter_by_members]
def filter_by_members(solr_parameters)
ids = if blacklight_params[:monograph_id]
# used for the facets "more" link and facet modal
asset_ids(blacklight_params[:monograph_id])
else
asset_ids(blacklight_params['id'])
end
solr_parameters[:fq] ||= []
solr_parameters[:fq] << "{!terms f=id}#{ids}"
end
private
# Get the asset/fileset ids of the monograph
def asset_ids(id)
monograph = Hyrax::PresenterFactory.build_for(ids: [id], presenter_class: Hyrax::MonographPresenter, presenter_args: nil).first
return if monograph.blank?
docs = monograph.ordered_member_docs
return if docs.blank?
ids = []
docs.each do |doc|
fp = Hyrax::FileSetPresenter.new(doc, nil)
next if fp.featured_representative?
next if fp.id == monograph.representative_id
next if Sighrax.tombstone?(Sighrax.from_presenter(fp))
ids << fp.id
end
ids.join(",")
end
def work_types
[FileSet]
end
end
| 27.55814 | 133 | 0.652321 |
01bf2ad973989818f6b73ef7a2734383430a497d | 1,287 | # frozen_string_literal: true
class PluginSettingsForm
class << self
def add_accessor(*args)
@accessors ||= []
args.each do |accessor|
@accessors << accessor
attr_accessor accessor
end
end
def all_accessors
@accessors
end
end
include BaseForm
include PluginSettingsValidation::CacheConfig
include PluginSettingsValidation::GitoliteAccessConfig
include PluginSettingsValidation::GitoliteConfig
include PluginSettingsValidation::HooksConfig
include PluginSettingsValidation::MailingListConfig
include PluginSettingsValidation::RedmineConfig
include PluginSettingsValidation::SshConfig
include PluginSettingsValidation::StorageConfig
attr_reader :plugin
def initialize(plugin)
@plugin = plugin
end
def params
self.class.all_accessors.map { |v| [v, send(v)] }.to_h
end
private
def current_setting(setting)
Setting.plugin_redmine_git_hosting[setting]
end
def strip_value(value)
return '' if value.nil?
value.strip
end
def filter_email_list(list)
list.select { |m| m.present? && valid_email?(m) }
end
def valid_email?(email)
RedmineGitHosting::Validators.valid_email? email
end
def convert_time(time)
(time.to_f * 10).to_i / 10.0
end
end
| 20.758065 | 58 | 0.727273 |
28400c038a80cfa856381feecc11bfbd3325a9d9 | 43 | module MdolceP1Api
VERSION = "0.1.0"
end
| 10.75 | 19 | 0.697674 |
11016836e45c84aac7b02e2ebd26f45f3bd853f8 | 753 | cask "font-iosevka-ss16" do
version "10.3.2"
sha256 "a417033597f0a378dc0d809a06e51eeba8179ddf62c2954028cc2c6a3a4f3cec"
url "https://github.com/be5invis/Iosevka/releases/download/v#{version}/ttc-iosevka-ss16-#{version}.zip"
name "Iosevka SS16"
desc "Sans-serif, slab-serif, monospace and quasi‑proportional typeface family"
homepage "https://github.com/be5invis/Iosevka/"
livecheck do
url :url
strategy :github_latest
end
font "iosevka-ss16-bold.ttc"
font "iosevka-ss16-extrabold.ttc"
font "iosevka-ss16-extralight.ttc"
font "iosevka-ss16-heavy.ttc"
font "iosevka-ss16-light.ttc"
font "iosevka-ss16-medium.ttc"
font "iosevka-ss16-regular.ttc"
font "iosevka-ss16-semibold.ttc"
font "iosevka-ss16-thin.ttc"
end
| 30.12 | 105 | 0.747676 |
7aa46a2298d8b16d86a78286ef8adf75f6369731 | 1,230 | require_relative '../../spec_helper'
describe Cranium::DSL::DatabaseDefinition do
let(:database) { Cranium::DSL::DatabaseDefinition.new "name" }
describe "#name" do
it "should return the name of the database definition" do
expect(database.name).to eq("name")
end
end
describe "#connect_to" do
it "should set the attribute to the specified value" do
database.connect_to "value"
expect(database.connect_to).to eq("value")
end
end
describe "#retry_count" do
context 'when not set' do
it "should return 0 by default" do
expect(database.retry_count).to eq(0)
end
end
context 'when set' do
it "should return the number of retries specified for the database" do
database.retry_count 3
expect(database.retry_count).to eq(3)
end
end
end
describe "#retry_delay" do
context 'when not set' do
it "should return 0 by default" do
expect(database.retry_delay).to eq(0)
end
end
context 'when set' do
it "should return the number of retries specified for the database" do
database.retry_delay 15
expect(database.retry_delay).to eq(15)
end
end
end
end
| 21.206897 | 76 | 0.652033 |
61497e1695f916bbec02b833cfae5f4d754845a2 | 8,224 | module CommandLine
class Option
attr_reader :name, :alias
attr_reader :parameter_count
attr_reader :default_value
# Rewrites a command line keyword by replacing the underscores with dashes
# <tt>sym</tt> The symbol to rewrite
def self.rewrite(sym)
sym.to_s.gsub(/_/, '-').to_sym
end
# Initialize new CommandLine::Option
# <tt>name</tt> The name of the flag
# <tt>definition</tt> The definition of the flag.
def initialize(name, definition = {})
@name = CommandLine::Option.rewrite(name)
@alias = definition[:alias] ? definition[:alias].to_sym : nil
@required = definition.has_key?(:required) && definition[:required] == true
@parameter_count = definition[:parameters] || 1
@multiple = definition[:multiple] || false
@default_value = definition[:default] || false
end
def parse(arguments_parser)
if @parameter_count == 0
return true
elsif @parameter_count == 1
parameter = arguments_parser.next_parameter
raise CommandLine::ParameterExpected, self if parameter.nil?
return parameter
elsif @parameter_count == :any
parameters = []
while parameter = arguments_parser.next_parameter && parameter != '--'
parameters << parameter
end
return parameters
else
parameters = []
@parameter_count.times do |n|
parameter = arguments_parser.next_parameter
raise CommandLine::ParameterExpected, self if parameter.nil?
parameters << parameter
end
return parameters
end
end
def =~(test)
[@name, @alias].include?(CommandLine::Option.rewrite(test))
end
# Argument representation of the flag (--fast)
def to_option
"--#{@name}"
end
# Argument alias representation of the flag (-f)
def to_alias
"-#{@alias}"
end
# Check if flag has an alias
def has_alias?
[email protected]?
end
# Check if flag is required
def required?
@required
end
# Check if flag is optional
def optional?
!@required
end
def multiple?
@multiple
end
def has_default?
!@default_value.nil?
end
end
class Arguments
class Definition
ENDLESS_PARAMETERS = 99999
attr_reader :commands, :options, :parameters
def initialize(parent)
@parent = parent
@options = {}
@commands = {}
@parameters = nil
end
def [](option_name)
option_symbol = CommandLine::Option.rewrite(option_name)
if the_option = @options.detect { |(_, odef)| odef =~ option_symbol }
the_option[1]
else
raise CommandLine::UnknownOption, option_name
end
end
def minimum_parameters=(count_specifier)
@parameters = count_specifier..ENDLESS_PARAMETERS
end
def parameters=(count_specifier)
@parameters = count_specifier
end
alias :files= :parameters=
def option(name, options = {})
clo = CommandLine::Option.new(name, options)
@options[clo.name] = clo
end
def switch(name, switch_alias = nil)
option(name, :alias => switch_alias, :parameters => 0)
end
def command(name, &block)
command_definition = Definition.new(self)
yield(command_definition) if block_given?
@commands[CommandLine::Option.rewrite(name)] = command_definition
end
def has_command?(command)
@commands[CommandLine::Option.rewrite(command)]
end
end
OPTION_REGEXP = /^\-\-([A-Za-z0-9-]+)$/;
ALIASES_REGEXP = /^\-([A-Aa-z0-9]+)$/
attr_reader :definition
attr_reader :tokens
attr_reader :command, :options, :parameters
def self.parse(tokens = $*, &block)
cla = Arguments.new
cla.define(&block)
return cla.parse!(tokens)
end
def initialize
@tokens = []
@definition = Definition.new(self)
@current_definition = @definition
end
def define(&block)
yield(@definition)
end
def [](option)
if the_option = @options.detect { |(key, _)| key =~ option }
the_option[1]
else
@current_definition[option].default_value
end
end
def next_token
@current_token = @tokens.shift
return @current_token
end
def next_parameter
parameter_candidate = @tokens.first
parameter = (parameter_candidate.nil? || OPTION_REGEXP =~ parameter_candidate || ALIASES_REGEXP =~ parameter_candidate) ? nil : @tokens.shift
return parameter
end
def parse!(tokens)
@current_definition = @definition
@first_token = true
@tokens = tokens.clone
@options = {}
@parameters = []
@command = nil
prepare_result!
while next_token
if @first_token && command_definition = @definition.has_command?(@current_token)
@current_definition = command_definition
@command = CommandLine::Option.rewrite(@current_token)
else
case @current_token
when ALIASES_REGEXP; handle_alias_expansion($1)
when OPTION_REGEXP; handle_option($1)
else; handle_other_parameter(@current_token)
end
@first_token = false
end
end
validate_arguments!
return self
end
protected
def prepare_result!
multiple_options = Hash[*@current_definition.options.select { |name, o| o.multiple? }.flatten]
multiple_options.each { |name, definition| @options[definition] = [] }
end
def validate_arguments!
if @current_definition.parameters && !(@current_definition.parameters === @parameters.length)
raise CommandLine::ParametersOutOfRange.new(@current_definition.parameters, @parameters.length)
end
required_options = Hash[*@current_definition.options.select { |name, o| o.required? }.flatten]
required_options.each do |name, definition|
raise CommandLine::RequiredOptionMissing, definition unless self[name]
end
end
def handle_alias_expansion(aliases)
aliases.reverse.scan(/./) do |alias_char|
if option_definition = @current_definition[alias_char]
@tokens.unshift(option_definition.to_option)
else
raise CommandLine::UnknownOption, alias_char
end
end
end
def handle_other_parameter(parameter)
@parameters << parameter
end
def handle_option(option_name)
option_definition = @current_definition[option_name]
raise CommandLine::UnknownOption, option_name if option_definition.nil?
if option_definition.multiple?
@options[option_definition] << option_definition.parse(self)
else
@options[option_definition] = option_definition.parse(self)
end
end
end
# Commandline parsing errors and exceptions
class Error < Exception
end
# Missing a required flag
class RequiredOptionMissing < CommandLine::Error
def initialize(option)
super("You have to provide the #{option.name} option!")
end
end
# Missing a required file
class ParametersOutOfRange < CommandLine::Error
def initialize(expected, actual)
if expected.kind_of?(Range)
if expected.end == CommandLine::Arguments::Definition::ENDLESS_PARAMETERS
super("The command expected at least #{expected.begin} parameters, but found #{actual}!")
else
super("The command expected between #{expected.begin} and #{expected.end} parameters, but found #{actual}!")
end
else
super("The command expected #{expected} parameters, but found #{actual}!")
end
end
end
# Missing a required flag argument
class ParameterExpected < CommandLine::Error
def initialize(option)
super("The option #{option.inspect} expects a parameter!")
end
end
# Encountered an unkown flag
class UnknownOption < CommandLine::Error
def initialize(option_identifier)
super("#{option_identifier.inspect} not recognized as a valid option!")
end
end
end | 27.322259 | 147 | 0.637038 |
4a90053e40b6aab20d2cc2ac532831951942330f | 3,312 | # encoding: UTF-8
# This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20150405200823) do
# These are extensions that must be enabled in order to support this database
enable_extension "plpgsql"
create_table "item_comments", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "item_id", null: false
t.text "content", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "items", force: :cascade do |t|
t.string "title", null: false
t.string "url"
t.text "content"
t.integer "user_id", null: false
t.boolean "disabled", default: false, null: false
t.integer "comments_count", default: 0, null: false
t.integer "upvotes_count", default: 0, null: false
t.integer "downvotes_count", default: 0, null: false
t.integer "score", default: 0, null: false
t.integer "rank", default: 0, null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "items", ["disabled"], name: "index_items_on_disabled", using: :btree
add_index "items", ["user_id"], name: "index_items_on_user_id", using: :btree
create_table "users", force: :cascade do |t|
t.string "username", null: false
t.string "crypted_password"
t.string "salt"
t.boolean "admin", default: false, null: false
t.boolean "disabled", default: false, null: false
t.integer "karma", default: 0, null: false
t.text "about"
t.string "auth"
t.string "token"
t.datetime "karma_increment_time"
t.datetime "pwd_reset"
t.integer "replies_count", default: 0, null: false
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "users", ["auth", "token"], name: "index_users_on_auth_and_token", using: :btree
add_index "users", ["username"], name: "index_users_on_username", unique: true, using: :btree
create_table "votes", force: :cascade do |t|
t.integer "user_id", null: false
t.integer "votable_id", null: false
t.string "votable_type", null: false
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
add_index "votes", ["user_id", "votable_id", "votable_type"], name: "index_votes_on_user_id_and_votable_id_and_votable_type", unique: true, using: :btree
end
| 43.578947 | 155 | 0.649758 |
ac45b4852346fe7fe1b07f2b4940873342dc1af2 | 517 | require 'forwardable'
require 'singleton'
module HTTP
module MimeType
# Base encode/decode MIME type adapter
class Adapter
include Singleton
class << self
extend Forwardable
def_delegators :instance, :encode, :decode
end
%w[encode decode].each do |operation|
class_eval <<-RUBY, __FILE__, __LINE__
def #{operation}(*)
fail Error, "\#{self.class} does not supports ##{operation}"
end
RUBY
end
end
end
end
| 20.68 | 72 | 0.601547 |
2151ee1f488a603bc7b4c6e079c1f2a726a089e5 | 2,319 | require 'spec_helper'
require 'find'
require 'zlib'
=begin
- Before any tests are run, read in the contents of a Gzipped compressed file containing a list of recordings
- and store the array to an instance variable
- To grab a source file:
ls /var/spool/asterisk/monitor | gzip -c | ssh [email protected] 'cat > /home/deployer/dev/engines/free_pbx/spec/fixtures/monitors.txt.gz'
- The before code:
=end
module FreePbx
describe AsteriskMonitor do
before(:all) do
file_name = 'spec/fixtures/monitors.txt.gz'
@file_count = `zcat -f #{file_name} | wc -l`.split[0].to_i
raw_monitors_list = Zlib::GzipReader.new( open(file_name) ).map { |line| line.chomp }
@monitors_list = raw_monitors_list.to_enum
end
subject { create(:free_pbx_asterisk_monitor) }
it "has a valid factory" do
expect(subject).to be_valid
end
describe "Validations" do
%w(path uniqueid).each do |attr|
it "requires #{attr}" do
subject.send("#{attr}=", nil)
expect(subject).to_not be_valid
expect(subject.errors[attr.to_sym].any?).to be_true
end
end
end # Validations
describe "#reindex" do
before(:each) do
# Stub and return an Enumerator of recordings from the file list fixture we have
# rather than reading from the actual file system
AsteriskMonitor.stub!(:file_list).and_return(@monitors_list)
AsteriskMonitor.reindex
end
context "is successful" do
it "has the correct number of records created" do
expect(AsteriskMonitor.count).to eq(@file_count)
end
it "has the correct unique ids" do
expect(AsteriskMonitor.first.uniqueid).to eq('1353316149.33')
expect(AsteriskMonitor.find_by_uniqueid('1354866349.14901')).to_not eq(nil)
expect(AsteriskMonitor.find_by_uniqueid('1354866349.xxxxx')).to eq(nil)
end
end
# TODO Implement this: email will be to the users in User report
context "is not successful" do
it "should not have the correct number of records created" do
#expect(AsteriskMonitor.count).not_to eq(@file_count)
end
it "should send a notification" do
end
end
end # reindex
end
end
| 28.280488 | 143 | 0.655886 |
62c3d17782f577da882683786bf4e48622d026bc | 20,891 | # frozen_string_literal: true
require "rails_helper"
require "controller_spec_helper"
require "order_detail_batch_update_shared_examples"
RSpec.describe FacilityOrdersController do
let(:account) { @account }
let(:facility) { @authable }
let(:facility_account) { @facility_account }
let(:product) { @product }
render_views
before(:all) { create_users }
before(:each) do
@authable = FactoryBot.create(:facility)
@facility_account = FactoryBot.create(:facility_account, facility: @authable)
@product = FactoryBot.create(:item,
facility_account: @facility_account,
facility: @authable,
)
@account = create_nufs_account_with_owner :director
@order_detail = place_product_order(@director, @authable, @product, @account)
@order_detail.order.update!(state: "purchased")
@params = { facility_id: @authable.url_name }
end
context "#assign_price_policies_to_problem_orders" do
let(:order_details) do
Array.new(3) do
order_detail = place_and_complete_item_order(@director, facility)
order_detail.update_attribute(:price_policy_id, nil)
order_detail
end
end
let(:order_detail_ids) { order_details.map(&:id) }
before :each do
@method = :post
@action = :assign_price_policies_to_problem_orders
end
context "when compatible price policies exist" do
let(:price_group) { create(:price_group, facility: facility) }
before :each do
create(:account_price_group_member, account: account, price_group: price_group)
order_details.each do |order_detail|
order_detail.update_attribute(:account_id, account.id)
order_detail.product.item_price_policies.create(attributes_for(
:item_price_policy, price_group_id: price_group.id))
end
do_request
end
it_should_allow_operators_only :redirect do
expect(OrderDetail.where(id: order_detail_ids, problem: false).count)
.to eq order_details.count
end
end
context "when no compatible price policies exist" do
before :each do
ItemPricePolicy.destroy_all
do_request
end
it_should_allow_operators_only :redirect do
expect(OrderDetail.where(id: order_detail_ids, problem: true).count)
.to eq order_details.count
end
end
end
it_behaves_like "it supports order_detail POST #batch_update"
context "#index" do
before :each do
@method = :get
@action = :index
end
it_should_allow_operators_only {}
context "signed in" do
before :each do
maybe_grant_always_sign_in :director
end
%w(order_number date product assigned_to status).each do |sort|
it "should not blow up for sort by #{sort}" do
@params[:sort] = sort
do_request
expect(response).to be_successful
expect(assigns[:order_details]).not_to be_nil
expect(assigns[:order_details].first).not_to be_nil
end
end
it "should not return reservations" do
# setup_reservation overwrites @order_detail
@order_detail_item = @order_detail
@order_detail_reservation = setup_reservation(@authable, @account, @director)
@reservation = place_reservation(@authable, @order_detail_reservation, Time.zone.now + 1.hour)
expect(@authable.reload.order_details).to contain_all [@order_detail_item, @order_detail_reservation]
do_request
expect(assigns[:order_details]).to eq([@order_detail_item])
end
context "export" do
before :each do
@params.merge!(format: :csv)
end
it "renders a csv download" do
do_request
expect(response.headers["Content-Type"]).to match %r{\Atext/csv\b}
end
end
end
end
describe "#show" do
before do
maybe_grant_always_sign_in :admin
@method = :get
@action = :show
@params.merge!(id: @order_detail.order.id)
end
describe "with an order detail with no cost assigned" do
it "renders" do
expect(@order_detail.actual_cost).to be_nil
expect(@order_detail.estimated_cost).to be_nil
expect { do_request }.not_to raise_error
end
end
end
context "#show_problems" do
before :each do
@method = :get
@action = :show_problems
end
it_should_allow_managers_and_senior_staff_only
end
context "#send_receipt" do
before :each do
@method = :post
@action = :send_receipt
@params[:id] = @order.id
request.env["HTTP_REFERRER"] = facility_order_path @authable, @order
ActionMailer::Base.deliveries.clear
end
it_should_allow_operators_only :redirect, "to send a receipt" do
expect(flash[:notice]).to include("sent successfully")
expect(ActionMailer::Base.deliveries.size).to eq(1)
mail = ActionMailer::Base.deliveries.first
expect(mail.subject).to include("Order Receipt")
expect(mail.from.first).to eq(Settings.email.from)
assert_redirected_to facility_order_path(@authable, @order)
end
end
describe "PUT #update" do
let(:order) { @order }
before do
@method = :put
@action = :update
@params.merge!(id: order.id, add_to_order_form: { product_id: product.id, order_status_id: OrderStatus.new_status.id })
end
context "with a product_add_quantity of 0" do
before { @params[:add_to_order_form][:quantity] = 0 }
it_should_allow_operators_only do
expect(flash[:error]).to include("Quantity must be greater than 0")
expect(response).to render_template(:show)
end
end
context "with a product_add_quantity of 1" do
before do
@params[:add_to_order_form][:quantity] = 1
order.order_details.destroy_all
end
it_should_allow :director, "to add an item to existing order directly" do
assert_no_merge_order(order, product)
expect(order.order_details.last.created_by_user).to eq(@director)
end
context "when adding an instrument" do
let(:instrument) { FactoryBot.create(:instrument, facility_account: facility_account) }
let(:merge_order) { Order.find_by(merge_with_order_id: order.id) }
let(:order_detail) { merge_order.order_details.last }
before { @params[:add_to_order_form][:product_id] = instrument.id }
it_should_allow :director, "to add an instrument to existing order via merge" do
assert_merge_order(order, instrument)
end
context "when setting a note" do
before { @params[:add_to_order_form][:note] = "This is a note" }
it_should_allow :director, "to add an instrument to existing order via merge" do
expect(order_detail.note).to eq("This is a note")
end
end
context "when setting an order status" do
before { @params[:add_to_order_form][:order_status_id] = order_status.id.to_s }
context "of 'In Process'" do
let(:order_status) { OrderStatus.in_process }
it_should_allow :director, "to add an instrument to existing order via merge" do
expect(order_detail.order_status).to eq(OrderStatus.in_process)
end
end
context "of 'Complete'" do
let(:order_status) { OrderStatus.complete }
let(:director) do
FactoryBot.create(:user, :facility_director, facility: facility)
end
before do
sign_in director
put @action, params: @params
end
it "errors due to an invalid transition", :aggregate_failures do
expect(merge_order).to be_nil
expect(flash[:error])
.to include("may not be set initially to an order status of Complete")
end
end
end
end
context "when adding an item" do
let(:item) { FactoryBot.create(:item, facility_account: facility_account) }
let(:order_detail) { order.order_details.last }
before { @params[:add_to_order_form][:product_id] = item.id }
it_should_allow :director, "to add an item to existing order directly" do
assert_no_merge_order(order, item, 1)
end
context "when setting a note" do
before { @params[:add_to_order_form][:note] = "This is a note" }
it_should_allow :director, "to add an item to existing order with a note" do
expect(order_detail.note).to eq("This is a note")
end
end
context "when setting a reference id" do
context "of an appropriate length" do
before { @params[:add_to_order_form][:reference_id] = "Ref123" }
it_should_allow :director, "to set the reference id" do
expect(order_detail.reference_id).to eq("Ref123")
end
end
context "that is too long" do
before { @params[:add_to_order_form][:reference_id] = "a" * 31 }
it_should_allow :director, "to set the reference id" do
expect(order_detail).to be_blank
expect(flash[:error]).to include("Reference ID is too long")
end
end
end
context "when specifying an account" do
let(:other_account) { create(:nufs_account, :with_account_owner, owner: order.user) }
before { @params[:add_to_order_form][:account_id] = other_account.id }
it_should_allow :director, "to add the item to that account" do
expect(order.order_details.last.account).to eq(other_account)
end
context "and that account is suspended" do
before { other_account.suspend }
it_should_allow :director, "to error on that account" do
expect(order.order_details).to be_empty
expect(flash[:error]).to be_present
end
end
end
context "when setting the order status to 'Complete'" do
let(:complete_status) { OrderStatus.complete }
before { @params[:add_to_order_form][:order_status_id] = complete_status.id.to_s }
context "and setting fulfilled_at" do
before { @params[:add_to_order_form][:fulfilled_at] = fulfilled_at.strftime("%m/%d/%Y") }
context "to today" do
let(:fulfilled_at) { Date.today }
it_should_allow :director, "to add an item to existing order with fulfilled_at set" do
expect(order.order_details).to be_one
expect(order_detail.order_status).to eq(complete_status)
expect(order_detail.fulfilled_at)
.to eq(fulfilled_at.beginning_of_day + 12.hours)
end
end
context "to a date in the future" do
let(:fulfilled_at) { 1.day.from_now }
it_should_allow :director, "it should not save" do
expect(order_detail).to be_blank
expect(flash[:error]).to include "cannot be in the future"
end
end
context "to a date before the start of the previous fiscal year" do
let(:fulfilled_at) { SettingsHelper.fiscal_year_beginning - 1.year - 1.day }
it_should_allow :director, "it should not save" do
expect(order_detail).to be_blank
expect(flash[:error]).to include("fiscal year")
end
end
context "to a date during this fiscal year" do
let(:fulfilled_at) { SettingsHelper.fiscal_year_beginning + 1.day }
it_should_allow :director, "to add an item to existing order with fulfilled_at set" do
expect(order_detail.order_status).to eq(complete_status)
expect(order_detail.fulfilled_at).to eq(fulfilled_at.beginning_of_day + 12.hours)
end
end
end
end
context "when not setting an order status" do
context "and setting fulfilled_at" do
before { @params[:add_to_order_form][:fulfilled_at] = fulfilled_at.strftime("%m/%d/%Y") }
let(:fulfilled_at) { Date.today }
it_should_allow :director, "to add an item to existing order with status and fulfilled_at set to defaults" do
expect(order_detail.order_status).to eq(item.initial_order_status)
expect(order_detail.fulfilled_at).to be_blank
end
end
end
context "when setting an order status" do
before { @params[:add_to_order_form][:order_status_id] = OrderStatus.in_process.id }
it_should_allow :director, "to add an item to existing order via merge" do
expect(order_detail.order_status).to eq(OrderStatus.in_process)
end
end
end
context "when adding a service" do
let(:service) { FactoryBot.create(:service, facility_account: facility_account) }
before do
allow_any_instance_of(OrderDetail).to receive(:valid_service_meta?).and_return(false)
allow_any_instance_of(Service).to receive(:active_survey?).and_return(active_survey?)
allow_any_instance_of(Service).to receive(:active_template?).and_return(active_template?)
@params[:add_to_order_form][:product_id] = service.id
end
shared_examples_for "directors may add via merge" do
it_should_allow :director, "to add a service to existing order via merge" do
assert_merge_order(order, service)
end
end
context "with an active survey" do
let(:active_survey?) { true }
context "with an active template" do
let(:active_template?) { true }
it_behaves_like "directors may add via merge"
end
context "without an active template" do
let(:active_template?) { false }
it_behaves_like "directors may add via merge"
end
end
context "without an active survey" do
let(:active_survey?) { false }
context "with an active template" do
let(:active_template?) { true }
it_behaves_like "directors may add via merge"
end
context "without an active template" do
let(:active_template?) { false }
it_should_allow :director, "to add a service to existing order directly" do
assert_no_merge_order(order, service)
end
end
end
end
context "when adding a bundle" do
let(:bundle) do
FactoryBot.create(:bundle, bundle_products: bundle_products, facility_account: facility_account)
end
let(:bundle_products) { [product, additional_product] }
let(:additional_product) do
FactoryBot.create(bundled_product_type, facility_account: facility_account)
end
before { @params[:add_to_order_form][:product_id] = bundle.id }
context "containing an item" do
let(:bundled_product_type) { :item }
it_should_allow :director, "to add an item to existing order directly" do
assert_no_merge_order(order, bundle, 2)
end
end
context "containing an instrument" do
let(:bundled_product_type) { :instrument }
it_should_allow :director, "to add an instrument to existing order via merge" do
assert_merge_order(order, bundle, 1, 1)
end
end
context "containing a service" do
let(:bundled_product_type) { :service }
before do
allow_any_instance_of(OrderDetail).to receive(:valid_service_meta?).and_return(false)
allow_any_instance_of(Service).to receive(:active_survey?).and_return(active_survey?)
allow_any_instance_of(Service).to receive(:active_template?).and_return(active_template?)
end
shared_examples_for "directors may add via merge" do
it_should_allow :director, "to add a service to existing order via merge" do
assert_merge_order(order, bundle, 1, 1)
end
end
context "with an active survey" do
let(:active_survey?) { true }
context "with an active template" do
let(:active_template?) { true }
it_behaves_like "directors may add via merge"
end
context "without an active template" do
let(:active_template?) { false }
it_behaves_like "directors may add via merge"
end
end
context "without an active survey" do
let(:active_survey?) { false }
context "with an active template" do
let(:active_template?) { true }
it_behaves_like "directors may add via merge"
end
context "without an active template" do
let(:active_template?) { false }
it_should_allow :director, "to add a service to existing order directly" do
assert_no_merge_order(order, bundle, 2)
end
end
end
end
end
end
def assert_update_success(order, product)
if product.is_a? Bundle
order.order_details.each do |od|
expect(od.order_status).to eq(OrderStatus.default_order_status)
expect(product.products).to be_include(od.product)
end
else
order_detail = order.order_details[0]
expect(order_detail.product).to eq(product)
expect(order_detail.order_status).to eq(OrderStatus.default_order_status)
end
if order.to_be_merged?
expect(flash[:error]).to include("needs your attention")
else
expect(flash[:notice]).to include("successfully added to this order")
end
assert_redirected_to facility_order_path(@authable, order.to_be_merged? ? order.merge_order : order)
end
def assert_no_merge_order(original_order, product, detail_count = 1)
expect(original_order.reload.order_details.size).to eq(detail_count)
assert_update_success original_order, product
end
def assert_merge_order(original_order, product, detail_count = 1, original_detail_count = 0)
expect(original_order.reload.order_details.size).to eq(original_detail_count)
merges = Order.where(merge_with_order_id: original_order.id)
expect(merges.size).to eq(1)
merge_order = merges.first
expect(merge_order.merge_order).to eq(original_order)
expect(merge_order.facility_id).to eq(original_order.facility_id)
expect(merge_order.account_id).to eq(original_order.account_id)
expect(merge_order.user_id).to eq(original_order.user_id)
expect(merge_order.created_by).to eq(@director.id)
expect(merge_order.order_details).to be { |od| od.ordered_at.blank? }
expect(merge_order.order_details.size).to eq(detail_count)
expect(MergeNotification.count).to eq(detail_count)
assert_update_success merge_order, product
end
end
context "#tab_counts" do
before :each do
@method = :get
@action = :tab_counts
@order_detail2 = FactoryBot.create(:order_detail, order: @order, product: @product)
expect(@authable.order_details.item_and_service_orders.new_or_inprocess.size).to eq(2)
@problem_order_details = (1..3).map do |_i|
order_detail = place_and_complete_item_order(@staff, @authable)
order_detail.update(price_policy_id: nil)
order_detail
end
@params.merge!(tabs: %w(new_or_in_process_orders problem_order_details))
end
it_should_allow_operators_only {}
context "signed in" do
before :each do
maybe_grant_always_sign_in :director
end
it "should get only new if thats all you ask for" do
@authable.order_details.item_and_service_orders.new_or_inprocess.to_sql
@params[:tabs] = ["new_or_in_process_orders"]
do_request
expect(response).to be_successful
body = JSON.parse(response.body)
expect(body.keys).to contain_all ["new_or_in_process_orders"]
expect(body["new_or_in_process_orders"]).to eq(2)
end
it "should get everything if you ask for it" do
do_request
expect(response).to be_successful
body = JSON.parse(response.body)
expect(body.keys).to contain_all %w(new_or_in_process_orders problem_order_details)
expect(body["new_or_in_process_orders"]).to eq(2)
expect(body["problem_order_details"]).to eq(3)
end
end
end
end
| 35.348562 | 125 | 0.637643 |
ffbbed4e28d701884d31021b474ae43ec5ad99bb | 371 | # frozen_string_literal: true
module ShipmentCalculator
class Calculator
attr_reader :valid_transactions, :rules
def initialize(transactions, rules)
@valid_transactions = transactions.select(&:valid?)
@rules = rules
end
def basic_calculate
rules.each do |rule|
rule.new(valid_transactions).apply
end
end
end
end
| 19.526316 | 57 | 0.692722 |
1d13904114734dc26a94384ecb5498a123358bde | 2,683 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/vision/v1/product_search.proto
require 'google/protobuf'
require 'google/cloud/vision/v1/geometry_pb'
require 'google/cloud/vision/v1/product_search_service_pb'
require 'google/protobuf/timestamp_pb'
require 'google/api/annotations_pb'
require 'google/api/resource_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_message "google.cloud.vision.v1.ProductSearchParams" do
optional :bounding_poly, :message, 9, "google.cloud.vision.v1.BoundingPoly"
optional :product_set, :string, 6
repeated :product_categories, :string, 7
optional :filter, :string, 8
end
add_message "google.cloud.vision.v1.ProductSearchResults" do
optional :index_time, :message, 2, "google.protobuf.Timestamp"
repeated :results, :message, 5, "google.cloud.vision.v1.ProductSearchResults.Result"
repeated :product_grouped_results, :message, 6, "google.cloud.vision.v1.ProductSearchResults.GroupedResult"
end
add_message "google.cloud.vision.v1.ProductSearchResults.Result" do
optional :product, :message, 1, "google.cloud.vision.v1.Product"
optional :score, :float, 2
optional :image, :string, 3
end
add_message "google.cloud.vision.v1.ProductSearchResults.ObjectAnnotation" do
optional :mid, :string, 1
optional :language_code, :string, 2
optional :name, :string, 3
optional :score, :float, 4
end
add_message "google.cloud.vision.v1.ProductSearchResults.GroupedResult" do
optional :bounding_poly, :message, 1, "google.cloud.vision.v1.BoundingPoly"
repeated :results, :message, 2, "google.cloud.vision.v1.ProductSearchResults.Result"
repeated :object_annotations, :message, 3, "google.cloud.vision.v1.ProductSearchResults.ObjectAnnotation"
end
end
module Google
module Cloud
module Vision
module V1
ProductSearchParams = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ProductSearchParams").msgclass
ProductSearchResults = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ProductSearchResults").msgclass
ProductSearchResults::Result = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ProductSearchResults.Result").msgclass
ProductSearchResults::ObjectAnnotation = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ProductSearchResults.ObjectAnnotation").msgclass
ProductSearchResults::GroupedResult = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.vision.v1.ProductSearchResults.GroupedResult").msgclass
end
end
end
end
| 48.781818 | 176 | 0.776742 |
d543dcd334d29d5ad68ada2d16348f94941a54d9 | 2,595 | require "test_helper"
class TopicTest < ActionDispatch::IntegrationTest
setup do
@topic_example = JSON.parse(
GovukContentSchemaTestHelpers::Examples.new.get(
"service_manual_topic",
"service_manual_topic",
),
)
end
test "it uses topic description as meta description" do
stub_content_store_has_item("/service-manual/test-topic", @topic_example.to_json)
visit "/service-manual/test-topic"
assert page.has_css?('meta[name="description"]', visible: false)
tag = page.find 'meta[name="description"]', visible: false
assert_equal @topic_example["description"], tag["content"]
end
test "it doesn't write a meta description if there is none" do
@topic_example.delete("description")
stub_content_store_has_item("/service-manual/test-topic", @topic_example.to_json)
visit "/service-manual/test-topic"
assert_not page.has_css?('meta[name="description"]', visible: false)
end
test "it lists communities in the sidebar" do
setup_and_visit_example("service_manual_topic", "service_manual_topic")
within(".related-communities") do
assert page.has_link?(
"Agile delivery community",
href: "/service-manual/communities/agile-delivery-community",
)
assert page.has_link?(
"User research community",
href: "/service-manual/communities/user-research-community",
)
end
end
test "it doesn't display content in accordian if not eligible" do
setup_and_visit_example("service_manual_topic", "service_manual_topic")
assert_not page.has_css?(".gem-c-accordion")
end
test "it displays content using an accordian if eligible" do
content_item = govuk_content_schema_example("service_manual_topic", "service_manual_topic")
third_linked_item = { content_id: SecureRandom.uuid, title: "linky", base_path: "/basey" }
third_group = { name: "Group 3", description: "The third group", content_ids: [third_linked_item[:content_id]] }
content_item["links"]["linked_items"] << third_linked_item
content_item["details"]["groups"] << third_group
content_item["details"]["visually_collapsed"] = true
stub_content_store_has_item(content_item["base_path"], content_item)
visit content_item["base_path"]
assert page.has_css?(".gem-c-accordion")
end
test "it includes a link to subscribe for email alerts" do
setup_and_visit_example("service_manual_topic", "service_manual_topic")
assert page.has_link?(
"email",
href: "/email-signup?link=/service-manual/test-expanded-topic",
)
end
end
| 33.701299 | 116 | 0.714451 |
6a760ea5364bf76b7dc3c8c9b1255258e241402e | 1,611 | #
# Be sure to run `pod lib lint LHTools.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'LHTools'
s.version = '0.0.1'
s.summary = 'A short description of LHTools.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/shookHead/LHTools'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'shookHead' => '[email protected]' }
s.source = { :git => 'https://github.com/shookHead/LHTools.git', :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.ios.deployment_target = '9.0'
s.swift_version = '5.0'
s.source_files = 'LHTools/Classes/**/*'
#s.resource_bundles = {
# 'LHTools' => ['LHTools/Assets/**']
#}
# s.public_header_files = 'Pod/Classes/**/*.h'
s.frameworks = 'UIKit'#, 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 36.613636 | 101 | 0.627561 |
26b5a495c1b2ca44b836b6e91fde41c3a84d6d6e | 1,524 | require 'test_helper'
class UsersLoginTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
end
test "login with invalid information" do
get login_path
assert_template 'sessions/new'
post login_path, params: { session: { email: "", password: "" } }
assert_template 'sessions/new'
assert_not flash.empty?
get root_path
assert flash.empty?
end
test "login with valid information followed by logout" do
get login_path
post login_path, params: { session: { email: @user.email,
password: 'password' } }
assert is_logged_in?
assert_redirected_to @user
follow_redirect!
assert_template 'users/show'
assert_select "a[href=?]", login_path, count: 0
assert_select "a[href=?]", logout_path
assert_select "a[href=?]", user_path(@user)
delete logout_path
assert_not is_logged_in?
assert_redirected_to root_url
delete logout_path
follow_redirect!
assert_select "a[href=?]", login_path
assert_select "a[href=?]", logout_path, count: 0
assert_select "a[href=?]", user_path(@user), count: 0
end
test "login with remembering" do
log_in_as(@user, remember_me: '1')
assert_not_empty cookies['remember_token']
end
test "login without remembering" do
# クッキーを保存してログイン
log_in_as(@user, remember_me: '1')
delete logout_path
# クッキーを削除してログイン
log_in_as(@user, remember_me: '0')
assert_empty cookies['remember_token']
end
end
| 28.222222 | 69 | 0.674541 |
1a4b8940ec3a7fddf8e09011e74dc5da51c45fb2 | 2,632 | require 'cases/helper_sqlserver'
require 'models/person'
require 'models/reader'
class PessimisticLockingTestSQLServer < ActiveRecord::TestCase
fixtures :people, :readers
before do
Person.columns
Reader.columns
end
it 'uses with updlock by default' do
assert_sql %r|SELECT \[people\]\.\* FROM \[people\] WITH\(UPDLOCK\)| do
Person.lock(true).to_a.must_equal Person.all.to_a
end
end
describe 'For simple finds with default lock option' do
it 'lock with simple find' do
assert_nothing_raised do
Person.transaction do
Person.lock(true).find(1).must_equal Person.find(1)
end
end
end
it 'lock with scoped find' do
assert_nothing_raised do
Person.transaction do
Person.lock(true).scoping do
Person.find(1).must_equal Person.find(1)
end
end
end
end
it 'lock with eager find' do
assert_nothing_raised do
Person.transaction do
person = Person.lock(true).includes(:readers).find(1)
person.must_equal Person.find(1)
end
end
end
it 'reload with lock when #lock! called' do
assert_nothing_raised do
Person.transaction do
person = Person.find 1
old, person.first_name = person.first_name, 'fooman'
person.lock!
assert_equal old, person.first_name
end
end
end
it 'can add a custom lock directive' do
assert_sql %r|SELECT \[people\]\.\* FROM \[people\] WITH\(HOLDLOCK, ROWLOCK\)| do
Person.lock('WITH(HOLDLOCK, ROWLOCK)').load
end
end
end
describe 'For paginated finds' do
before do
Person.delete_all
20.times { |n| Person.create!(first_name: "Thing_#{n}") }
end
it 'copes with eager loading un-locked paginated' do
eager_ids_sql = /SELECT\s+DISTINCT \[people\].\[id\] FROM \[people\] WITH\(UPDLOCK\) LEFT OUTER JOIN \[readers\] WITH\(UPDLOCK\)\s+ON \[readers\].\[person_id\] = \[people\].\[id\]\s+ORDER BY \[people\].\[id\] ASC OFFSET @0 ROWS FETCH NEXT @1 ROWS ONLY/
loader_sql = /SELECT.*FROM \[people\] WITH\(UPDLOCK\).*WHERE \[people\]\.\[id\] IN/
assert_sql(eager_ids_sql, loader_sql) do
people = Person.lock(true).limit(5).offset(10).includes(:readers).references(:readers).to_a
people[0].first_name.must_equal 'Thing_10'
people[1].first_name.must_equal 'Thing_11'
people[2].first_name.must_equal 'Thing_12'
people[3].first_name.must_equal 'Thing_13'
people[4].first_name.must_equal 'Thing_14'
end
end
end
end
| 28.923077 | 258 | 0.637158 |
2194e32949d49f2195691cc5f3f2cb5137bd859f | 40 | class Autoreload
VERSION = '1.0.0'
end | 13.333333 | 19 | 0.7 |
b9f3df8ddc97c6993d546a5482f8182f02b13428 | 16,138 | require 'test_helper'
require 'pry'
describe BrBoleto::Retorno::Cnab240::Unicred do
subject { BrBoleto::Retorno::Cnab240::Unicred.new(file) }
let(:file) { open_fixture('retorno/cnab240/padrao240.ret') }
it "Deve ler o código do banco" do
subject.codigo_banco.must_equal '756'
end
it "Deve carregar 5 pagamentos" do
subject.pagamentos.size.must_equal 5
end
describe "deve setar as informações corretas para os pagamentos" do
it "valores para o pagamento 1" do
pagamento = subject.pagamentos[0]
pagamento.modalidade.must_equal nil
pagamento.agencia_com_dv.must_equal "030690"
pagamento.agencia_sem_dv.must_equal "03069"
pagamento.numero_conta.must_equal "0000000777778"
pagamento.numero_conta_dv.must_equal "8"
pagamento.numero_conta_sem_dv.must_equal "000000077777"
pagamento.dv_conta_e_agencia.must_equal "0"
pagamento.nosso_numero_sem_dv.must_equal "000000033001014"
pagamento.carteira.must_equal "1"
pagamento.numero_documento.must_equal "000000000000330"
pagamento.data_vencimento.must_equal Date.parse('03/05/2016')
pagamento.valor_titulo.must_equal 129.39
pagamento.banco_recebedor.must_equal "756"
pagamento.agencia_recebedora_com_dv.must_equal "030690"
pagamento.identificacao_titulo_empresa.must_equal ""
pagamento.codigo_moeda.must_equal "09"
pagamento.sacado_tipo_documento.must_equal "2"
pagamento.sacado_documento.must_equal "111111111111111"
pagamento.sacado_nome.must_equal "NOME DO CLIENTE 1 ??O"
pagamento.numero_contrato.must_equal "0000000000"
pagamento.valor_tarifa.must_equal 2.69
pagamento.motivo_ocorrencia_original_1.must_equal "00"
pagamento.motivo_ocorrencia_original_2.must_equal "00"
pagamento.motivo_ocorrencia_original_3.must_equal "00"
pagamento.motivo_ocorrencia_original_4.must_equal "00"
pagamento.motivo_ocorrencia_original_5.must_equal "04"
pagamento.valor_juros_multa.must_equal 0.0
pagamento.valor_desconto.must_equal 0.0
pagamento.valor_abatimento.must_equal 0.0
pagamento.valor_iof.must_equal 0.0
pagamento.valor_pago.must_equal 129.39
pagamento.valor_liquido.must_equal 129.39
pagamento.valor_outras_despesas.must_equal 0.0
pagamento.valor_outros_creditos.must_equal 0.0
pagamento.data_ocorrencia.must_equal Date.parse('02/05/2016')
pagamento.data_credito.must_equal Date.parse('03/05/2016')
pagamento.codigo_ocorrencia_sacado.must_equal ""
pagamento.data_ocorrencia_sacado.must_equal nil
pagamento.valor_ocorrencia_sacado.must_equal 0.0
pagamento.complemento_ocorrencia_sacado.must_equal ""
pagamento.codigo_ocorrencia_banco_correspondente.must_equal "756"
pagamento.nosso_numero_banco_correspondente.must_equal "00000000000000000000"
pagamento.codigo_movimento_retorno.must_equal "06"
end
it "valores para o pagamento 2" do
pagamento = subject.pagamentos[1]
pagamento.modalidade.must_equal nil
pagamento.agencia_com_dv.must_equal "030690"
pagamento.agencia_sem_dv.must_equal "03069"
pagamento.numero_conta.must_equal "0000000777778"
pagamento.numero_conta_dv.must_equal "8"
pagamento.numero_conta_sem_dv.must_equal "000000077777"
pagamento.dv_conta_e_agencia.must_equal "0"
pagamento.nosso_numero_sem_dv.must_equal "000000034801024"
pagamento.carteira.must_equal "1"
pagamento.numero_documento.must_equal "000000000000348"
pagamento.data_vencimento.must_equal Date.parse('06/05/2016')
pagamento.valor_titulo.must_equal 29.0
pagamento.banco_recebedor.must_equal "756"
pagamento.agencia_recebedora_com_dv.must_equal "030690"
pagamento.identificacao_titulo_empresa.must_equal ""
pagamento.codigo_moeda.must_equal "09"
pagamento.sacado_tipo_documento.must_equal "2"
pagamento.sacado_documento.must_equal "222222222222222"
pagamento.sacado_nome.must_equal "NOME DO CLIENTE 2"
pagamento.numero_contrato.must_equal "0000000000"
pagamento.valor_tarifa.must_equal 2.69
pagamento.motivo_ocorrencia_original_1.must_equal "00"
pagamento.motivo_ocorrencia_original_2.must_equal "00"
pagamento.motivo_ocorrencia_original_3.must_equal "00"
pagamento.motivo_ocorrencia_original_4.must_equal "00"
pagamento.motivo_ocorrencia_original_5.must_equal "04"
pagamento.valor_juros_multa.must_equal 0.0
pagamento.valor_desconto.must_equal 0.0
pagamento.valor_abatimento.must_equal 0.0
pagamento.valor_iof.must_equal 0.0
pagamento.valor_pago.must_equal 29.0
pagamento.valor_liquido.must_equal 29.0
pagamento.valor_outras_despesas.must_equal 0.0
pagamento.valor_outros_creditos.must_equal 0.0
pagamento.data_ocorrencia.must_equal Date.parse('05/05/2016')
pagamento.data_credito.must_equal Date.parse('06/05/2016')
pagamento.codigo_ocorrencia_sacado.must_equal ""
pagamento.data_ocorrencia_sacado.must_equal nil
pagamento.valor_ocorrencia_sacado.must_equal 0.0
pagamento.complemento_ocorrencia_sacado.must_equal ""
pagamento.codigo_ocorrencia_banco_correspondente.must_equal "304"
pagamento.nosso_numero_banco_correspondente.must_equal "00000000000000000000"
pagamento.codigo_movimento_retorno.must_equal "06"
end
it "valores para o pagamento 3" do
pagamento = subject.pagamentos[2]
pagamento.modalidade.must_equal nil
pagamento.agencia_com_dv.must_equal "030690"
pagamento.agencia_sem_dv.must_equal "03069"
pagamento.numero_conta.must_equal "0000000777778"
pagamento.numero_conta_dv.must_equal "8"
pagamento.numero_conta_sem_dv.must_equal "000000077777"
pagamento.dv_conta_e_agencia.must_equal "0"
pagamento.nosso_numero_sem_dv.must_equal "000000035501014"
pagamento.carteira.must_equal "1"
pagamento.numero_documento.must_equal "000000000000355"
pagamento.data_vencimento.must_equal Date.parse('06/05/2016')
pagamento.valor_titulo.must_equal 89.1
pagamento.banco_recebedor.must_equal "756"
pagamento.agencia_recebedora_com_dv.must_equal "030690"
pagamento.identificacao_titulo_empresa.must_equal ""
pagamento.codigo_moeda.must_equal "09"
pagamento.sacado_tipo_documento.must_equal "2"
pagamento.sacado_documento.must_equal "333333333333333"
pagamento.sacado_nome.must_equal "NOME DO CLIENTE 3"
pagamento.numero_contrato.must_equal "0000000000"
pagamento.valor_tarifa.must_equal 2.69
pagamento.motivo_ocorrencia_original_1.must_equal "00"
pagamento.motivo_ocorrencia_original_2.must_equal "00"
pagamento.motivo_ocorrencia_original_3.must_equal "00"
pagamento.motivo_ocorrencia_original_4.must_equal "00"
pagamento.motivo_ocorrencia_original_5.must_equal "04"
pagamento.valor_juros_multa.must_equal 0.0
pagamento.valor_desconto.must_equal 0.0
pagamento.valor_abatimento.must_equal 0.0
pagamento.valor_iof.must_equal 0.0
pagamento.valor_pago.must_equal 89.1
pagamento.valor_liquido.must_equal 89.1
pagamento.valor_outras_despesas.must_equal 0.0
pagamento.valor_outros_creditos.must_equal 0.0
pagamento.data_ocorrencia.must_equal Date.parse('05/05/2016')
pagamento.data_credito.must_equal Date.parse('06/05/2016')
pagamento.codigo_ocorrencia_sacado.must_equal ""
pagamento.data_ocorrencia_sacado.must_equal nil
pagamento.valor_ocorrencia_sacado.must_equal 0.0
pagamento.complemento_ocorrencia_sacado.must_equal ""
pagamento.codigo_ocorrencia_banco_correspondente.must_equal "100"
pagamento.nosso_numero_banco_correspondente.must_equal "00000000000000000000"
pagamento.codigo_movimento_retorno.must_equal "06"
end
it "valores para o pagamento 4" do
pagamento = subject.pagamentos[3]
pagamento.modalidade.must_equal nil
pagamento.agencia_com_dv.must_equal "030690"
pagamento.agencia_sem_dv.must_equal "03069"
pagamento.numero_conta.must_equal "0000000777778"
pagamento.numero_conta_dv.must_equal "8"
pagamento.numero_conta_sem_dv.must_equal "000000077777"
pagamento.dv_conta_e_agencia.must_equal "0"
pagamento.nosso_numero_sem_dv.must_equal "000000036201014"
pagamento.carteira.must_equal "1"
pagamento.numero_documento.must_equal "000000000000362"
pagamento.data_vencimento.must_equal Date.parse('06/05/2016')
pagamento.valor_titulo.must_equal 29.0
pagamento.banco_recebedor.must_equal "104"
pagamento.agencia_recebedora_com_dv.must_equal "004140"
pagamento.identificacao_titulo_empresa.must_equal ""
pagamento.codigo_moeda.must_equal "09"
pagamento.sacado_tipo_documento.must_equal "1"
pagamento.sacado_documento.must_equal "444444444444444"
pagamento.sacado_nome.must_equal "NOME DO CLIENTE 4"
pagamento.numero_contrato.must_equal "0000000000"
pagamento.valor_tarifa.must_equal 2.69
pagamento.motivo_ocorrencia_original_1.must_equal "00"
pagamento.motivo_ocorrencia_original_2.must_equal "00"
pagamento.motivo_ocorrencia_original_3.must_equal "00"
pagamento.motivo_ocorrencia_original_4.must_equal "00"
pagamento.motivo_ocorrencia_original_5.must_equal "04"
pagamento.valor_juros_multa.must_equal 0.0
pagamento.valor_desconto.must_equal 0.0
pagamento.valor_abatimento.must_equal 0.0
pagamento.valor_iof.must_equal 0.0
pagamento.valor_pago.must_equal 29.0
pagamento.valor_liquido.must_equal 29.0
pagamento.valor_outras_despesas.must_equal 0.0
pagamento.valor_outros_creditos.must_equal 0.0
pagamento.data_ocorrencia.must_equal Date.parse('06/05/2016')
pagamento.data_credito.must_equal nil
pagamento.codigo_ocorrencia_sacado.must_equal ""
pagamento.data_ocorrencia_sacado.must_equal nil
pagamento.valor_ocorrencia_sacado.must_equal 0.0
pagamento.complemento_ocorrencia_sacado.must_equal ""
pagamento.codigo_ocorrencia_banco_correspondente.must_equal "756"
pagamento.nosso_numero_banco_correspondente.must_equal "00000000000000000000"
pagamento.codigo_movimento_retorno.must_equal "06"
end
it "valores para o pagamento 4" do
pagamento = subject.pagamentos[4]
pagamento.agencia_com_dv.must_equal "030690"
pagamento.agencia_sem_dv.must_equal "03069"
pagamento.numero_conta.must_equal "0000000777778"
pagamento.numero_conta_dv.must_equal "8"
pagamento.numero_conta_sem_dv.must_equal "000000077777"
pagamento.dv_conta_e_agencia.must_equal "0"
pagamento.nosso_numero_sem_dv.must_equal "000000036201014"
pagamento.carteira.must_equal "1"
pagamento.numero_documento.must_equal "000000000000362"
pagamento.data_vencimento.must_equal Date.parse('06/05/2016')
pagamento.valor_titulo.must_equal 47.37
pagamento.banco_recebedor.must_equal "104"
pagamento.agencia_recebedora_com_dv.must_equal "004140"
pagamento.identificacao_titulo_empresa.must_equal ""
pagamento.codigo_moeda.must_equal "09"
pagamento.sacado_tipo_documento.must_equal "1"
pagamento.sacado_documento.must_equal "555555555555555"
pagamento.sacado_nome.must_equal "NOME DO CLIENTE 5"
pagamento.numero_contrato.must_equal "0000000000"
pagamento.valor_tarifa.must_equal 2.69
pagamento.motivo_ocorrencia_original_1.must_equal "00"
pagamento.motivo_ocorrencia_original_2.must_equal "00"
pagamento.motivo_ocorrencia_original_3.must_equal "00"
pagamento.motivo_ocorrencia_original_4.must_equal "00"
pagamento.motivo_ocorrencia_original_5.must_equal "04"
pagamento.valor_juros_multa.must_equal 0.0
pagamento.valor_desconto.must_equal 0.0
pagamento.valor_abatimento.must_equal 0.0
pagamento.valor_iof.must_equal 0.0
pagamento.valor_pago.must_equal 47.37
pagamento.valor_liquido.must_equal 47.37
pagamento.valor_outras_despesas.must_equal 0.0
pagamento.valor_outros_creditos.must_equal 0.0
pagamento.data_ocorrencia.must_equal Date.parse('06/05/2016')
pagamento.data_credito.must_equal nil
pagamento.codigo_ocorrencia_sacado.must_equal ""
pagamento.data_ocorrencia_sacado.must_equal nil
pagamento.valor_ocorrencia_sacado.must_equal 0.0
pagamento.complemento_ocorrencia_sacado.must_equal ""
pagamento.codigo_ocorrencia_banco_correspondente.must_equal "756"
pagamento.nosso_numero_banco_correspondente.must_equal "00000000000000000000"
pagamento.codigo_movimento_retorno.must_equal "06"
end
end
end | 65.072581 | 87 | 0.612467 |
7ad8e556770c8fbf8f8726aeed25df3733aaec87 | 90 | VALID_EMAIL_REGEX = /\A[\w+\-.]+@[a-z\d\-.]+\.[a-z]+\z/i
VALID_PHONE_REGEX = /\A\+?\d+\z/
| 30 | 56 | 0.511111 |
bfa250711c5c1378a0e7926618da322cd161d407 | 181 | require 'active_merchant'
require 'active_merchant/billing/integrations/action_view_helper'
ActionView::Base.send(:include, ActiveMerchant::Billing::Integrations::ActionViewHelper)
| 45.25 | 88 | 0.856354 |
bb897dc859e2d693983c8404bc42a456f96ba27f | 2,723 | require 'test_helper'
require 'wsdl_mapper/dom/namespaces'
module DomTests
class NamespacesTest < WsdlMapperTesting::Test
include WsdlMapper::Dom
def test_storing_and_retrieving_namespaces
namespaces = Namespaces.new
namespaces.set(:ns1, 'http://example.org/foobar')
url = namespaces.get(:ns1)
assert_equal 'http://example.org/foobar', url
end
def test_getting_prefix_for_a_stored_namespace
namespaces = Namespaces.new
namespaces.set(:ns1, 'http://example.org/foobar')
prefix = namespaces.prefix_for('http://example.org/foobar')
assert_equal 'ns1', prefix
end
def test_generating_prefixes_automatically
namespaces = Namespaces.new
prefix1 = namespaces.prefix_for('http://example.org/foobar1')
prefix2 = namespaces.prefix_for('http://example.org/foobar2')
assert_equal 'ns0', prefix1
assert_equal 'ns1', prefix2
end
def test_prefix_for_default_namespace
namespaces = Namespaces.new
namespaces.default = 'http://example.org/foobar'
prefix = namespaces.prefix_for('http://example.org/foobar')
assert_nil prefix
end
def test_prefix_for_nil
namespaces = Namespaces.new
prefix = namespaces.prefix_for(nil)
assert_nil(prefix)
end
def test_convert_hash_to_namespaces
hash = {
foo: 'http://example.org/foobar1',
bar: 'http://example.org/foobar2'
}
namespaces = Namespaces.for(hash)
assert_equal 'http://example.org/foobar1', namespaces[:foo]
assert_equal 'http://example.org/foobar2', namespaces[:bar]
end
def test_enumeration
namespaces = Namespaces.for({
foo: 'http://example.org/foo',
bar: 'http://example.org/bar'
})
namespaces.default = 'http://example.org/default'
array = namespaces.to_a
# Enumeration always contains the default namespace as first element, if set
assert_equal [nil, 'http://example.org/default'], array[0]
assert_equal ['foo', 'http://example.org/foo'], array[1]
assert_equal ['bar', 'http://example.org/bar'], array[2]
end
def test_enumeration_with_block
namespaces = Namespaces.for({
foo: 'http://example.org/foo',
bar: 'http://example.org/bar'
})
namespaces.default = 'http://example.org/default'
# More of a smoke test. Assertion of sequence and correct pairs is done in #test_enumeration
namespaces.each.with_index do |(prefix, url)|
assert_includes [nil, 'foo', 'bar'], prefix
assert_includes ['http://example.org/default', 'http://example.org/foo', 'http://example.org/bar'], url
end
end
end
end
| 28.072165 | 111 | 0.661403 |
013dcc9d4902faae3c0eb2310e5af32498e2012e | 5,319 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Subscriptions::Mgmt::V2019_06_01
#
# A service client - single point of access to the REST API.
#
class SubscriptionClient < MsRestAzure::AzureServiceClient
include MsRestAzure
include MsRestAzure::Serialization
# @return [String] the base URI of the service.
attr_accessor :base_url
# @return Credentials needed for the client to connect to Azure.
attr_reader :credentials
# @return [String] The API version to use for the operation.
attr_reader :api_version
# @return [String] The preferred language for the response.
attr_accessor :accept_language
# @return [Integer] The retry timeout in seconds for Long Running
# Operations. Default value is 30.
attr_accessor :long_running_operation_retry_timeout
# @return [Boolean] Whether a unique x-ms-client-request-id should be
# generated. When set to true a unique x-ms-client-request-id value is
# generated and included in each request. Default is true.
attr_accessor :generate_client_request_id
# @return [Operations] operations
attr_reader :operations
# @return [Subscriptions] subscriptions
attr_reader :subscriptions
# @return [Tenants] tenants
attr_reader :tenants
#
# Creates initializes a new instance of the SubscriptionClient class.
# @param credentials [MsRest::ServiceClientCredentials] credentials to authorize HTTP requests made by the service client.
# @param base_url [String] the base URI of the service.
# @param options [Array] filters to be applied to the HTTP requests.
#
def initialize(credentials = nil, base_url = nil, options = nil)
super(credentials, options)
@base_url = base_url || 'https://management.azure.com'
fail ArgumentError, 'invalid type of credentials input parameter' unless credentials.is_a?(MsRest::ServiceClientCredentials) unless credentials.nil?
@credentials = credentials
@operations = Operations.new(self)
@subscriptions = Subscriptions.new(self)
@tenants = Tenants.new(self)
@api_version = '2019-06-01'
@accept_language = 'en-US'
@long_running_operation_retry_timeout = 30
@generate_client_request_id = true
add_telemetry
end
#
# Makes a request and returns the body of the response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Hash{String=>String}] containing the body of the response.
# Example:
#
# request_content = "{'location':'westus','tags':{'tag1':'val1','tag2':'val2'}}"
# path = "/path"
# options = {
# body: request_content,
# query_params: {'api-version' => '2016-02-01'}
# }
# result = @client.make_request(:put, path, options)
#
def make_request(method, path, options = {})
result = make_request_with_http_info(method, path, options)
result.body unless result.nil?
end
#
# Makes a request and returns the operation response.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [MsRestAzure::AzureOperationResponse] Operation response containing the request, response and status.
#
def make_request_with_http_info(method, path, options = {})
result = make_request_async(method, path, options).value!
result.body = result.response.body.to_s.empty? ? nil : JSON.load(result.response.body)
result
end
#
# Makes a request asynchronously.
# @param method [Symbol] with any of the following values :get, :put, :post, :patch, :delete.
# @param path [String] the path, relative to {base_url}.
# @param options [Hash{String=>String}] specifying any request options like :body.
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def make_request_async(method, path, options = {})
fail ArgumentError, 'method is nil' if method.nil?
fail ArgumentError, 'path is nil' if path.nil?
request_url = options[:base_url] || @base_url
if(!options[:headers].nil? && !options[:headers]['Content-Type'].nil?)
@request_headers['Content-Type'] = options[:headers]['Content-Type']
end
request_headers = @request_headers
request_headers.merge!({'accept-language' => @accept_language}) unless @accept_language.nil?
options.merge!({headers: request_headers.merge(options[:headers] || {})})
options.merge!({credentials: @credentials}) unless @credentials.nil?
super(request_url, method, path, options)
end
private
#
# Adds telemetry information.
#
def add_telemetry
sdk_information = 'azure_mgmt_subscriptions'
sdk_information = "#{sdk_information}/0.18.5"
add_user_agent_information(sdk_information)
end
end
end
| 38.824818 | 154 | 0.690731 |
622688448eb411debad8ffbe94e6d634d2f319ef | 4,804 | module BioInterchange::TextMining
require 'rubygems'
require 'json'
class PubAnnosJSONReader < BioInterchange::TextMining::TMReader
# Register reader:
BioInterchange::Registry.register_reader(
'dbcls.catanns.json',
PubAnnosJSONReader,
[
'name',
'name_id',
'date',
[ Proc.new { |*args| BioInterchange::TextMining::TMReader::determine_process(*args) }, 'name_id' ],
'version'
],
false,
'PDFx XML reader',
[
[ 'date <date>', 'date when the GFF3 file was created (optional)' ],
[ 'version <version>', 'version number of resource (optional)' ],
[ 'name <name>', 'name of the GFF3 file creator (required)' ],
[ 'name_id <id>', 'email address of the GFF3 file creator (required)' ]
]
)
# Deserialize a PubAnnotations JSON object.
#
# +inputstream+:: Input IO stream to deserialize
def deserialize(inputstream)
if inputstream.kind_of?(IO) then
pubannos(inputstream.read)
elsif inputstream.kind_of?(String) then
pubannos(inputstream)
else
super(inputstream)
end
end
private
# Specific method for parsing of *Pubannotations* json format
def pubannos(data)
result = JSON.parse(data)
if result.has_key? 'Error'
raise BioInterchange::Exceptions::InputFormatError, 'Error parsing the JSON input file: #{result["Error"]}'
end
text = result['text']
#doc_uri = "http://pubannotation.dbcls.jp/pmdocs/" + result['pmid'].to_s
doc_uri = result['docurl']
doc = Document.new(doc_uri)
docContent = Content.new(0, text.length, Content::DOCUMENT, @process)
docContent.setContext(doc)
doc.add(docContent)
#so our document requires content of type document or abstract
#should it hold the content string?
#hash to remember annotation in case they are needed for building upon based on ids later
contents = {}
if result['catanns']
result['catanns'].each do |annot|
start_offset = 0
end_offset = 0
if annot['span']
start_offset = annot['span']['begin']
end_offset = annot['span']['end']
elsif annot['begin'] and annot['end']
start_offset = annot['begin']
end_offset = annot['end']
end
length = end_offset - start_offset
category = annot['category']
id = annot['id']
entity = text.slice(start_offset..end_offset)
#phrase = type for NE
con = Content.new(start_offset, length, Content::PHRASE, @process)
con.setContext(doc)
doc.add(con)
contents[id] = con
#set process.date = updated_time?
end
end
if result['insanns']
result['insanns'].each do |annot|
#unsure what to do about this (con1), 'E1' is the ID of something not created yet.
#it is perhaps a case of making a new content, but with what params...?
#need to conform what this is refering to with JDK
con1 = nil
con2 = contents[annot['object']]
#get annotation type
type = ContentConnection::UNSPECIFIED
case annot['type']
when 'subClassOf'
type = ContentConnection::SUBCLASS
end
connection = ContentConnection.new(con1, con2, type, @process)
connection.setContext(doc)
doc.add(connection)
contents[annot['id']] = connection
end
end
if result['relanns']
result['relanns'].each do |annot|
con1 = contents[annot['subject']]
con2 = contents[annot['object']]
#get annotation type
type = ContentConnection::UNSPECIFIED
case annot['type']
when 'equivalentTo'
type = ContentConnection::EQUIVALENCE
when 'themeOf'
type = ContentConnection::THEME
end
connection = ContentConnection.new(con1, con2, type, @process)
connection.setContext(doc)
doc.add(connection)
contents[annot['id']] = connection
end
end
if result['modanns']
result['modanns'].each do |annot|
#in this case, it is a modification of an already existing content object (speculation/negation).
con = contents[annot['object']]
#get annotation type
type = ContentConnection::UNSPECIFIED
case annot['type']
when 'Speculation'
type = ContentConnection::SPECULATION
when 'Negation'
type = ContentConnection::NEGATION
end
connection = ContentConnection.new(con, nil, type, @process)
connection.setContext(doc)
doc.add(connection)
contents[annot['id']] = connection
end
end
doc
end
end
end
| 27.930233 | 113 | 0.60637 |
b90be598503360f59af482a9a53849fb54692d71 | 1,593 | require_relative './handler.rb'
require_relative './place_handler.rb'
class PersonHandler < Handler
def initialize(args)
super
@property_mapping.merge!({
"name" => "preferred_name" ,
"givenName" => "forename" ,
"familyName" => "surname" ,
"gender" => "gender" ,
"birthDate" => "date_of_birth" ,
"deathDate" => "date_of_death" ,
"description" => "biographical_or_historical_information"
})
@place_property_mapping = {
"birthPlace" => "place_of_birth" ,
"deathPlace" => "place_of_death"
}
@conf = args[:conf]
end
def serialize
@json.merge!({
"@type" => "Person"
})
@property_mapping.keys.each do |property|
add_data(property)
end
add_same_as('gnd')
add_same_as('viaf')
add_same_as('lc_naf')
add_alternate_names
@place_property_mapping.keys.each do |property|
add_place(property)
end
@json["@id"] = "#{@conf[:namespaces][:person]}p_#{@resource['id']}"
return @json
end
def add_alternate_names
if (names = @resource['variant_names'])
unless (is_null?(names))
@json['alternateName'] = []
names.each_line { |name| @json['alternateName'] << name.strip }
end
end
end
def add_place(property)
if (column_name = @place_property_mapping[property])
place_handler = PlaceHandler.new({
:resource => @resource ,
:place_column => column_name ,
:conf => @conf
})
if (place = place_handler.serialize)
@json[property] = place
end
end
end
end | 24.890625 | 71 | 0.602637 |
ed37c64d11dfc54b1621016087023157904568e4 | 1,008 | require 'spec_helper'
module VhdlDoctest
describe Types do
describe ".parse" do
subject { Types.parse(string) }
describe 'std_logic' do
let(:string) { 'std_logic' }
it { should be_a Types::StdLogic }
end
describe 'std_logic_vector' do
let(:string) { 'std_logic_vector(8 downto 0)' }
it { should be_a Types::StdLogicVector }
end
describe 'std_logic_vector, but not in format' do
let(:string) { 'std_logic_vector(0 upto 8)' }
specify { expect { subject }.to raise_error }
end
describe 'upcase STD_LOGIC' do
let(:string) { 'STD_LOGIC' }
it { should be_a Types::StdLogic }
end
describe 'upcase STD_LOGIC_VECTOR' do
let(:string) { 'STD_LOGIC_VECTOR(2 DOWNTO 0)' }
it { should be_a Types::StdLogicVector }
end
describe 'unknown' do
let(:string) { 'unkonwn' }
specify { expect { subject }.to raise_error }
end
end
end
end
| 25.2 | 55 | 0.59623 |
3969968229f3f37e4155efe845e6373ad259b238 | 3,312 | # frozen_string_literal: true
module PuppetEditorServices
class SimpleSTDIOServerConnection < SimpleServerConnectionBase
attr_accessor :simple_stdio_server
def initialize(simple_stdio_server)
@simple_stdio_server = simple_stdio_server
end
def send_data(data)
$editor_services_stdout.write(data) # rubocop:disable Style/GlobalVars We need this global var
true
end
def close_connection_after_writing
$editor_services_stdout.flush # rubocop:disable Style/GlobalVars We need this global var
@simple_stdio_server.close_connection
true
end
def close_connection
@simple_stdio_server.close_connection
true
end
end
class SimpleSTDIOServer
attr_accessor :exiting
def log(message)
PuppetEditorServices.log_message(:debug, "STDIOSRV: #{message}")
end
def initialize
@exiting = false
end
def start(handler_klass = PuppetEditorServices::SimpleTCPServerConnection, connection_options = {})
connection_options[:servicename] = 'LANGUAGE SERVER' if connection_options[:servicename].nil?
# This is a little heavy handed but we need to suppress writes to STDOUT and STDERR
$VERBOSE = nil
# Some libraries use $stdout to write to the console. Suppress all of that too!
# Copy the existing $stdout variable and then reassign to NUL to suppress it
$editor_services_stdout = $stdout # rubocop:disable Style/GlobalVars We need this global var
$stdout = File.open(File::NULL, 'w')
$editor_services_stdout.sync = true # rubocop:disable Style/GlobalVars We need this global var
# Stop the stupid CRLF injection when on Windows
$editor_services_stdout.binmode unless $editor_services_stdout.binmode # rubocop:disable Style/GlobalVars We need this global var
handler = handler_klass.new(connection_options)
client_connection = PuppetEditorServices::SimpleSTDIOServerConnection.new(self)
handler.client_connection = client_connection
handler.post_init
log('Starting STDIO server...')
loop do
inbound_data = nil
read_from_pipe($stdin, 2) { |data| inbound_data = data }
break if @exiting
handler.receive_data(inbound_data) unless inbound_data.nil?
break if @exiting
end
log('STDIO server stopped')
end
def stop
log('Stopping STDIO server...')
@exiting = true
end
def close_connection
stop
end
def pipe_is_readable?(stream, timeout = 0.5)
read_ready = IO.select([stream], [], [], timeout)
read_ready && stream == read_ready[0][0]
end
def read_from_pipe(pipe, timeout = 0.1, &_block)
if pipe_is_readable?(pipe, timeout)
l = nil
begin
l = pipe.readpartial(4096)
rescue EOFError
log('Reading from pipe has reached End of File. Exiting STDIO server')
stop
rescue # rubocop:disable Style/RescueStandardError, Lint/HandleExceptions
# Any errors here should be swallowed because the pipe could be in any state
end
# since readpartial may return a nil at EOF, skip returning that value
# client_connected = true unless l.nil?
yield l unless l.nil?
end
nil
end
end
end
| 32.792079 | 136 | 0.691425 |
624fb68b780b1cc4b0816a2363911f1fcf0724bb | 1,695 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `bin/rails
# db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_04_15_131843) do
create_table "attendances", force: :cascade do |t|
t.integer "event_id", null: false
t.integer "user_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["event_id"], name: "index_attendances_on_event_id"
t.index ["user_id"], name: "index_attendances_on_user_id"
end
create_table "events", force: :cascade do |t|
t.string "name"
t.string "description"
t.string "date"
t.string "location"
t.integer "creator_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
create_table "users", force: :cascade do |t|
t.string "name"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
end
add_foreign_key "attendances", "events"
add_foreign_key "attendances", "users"
end
| 39.418605 | 86 | 0.733333 |
7911fc2b0817301a91f80d87d0ebaea0e602af57 | 416 | # frozen_string_literal: true
#
# spidy interface binding
#
class Spidy::DefinitionFile
attr_reader :path, :spidy
def self.open(filepath)
object = new(filepath)
object.eval_definition
object
end
# rubocop:disable Security/Eval
def eval_definition
@spidy = eval(File.read(path)) if path
end
# rubocop:enable Security/Eval
private
def initialize(path)
@path = path
end
end
| 15.407407 | 42 | 0.704327 |
1c88f5dcf4bec0f03ad9b1c21429df652311b0af | 1,167 | require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Steward
class Application < Rails::Application
config.autoload_paths << Rails.root.join('lib')
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 40.241379 | 99 | 0.730934 |
1c8d2ca12d59c354f93ca8bd65235d089dcb14d5 | 175 | require_relative 'base'
class UserSubmission < BaseUserActivity
protected
def self.reddit_accessor(reddit_object, args)
reddit_object.get_submitted(args)
end
end
| 15.909091 | 47 | 0.794286 |
26bf8516ecb305b905cfc5d6be78f4a29a07a552 | 4,343 | require 'spec_helper'
describe OmniauthCallbacksController do
before do
facebook_provider
OmniauthCallbacksController.add_providers
@request.env["devise.mapping"] = Devise.mappings[:user]
end
let(:return_to){ nil }
let(:user){ FactoryGirl.create(:user, authorizations: [ FactoryGirl.create(:authorization, uid: oauth_data[:uid], oauth_provider: facebook_provider ) ]) }
let(:facebook_provider){ FactoryGirl.create :oauth_provider, name: 'facebook' }
let(:oauth_data){
Hashie::Mash.new({
credentials: {
expires: true,
expires_at: 1366644101,
token: "AAAHuZCwF61OkBAOmLTwrhv52pZCriPnTGIasdasdasdascNhZCZApsZCSg6POZCQqolxYjnqLSVH67TaRDONx72fXXXB7N7ZBByLZCV7ldvagm"
},
extra: {
raw_info: {
bio: "I, simply am not there",
email: "[email protected]",
first_name: "Diogo",
gender: "male",
id: "547955110",
last_name: "Biazus",
link: "http://www.facebook.com/diogo.biazus",
locale: "pt_BR",
name: "Diogo, Biazus",
timezone: -3,
updated_time: "2012-08-01T18:22:50+0000",
username: "diogo.biazus",
verified: true
},
},
info: {
description: "I, simply am not there",
email: "[email protected]",
first_name: "Diogo",
image: "http://graph.facebook.com/547955110/picture?type:, square",
last_name: "Biazus",
name: "Diogo, Biazus",
nickname: "diogo.biazus",
urls: {
Facebook: "http://www.facebook.com/diogo.biazus"
},
verified: true
},
provider: "facebook",
uid: "547955110"
})
}
subject{ response }
describe ".add_providers" do
subject{ controller }
it{ should respond_to(:facebook) }
end
describe "GET facebook" do
describe "when user already loged in" do
let(:set_expectations) { }
let(:user) { FactoryGirl.create(:user, name: 'Foo') }
before do
set_expectations
controller.stub(:current_user).and_return(user)
session[:return_to] = return_to
request.env['omniauth.auth'] = oauth_data
get :facebook
end
describe "assigned user" do
subject{ assigns(:auth).user }
its(:name){ should == "Foo" }
it { subject.authorizations.reload.should have(1).item }
end
describe 'update social info' do
let(:set_expectations) { User.any_instance.should_receive(:update_social_info).once }
it "should satisfy expectations" do; end
end
describe 'update access token' do
let(:set_expectations) { Authorization.any_instance.should_receive(:update_access_token_from_hash).once }
it "should satisfy expectations" do; end
end
it{ should redirect_to root_path }
end
describe 'when user not loged in' do
let(:set_expectations) { }
before do
set_expectations
user
session[:return_to] = return_to
request.env['omniauth.auth'] = oauth_data
get :facebook
end
context "when there is no such user but we retrieve the email from omniauth" do
let(:user){ nil }
describe "assigned user" do
subject{ assigns(:auth).user }
its(:email){ should == "[email protected]" }
its(:name){ should == "Diogo, Biazus" }
end
it{ should redirect_to root_path }
end
context "when there is a valid user with this provider and uid and session return_to is /foo" do
let(:return_to){ '/foo' }
it{ assigns(:auth).user.should == user }
it{ should redirect_to '/foo' }
end
context "when there is a valid user with this provider and uid and session return_to is nil" do
it{ assigns(:auth).user.should == user }
it{ should redirect_to root_path }
end
describe 'update social info' do
let(:set_expectations) { User.any_instance.should_receive(:update_social_info).once }
it "should satisfy expectations" do; end
end
describe 'update access token' do
let(:set_expectations) { Authorization.any_instance.should_receive(:update_access_token_from_hash).once }
it "should satisfy expectations" do; end
end
end
end
end
| 31.471014 | 156 | 0.620309 |
ac0513a161cf90b9bb23b88543b4f9f0bec20a58 | 1,479 | require 'spec_helper'
module Sexpr::Grammar
describe Tagging, "tagging_module_for" do
include Tagging
module TaggingReference
module Node; end
module Not; end
module Lit; end
end
subject{ tagging_module_for(rulename) }
context 'when there is a tagging reference and a default tagging module' do
def tagging_reference
TaggingReference
end
def default_tagging_module
TaggingReference::Node
end
context 'when the module exists' do
let(:rulename){ :not }
it{ should be(TaggingReference::Not) }
end
context 'when the module does not exists' do
let(:rulename){ :blah }
it{ should be(TaggingReference::Node) }
end
end
context 'when there is a no tagging reference but default tagging module' do
def default_tagging_module
TaggingReference::Node
end
let(:rulename){ :not }
it{ should be(TaggingReference::Node) }
end
context 'when there is a a tagging reference and no default tagging module' do
def tagging_reference
TaggingReference
end
def default_tagging_module
nil
end
context 'when the module exists' do
let(:rulename){ :not }
it{ should be(TaggingReference::Not) }
end
context 'when the module does not exists' do
let(:rulename){ :blah }
it{ should be_nil }
end
end
end
end | 20.541667 | 82 | 0.627451 |
08be0483a54d2c3b42e35975646c5be8138b60c5 | 248 | class CreateDistricts < ActiveRecord::Migration
def change
create_table :districts do |t|
t.string :name
t.boolean :active
t.references :city, index: true, foreign_key: true
t.timestamps null: false
end
end
end
| 20.666667 | 56 | 0.673387 |
f8453dc595e6985e61f9e1347cd8639db8a1b5db | 1,542 | require '_aws'
class AwsVpc < Inspec.resource(1)
name 'aws_vpc'
desc 'Verifies settings for AWS VPC'
example "
describe aws_vpc do
it { should be_default }
its('cidr_block') { should cmp '10.0.0.0/16' }
end
"
include AwsResourceMixin
def to_s
"VPC #{vpc_id}"
end
[:cidr_block, :dhcp_options_id, :state, :vpc_id, :instance_tenancy, :is_default].each do |property|
define_method(property) do
@vpc[property]
end
end
alias default? is_default
private
def validate_params(raw_params)
validated_params = check_resource_param_names(
raw_params: raw_params,
allowed_params: [:vpc_id],
allowed_scalar_name: :vpc_id,
allowed_scalar_type: String,
)
if validated_params.key?(:vpc_id) && validated_params[:vpc_id] !~ /^vpc\-[0-9a-f]{8}/
raise ArgumentError, 'aws_vpc VPC ID must be in the format "vpc-" followed by 8 hexadecimal characters.'
end
validated_params
end
def fetch_from_aws
backend = AwsVpc::BackendFactory.create
if @vpc_id.nil?
filter = { name: 'isDefault', values: ['true'] }
else
filter = { name: 'vpc-id', values: [@vpc_id] }
end
resp = backend.describe_vpcs({ filters: [filter] })
@vpc = resp.vpcs[0].to_h
@vpc_id = @vpc[:vpc_id]
@exists = [email protected]?
end
class Backend
class AwsClientApi
BackendFactory.set_default_backend(self)
def describe_vpcs(query)
AWSConnection.new.ec2_client.describe_vpcs(query)
end
end
end
end
| 22.028571 | 110 | 0.655642 |
e85de9896d4077ee3e99b0fc3c6a64d81da0f0fd | 297 | module Destatis
class State < Region
def inspect
"<#{self.class.name} #{gemeinde_name}>"
end
def regierungsbezirke
Regierungsbezirk.all.select { |i| i.state_id == state_id }
end
def kreise
Kreis.all.select { |i| i.state_id == state_id }
end
end
end
| 18.5625 | 64 | 0.622896 |
1a8ae2d99471e65d89bfb50d41ec9c921be21c9f | 758 | require 'telegram_bot'
require 'pp'
require 'logger'
require 'yaml'
require 'daemons'
require 'mysql'
require 'active_record'
env_file = File.join('config','local_env.yml')
YAML.load(File.open(env_file)).each do |key, value|
ENV[key.to_s] = value
end if File.exists?(env_file)
logger = Logger.new(STDOUT, Logger::DEBUG)
bot = TelegramBot.new(token: ENV['TELEGRAM_BOT_API_KEY'], logger: logger)
logger.debug "starting telegram bot"
ActiveRecord::Base.establish_connection(
:adapter => "mysql",
:host => ENV['MYSQL_HOST'],
:database => ENV['MYSQL_DB'],
:username => ENV['MYSQL_USER'],
:password => ENV['MYSQL_PASS']
)
class OcAuth < ActiveRecord::Base
end
allowed = OcAuth.where(is_allowed: '1', username: 'midincihuy')
logger.info allowed.count | 22.969697 | 73 | 0.728232 |
03b57dea51e4d4a9c8f83fa3d0ab8306ed19c3df | 2,711 | # Projects::TransferService class
#
# Used for transfer project to another namespace
#
# Ex.
# # Move projects to namespace with ID 17 by user
# Projects::TransferService.new(project, user, namespace_id: 17).execute
#
module Projects
class TransferService < BaseService
include Gitlab::ShellAdapter
class TransferError < StandardError; end
def execute(new_namespace)
if allowed_transfer?(current_user, project, new_namespace)
transfer(project, new_namespace)
else
project.errors.add(:new_namespace, 'is invalid')
false
end
rescue Projects::TransferService::TransferError => ex
project.reload
project.errors.add(:new_namespace, ex.message)
false
end
def transfer(project, new_namespace)
Project.transaction do
old_path = project.path_with_namespace
old_namespace = project.namespace
new_path = File.join(new_namespace.try(:path) || '', project.path)
if Project.where(path: project.path, namespace_id: new_namespace.try(:id)).present?
raise TransferError.new("Project with same path in target namespace already exists")
end
if project.has_container_registry_tags?
# we currently doesn't support renaming repository if it contains tags in container registry
raise TransferError.new('Project cannot be transferred, because tags are present in its container registry')
end
project.expire_caches_before_rename(old_path)
# Apply new namespace id and visibility level
project.namespace = new_namespace
project.visibility_level = new_namespace.visibility_level unless project.visibility_level_allowed_by_group?
project.save!
# Notifications
project.send_move_instructions(old_path)
# Move main repository
unless gitlab_shell.mv_repository(old_path, new_path)
raise TransferError.new('Cannot move project')
end
# Move wiki repo also if present
gitlab_shell.mv_repository("#{old_path}.wiki", "#{new_path}.wiki")
# clear project cached events
project.reset_events_cache
# Move uploads
Gitlab::UploadsTransfer.new.move_project(project.path, old_namespace.path, new_namespace.path)
project.old_path_with_namespace = old_path
SystemHooksService.new.execute_hooks_for(project, :transfer)
true
end
end
def allowed_transfer?(current_user, project, namespace)
namespace &&
can?(current_user, :change_namespace, project) &&
namespace.id != project.namespace_id &&
current_user.can?(:create_projects, namespace)
end
end
end
| 33.469136 | 118 | 0.69716 |
62234b7801339af74aa6d8a0babf0832ea9c41fc | 4,709 | module Ec2ex
class Ami
def initialize(core)
@core = core
end
def deregister_image(ami_name:, name:, older_than:)
images = if ami_name
search_images(ami_name)
else
get_old_images(name, older_than)
end
images.each do |image|
image_id = image[:image_id]
@core.logger.info "delete AMI #{image_id} [#{image[:name]}]"
@core.client.deregister_image({image_id: image_id})
snapshot_ids = image[:block_device_mappings]
.select{ |block_device_mapping| block_device_mapping[:ebs] != nil }
.map{ |block_device_mapping| block_device_mapping[:ebs][:snapshot_id] }
snapshot_ids.each do |snapshot_id|
@core.logger.info "delete snapshot #{snapshot_id}"
@core.client.delete_snapshot({snapshot_id: snapshot_id})
end
end
end
def create_image_with_instance(instance, region = nil)
tags = Tag.get_hash(instance.tags)
@core.logger.info "#{tags['Name']} image creating..."
image_name = tags['Name'] + ".#{Time.now.strftime('%Y%m%d%H%M%S')}"
image_response = @core.client.create_image(
instance_id: instance.instance_id,
name: image_name,
no_reboot: true
)
sleep 10
@core.client.wait_until(:image_available, image_ids: [image_response.image_id]) do |w|
w.interval = 15
w.max_attempts = 1440
end
@core.logger.info "image create complete #{tags['Name']}! image_id => [#{image_response.image_id}]"
ami_tag = Tag.format(Tag.get_ami_tag_hash(instance, tags))
@core.client.create_tags(resources: [image_response.image_id], tags: ami_tag)
if region
@core.logger.info "copying another region... [#{ENV['AWS_REGION']}] => [#{region}]"
dest_ec2 = Aws::EC2::Client.new(region: region)
copy_image_response = dest_ec2.copy_image(
source_region: ENV['AWS_REGION'],
source_image_id: image_response.image_id,
name: image_name
)
dest_ec2.create_tags(resources: [copy_image_response.image_id], tags: ami_tag)
end
image_response.image_id
end
def latest_image_with_name(name)
result = search_image_with_name(name)
result = result.sort_by{ |image|
tag_hash = Tag.get_hash(image[:tags])
tag_hash['created'].nil? ? '' : tag_hash['created']
}
result.empty? ? {} : result.last
end
def get_old_images(name, num = 10)
result = search_image_with_name(name)
return [] if result.empty?
map = Hash.new{|h,k| h[k] = []}
result = result.each{ |image|
tag_hash = Tag.get_hash(image[:tags])
next if tag_hash['Name'].nil? || tag_hash['created'].nil?
map[tag_hash['Name']] << image
}
old_images = []
map.each do |name, images|
sorted_images = images.sort_by{ |image|
tag_hash = Tag.get_hash(image[:tags])
Time.parse(tag_hash['created'])
}
newly_images = sorted_images.last(num)
old_images = old_images + (sorted_images - newly_images)
end
old_images
end
def search_images(name)
filter = [{ name: 'is-public', values: ['false'] }]
filter << { name: 'name', values: [name] }
@core.client.describe_images(
filters: filter
).data.to_h[:images]
end
def search_image_with_name(name)
filter = [{ name: 'is-public', values: ['false'] }]
filter << { name: 'tag:Name', values: [name] }
@core.client.describe_images(
filters: filter
).data.to_h[:images]
end
def search_image_with_id(image_id)
@core.client.describe_images(
image_ids: [image_id]
).data.to_h[:images].first
end
def deregister_snapshot_no_related(owner_id)
enable_snapshot_ids = []
search_images('*').each do |image|
image_id = image[:image_id]
snapshot_ids = image[:block_device_mappings]
.select{ |block_device_mapping| block_device_mapping[:ebs] != nil }
.map{ |block_device_mapping| block_device_mapping[:ebs][:snapshot_id] }
enable_snapshot_ids.concat(snapshot_ids)
end
filter = [{ name: 'owner-id', values: [owner_id] }]
all_snapshot_ids = @core.client.describe_snapshots(
filters: filter
).data.to_h[:snapshots].map{ |snapshot| snapshot[:snapshot_id] }
disable_snapshot_ids = (all_snapshot_ids - enable_snapshot_ids)
disable_snapshot_ids.each do |disable_snapshot_id|
@core.client.delete_snapshot({snapshot_id: disable_snapshot_id})
@core.logger.info "delete snapshot #{disable_snapshot_id}"
end
end
end
end
| 34.625 | 105 | 0.628796 |
28de23464394d05d9dbd64d602c6604d88b310d9 | 1,918 | require 'securerandom'
module Moonshot
describe Shell do
include ResourcesHelper
include described_class
let(:resources) do
Resources.new(
ilog: InteractiveLoggerProxy.new(log),
stack: double(Stack).as_null_object,
controller: instance_double(Moonshot::Controller).as_null_object
)
end
before { self.resources = resources }
describe '#shell' do
it 'should return a shell compatible with Thor::Shell::Basic.' do
expect(shell).to be_a(Thor::Shell::Basic)
end
end
describe '#sh_out' do
it 'should raise on non-zero exit.' do
expect { sh_out('false') }.to raise_error(/`false` exited 1/)
end
it 'should not raise if fail is disabled.' do
sh_out('false', fail: false)
end
end
describe '#sh_step' do
before do
expect(InteractiveLoggerProxy::Step).to receive(:new).and_return(step)
end
let(:step) { instance_double(InteractiveLoggerProxy::Step) }
it 'should raise an error if the step fails.' do
expect { sh_step('false') }.to raise_error(/`false` exited 1/)
end
it 'should provide the step and sh output to a block.' do
output = nil
expect(step).to receive(:continue).with('reticulating splines')
expect(step).to receive(:success)
sh_step('echo yo') do |step, out|
step.continue('reticulating splines')
output = out
end
expect(output).to match('yo')
end
it 'should truncate a long messages.' do
long_s = SecureRandom.urlsafe_base64(terminal_width)
cmd = "echo #{long_s}"
truncated_s = "#{cmd[0..(terminal_width - 22)]}..."
expect(resources.ilog).to receive(:start_threaded).with(truncated_s)
.and_call_original
allow(step).to receive(:success)
sh_step(cmd)
end
end
end
end
| 28.205882 | 78 | 0.622523 |
6281f49029deb75ed4f9cc5887dab267e1b64e1f | 1,020 | # -*- coding: utf-8 -*-
require "spec_helper"
describe "serialization" do
before :all do
class SpiraResource < Spira::Base
property :name, :predicate => RDF::Vocab::FOAF.givenName, :type => XSD.string
end
Spira.repository = RDF::Repository.new
end
it "should serialize a spira resource into its subject" do
res = SpiraResource.for RDF::URI.new("http://example.com/resources/res1")
serialized = SpiraResource.serialize(res)
expect(serialized).not_to be_nil
expect(serialized).to eql res.subject
end
it "should serialize a blank ruby object into nil" do
expect(SpiraResource.serialize("")).to be_nil
end
it "should raise TypeError exception when trying to serialize an object it cannot serialize" do
expect { SpiraResource.serialize(1) }.to raise_error TypeError
end
context "of UTF-8 literals" do
it "should produce proper UTF-8 output" do
res = SpiraResource.create(:name => "日本語")
expect(res.reload.name).to eql "日本語"
end
end
end
| 28.333333 | 97 | 0.70098 |
030f986f7d8bd706b9d513558c8bcf4001bd2387 | 235 | # frozen_string_literal: true
module TruemailServer
module Controllers
module Healthcheck
class Show < TruemailServer::Controllers::Base
def call
respond_with(200)
end
end
end
end
end
| 16.785714 | 52 | 0.655319 |
ac39f5ea4d01096e776d0a7841ec271e37575079 | 1,103 | module KOSapiClient
module Resource
class CoursesBuilder < RequestBuilder
def detail(level = 1)
url_builder.set_query_param(:detail, level)
self
end
# Selects courses scheduled in the specified semester(s).
def sem(*semesters)
url_builder.set_query_param(:sem, semesters.join(','))
self
end
alias semester sem
# XXX: This is quite insane, we need some support to DRY subresources.
%w[events exams parallels students instances branches].each do |resource|
define_method(resource) do |semester: 'current'|
raise "Call #find({course_code}) before asking for #{resource}" unless id_set?
url_builder.set_path(id, resource)
url_builder.set_query_param(:sem, semester)
self
end
end
def parallel(code, semester: 'current')
raise 'Call #find({course_code}) before asking for parallel' unless id_set?
url_builder.set_path(id, 'parallels', code)
url_builder.set_query_param(:sem, semester)
self
end
end
end
end
| 29.810811 | 88 | 0.646419 |
01bdca1928d33e925dde3df06e41223b1a2dbf62 | 1,992 | class Libsass < Formula
homepage "https://github.com/sass/libsass"
url "https://github.com/sass/libsass.git", :tag => "3.2.1", :revision => "e716caa918d86b3b8598e8ad639943fe6ec8e0ec"
head "https://github.com/sass/libsass.git"
bottle do
cellar :any
sha256 "1abc69ee4fff9ff1b355d3e5709c9a2d369d09ce559e901c5a9e574f24f33874" => :yosemite
sha256 "0f5fd20da6968ef21689d4b6cfbbf329cb6f773d455b9b2b4b66870fa50a2c19" => :mavericks
sha256 "134f46ea84c3d3efd2b6018944a8a4be3766413a3614286d8a35eb819f833153" => :mountain_lion
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
needs :cxx11
def install
ENV.cxx11
system "autoreconf", "-fvi"
system "./configure", "--prefix=#{prefix}", "--disable-silent-rules",
"--disable-dependency-tracking"
system "make", "install"
end
test do
# This will need to be updated when devel = stable due to API changes.
(testpath/"test.c").write <<-EOS.undent
#include <sass_context.h>
#include <string.h>
int main()
{
const char* source_string = "a { color:blue; &:hover { color:red; } }";
struct Sass_Data_Context* data_ctx = sass_make_data_context(strdup(source_string));
struct Sass_Options* options = sass_data_context_get_options(data_ctx);
sass_option_set_precision(options, 1);
sass_option_set_source_comments(options, false);
sass_data_context_set_options(data_ctx, options);
sass_compile_data_context(data_ctx);
struct Sass_Context* ctx = sass_data_context_get_context(data_ctx);
int err = sass_context_get_error_status(ctx);
if(err != 0) {
return 1;
} else {
return strcmp(sass_context_get_output_string(ctx), "a {\\n color: blue; }\\n a:hover {\\n color: red; }\\n") != 0;
}
}
EOS
system ENV.cc, "-o", "test", "test.c", "-lsass"
system "./test"
end
end
| 36.888889 | 129 | 0.665161 |
f7c93c4f113a6092b0a2296b5413385b0bd4349f | 1,606 | #==============================================================================
# Watts
#
# @description: Module for providing functions to work with MassWeightScreen
# objects
# @author: Elisha Lai
# @version: 0.0.1 15/06/2015
#==============================================================================
# Mass weight screen module (mass_weight_screen.rb)
require_relative '../elements/screen_header.rb'
require_relative '../elements/screen_box.rb'
require_relative 'mass-weight-screens/weight_screen.rb'
require_relative 'mass-weight-screens/mass_screen.rb'
# Object definition
class MassWeightScreen < Shoes
url('/title_screen/mass_weight_screen',
:mass_weight_screen)
# Draws the mass weight screen on the Shoes app window.
def mass_weight_screen
@heading = 'Mass & weight'
@heading_edited = @heading.downcase.gsub(/[,&]/,'').squeeze(' ').gsub(' ','_')
background("images/#{@heading_edited}_large.png")
# Mass weight screen header
ScreenHeader.new(self, '/title_screen', @@font, @heading)
# Mass weight screen content
flow(:height => 640, :width => 1080, :scroll => true) do
# Left margin offset
stack(:height => 640, :width => 80) do
end
# Content column
stack(:height => 640, :width => 1000) do
@formula_set =
{'Weight' => 'Weight = mass × acceleration of free fall',
'Mass' => 'Mass = weight / acceleration of free fall'}
@formula_set.each do |name, formula|
ScreenBox.new(self, @@theme_colour, @@font, @heading, formula, name)
end
end
end
end
end
| 31.490196 | 82 | 0.59589 |
62db8b75e1aa7b75e3f931b87ca7bbf5ad42bbcc | 1,477 | require 'optparse'
options = { :environment => (ENV['RAILS_ENV'] || "development").dup }
code_or_file = nil
ARGV.clone.options do |opts|
script_name = File.basename($0)
opts.banner = "Usage: #{$0} [options] ('Some.ruby(code)' or a filename)"
opts.separator ""
opts.on("-e", "--environment=name", String,
"Specifies the environment for the runner to operate under (test/development/production).",
"Default: development") { |v| options[:environment] = v }
opts.separator ""
opts.on("-h", "--help",
"Show this help message.") { $stderr.puts opts; exit }
if RUBY_PLATFORM !~ /mswin/
opts.separator ""
opts.separator "You can also use runner as a shebang line for your scripts like this:"
opts.separator "-------------------------------------------------------------"
opts.separator "#!/usr/bin/env #{File.expand_path($0)}"
opts.separator ""
opts.separator "Product.find(:all).each { |p| p.price *= 2 ; p.save! }"
opts.separator "-------------------------------------------------------------"
end
opts.order! { |o| code_or_file ||= o } rescue retry
end
ARGV.delete(code_or_file)
ENV["RAILS_ENV"] = options[:environment]
RAILS_ENV.replace(options[:environment]) if defined?(RAILS_ENV)
require RAILS_ROOT + '/config/environment'
if code_or_file.nil?
$stderr.puts "Run '#{$0} -h' for help."
exit 1
elsif File.exists?(code_or_file)
eval(File.read(code_or_file))
else
eval(code_or_file)
end
| 30.142857 | 101 | 0.607989 |
18972a3f134f5165cdacc8eb00f5fa298f96cca5 | 553 | # frozen_string_literal: true
module Backup
module Syncer
module RSync
class Local < Base
def perform!
log!(:started)
create_dest_path!
run("#{rsync_command} #{paths_to_push} '#{dest_path}'")
log!(:finished)
end
private
# Expand path, since this is local and shell-quoted.
def dest_path
@dest_path ||= File.expand_path(path)
end
def create_dest_path!
FileUtils.mkdir_p dest_path
end
end
end
end
end
| 18.433333 | 65 | 0.562387 |
01e0b8dfece67681a935f2ee25b76741a316acce | 1,181 | $:.push File.expand_path("../lib", __FILE__)
require "capybara/webkit/version"
Gem::Specification.new do |s|
s.name = "capybara-webkit"
s.version = Capybara::Driver::Webkit::VERSION.dup
s.authors = ["thoughtbot", "Joe Ferris", "Matt Horan", "Matt Mongeau",
"Mike Burns", "Jason Morrison"]
s.email = "[email protected]"
s.homepage = "http://github.com/thoughtbot/capybara-webkit"
s.summary = "Headless Webkit driver for Capybara"
s.license = 'MIT'
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {spec,features}/*`.split("\n")
s.require_path = "lib"
s.extensions = "extconf.rb"
s.required_ruby_version = ">= 1.9.0"
s.add_runtime_dependency("capybara", ">= 2.3.0", "< 2.7.0")
s.add_runtime_dependency("json")
s.add_development_dependency("rspec", "~> 2.14.0")
# Sinatra is used by Capybara's TestApp
s.add_development_dependency("sinatra")
s.add_development_dependency("mini_magick")
s.add_development_dependency("rake")
s.add_development_dependency("appraisal", "~> 0.4.0")
s.add_development_dependency("selenium-webdriver")
s.add_development_dependency("launchy")
end
| 33.742857 | 73 | 0.680779 |
e2f54898fbe0ab9c9479dc1cc1ef889651c311b7 | 132 | require 'test_helper'
class DiscriminationTypeTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 16.5 | 54 | 0.727273 |
08544e024417841f904adb0da3f156e1c047bcda | 2,521 | # Copyright (c) 2010-2011, Diaspora Inc. This file is
# licensed under the Affero General Public License version 3 or later. See
# the COPYRIGHT file.
require 'spec_helper'
describe User do
before do
@aspect = alice.aspects.first
@aspect1 = alice.aspects.create(:name => 'other')
end
describe '#add_to_streams' do
before do
@params = {:text => "hey", :to => [@aspect.id, @aspect1.id]}
@post = alice.build_post(:status_message, @params)
@post.save
@aspect_ids = @params[:to]
@aspects = alice.aspects_from_ids(@aspect_ids)
end
it 'saves post into visible post ids' do
lambda {
alice.add_to_streams(@post, @aspects)
}.should change{alice.visible_shareables(Post, :by_members_of => @aspects).length}.by(1)
alice.visible_shareables(Post, :by_members_of => @aspects).should include @post
end
it 'saves post into each aspect in aspect_ids' do
alice.add_to_streams(@post, @aspects)
@aspect.reload.post_ids.should include @post.id
@aspect1.reload.post_ids.should include @post.id
end
end
describe '#aspects_from_ids' do
it 'returns a list of all valid aspects a alice can post to' do
aspect_ids = Aspect.all.map(&:id)
alice.aspects_from_ids(aspect_ids).map{|a| a}.should ==
alice.aspects.map{|a| a} #RSpec matchers ftw
end
it "lets you post to your own aspects" do
alice.aspects_from_ids([@aspect.id]).should == [@aspect]
alice.aspects_from_ids([@aspect1.id]).should == [@aspect1]
end
it 'removes aspects that are not yours' do
alice.aspects_from_ids(eve.aspects.first.id).should == []
end
end
describe '#build_post' do
it 'sets status_message#text' do
post = alice.build_post(:status_message, :text => "hey", :to => @aspect.id)
post.text.should == "hey"
end
it 'does not save a status_message' do
post = alice.build_post(:status_message, :text => "hey", :to => @aspect.id)
post.should_not be_persisted
end
it 'does not save a photo' do
post = alice.build_post(:photo, :user_file => uploaded_photo, :to => @aspect.id)
post.should_not be_persisted
end
end
describe '#update_post' do
it 'should update fields' do
photo = alice.post(:photo, :user_file => uploaded_photo, :text => "Old caption", :to => @aspect.id)
update_hash = {:text => "New caption"}
alice.update_post(photo, update_hash)
photo.text.should match(/New/)
end
end
end
| 32.320513 | 105 | 0.654899 |
b93aa7ef73ec96bb7d5ddd141336cb8b85c461b7 | 364 | class CreateProducts < ActiveRecord::Migration
def change
create_table :products do |t|
t.string :title
t.text :description
t.decimal :price
t.integer :size
t.boolean :spicy
t.boolean :veg
t.boolean :best_offer
t.string :image_path
t.string :small_image_path
t.timestamps null: false
end
end
end
| 20.222222 | 46 | 0.648352 |
1ce071234ebacb7bf0a1f8d7c43a882c3561de64 | 241 | class CreateEllisActs < ActiveRecord::Migration
def change
create_table :ellis_acts do |t|
t.belongs_to :building
t.belongs_to :property_owner
t.string :filing_number
t.timestamps null: false
end
end
end
| 20.083333 | 47 | 0.697095 |
18ef7e90e6800f30a6befc841c48d944b2115e16 | 9,647 | # encoding: UTF-8
require 'spec_helper'
require 'yt/models/playlist'
describe Yt::Playlist, :device_app do
subject(:playlist) { Yt::Playlist.new id: id, auth: $account }
context 'given an existing playlist' do
let(:id) { 'PLSWYkYzOrPMT9pJG5St5G0WDalhRzGkU4' }
it 'returns valid metadata' do
expect(playlist.title).to be_a String
expect(playlist.description).to be_a String
expect(playlist.thumbnail_url).to be_a String
expect(playlist.published_at).to be_a Time
expect(playlist.tags).to be_an Array
expect(playlist.channel_id).to be_a String
expect(playlist.channel_title).to be_a String
expect(playlist.privacy_status).to be_a String
end
describe '.playlist_items' do
let(:item) { playlist.playlist_items.first }
specify 'returns the playlist item with the complete snippet' do
expect(item).to be_a Yt::PlaylistItem
expect(item.snippet).to be_complete
expect(item.position).not_to be_nil
end
specify 'does not eager-load the attributes of the item’s video' do
expect(item.video.instance_variable_defined? :@snippet).to be false
expect(item.video.instance_variable_defined? :@status).to be false
expect(item.video.instance_variable_defined? :@statistics_set).to be false
end
end
describe '.playlist_items.includes(:video)' do
let(:item) { playlist.playlist_items.includes(:video).first }
specify 'eager-loads the snippet, status and statistics of each video' do
expect(item.video.instance_variable_defined? :@snippet).to be true
expect(item.video.instance_variable_defined? :@status).to be true
expect(item.video.instance_variable_defined? :@statistics_set).to be true
end
end
end
context 'given a playlist that only includes other people’s private or deleted videos' do
let(:id) { 'PLsnYEvcCzABOsJdehqkIDhwz8CPGWzX59' }
describe '.playlist_items.includes(:video)' do
let(:items) { playlist.playlist_items.includes(:video).map{|i| i} }
specify 'returns nil (without running an infinite loop)' do
expect(items.size).to be 2
end
end
end
context 'given an unknown playlist' do
let(:id) { 'not-a-playlist-id' }
it { expect{playlist.snippet}.to raise_error Yt::Errors::NoItems }
it { expect{playlist.status}.to raise_error Yt::Errors::NoItems }
end
context 'given someone else’s playlist' do
let(:id) { 'PLSWYkYzOrPMT9pJG5St5G0WDalhRzGkU4' }
let(:video_id) { 'MESycYJytkU' }
it { expect{playlist.delete}.to fail.with 'forbidden' }
it { expect{playlist.update}.to fail.with 'forbidden' }
it { expect{playlist.add_video! video_id}.to raise_error Yt::Errors::RequestError }
it { expect{playlist.delete_playlist_items}.to raise_error Yt::Errors::RequestError }
end
context 'given one of my own playlists that I want to delete' do
before(:all) { @my_playlist = $account.create_playlist title: "Yt Test Delete Playlist #{rand}" }
let(:id) { @my_playlist.id }
it { expect(playlist.delete).to be true }
end
context 'given one of my own playlists that I want to update' do
before(:all) { @my_playlist = $account.create_playlist title: "Yt Test Update Playlist #{rand}" }
after(:all) { @my_playlist.delete }
let(:id) { @my_playlist.id }
let!(:old_title) { @my_playlist.title }
let!(:old_privacy_status) { @my_playlist.privacy_status }
let(:update) { @my_playlist.update attrs }
context 'given I update the title' do
# NOTE: The use of UTF-8 characters is to test that we can pass up to
# 50 characters, independently of their representation
let(:attrs) { {title: "Yt Example Update Playlist #{rand} - ®•♡❥❦❧☙"} }
specify 'only updates the title' do
expect(update).to be true
expect(@my_playlist.title).not_to eq old_title
expect(@my_playlist.privacy_status).to eq old_privacy_status
end
end
context 'given I update the description' do
let!(:old_description) { @my_playlist.description }
let(:attrs) { {description: "Yt Example Description #{rand} - ®•♡❥❦❧☙"} }
specify 'only updates the description' do
expect(update).to be true
expect(@my_playlist.description).not_to eq old_description
expect(@my_playlist.title).to eq old_title
expect(@my_playlist.privacy_status).to eq old_privacy_status
end
end
context 'given I update the tags' do
let!(:old_tags) { @my_playlist.tags }
let(:attrs) { {tags: ["Yt Test Tag #{rand}"]} }
specify 'only updates the tag' do
expect(update).to be true
expect(@my_playlist.tags).not_to eq old_tags
expect(@my_playlist.title).to eq old_title
expect(@my_playlist.privacy_status).to eq old_privacy_status
end
end
context 'given I update title, description and/or tags using angle brackets' do
let(:attrs) { {title: "Yt Test < >", description: '< >', tags: ['<tag>']} }
specify 'updates them replacing angle brackets with similar unicode characters accepted by YouTube' do
expect(update).to be true
expect(playlist.title).to eq 'Yt Test ‹ ›'
expect(playlist.description).to eq '‹ ›'
expect(playlist.tags).to eq ['‹tag›']
end
end
context 'given I update the privacy status' do
let!(:new_privacy_status) { old_privacy_status == 'private' ? 'unlisted' : 'private' }
context 'passing the parameter in underscore syntax' do
let(:attrs) { {privacy_status: new_privacy_status} }
specify 'only updates the privacy status' do
expect(update).to be true
expect(@my_playlist.privacy_status).not_to eq old_privacy_status
expect(@my_playlist.title).to eq old_title
end
end
context 'passing the parameter in camel-case syntax' do
let(:attrs) { {privacyStatus: new_privacy_status} }
specify 'only updates the privacy status' do
expect(update).to be true
expect(@my_playlist.privacy_status).not_to eq old_privacy_status
expect(@my_playlist.title).to eq old_title
end
end
end
context 'given an existing video' do
let(:video_id) { 'MESycYJytkU' }
describe 'can be added' do
it { expect(playlist.add_video video_id).to be_a Yt::PlaylistItem }
it { expect{playlist.add_video video_id}.to change{playlist.playlist_items.count}.by(1) }
it { expect(playlist.add_video! video_id).to be_a Yt::PlaylistItem }
it { expect{playlist.add_video! video_id}.to change{playlist.playlist_items.count}.by(1) }
it { expect(playlist.add_video(video_id, position: 0).position).to be 0 }
end
# NOTE: This test sounds redundant, but it’s actually a reflection of
# another irrational behavior of YouTube API. In short, if you add a new
# video to a playlist, the returned item does not have the "position"
# information. You need an extra call to get it. When YouTube fixes this
# behavior, this test (and related code) will go away.
describe 'adding the video' do
let(:item) { playlist.add_video video_id }
specify 'returns an item without its position' do
expect(item.snippet).not_to be_complete
expect(item.position).not_to be_nil # after reloading
end
end
describe 'can be removed' do
before { playlist.add_video video_id }
it { expect(playlist.delete_playlist_items.uniq).to eq [true] }
it { expect{playlist.delete_playlist_items}.to change{playlist.playlist_items.count} }
end
end
context 'given an unknown video' do
let(:video_id) { 'not-a-video' }
describe 'cannot be added' do
it { expect(playlist.add_video video_id).to be_nil }
it { expect{playlist.add_video video_id}.not_to change{playlist.playlist_items.count} }
it { expect{playlist.add_video! video_id}.to fail.with 'videoNotFound' }
end
end
context 'given a video of a terminated account' do
let(:video_id) { 'kDCpdKeTe5g' }
describe 'cannot be added' do
it { expect(playlist.add_video video_id).to be_nil }
it { expect{playlist.add_video video_id}.not_to change{playlist.playlist_items.count} }
it { expect{playlist.add_video! video_id}.to fail.with 'forbidden' }
end
end
context 'given one existing and one unknown video' do
let(:video_ids) { ['MESycYJytkU', 'not-a-video'] }
describe 'only one can be added' do
it { expect(playlist.add_videos(video_ids).length).to eq 2 }
it { expect{playlist.add_videos video_ids}.to change{playlist.playlist_items.count}.by(1) }
it { expect{playlist.add_videos! video_ids}.to fail.with 'videoNotFound' }
end
end
end
context 'given one of my own playlists that I want to get reports for' do
let(:id) { $account.channel.playlists.first.id }
it 'returns valid reports for playlist-related metrics' do
expect{playlist.views}.not_to raise_error
expect{playlist.playlist_starts}.not_to raise_error
expect{playlist.average_time_in_playlist}.not_to raise_error
expect{playlist.views_per_playlist_start}.not_to raise_error
expect{playlist.views_on 3.days.ago}.not_to raise_error
expect{playlist.playlist_starts_on 3.days.ago}.not_to raise_error
expect{playlist.average_time_in_playlist_on 3.days.ago}.not_to raise_error
expect{playlist.views_per_playlist_start_on 3.days.ago}.not_to raise_error
end
end
end | 39.536885 | 108 | 0.682077 |
61e7ece90d2462f24cf2b77d86a18d0bb24508f7 | 3,312 | module ActionController
module ConditionalGet
extend ActiveSupport::Concern
include RackDelegation
include Head
# Sets the etag, last_modified, or both on the response and renders a
# "304 Not Modified" response if the request is already fresh.
#
# Parameters:
# * <tt>:etag</tt>
# * <tt>:last_modified</tt>
# * <tt>:public</tt> By default the Cache-Control header is private, set this to true if you want your application to be cachable by other devices (proxy caches).
#
# Example:
#
# def show
# @article = Article.find(params[:id])
# fresh_when(:etag => @article, :last_modified => @article.created_at.utc, :public => true)
# end
#
# This will render the show template if the request isn't sending a matching etag or
# If-Modified-Since header and just a "304 Not Modified" response if there's a match.
#
def fresh_when(options)
options.assert_valid_keys(:etag, :last_modified, :public)
response.etag = options[:etag] if options[:etag]
response.last_modified = options[:last_modified] if options[:last_modified]
response.cache_control[:public] = true if options[:public]
head :not_modified if request.fresh?(response)
end
# Sets the etag and/or last_modified on the response and checks it against
# the client request. If the request doesn't match the options provided, the
# request is considered stale and should be generated from scratch. Otherwise,
# it's fresh and we don't need to generate anything and a reply of "304 Not Modified" is sent.
#
# Parameters:
# * <tt>:etag</tt>
# * <tt>:last_modified</tt>
# * <tt>:public</tt> By default the Cache-Control header is private, set this to true if you want your application to be cachable by other devices (proxy caches).
#
# Example:
#
# def show
# @article = Article.find(params[:id])
#
# if stale?(:etag => @article, :last_modified => @article.created_at.utc)
# @statistics = @article.really_expensive_call
# respond_to do |format|
# # all the supported formats
# end
# end
# end
def stale?(options)
fresh_when(options)
!request.fresh?(response)
end
# Sets a HTTP 1.1 Cache-Control header. Defaults to issuing a "private" instruction, so that
# intermediate caches shouldn't cache the response.
#
# Examples:
# expires_in 20.minutes
# expires_in 3.hours, :public => true
# expires in 3.hours, 'max-stale' => 5.hours, :public => true
#
# This method will overwrite an existing Cache-Control header.
# See http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html for more possibilities.
def expires_in(seconds, options = {}) #:doc:
response.cache_control.merge!(:max_age => seconds, :public => options.delete(:public))
options.delete(:private)
response.cache_control[:extras] = options.map {|k,v| "#{k}=#{v}"}
end
# Sets a HTTP 1.1 Cache-Control header of "no-cache" so no caching should occur by the browser or
# intermediate caches (like caching proxy servers).
def expires_now #:doc:
response.cache_control.replace(:no_cache => true)
end
end
end
| 38.068966 | 166 | 0.652174 |
6a4d1c8895180c1a0d9906fa14daef47b6cd860a | 3,857 | require 'json'
require 'randexp'
require 'hashie'
require 'active_support/concern'
require 'digest/sha1'
module Attributor
require_relative 'attributor/dumpable'
require_relative 'attributor/exceptions'
require_relative 'attributor/attribute'
require_relative 'attributor/type'
require_relative 'attributor/dsl_compiler'
require_relative 'attributor/hash_dsl_compiler'
require_relative 'attributor/attribute_resolver'
require_relative 'attributor/smart_attribute_selector'
require_relative 'attributor/example_mixin'
require_relative 'attributor/extensions/randexp'
# hierarchical separator string for composing human readable attributes
SEPARATOR = '.'.freeze
DEFAULT_ROOT_CONTEXT = ['$'].freeze
# @param type [Class] The class of the type to resolve
#
def self.resolve_type(attr_type, options = {}, constructor_block = nil)
klass = self.find_type(attr_type)
return klass.construct(constructor_block, **options) if klass.constructable?
raise AttributorException, "Type: #{attr_type} does not support anonymous generation" if constructor_block
klass
end
def self.find_type(attr_type)
return attr_type if attr_type < Attributor::Type
name = attr_type.name.split('::').last # TOO EXPENSIVE?
klass = const_get(name) if const_defined?(name)
raise AttributorException, "Could not find class with name #{name}" unless klass
raise AttributorException, "Could not find attribute type for: #{name} [klass: #{klass.name}]" unless klass < Attributor::Type
klass
end
def self.type_name(type)
return type_name(type.class) unless type.is_a?(::Class)
type.ancestors.find { |k| k.name && !k.name.empty? }.name
end
def self.humanize_context(context)
return '' unless context
context = Array(context) if context.is_a? ::String
unless context.is_a? Enumerable
raise "INVALID CONTEXT!!! (got: #{context.inspect})"
end
begin
return context.join('.')
rescue e
raise "Error creating context string: #{e.message}"
end
end
def self.errorize_value(value)
inspection = value.inspect
inspection = inspection[0..500] + '...[truncated]' if inspection.size > 500
inspection
end
def self.recursive_to_h(val)
if val.is_a? Array
val.map { |v| recursive_to_h(v) }
elsif val.nil?
nil
elsif val.respond_to?(:to_h)
val.to_h.each_with_object({}) do |(name, inner_val), hash|
hash[name] = recursive_to_h(inner_val)
end
else
val
end
end
MODULE_PREFIX = 'Attributor::'.freeze
MODULE_PREFIX_REGEX = ::Regexp.new(MODULE_PREFIX)
require_relative 'attributor/families/numeric'
require_relative 'attributor/families/temporal'
require_relative 'attributor/types/container'
require_relative 'attributor/types/object'
require_relative 'attributor/types/bigdecimal'
require_relative 'attributor/types/integer'
require_relative 'attributor/types/string'
require_relative 'attributor/types/symbol'
require_relative 'attributor/types/boolean'
require_relative 'attributor/types/time'
require_relative 'attributor/types/date'
require_relative 'attributor/types/date_time'
require_relative 'attributor/types/regexp'
require_relative 'attributor/types/float'
require_relative 'attributor/types/collection'
require_relative 'attributor/types/hash'
require_relative 'attributor/types/model'
require_relative 'attributor/types/struct'
require_relative 'attributor/types/class'
require_relative 'attributor/types/polymorphic'
require_relative 'attributor/types/csv'
require_relative 'attributor/types/ids'
# TODO: move these to 'optional types' or 'extra types'... location
require_relative 'attributor/types/tempfile'
require_relative 'attributor/types/file_upload'
require_relative 'attributor/types/uri'
end
| 31.104839 | 130 | 0.745917 |
1d70eeca40c9ca4b1c156e974b5a7c72513171ea | 18,854 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.5.1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXT
class PortMirroringSession
# Link to this resource
attr_accessor :_self
# The server will populate this field when returing the resource. Ignored on PUT and POST.
attr_accessor :_links
# Schema for this resource
attr_accessor :_schema
# The _revision property describes the current revision of the resource. To prevent clients from overwriting each other's changes, PUT operations must include the current _revision of the resource, which clients should obtain by issuing a GET operation. If the _revision provided in a PUT request is missing or stale, the operation will be rejected.
attr_accessor :_revision
# Indicates system owned resource
attr_accessor :_system_owned
# Defaults to ID if not set
attr_accessor :display_name
# Description of this resource
attr_accessor :description
# Opaque identifiers meaningful to the API user
attr_accessor :tags
# ID of the user who created this resource
attr_accessor :_create_user
# Protection status is one of the following: PROTECTED - the client who retrieved the entity is not allowed to modify it. NOT_PROTECTED - the client who retrieved the entity is allowed to modify it REQUIRE_OVERRIDE - the client who retrieved the entity is a super user and can modify it, but only when providing the request header X-Allow-Overwrite=true. UNKNOWN - the _protection field could not be determined for this entity.
attr_accessor :_protection
# Timestamp of resource creation
attr_accessor :_create_time
# Timestamp of last modification
attr_accessor :_last_modified_time
# ID of the user who last modified this resource
attr_accessor :_last_modified_user
# Unique identifier of this resource
attr_accessor :id
# The type of this resource.
attr_accessor :resource_type
# Port mirroring session direction
attr_accessor :direction
# Mirror sources
attr_accessor :mirror_sources
# Only for Remote SPAN Port Mirror.
attr_accessor :encapsulation_vlan_id
# If this property is unset, this session will be treated as LocalPortMirrorSession.
attr_accessor :session_type
# If this property is set, the packet will be truncated to the provided length. If this property is unset, entire packet will be mirrored.
attr_accessor :snap_length
# An array of 5-tuples used to filter packets for the mirror session, if not provided, all the packets will be mirrored.
attr_accessor :port_mirroring_filters
# Only for Remote SPAN Port Mirror. Whether to preserve original VLAN.
attr_accessor :preserve_original_vlan
# Mirror destination
attr_accessor :mirror_destination
class EnumAttributeValidator
attr_reader :datatype
attr_reader :allowable_values
def initialize(datatype, allowable_values)
@allowable_values = allowable_values.map do |value|
case datatype.to_s
when /Integer/i
value.to_i
when /Float/i
value.to_f
else
value
end
end
end
def valid?(value)
!value || allowable_values.include?(value)
end
end
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'_self' => :'_self',
:'_links' => :'_links',
:'_schema' => :'_schema',
:'_revision' => :'_revision',
:'_system_owned' => :'_system_owned',
:'display_name' => :'display_name',
:'description' => :'description',
:'tags' => :'tags',
:'_create_user' => :'_create_user',
:'_protection' => :'_protection',
:'_create_time' => :'_create_time',
:'_last_modified_time' => :'_last_modified_time',
:'_last_modified_user' => :'_last_modified_user',
:'id' => :'id',
:'resource_type' => :'resource_type',
:'direction' => :'direction',
:'mirror_sources' => :'mirror_sources',
:'encapsulation_vlan_id' => :'encapsulation_vlan_id',
:'session_type' => :'session_type',
:'snap_length' => :'snap_length',
:'port_mirroring_filters' => :'port_mirroring_filters',
:'preserve_original_vlan' => :'preserve_original_vlan',
:'mirror_destination' => :'mirror_destination'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'_self' => :'SelfResourceLink',
:'_links' => :'Array<ResourceLink>',
:'_schema' => :'String',
:'_revision' => :'Integer',
:'_system_owned' => :'BOOLEAN',
:'display_name' => :'String',
:'description' => :'String',
:'tags' => :'Array<Tag>',
:'_create_user' => :'String',
:'_protection' => :'String',
:'_create_time' => :'Integer',
:'_last_modified_time' => :'Integer',
:'_last_modified_user' => :'String',
:'id' => :'String',
:'resource_type' => :'String',
:'direction' => :'String',
:'mirror_sources' => :'Array<MirrorSource>',
:'encapsulation_vlan_id' => :'Integer',
:'session_type' => :'String',
:'snap_length' => :'Integer',
:'port_mirroring_filters' => :'Array<PortMirroringFilter>',
:'preserve_original_vlan' => :'BOOLEAN',
:'mirror_destination' => :'MirrorDestination'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'_self')
self._self = attributes[:'_self']
end
if attributes.has_key?(:'_links')
if (value = attributes[:'_links']).is_a?(Array)
self._links = value
end
end
if attributes.has_key?(:'_schema')
self._schema = attributes[:'_schema']
end
if attributes.has_key?(:'_revision')
self._revision = attributes[:'_revision']
end
if attributes.has_key?(:'_system_owned')
self._system_owned = attributes[:'_system_owned']
end
if attributes.has_key?(:'display_name')
self.display_name = attributes[:'display_name']
end
if attributes.has_key?(:'description')
self.description = attributes[:'description']
end
if attributes.has_key?(:'tags')
if (value = attributes[:'tags']).is_a?(Array)
self.tags = value
end
end
if attributes.has_key?(:'_create_user')
self._create_user = attributes[:'_create_user']
end
if attributes.has_key?(:'_protection')
self._protection = attributes[:'_protection']
end
if attributes.has_key?(:'_create_time')
self._create_time = attributes[:'_create_time']
end
if attributes.has_key?(:'_last_modified_time')
self._last_modified_time = attributes[:'_last_modified_time']
end
if attributes.has_key?(:'_last_modified_user')
self._last_modified_user = attributes[:'_last_modified_user']
end
if attributes.has_key?(:'id')
self.id = attributes[:'id']
end
if attributes.has_key?(:'resource_type')
self.resource_type = attributes[:'resource_type']
end
if attributes.has_key?(:'direction')
self.direction = attributes[:'direction']
end
if attributes.has_key?(:'mirror_sources')
if (value = attributes[:'mirror_sources']).is_a?(Array)
self.mirror_sources = value
end
end
if attributes.has_key?(:'encapsulation_vlan_id')
self.encapsulation_vlan_id = attributes[:'encapsulation_vlan_id']
end
if attributes.has_key?(:'session_type')
self.session_type = attributes[:'session_type']
else
self.session_type = 'LocalPortMirrorSession'
end
if attributes.has_key?(:'snap_length')
self.snap_length = attributes[:'snap_length']
end
if attributes.has_key?(:'port_mirroring_filters')
if (value = attributes[:'port_mirroring_filters']).is_a?(Array)
self.port_mirroring_filters = value
end
end
if attributes.has_key?(:'preserve_original_vlan')
self.preserve_original_vlan = attributes[:'preserve_original_vlan']
else
self.preserve_original_vlan = false
end
if attributes.has_key?(:'mirror_destination')
self.mirror_destination = attributes[:'mirror_destination']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
if !@display_name.nil? && @display_name.to_s.length > 255
invalid_properties.push('invalid value for "display_name", the character length must be smaller than or equal to 255.')
end
if [email protected]? && @description.to_s.length > 1024
invalid_properties.push('invalid value for "description", the character length must be smaller than or equal to 1024.')
end
if @direction.nil?
invalid_properties.push('invalid value for "direction", direction cannot be nil.')
end
if @mirror_sources.nil?
invalid_properties.push('invalid value for "mirror_sources", mirror_sources cannot be nil.')
end
if !@snap_length.nil? && @snap_length > 65535
invalid_properties.push('invalid value for "snap_length", must be smaller than or equal to 65535.')
end
if !@snap_length.nil? && @snap_length < 60
invalid_properties.push('invalid value for "snap_length", must be greater than or equal to 60.')
end
if @mirror_destination.nil?
invalid_properties.push('invalid value for "mirror_destination", mirror_destination cannot be nil.')
end
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
return false if !@display_name.nil? && @display_name.to_s.length > 255
return false if [email protected]? && @description.to_s.length > 1024
return false if @direction.nil?
direction_validator = EnumAttributeValidator.new('String', ['INGRESS', 'EGRESS', 'BIDIRECTIONAL'])
return false unless direction_validator.valid?(@direction)
return false if @mirror_sources.nil?
session_type_validator = EnumAttributeValidator.new('String', ['LogicalPortMirrorSession', 'UplinkPortMirrorSession', 'RspanSrcMirrorSession', 'RspanDstMirrorSession', 'LocalPortMirrorSession', 'LogicalLocalPortMirrorSession', 'L3PortMirrorSession'])
return false unless session_type_validator.valid?(@session_type)
return false if !@snap_length.nil? && @snap_length > 65535
return false if !@snap_length.nil? && @snap_length < 60
return false if @mirror_destination.nil?
true
end
# Custom attribute writer method with validation
# @param [Object] display_name Value to be assigned
def display_name=(display_name)
if !display_name.nil? && display_name.to_s.length > 255
fail ArgumentError, 'invalid value for "display_name", the character length must be smaller than or equal to 255.'
end
@display_name = display_name
end
# Custom attribute writer method with validation
# @param [Object] description Value to be assigned
def description=(description)
if !description.nil? && description.to_s.length > 1024
fail ArgumentError, 'invalid value for "description", the character length must be smaller than or equal to 1024.'
end
@description = description
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] direction Object to be assigned
def direction=(direction)
validator = EnumAttributeValidator.new('String', ['INGRESS', 'EGRESS', 'BIDIRECTIONAL'])
unless validator.valid?(direction)
fail ArgumentError, 'invalid value for "direction", must be one of #{validator.allowable_values}.'
end
@direction = direction
end
# Custom attribute writer method checking allowed values (enum).
# @param [Object] session_type Object to be assigned
def session_type=(session_type)
validator = EnumAttributeValidator.new('String', ['LogicalPortMirrorSession', 'UplinkPortMirrorSession', 'RspanSrcMirrorSession', 'RspanDstMirrorSession', 'LocalPortMirrorSession', 'LogicalLocalPortMirrorSession', 'L3PortMirrorSession'])
unless validator.valid?(session_type)
fail ArgumentError, 'invalid value for "session_type", must be one of #{validator.allowable_values}.'
end
@session_type = session_type
end
# Custom attribute writer method with validation
# @param [Object] snap_length Value to be assigned
def snap_length=(snap_length)
if !snap_length.nil? && snap_length > 65535
fail ArgumentError, 'invalid value for "snap_length", must be smaller than or equal to 65535.'
end
if !snap_length.nil? && snap_length < 60
fail ArgumentError, 'invalid value for "snap_length", must be greater than or equal to 60.'
end
@snap_length = snap_length
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
_self == o._self &&
_links == o._links &&
_schema == o._schema &&
_revision == o._revision &&
_system_owned == o._system_owned &&
display_name == o.display_name &&
description == o.description &&
tags == o.tags &&
_create_user == o._create_user &&
_protection == o._protection &&
_create_time == o._create_time &&
_last_modified_time == o._last_modified_time &&
_last_modified_user == o._last_modified_user &&
id == o.id &&
resource_type == o.resource_type &&
direction == o.direction &&
mirror_sources == o.mirror_sources &&
encapsulation_vlan_id == o.encapsulation_vlan_id &&
session_type == o.session_type &&
snap_length == o.snap_length &&
port_mirroring_filters == o.port_mirroring_filters &&
preserve_original_vlan == o.preserve_original_vlan &&
mirror_destination == o.mirror_destination
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[_self, _links, _schema, _revision, _system_owned, display_name, description, tags, _create_user, _protection, _create_time, _last_modified_time, _last_modified_user, id, resource_type, direction, mirror_sources, encapsulation_vlan_id, session_type, snap_length, port_mirroring_filters, preserve_original_vlan, mirror_destination].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 35.373358 | 508 | 0.649093 |
ab08e208963f1f7108073fe58d2d4c9bbe33e447 | 1,234 | require "spec_helper"
describe Lionactor::LocationAmenity do
before :each do
data = JSON.parse(MML)["location"]["_embedded"]["amenities"].first
@local = Lionactor::LocationAmenity.new(data, nil)
end
describe "via automatic methods" do
describe "#accessible" do
it "is true" do
expect(@local.accessible).to eq true
end
end
describe "#accessibility_note" do
it "does not have an accessibility note" do
expect(@local.accessibility_note).to be_nil
end
end
describe "#staff_assistance" do
it "does not require staff assistance" do
pending "staff_assistance should be boolean"
expect(@local.staff_assistance).to be false
end
end
describe "#local_rank" do
it "should be an integer" do
expect(@local.location_rank).to be_an_instance_of Fixnum
end
it "should be 1" do
expect(@local.location_rank).to eq 1
end
end
end
describe "#amenity" do
it "returns an Amenity object" do
expect(@local.amenity).to be_an_instance_of Lionactor::Amenity
end
it "should be have a name" do
expect(@local.amenity.name).to eq "Computers for public use"
end
end
end
| 24.196078 | 70 | 0.657212 |
91424f843c0c8e33792e7d1df49188c253d15895 | 786 | # frozen_string_literal: true
module CallCenter
module Clients
# Schema and validation rules for the {CallCenter::Operations::Clients::Create} operation
class ClientContract < CallCenter::Validation::ApplicationContract
# @!method call(opts)
# @param [Hash] opts the parameters to validate using this contract
# @option opts [String] :access_key_id (required)
# @option opts [String] :secret_access_key (required)
# @option opts [Integer] :max_results
# @return [Dry::Monads::Result::Success] if params pass validation
# @return [Dry::Monads::Result::Failure] if params fail validation
params do
required(:access_key_id).value(:string)
required(:secret_access_key).value(:string)
end
end
end
end
| 32.75 | 93 | 0.693384 |
7997363e8b51824c70ed853d575a27d79c8e4bf4 | 373 | # frozen_string_literal: true
module ClickUp
class ClickUpError < StandardError
attr_reader :message
def initialize(msg = nil)
@message = msg
end
def to_s
"#{message}"
end
end
class ParamRequiredError < ClickUpError
attr_reader :params
def initialize(msg, *params)
super(msg)
@params = params
end
end
end | 15.541667 | 41 | 0.646113 |
39b2953995b3af3e7eb57c614d8791d6d8791857 | 3,974 | # frozen_string_literal: true
require "spec_helper"
require "dependabot/terraform/file_fetcher"
require_common_spec "file_fetchers/shared_examples_for_file_fetchers"
RSpec.describe Dependabot::Terraform::FileFetcher do
it_behaves_like "a dependency file fetcher"
let(:source) do
Dependabot::Source.new(
provider: "github",
repo: "gocardless/bump",
directory: directory
)
end
let(:file_fetcher_instance) do
described_class.new(source: source, credentials: credentials)
end
let(:directory) { "/" }
let(:github_url) { "https://api.github.com/" }
let(:url) { github_url + "repos/gocardless/bump/contents/" }
let(:credentials) do
[{
"type" => "git_source",
"host" => "github.com",
"username" => "x-access-token",
"password" => "token"
}]
end
before { allow(file_fetcher_instance).to receive(:commit).and_return("sha") }
context "with a Terraform file" do
before do
stub_request(:get, url + "?ref=sha").
with(headers: { "Authorization" => "token token" }).
to_return(
status: 200,
body: fixture("github", "contents_terraform_repo.json"),
headers: { "content-type" => "application/json" }
)
%w(main.tf outputs.tf variables.tf).each do |nm|
stub_request(:get, File.join(url, "#{nm}?ref=sha")).
with(headers: { "Authorization" => "token token" }).
to_return(
status: 200,
body: fixture("github", "contents_terraform_file.json"),
headers: { "content-type" => "application/json" }
)
end
end
it "fetches the Terraform files" do
expect(file_fetcher_instance.files.map(&:name)).
to match_array(%w(main.tf outputs.tf variables.tf))
end
end
context "with a HCL based terragrunt file" do
before do
stub_request(:get, url + "?ref=sha").
with(headers: { "Authorization" => "token token" }).
to_return(
status: 200,
body: fixture("github", "contents_terragrunt_hcl_repo.json"),
headers: { "content-type" => "application/json" }
)
stub_request(:get, File.join(url, "terragrunt.hcl?ref=sha")).
with(headers: { "Authorization" => "token token" }).
to_return(
status: 200,
body: fixture("github", "contents_terraform_file.json"),
headers: { "content-type" => "application/json" }
)
end
it "fetches the Terragrunt file" do
expect(file_fetcher_instance.files.map(&:name)).
to match_array(%w(terragrunt.hcl))
end
end
context "with a lockfile" do
before do
stub_request(:get, url + "?ref=sha").
with(headers: { "Authorization" => "token token" }).
to_return(
status: 200,
body: fixture("github", "contents_lockfile_repo.json"),
headers: { "content-type" => "application/json" }
)
stub_request(:get, File.join(url, ".terraform.lock.hcl?ref=sha")).
with(headers: { "Authorization" => "token token" }).
to_return(
status: 200,
body: fixture("github", "contents_terraform_file.json"),
headers: { "content-type" => "application/json" }
)
end
it "fetches the lockfile" do
expect(file_fetcher_instance.files.map(&:name)).
to match_array(%w(.terraform.lock.hcl))
end
end
context "with a directory that doesn't exist" do
let(:directory) { "/nonexistent" }
before do
stub_request(:get, url + "nonexistent?ref=sha").
with(headers: { "Authorization" => "token token" }).
to_return(
status: 404,
body: fixture("github", "not_found.json"),
headers: { "content-type" => "application/json" }
)
end
it "raises a helpful error" do
expect { file_fetcher_instance.files }.
to raise_error(Dependabot::DependencyFileNotFound)
end
end
end
| 30.569231 | 79 | 0.601409 |
e84748209e875cfd78062e16b6a24787ddb8b3ce | 1,171 | module Xpose
class Configuration
DEFAULT_VALUES = {
name: nil,
value: nil,
decorate: true,
decorator: :infer,
scope: :all
# source: :infer (:infer, :method, : .call : ...)
}.freeze
def initialize(**options)
@options = options
permit_options! unless options.fetch(:permissive, false)
build_config
build_internal_defaults
end
def method_missing(method, *args, &block)
config.send(method, *args, &block)
end
def model
config.singularized_name.capitalize.constantize
end
private
attr_accessor :config
def permit_options!
(@options.keys - DEFAULT_VALUES.keys).tap do |unknown_keys|
raise UnknownOptionsError.new(unknown_keys) unless unknown_keys.empty?
end
end
def build_config
@config = OpenStruct.new(DEFAULT_VALUES.merge(@options)).tap do |c|
raise MissingOptionsError.new(:name) if c.name.blank?
c.name = c.name.to_sym
c.ivar_name = :"@#{c.name}"
c.singularized_name = c.name.to_s.singularize
c.pluralized_name = c.singularized_name.pluralize
end
end
end
end
| 23.42 | 78 | 0.63877 |
626757f13db9b1d8dc61eb8fde9d3716777abb38 | 5,250 | require "rails_helper"
RSpec.describe UpdateDocument, :with_stubbed_opensearch, :with_test_queue_adapter do
let(:investigation) { create(:allegation) }
let(:product) { create(:product_washing_machine) }
let(:business) { create(:business) }
let(:user) { create(:user) }
let(:parent) { product }
let(:old_title) { "old title" }
let(:old_description) { "old description" }
let(:new_title) { Faker::Hipster.word }
let(:new_description) { Faker::Lorem.paragraph }
let(:old_document_metadata) { { "title" => old_title, "description" => old_description, "updated" => "test" } }
let(:new_document_metadata) { { "title" => new_title, "description" => new_description } }
let(:uploaded_document) do
uploaded_document = fixture_file_upload(file_fixture("testImage.png"))
document = ActiveStorage::Blob.create_and_upload!(
io: uploaded_document,
filename: uploaded_document.original_filename,
content_type: uploaded_document.content_type
)
document.update!(metadata: old_document_metadata)
document
end
let(:file) { parent.documents.first.blob }
before do
AddDocument.call!(parent:, document: uploaded_document, user:)
end
describe ".call" do
context "with no parameters" do
let(:result) { described_class.call }
it "returns a failure" do
expect(result).to be_failure
end
end
context "with no user parameter" do
let(:result) { described_class.call(parent:, file:) }
it "returns a failure" do
expect(result).to be_failure
end
end
context "with no document parameter" do
let(:result) { described_class.call(parent:, user:) }
it "returns a failure" do
expect(result).to be_failure
end
end
context "with no parent parameter" do
let(:result) { described_class.call(file:, user:) }
it "returns a failure" do
expect(result).to be_failure
end
end
context "with required parameters" do
context "with no changes" do
let(:result) do
described_class.call(
user:,
parent:,
file:,
title: old_title,
description: old_description
)
end
%i[investigation product business].each do |parent_name|
context "when the parent object is a #{parent_name.to_s.upcase_first}" do
let(:parent) { send(parent_name) }
it "succeeds" do
expect(result).to be_a_success
end
it "does not update the file" do
expect { result }.not_to change(file, :metadata)
end
end
end
context "when the parent is an Investigation" do
let(:parent) { investigation }
it "does not add an audit activity record" do
expect { result }.not_to change { investigation.activities.count }
end
it "does not send a notification email" do
expect { result }.not_to have_enqueued_mail(NotifyMailer, :investigation_updated)
end
end
end
context "with changes" do
let(:result) do
described_class.call(
user:,
parent:,
file:,
title: new_title,
description: new_description
)
end
%i[investigation product business].each do |parent_name|
context "when the parent object is a #{parent_name.to_s.upcase_first}" do
let(:parent) { send(parent_name) }
it "succeeds" do
expect(result).to be_a_success
end
it "updates the file title metadata" do
result
expect(file.metadata["title"]).to eq(new_title)
end
it "updates the file description metadata" do
result
expect(file.metadata["description"]).to eq(new_description)
end
it "updates the file updated metadata" do
freeze_time do
result
expect(file.metadata["updated"].to_json).to eq(Time.zone.now.to_json)
end
end
end
end
context "when the parent is an Investigation" do
let(:parent) { investigation }
let(:last_added_activity) { investigation.activities.order(:id).first }
def expected_email_subject
"Allegation updated"
end
def expected_email_body(name)
"Document attached to the #{investigation.case_type.upcase_first} was updated by #{name}."
end
it "adds an audit activity record", :aggregate_failures do
result
expect(last_added_activity).to be_a(AuditActivity::Document::Update)
expect(last_added_activity.attachment.blob).to eq(uploaded_document)
expect(last_added_activity.metadata).to match({
"blob_id" => file.id,
"updates" => { "metadata" => [hash_including(old_document_metadata), hash_including(new_document_metadata)] }
})
end
it_behaves_like "a service which notifies the case owner"
end
end
end
end
end
| 30.701754 | 123 | 0.601524 |
6221fb05779aae2b1537dafff34fceace8c085c5 | 1,164 | #
# Original knife-windows author:: Chirag Jog (<[email protected]>)
# Copyright:: Copyright (c) 2013-2016 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'chef/knife'
require 'chef/knife/winops_winrm'
require 'chef/knife/winops_bootstrap_windows_ssh'
require 'chef/knife/winops_bootstrap_windows_winrm'
require 'chef/knife/winops_wsman_test'
class Chef
class Knife
class WindowsHelper < Knife
banner "#{BootstrapWindowsWinRM.banner}\n" +
"#{BootstrapWindowsSSH.banner}\n" +
"#{Winrm.banner}\n" +
"#{WsmanTest.banner}"
end
end
end
| 31.459459 | 74 | 0.724227 |
ff485b087f81bfc4432b94441688df0ea1881d54 | 6,781 | # frozen_string_literal: true
module GraphQL
class Schema
# You can use the result of {GraphQL::Introspection::INTROSPECTION_QUERY}
# to make a schema. This schema is missing some important details like
# `resolve` functions, but it does include the full type system,
# so you can use it to validate queries.
module Loader
extend self
# Create schema with the result of an introspection query.
# @param introspection_result [Hash] A response from {GraphQL::Introspection::INTROSPECTION_QUERY}
# @return [GraphQL::Schema] the schema described by `input`
# @deprecated Use {GraphQL::Schema.from_introspection} instead
def load(introspection_result)
schema = introspection_result.fetch("data").fetch("__schema")
types = {}
type_resolver = ->(type) { -> { resolve_type(types, type) } }
schema.fetch("types").each do |type|
next if type.fetch("name").start_with?("__")
type_object = define_type(type, type_resolver)
types[type_object.name] = type_object
end
kargs = { orphan_types: types.values, resolve_type: NullResolveType }
[:query, :mutation, :subscription].each do |root|
type = schema["#{root}Type"]
kargs[root] = types.fetch(type.fetch("name")) if type
end
Schema.define(**kargs, raise_definition_error: true)
end
NullResolveType = ->(type, obj, ctx) {
raise(NotImplementedError, "This schema was loaded from string, so it can't resolve types for objects")
}
NullScalarCoerce = ->(val, _ctx) { val }
class << self
private
def resolve_type(types, type)
case kind = type.fetch("kind")
when "ENUM", "INTERFACE", "INPUT_OBJECT", "OBJECT", "SCALAR", "UNION"
types.fetch(type.fetch("name"))
when "LIST"
ListType.new(of_type: resolve_type(types, type.fetch("ofType")))
when "NON_NULL"
NonNullType.new(of_type: resolve_type(types, type.fetch("ofType")))
else
fail NotImplementedError, "#{kind} not implemented"
end
end
def extract_default_value(default_value_str, input_value_ast)
case input_value_ast
when String, Integer, Float, TrueClass, FalseClass
input_value_ast
when GraphQL::Language::Nodes::Enum
input_value_ast.name
when GraphQL::Language::Nodes::NullValue
nil
when GraphQL::Language::Nodes::InputObject
input_value_ast.to_h
when Array
input_value_ast.map { |element| extract_default_value(default_value_str, element) }
else
raise(
"Encountered unexpected type when loading default value. "\
"input_value_ast.class is #{input_value_ast.class} "\
"default_value is #{default_value_str}"
)
end
end
def define_type(type, type_resolver)
case type.fetch("kind")
when "ENUM"
EnumType.define(
name: type["name"],
description: type["description"],
values: type["enumValues"].map { |enum|
EnumType::EnumValue.define(
name: enum["name"],
description: enum["description"],
deprecation_reason: enum["deprecationReason"],
value: enum["name"]
)
})
when "INTERFACE"
InterfaceType.define(
name: type["name"],
description: type["description"],
fields: Hash[(type["fields"] || []).map { |field|
[field["name"], define_type(field.merge("kind" => "FIELD"), type_resolver)]
}]
)
when "INPUT_OBJECT"
InputObjectType.define(
name: type["name"],
description: type["description"],
arguments: Hash[type["inputFields"].map { |arg|
[arg["name"], define_type(arg.merge("kind" => "ARGUMENT"), type_resolver)]
}]
)
when "OBJECT"
ObjectType.define(
name: type["name"],
description: type["description"],
interfaces: (type["interfaces"] || []).map { |interface|
type_resolver.call(interface)
},
fields: Hash[type["fields"].map { |field|
[field["name"], define_type(field.merge("kind" => "FIELD"), type_resolver)]
}]
)
when "FIELD"
GraphQL::Field.define(
name: type["name"],
type: type_resolver.call(type["type"]),
description: type["description"],
arguments: Hash[type["args"].map { |arg|
[arg["name"], define_type(arg.merge("kind" => "ARGUMENT"), type_resolver)]
}]
)
when "ARGUMENT"
kwargs = {}
if type["defaultValue"]
kwargs[:default_value] = begin
default_value_str = type["defaultValue"]
dummy_query_str = "query getStuff($var: InputObj = #{default_value_str}) { __typename }"
# Returns a `GraphQL::Language::Nodes::Document`:
dummy_query_ast = GraphQL.parse(dummy_query_str)
# Reach into the AST for the default value:
input_value_ast = dummy_query_ast.definitions.first.variables.first.default_value
extract_default_value(default_value_str, input_value_ast)
end
end
GraphQL::Argument.define(
name: type["name"],
type: type_resolver.call(type["type"]),
description: type["description"],
method_access: false,
**kwargs
)
when "SCALAR"
type_name = type.fetch("name")
if GraphQL::Schema::BUILT_IN_TYPES[type_name]
GraphQL::Schema::BUILT_IN_TYPES[type_name]
else
ScalarType.define(
name: type["name"],
description: type["description"],
coerce: NullScalarCoerce,
)
end
when "UNION"
UnionType.define(
name: type["name"],
description: type["description"],
possible_types: type["possibleTypes"].map { |possible_type|
type_resolver.call(possible_type)
}
)
else
fail NotImplementedError, "#{type["kind"]} not implemented"
end
end
end
end
end
end
| 37.464088 | 111 | 0.545347 |
38acd0a4da3c71a49a734d8ee67927f0096aebc9 | 3,365 | # frozen_string_literal: true
# Bind on a specific TCP address. We won't bother using unix sockets because
# nginx will be running in a different Docker container.
bind "tcp://#{ENV.fetch('BIND_ON') { '0.0.0.0:3000' }}"
# Specifies the `pidfile` that Puma will use.
pidfile ENV.fetch('PIDFILE') { 'tmp/pids/puma.pid' }
# Puma supports threading. Requests are served through an internal thread pool.
# Even on MRI, it is beneficial to leverage multiple threads because I/O
# operations do not lock the GIL. This typically requires more CPU resources.
#
# More threads will increase CPU load but will also increase throughput.
#
# Like anything this will heavily depend on the size of your instance and web
# application's demands. 5 is a relatively safe number, start here and increase
# it based on your app's demands.
#
# RAILS_MAX_THREADS will match the default thread size for Active Record.
max_threads_count = ENV.fetch('RAILS_MAX_THREADS') { 5 }
min_threads_count = ENV.fetch('RAILS_MIN_THREADS') { max_threads_count }
threads min_threads_count, max_threads_count
# Specifies the `environment` that Puma will run in.
#
rails_env = ENV.fetch('RAILS_ENV') { 'development' }
environment rails_env
# Puma supports spawning multiple workers. It will fork out a process at the
# OS level to support concurrent requests. This typically requires more RAM.
#
# If you're looking to maximize performance you'll want to use as many workers
# as you can without starving your server of RAM.
#
# This value isn't really possible to auto-calculate if empty, so it defaults
# to 2 when it's not set. That is heavily leaning on the safe side.
#
# Ultimately you'll want to tweak this number for your instance size and web
# application's needs.
#
# If using threads and workers together, the concurrency of your application
# will be THREADS * WORKERS.
workers ENV.fetch('WEB_CONCURRENCY') { 2 }
silence_single_worker_warning if rails_env == 'development'
# An internal health check to verify that workers have checked in to the master
# process within a specific time frame. If this time is exceeded, the worker
# will automatically be rebooted. Defaults to 60s.
#
# Under most situations you will not have to tweak this value, which is why it
# is coded into the config rather than being an environment variable.
worker_timeout rails_env == 'development' ? 3600 : 30
# The path to the puma binary without any arguments.
# restart_command 'puma'
# Use the `preload_app!` method when specifying a `workers` number.
# This directive tells Puma to first boot the application and load code before
# forking the application. This takes advantage of Copy On Write process
# behavior so workers use less memory. If you use this option you need to make
# sure to reconnect any threads in the `on_worker_boot` block.
# preload_app!
# Allow puma to be restarted by `rails restart` command.
# plugin :tmp_restart
# Start the Puma control rack application on +url+. This application can
# be communicated with to control the main server. Additionally, you can
# provide an authentication token, so all requests to the control server
# will need to include that token as a query parameter. This allows for
# simple authentication.
activate_control_app "tcp://#{ENV.fetch('PUMA_CONTROL_URL') { '0.0.0.0:9293' }}", { auth_token: ENV.fetch('PUMA_CONTROL_URL_TOKEN') { 'zealot' } } | 46.09589 | 146 | 0.768499 |
28d358fe3a6df051ba1e612d6e55edb46f95a3af | 1,071 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2019_08_01
module Models
#
# Reference to another subresource.
#
class SubResource
include MsRestAzure
# @return [String] Resource ID.
attr_accessor :id
#
# Mapper for SubResource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SubResource',
type: {
name: 'Composite',
class_name: 'SubResource',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 22.787234 | 70 | 0.528478 |
ab54ba430eaba46686f5e8905f411c603f2d6147 | 1,687 | #--
# Copyright (C) 2006 Andrea Censi <andrea (at) rubyforge.org>
#
# This file is part of Maruku.
#
# Maruku is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Maruku is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Maruku; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#++
class String
# These are TeX's special characters
LATEX_ADD_SLASH = [ '{', '}', '$', '&', '#', '_', '%']
# These, we transform to {\tt \char<ascii code>}
LATEX_TO_CHARCODE = [ '^', '~', '>', '<']
def escape_to_latex(s)
s2 = ""
s.each_char do |b|
if LATEX_TO_CHARCODE.include? b
s2 += "{\\tt \\symbol{#{b.ord}}}"
elsif LATEX_ADD_SLASH.include? b
s2 << ?\\ << b
elsif b == '\\'
# there is no backslash in cmr10 fonts
s2 += "$\\backslash$"
else
s2 << b
end
end
s2
end
# escapes special characters
def to_latex
s = escape_to_latex(self)
OtherGoodies.each do |k, v|
s.gsub!(k, v)
end
s
end
# other things that are good on the eyes
OtherGoodies = {
/(\s)LaTeX/ => '\1\\LaTeX\\xspace ', # XXX not if already \LaTeX
# 'HTML' => '\\textsc{html}\\xspace ',
# 'PDF' => '\\textsc{pdf}\\xspace '
}
end | 26.359375 | 78 | 0.639597 |
33b896d6f5dd12e56a824cf800d4f46ed0d53237 | 786 | Pod::Spec.new do |s|
s.name = "Localytics-AMP"
s.version = "2.21.0"
s.summary = "Localytics AMP iOS SDK"
s.description = "Localytics analytics and marketing platform"
s.homepage = "http://www.localytics.com"
s.license = {
:type => 'Copyright',
:file => 'LICENSE'
}
s.author = 'Char Software, Inc. d/b/a Localytics'
s.source = { :http => "http://downloads.localytics.com/SDKs/iOS/archive/AMP-SDK-2.21.0.bin.zip" }
s.platform = :ios, '5.1.1'
s.source_files = '*.h'
s.preserve_paths = 'libLocalyticsAMP.a'
s.xcconfig = { 'LIBRARY_SEARCH_PATHS' => '"$(PODS_ROOT)/Localytics-AMP"' }
s.weak_frameworks = 'AdSupport'
s.frameworks = 'SystemConfiguration'
s.libraries = 'LocalyticsAMP', 'z', 'sqlite3'
end
| 32.75 | 105 | 0.617048 |
ff1fe273d6d7442ab81e69d924fd62e527452b8e | 1,755 | # -*- encoding: utf-8 -*-
# stub: rails-html-sanitizer 1.0.2 ruby lib
Gem::Specification.new do |s|
s.name = "rails-html-sanitizer"
s.version = "1.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Rafael Mendon\u{e7}a Fran\u{e7}a", "Kasper Timm Hansen"]
s.date = "2015-03-10"
s.description = "HTML sanitization for Rails applications"
s.email = ["[email protected]", "[email protected]"]
s.homepage = "https://github.com/rafaelfranca/rails-html-sanitizer"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.5"
s.summary = "This gem is responsible to sanitize HTML fragments in Rails applications."
s.installed_by_version = "2.4.5" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<loofah>, ["~> 2.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.3"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<rails-dom-testing>, [">= 0"])
else
s.add_dependency(%q<loofah>, ["~> 2.0"])
s.add_dependency(%q<bundler>, ["~> 1.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<rails-dom-testing>, [">= 0"])
end
else
s.add_dependency(%q<loofah>, ["~> 2.0"])
s.add_dependency(%q<bundler>, ["~> 1.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<rails-dom-testing>, [">= 0"])
end
end
| 39 | 105 | 0.638746 |
1d153b00632aa31e75a3b5bb218d2e47a43d1950 | 2,558 | # -*- coding: utf-8 -*-
#
# Copyright 2014 Roy Liu
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy of
# the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations under
# the License.
require "active_support/inflector"
module Percolate
module Adapter
# A base class to build off of.
class BaseAdapter
# The constructor.
#
# @param data_source [Object] the data source.
def initialize(data_source = nil)
@data_source = data_source
end
# Loads entities in an adapter-specific way.
#
# @return [Hash] the loaded entities.
def load_entities
{}
end
# Loads a facet.
#
# @param context [String] the lookup context.
# @param facet_name [Symbol] the facet name.
#
# @return [Object] the loaded facet.
def load_facet(context, facet_name)
create_facet(facet_name)
end
# Creates a facet from the given name.
#
# @param facet_name [Symbol] the facet name.
#
# @return [Object] the facet.
def create_facet(facet_name)
const_str = ActiveSupport::Inflector.camelize(facet_name) + "Facet"
begin
require "percolate/facet/#{facet_name}_facet"
rescue LoadError
# Do nothing. Give the benefit of the doubt if the file doesn't exist.
end if !Facet.const_defined?(const_str)
Facet.const_get(const_str).new
end
# Configures a facet according to the given attribute hash.
#
# @param facet [Object] the facet.
# @param attr_hash [Hash] the attribute hash.
#
# @return [Object] the facet.
def configure_facet(facet, attr_hash)
attr_hash.each_pair do |attr, value|
facet.send((attr + "=").to_sym, value)
end
facet
end
# If the given method isn't found, check for a setter of the same name.
def method_missing(sym, *args, &block)
if sym[-1] != "="
sym_set = (sym.to_s + "=").to_sym
return send(sym_set, *args, &block) if respond_to?(sym_set)
end
super
end
end
end
end
| 28.422222 | 80 | 0.629007 |
08444f671567f16f0294fb9cf6e287e8060bac72 | 96 | class VisitorsController < ApplicationController
def about
render 'pages/about'
end
end
| 16 | 48 | 0.770833 |
181c9ac5c5ce6c929de036551b19424e29f510bc | 1,064 | require "./lib/active_record/session_store/version"
Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
s.name = 'activerecord-session_store'
s.version = ActiveRecord::SessionStore::VERSION
s.summary = 'An Action Dispatch session store backed by an Active Record class.'
s.required_ruby_version = '>= 1.9.3'
s.license = 'MIT'
s.author = 'David Heinemeier Hansson'
s.email = '[email protected]'
s.homepage = 'https://github.com/rails/activerecord-session_store'
s.files = Dir['CHANGELOG.md', 'MIT-LICENSE', 'README.md', 'lib/**/*']
s.require_path = 'lib'
s.extra_rdoc_files = %w( README.md )
s.rdoc_options.concat ['--main', 'README.md']
s.add_dependency('activerecord', '>= 4.0')
s.add_dependency('actionpack', '>= 4.0')
s.add_dependency('railties', '>= 4.0')
s.add_dependency('rack', '>= 1.5.2', '< 3')
s.add_dependency('multi_json', '~> 1.11', '>= 1.11.2')
s.add_development_dependency('sqlite3')
s.add_development_dependency('appraisal', '~> 2.1.0')
end
| 34.322581 | 86 | 0.649436 |
79a568b3d5c660bd768979dee8ff2d4da1542aac | 993 | # frozen_string_literal: true
lib = File.expand_path('lib', __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "date_as_string/version"
Gem::Specification.new do |spec|
spec.name = "date_as_string"
spec.version = DateAsString::VERSION::STRING
spec.authors = ["Eric Sullivan"]
spec.email = ["[email protected]"]
spec.homepage = "https://github.com/annkissam/date_as_string"
spec.summary = %q{Convert from Date to String and vice versa by attaching _string suffix to an ActiveRecord field}
spec.description = %q{Treat an ActiveRecord Date column as a String}
spec.license = 'MIT'
spec.files = `git ls-files`.split("\n")
spec.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
spec.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "rspec"
spec.add_development_dependency "rake"
end
| 39.72 | 120 | 0.691843 |
1839d0bd69b57b69ae7bb2ef3f4aecf23bf1eeaf | 1,043 | require 'rails_helper'
require 'authorization/assertion/facebook'
RSpec.describe Authorization::Assertion::Facebook do
before do
stub_request(:get, %r{https://graph.facebook.com/v2.11/me\?.*})
.to_return(body: fixture('auth/facebook/self.json'))
stub_request(:get, %r{https://graph.facebook.com/v2.11/me/friends\?.*})
.to_return(body: fixture('auth/facebook/friends.json'))
end
let(:facebook_auth) { Authorization::Assertion::Facebook.new('any token') }
describe '#user!' do
let!(:user) { create :user, facebook_id: '1659565134412042' }
subject { facebook_auth.user! }
it 'should return user' do
expect(subject).to eq(user)
end
end
describe '#auto_follows' do
let!(:user) { create :user, facebook_id: '1659565134412042' }
let!(:friend) { create :user, facebook_id: '10204220238175291' }
before { facebook_auth.auto_follows }
subject { Follow.where(follower: user, followed: friend) }
it 'follow should exist' do
expect(subject).to exist
end
end
end
| 28.972222 | 77 | 0.682646 |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.