hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
33b750991a91faa053207789299eb886fae992a1 | 458 | ENV['RACK_ENV'] = 'test'
require_relative '../my_app'
require 'minitest/autorun'
require "turn/autorun"
require 'rack/test'
# Include rack helpers and expose full application stack
class MiniTest::Spec
include Rack::Test::Methods
def app
Rack::Builder.parse_file(File.dirname(__FILE__) + '/../config.ru').first
end
end
# Configure test output
# possible values are :pretty, :dot, :cue, :marshal, :outline, :progress
Turn.config.format = :dot
| 22.9 | 76 | 0.724891 |
03b0aa730efd08ace2bf671060529b63a0d5b2c6 | 558 | # coding: ascii-8bit
##
# This file is part of WhatWeb and may be subject to
# redistribution and commercial restrictions. Please see the WhatWeb
# web site for more information on licensing and terms of use.
# http://www.morningstarsecurity.com/research/whatweb
##
Plugin.define "AnyGate" do
author "Andrew Horton"
version "0.1"
description "Korean home wifi/router device"
website "http://www.anygate.co.kr/"
matches [
{:url=>'/index.asp', :text=>'<title>AnyGate' },
{:url=>'/index.asp', :text=>"์ฌ์ฉ์ ์ํธ๊ฐ ์ค์ ๋์ด ์์ง ์์ต๋๋ค.", :string=>"No Password"}
]
end
| 24.26087 | 77 | 0.716846 |
3893422293b2e303f10a0442f940853730ebf677 | 3,233 | namespace :quality_metrics do
desc 'List the names of quality metrics'
task list: :environment do
puts "Quality Metrics:"
QualityMetric.all.each do |qm|
puts "+ #{qm.name}"
end
end
namespace :run do
desc 'Run quality metrics on latest version of all cookbooks'
task all_the_latest: :environment do
result, message = RunQualityMetrics.all_the_latest
puts "#{result.to_s.upcase}: #{message}"
end
desc 'Run quality metrics on latest version of a named cookbook'
task :on_latest, [:cookbook_name] => :environment do |t, args|
args.with_defaults(cookbook_name: nil)
unless args[:cookbook_name]
puts "ERROR: Nothing to do without a cookbook name. e.g. #{t}[cookbook_name]"
exit 1
end
result, message = RunQualityMetrics.on_latest args[:cookbook_name]
puts "#{result.to_s.upcase}: #{message}"
end
desc 'Run quality metrics on given version of a named cookbook'
task :on_version, [:cookbook_name, :version] => :environment do |t, args|
args.with_defaults(cookbook_name: nil, version: nil)
unless args[:cookbook_name] && args[:version]
puts "ERROR: Nothing to do without a cookbook name and version. e.g. #{t}[cookbook_name, version]"
exit 1
end
result, message = RunQualityMetrics.on_version args[:cookbook_name], args[:version]
puts "#{result.to_s.upcase}: #{message}"
end
end
namespace :flip do
desc 'Flip all quality metrics visible to all users'
task all_public: :environment do
QualityMetric.flip_public
puts "OK: All quality metrics are now visible to all users."
end
desc 'Flip all quality metrics visible to only admin users'
task all_admin_only: :environment do
QualityMetric.flip_admin_only
puts "OK: All quality metrics are now visible to only admin users."
end
desc 'Flip a given quality metric visible to all users'
task :public, [:metric_name] => :environment do |t, args|
args.with_defaults(metric_name: nil)
metric_name = args[:metric_name]
unless metric_name
puts "ERROR: Nothing to do without a metric name. e.g. #{t}[metric_name]"
exit 1
end
quality_metric = QualityMetric.find_by_name(metric_name)
unless quality_metric
puts "ERROR: No quality metric found with the name '#{metric_name}'."
exit 1
end
quality_metric.flip_public
puts "OK: The #{metric_name} quality metric is now visible to all users."
end
desc 'Flip a given quality metric visible to only admin users'
task :admin_only, [:metric_name] => :environment do |t, args|
args.with_defaults(metric_name: nil)
metric_name = args[:metric_name]
unless metric_name
puts "ERROR: Nothing to do without a metric name. e.g. #{t}[metric_name]"
exit 1
end
quality_metric = QualityMetric.find_by_name(metric_name)
unless quality_metric
puts "ERROR: No quality metric found with the name '#{metric_name}'."
exit 1
end
quality_metric.flip_admin_only
puts "OK: The #{metric_name} quality metric is now visible to only admin users."
end
end
end
| 33.329897 | 106 | 0.672131 |
f8075556145098293aa2a648bd1007033756f70a | 3,272 | module Slinky
module ProxyServer
HTTP_MATCHER = /(GET|POST|PUT|DELETE|HEAD) (.+?)(?= HTTP)/
HOST_MATCHER = /Host: (\S+)/
def self.process_proxies proxy_hash
proxy_hash.map{|from, h|
begin
to, opt = h.is_a?(Hash) ? [h.delete("to"), h] : [h, {}]
a = [from, URI::parse(to), opt]
rescue
$stderr.puts "Invalid proxy setting: #{from} => #{to}".foreground(:red)
end
}.compact
end
def self.process_proxy_servers proxies
proxies.map{|p| [p[1].host, p[1].port]}
end
def self.find_matcher proxies, path
proxies.find{|p| path.start_with?(p[0])}
end
def self.rewrite_path path, proxy
if proxy[0] == "/"
# If we're proxying everything, we just want to pass the path
# through unmodified. Otherwise we end up stripping the
# initial slash, which is the wrong behavior.
path
else
path.gsub(/^#{proxy[0]}/, "")
end
end
def self.replace_path http, old_path, new_path, addition
# TODO: This may fail in certain, rare cases
addition = addition[0..-2] if addition[-1] == "/"
http.gsub(old_path, addition + new_path)
end
def self.replace_host http, host
http.gsub(HOST_MATCHER, "Host: #{host}")
end
def self.run proxy_hash, port, slinky_port
proxies = process_proxies proxy_hash
proxy_servers = process_proxy_servers proxies
Proxy.start(:host => "0.0.0.0", :port => port){|conn|
proxy = nil
start_time = nil
conn.server :slinky, :host => "127.0.0.1", :port => slinky_port
server = nil
conn.on_data do |data|
begin
matches = data.match(ProxyServer::HTTP_MATCHER)
if matches
path = matches[2]
proxy = ProxyServer.find_matcher(proxies, path)
start_time = Time.now
server = if proxy
new_path = ProxyServer.rewrite_path path, proxy
data = ProxyServer.replace_path(data, path, new_path, proxy[1].path)
new_host = proxy[1].select(:host, :port).join(":")
data = ProxyServer.replace_host(data, new_host)
conn.server [proxy[1].host, proxy[1].port],
:host => proxy[1].host, :port => proxy[1].port
[proxy[1].host, proxy[1].port]
else :slinky
end
end
[data, [server]]
rescue
conn.send_data "HTTP/1.1 500 Ooops...something went wrong\r\n"
end
end
conn.on_response do |server, resp|
opt = proxy && proxy[2]
if opt && opt["lag"]
# we want to get as close as possible to opt["lag"], so we
# take into account the lag from the backend server
so_far = Time.now - start_time
time = opt["lag"]/1000.0-so_far
EM.add_timer(time > 0 ? time : 0) do
conn.send_data resp
end
else
resp
end
end
conn.on_finish do |name|
unbind
end
}
end
end
end
| 32.078431 | 93 | 0.525672 |
39717f82def5cf744bd868f2edb1561c10bc030e | 1,554 | #
# Be sure to run `pod lib lint Swift-test-Lib.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = "Swift-test-Lib"
s.version = "0.1.0"
s.summary = "A short description of Swift-test-Lib."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
DESC
s.homepage = "https://github.com/<GITHUB_USERNAME>/Swift-test-Lib"
# s.screenshots = "www.example.com/screenshots_1", "www.example.com/screenshots_2"
s.license = 'MIT'
s.author = { "youguoyi" => "[email protected]" }
s.source = { :git => "https://github.com/<GITHUB_USERNAME>/Swift-test-Lib.git", :tag => s.version.to_s }
# s.social_media_url = 'https://twitter.com/<TWITTER_USERNAME>'
s.platform = :ios, '7.0'
s.requires_arc = true
s.source_files = 'Pod/Classes/**/*'
s.resource_bundles = {
'Swift-test-Lib' => ['Pod/Assets/*.png']
}
# s.public_header_files = 'Pod/Classes/**/*.h'
# s.frameworks = 'UIKit', 'MapKit'
# s.dependency 'AFNetworking', '~> 2.3'
end
| 37.902439 | 116 | 0.632561 |
4a11d0c5177931f846edbe856f463ddb3a970164 | 17 | require 'library' | 17 | 17 | 0.823529 |
4a66ed314306a8a66fb9a033668375c136224b24 | 766 | module Mutest
class Mutator
class Node
module Regexp
# Mutator for root expression regexp wrapper
class RootExpression < Node
handle(:regexp_root_expression)
# Emit mutations for children of root node
#
# @return [undefined]
def dispatch
children.each_index(&method(:mutate_child))
end
end
# Mutator for beginning of line anchor `^`
class BeginningOfLineAnchor < Node
handle(:regexp_bol_anchor)
# Emit mutations
#
# Replace `^` with `\A`
#
# @return [undefined]
def dispatch
emit(s(:regexp_bos_anchor))
end
end
end
end
end
end
| 22.529412 | 55 | 0.535248 |
f8209105b213305b66b47ad35d2a4f22fb1cf887 | 1,019 | # encoding: utf-8
class ParserNobuf
LF = "\n" ## \n == ASCII 0x0A (hex) 10 (dec) = LF (Newline/line feed)
CR = "\r" ## \r == ASCII 0x0D (hex) 13 (dec) = CR (Carriage return)
def self.parse( input )
recs = []
if input.eof?
else
c = input.getc
loop do
## non-blanl line
line = ""
if c==LF || c==CR || input.eof?
## blank line
recs << line
break if input.eof?
c = skip_newline( c, input )
else
loop do
line << c
c = input.getc
break if c==LF || c==CR || input.eof?
end
recs << line
break if input.eof?
c = skip_newline( c, input )
end
end
end
recs
end
def self.skip_newline( c, input )
return c if input.eof?
## only skip CR LF or LF or CR
if c == CR
c = input.getc
c = input.getc if c == LF
c
elsif c == LF
c = input.getc ## eat-up
c
else
# do nothing
c
end
end
end # class ParserNobuf
| 18.196429 | 85 | 0.490677 |
d5a55bf8cc0f31d17b19bc97eb31e3b6e907503c | 208 | class AddRetiredToContainerTypes < ActiveRecord::Migration
def up
add_column :container_types, :retired, :boolean, default: false
end
def down
remove_column :container_types, :retired
end
end
| 23.111111 | 67 | 0.764423 |
f7b88d0f78ba2dfa65dacc93d65c3a3e5b0ac14b | 2,389 | module Archive
##
# Client for archiving questionnaire responses after they're completed.
class Client
def initialize(auth_token:)
headers = {
'Authorization' => "Bearer #{auth_token}"
}
STDOUT.puts "Base URL: #{base_url}"
@connection = Faraday.new(url: base_url, headers: headers) do |conn|
conn.request :json
conn.response :json, content_type: /\bjson$/
conn.adapter Faraday.default_adapter
end
end
def archive(questionnaire_response)
_start = Time.now
resp = @connection.post do |req|
_start1 = Time.now
req.url 'QuestionnaireResponse'
req.headers['Content-Type'] = 'application/json;charset=utf-8'
fhir_obj = serializer.to_fhir(questionnaire_response)
errors = fhir_obj.validate
# Need to convert into instant for google
# TODO Save in dynamo correctly
if errors.dig('contained', 0, 'meta', 0, 'lastUpdated')
str = fhir_obj.contained[0].meta.lastUpdated
fhir_obj.contained[0].meta.lastUpdated = Time.parse(str).strftime('%Y-%M-%dT%H:%M:%S.0+00:00')
end
_end1 = Time.now
STDOUT.puts("TIME_next-q#faraday-setup_p1 #{(_end1 - _start1) * 1000}ms")
_start1 = Time.now
# Check for anymore errors
errors = fhir_obj.validate
if !errors.empty?
STDOUT.puts("QR Archive Validation Errors #{errors.to_json}")
end
_end1 = Time.now
STDOUT.puts("TIME_next-q#faraday-setup_p2 #{(_end1 - _start1) * 1000}ms")
_start1 = Time.now
req.body = fhir_obj.to_json
_end1 = Time.now
STDOUT.puts("TIME_next-q#faraday-setup_p3 #{(_end1 - _start1) * 1000}ms")
end
_end = Time.now
STDOUT.puts("TIME_next-q#archive-post #{(_end - _start) * 1000}ms")
if !resp.status.to_s.match(/2[0-9]{2}/)
STDOUT.puts("Error archiving QR #{questionnaire_response.id} to Hub")
STDOUT.puts("Status: #{resp.status}, Body: #{resp.body}")
STDOUT.puts("Request Body, #{serializer.to_fhir(questionnaire_response).to_json}")
else
STDOUT.puts("Successful archiving. Status: #{resp.status}")
end
resp
end
private
def serializer
Archive::QuestionnaireResponseSerializer
end
def base_url
ENV['HUB_FHIR_URL']
end
end
end
| 29.8625 | 104 | 0.620762 |
1d8c30e9a113a7cc741ef6efaf753a4270c2653a | 1,188 | module Api
module Endpoints
class UsersEndpoint < Grape::API
format :json
helpers Api::Helpers::CursorHelpers
helpers Api::Helpers::SortHelpers
helpers Api::Helpers::PaginationParameters
namespace :users do
desc 'Get a user.'
params do
requires :id, type: String, desc: 'User ID.'
end
get ':id' do
user = User.find(params[:id]) || error!('Not Found', 404)
error!('Not Found', 404) unless user.team.api?
present user, with: Api::Presenters::UserPresenter
end
desc 'Get all the users.'
params do
requires :team_id, type: String
optional :captain, type: Boolean
use :pagination
end
sort User::SORT_ORDERS
get do
team = Team.find(params[:team_id]) || error!('Not Found', 404)
error!('Not Found', 404) unless team.api?
query = team.users
query = query.captains if params[:captain]
users = paginate_and_sort_by_cursor(query, default_sort_order: '-elo')
present users, with: Api::Presenters::UsersPresenter
end
end
end
end
end
| 30.461538 | 80 | 0.584175 |
185ab26d96b1594634efb5ac01442f10fab0223d | 1,402 | require_relative "base_service"
module Files
class CreateService < BaseService
def execute
allowed = if project.protected_branch?(ref)
can?(current_user, :push_code_to_protected_branches, project)
else
can?(current_user, :push_code, project)
end
unless allowed
return error("You are not allowed to create file in this branch")
end
unless repository.branch_names.include?(ref)
return error("You can only create files if you are on top of a branch")
end
file_name = File.basename(path)
file_path = path
unless file_name =~ Gitlab::Regex.path_regex
return error("Your changes could not be committed, because file name contains not allowed characters")
end
blob = repository.blob_at(ref, file_path)
if blob
return error("Your changes could not be committed, because file with such name exists")
end
new_file_action = Gitlab::Satellite::NewFileAction.new(current_user, project, ref, file_path)
created_successfully = new_file_action.commit!(
params[:content],
params[:commit_message],
params[:encoding]
)
if created_successfully
success
else
error("Your changes could not be committed, because the file has been changed")
end
end
end
end
| 29.208333 | 110 | 0.650499 |
03d1483f425236472c4a3972f0e8615d18216cce | 359 | #!/usr/bin/env ruby
# encoding: utf-8
require "rubygems"
require "amqp"
# Declaring a durable shared queue
AMQP.start("amqp://guest:[email protected]") do |connection, open_ok|
channel = AMQP::Channel.new(connection)
queue = AMQP::Queue.new(channel, "images.resize", :durable => true)
connection.close {
EventMachine.stop { exit }
}
end
| 22.4375 | 74 | 0.699164 |
e98876a82a5d3165f57523becaf10fc173438661 | 4,609 | # RSpec for BioRuby-GFF3-Plugin. Run with something like:
#
# rspec -I ../bioruby/lib/ spec/gff3_assemble3_spec.rb
#
# Copyright (C) 2010,2011 Pjotr Prins <[email protected]>
#
$: << "../lib"
require 'bio-gff3'
include Bio::GFFbrowser
GFF3FILE3="test/data/gff/test-cds.gff3"
describe GFF3, "Assemble CDS (extra checks)" do
before :all do
gff3 = Bio::GFFbrowser::GFF3.new(GFF3FILE3)
@gff = gff3.assembler
@gff.parse
end
it "should translate gene MhA1_Contig1040.frz3.gene29" do
@contigsequence = @gff.sequencelist["MhA1_Contig1040"]
@componentlist = {}
@cdslist = {}
@gff.each_CDS do | id, reclist, component |
@componentlist[id] = component
@cdslist[id] = reclist
end
name = "cds:MhA1_Contig1040.frz3.gene29"
recs = @cdslist[name]
component = @componentlist[name]
ntseq = @gff.assemble(@contigsequence,component.start,recs,:raw=>true)
ntseq.should == "TTAATTAATTTGCCTAGAAAAACAAAGGCATAACATGCTTGCAGTCATCATACGGTAAGAGAGAAACCAACGATATGTTAATAATGTTGATGGGGGAATATCCTCATTAGAATTCTTTTTTGGGTGAATTGAAATTGCCATATTATTAGTATTATTAGAAAATATTAAATTTGTTGATAA"
ntseq = @gff.assemble(@contigsequence,component.start,recs,:codonize=>true)
ntseq.should == "TTATCAACAAATTTAATATTTTCTAATAATACTAATAATATGGCAATTTCAATTCACCCAAAAAAGAATTCTAATGAGGATATTCCCCCATCAACATTATTAACATATCGTTGGTTTCTCTCTTACCGTATGATGACTGCAAGCATGTTATGCCTTTGTTTTTCTAGGCAAATTAATTAA"
aaseq = @gff.assembleAA(@contigsequence,component.start,recs)
aaseq.should == "LSTNLIFSNNTNNMAISIHPKKNSNEDIPPSTLLTYRWFLSYRMMTASMLCLCFSRQIN*"
end
it "should translate gene MhA1_Contig2992.frz3.gene1" do
@contigsequence = @gff.sequencelist["MhA1_Contig2992"]
@componentlist = {}
@cdslist = {}
@gff.each_CDS do | id, reclist, component |
@componentlist[id] = component
@cdslist[id] = reclist
end
name = "cds:MhA1_Contig2992.frz3.gene1"
recs = @cdslist[name]
component = @componentlist[name]
# ntseq = @gff.assemble(@contigsequence,component.start,recs,:raw=>true)
# ntseq.should == ""
ntseq = @gff.assemble(@contigsequence,component.start,recs,:codonize=>true)
ntseq.should == "AAAATTAATAAAAAAATAAATGATAATTCTTTTAATATTCAATCTGATTCGAATGAAAATTTGTTTAATGATGGAATTAATTCTGAACAAAATGAAGACAATATAGCAACAAAAAAAGGCAACAAAAAATTCGGTAAAAATCAAAAAGAAGGAAATAAAGAGTTGGATATTCAAAGTGAAGGTTTTGATAATAATGAAATACCTTCAAAAGAAAGCAAAAAACAAATAAGTAATTTTGGGGATAATGAAAGTGAATATGAAAAAGAAGAGGATAATAGAAAAAAGAAAGGGAAAAAAGGAATGATAGAAAAGTATGAATTAGGAAGGAATAAAGGAAGGGATAAAAATGAAAGAAATAAGGCTTCTGAAAGGTTTGATGAGCAGAATCAAGACAGAAATAATCAACGTGATAGTTTTGATTCTGGCAATAATGATAAATCACAAAGAGGCTTAGATAGCGGCACATTAGATGGAACAAATAATTTAAAAAGATCGAATGATGATCAATTACCAGAATTTTTGAAAACGGCCAGTCTCTCAGAGCGTCAGAAATTTCTTCAACTTGAAGCAGAAAATGACAGGTCCAAGTCTTCTATACGAAGAGATAAACAGAATTGGGCTGATCAACAAGGGCAGAGAATTTCTGATCTTTATAAACAATTTCAACAATCTTTACAACAAAAAGAAAAACAATTTAAAAGTGAACGTCAACGAAATGTTCAAATTAAATTAAGCAGAAATGCACAGAATGTTGATAAAAGAATTCAGGATCTTCTGAATAATCCTGATATTGCTGAAAGAGCTTTAATTCTTCAAATTGAACAAATCCTCGGCGGTACAGACGATAGTATTCGTCAGGAATTACAAAGACAAATATCTGTTATTGGACCATTAGATGGAAATATACCGCCAAATCTTACATAG"
aaseq = @gff.assembleAA(@contigsequence,component.start,recs)
aaseq.should == "KINKKINDNSFNIQSDSNENLFNDGINSEQNEDNIATKKGNKKFGKNQKEGNKELDIQSEGFDNNEIPSKESKKQISNFGDNESEYEKEEDNRKKKGKKGMIEKYELGRNKGRDKNERNKASERFDEQNQDRNNQRDSFDSGNNDKSQRGLDSGTLDGTNNLKRSNDDQLPEFLKTASLSERQKFLQLEAENDRSKSSIRRDKQNWADQQGQRISDLYKQFQQSLQQKEKQFKSERQRNVQIKLSRNAQNVDKRIQDLLNNPDIAERALILQIEQILGGTDDSIRQELQRQISVIGPLDGNIPPNLT*"
end
it "should fix Wormbase error MhA1_Contig3426.frz3.gene1" do
@contigsequence = @gff.sequencelist["MhA1_Contig3426"]
@componentlist = {}
@cdslist = {}
@gff.each_CDS do | id, reclist, component |
@componentlist[id] = component
@cdslist[id] = reclist
end
name = "cds:MhA1_Contig3426.frz3.gene1"
recs = @cdslist[name]
component = @componentlist[name]
# :raw should not fix
ntseq = @gff.assemble(@contigsequence,component.start,recs,:raw=>true)
ntseq.should == "GCATCCAACAACAACAATTAGAAGTCTTTCCCAGCTCCTCCTCTGCCCCTCAGCAACAACAATACCCAGCGCAGCAGCTTCAATTAGTTACTCCTTTTATTGCATGCATAGCAGATGAATTGAGGGAGTTGATAGATGAAATGCGTATGTTTTAG"
ntseq = @gff.assemble(@contigsequence,component.start,recs,:codonize=>true,:fix=>true)
ntseq.should == "ATCCAACAACAACAATTAGAAGTCTTTCCCAGCTCCTCCTCTGCCCCTCAGCAACAACAATACCCAGCGCAGCAGCTTCAATTAGTTACTCCTTTTATTGCATGCATAGCAGATGAATTGAGGGAGTTGATAGATGAAATGCGTATGTTTTAG"
ntseq.size.should == 153
aaseq = @gff.assembleAA(@contigsequence,component.start,recs,:fix=>true)
aaseq.should == "IQQQQLEVFPSSSSAPQQQQYPAQQLQLVTPFIACIADELRELIDEMRMF*"
end
end
| 56.207317 | 946 | 0.812541 |
e2d71ddf5f6ca1f1caabd444247caf0c4be32757 | 522 | class AddCachingRows < ActiveRecord::Migration[5.2]
def change
# we can flatten the tables (to save free rows)
add_column :authors, :orgs_cached, :jsonb unless column_exists?(:authors, :orgs_cached)
add_column :authors, :sources_cached, :jsonb unless column_exists?(:authors, :sources_cached)
add_column :orgs, :authors_cached, :jsonb unless column_exists?(:orgs, :authors_cached)
add_column :imports, :sources_cached, :jsonb unless column_exists?(:imports, :sources_cached)
end
end
| 40.153846 | 99 | 0.735632 |
bfd2510be242073f985af286a3d7c6ee990ce44e | 1,231 | class Yadm < Formula
desc "Yet Another Dotfiles Manager"
homepage "https://yadm.io/"
url "https://github.com/TheLocehiliosan/yadm/archive/2.4.0.tar.gz"
sha256 "37efea63dc9c77951433c841bde34e0a042dc561e8c026e690d146e768701b0f"
license "GPL-3.0"
bottle :unneeded
def install
system "make", "install", "PREFIX=#{prefix}"
bash_completion.install "completion/yadm.bash_completion"
zsh_completion.install "completion/yadm.zsh_completion" => "_yadm"
end
test do
system bin/"yadm", "init"
assert_predicate testpath/".config/yadm/repo.git/config", :exist?, "Failed to init repository."
assert_match testpath.to_s, shell_output("#{bin}/yadm gitconfig core.worktree")
# disable auto-alt
system bin/"yadm", "config", "yadm.auto-alt", "false"
assert_match "false", shell_output("#{bin}/yadm config yadm.auto-alt")
(testpath/"testfile").write "test"
system bin/"yadm", "add", "#{testpath}/testfile"
system bin/"yadm", "gitconfig", "user.email", "[email protected]"
system bin/"yadm", "gitconfig", "user.name", "Test User"
system bin/"yadm", "commit", "-m", "test commit"
assert_match "test commit", shell_output("#{bin}/yadm log --pretty=oneline 2>&1")
end
end
| 35.171429 | 99 | 0.692933 |
7a5baec283b5bc74883ce78b8bc36ce1c0db8edc | 2,145 | class Lmod < Formula
desc "Lua-based environment modules system to modify PATH variable"
homepage "https://lmod.readthedocs.io"
url "https://github.com/TACC/Lmod/archive/8.4.7.tar.gz"
sha256 "51788fd935c2278627fe4c0df42e4f2d19fa5de6bbf5ec7c7ea51ab9112be40c"
license "MIT"
bottle do
cellar :any_skip_relocation
sha256 "32f748dc0704604f39c924c4ab16793a77f6b8e01a51765fd2407161afaba5b1" => :catalina
sha256 "15c2ef944f3314fe0f0dc3683d34859123ab3516ef2f1e0e7882cda75822f65b" => :mojave
sha256 "403ff3e37a88096680a81acfdf483a7f63c704102df391d9527cd0bfb4e84dd1" => :high_sierra
end
depends_on "luarocks" => :build
depends_on "pkg-config" => :build
depends_on "lua"
resource "luafilesystem" do
url "https://github.com/keplerproject/luafilesystem/archive/v1_8_0.tar.gz"
sha256 "16d17c788b8093f2047325343f5e9b74cccb1ea96001e45914a58bbae8932495"
end
resource "luaposix" do
url "https://github.com/luaposix/luaposix/archive/v35.0.tar.gz"
sha256 "a4edf2f715feff65acb009e8d1689e57ec665eb79bc36a6649fae55eafd56809"
end
def install
luapath = libexec/"vendor"
ENV["LUA_PATH"] = "?.lua;" \
"#{luapath}/share/lua/5.3/?.lua;" \
"#{luapath}/share/lua/5.3/?/init.lua"
ENV["LUA_CPATH"] = "#{luapath}/lib/lua/5.3/?.so"
resources.each do |r|
r.stage do
system "luarocks", "make", "--tree=#{luapath}"
end
end
system "./configure", "--with-siteControlPrefix=yes", "--prefix=#{prefix}"
system "make", "install"
end
def caveats
<<~EOS
To use Lmod, you should add the init script to the shell you are using.
For example, the bash setup script is here: #{opt_prefix}/init/profile
and you can source it in your bash setup or link to it.
If you use fish, use #{opt_prefix}/init/fish, such as:
ln -s #{opt_prefix}/init/fish ~/.config/fish/conf.d/00_lmod.fish
EOS
end
test do
system "#{prefix}/init/sh"
output = shell_output("#{prefix}/libexec/spider #{prefix}/modulefiles/Core/")
assert_match "lmod", output
assert_match "settarg", output
end
end
| 33 | 93 | 0.694172 |
e88ed9f7160f3d02c76bfb81826b55f14a8ff793 | 26 | require "omniauth/wechat"
| 13 | 25 | 0.807692 |
6a9f6c456376c24ec089f9ab06b37beb92b2b5c7 | 3,076 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
class UpdateQueryFromParamsService
def initialize(query, user)
self.query = query
self.current_user = user
end
def call(params)
apply_group_by(params)
apply_sort_by(params)
apply_filters(params)
apply_columns(params)
apply_sums(params)
apply_timeline(params)
apply_hierarchy(params)
disable_hierarchy_when_only_grouped_by(params)
if query.valid?
ServiceResult.new(success: true,
result: query)
else
ServiceResult.new(errors: query.errors)
end
end
private
def apply_group_by(params)
query.group_by = params[:group_by] if params.key?(:group_by)
end
def apply_sort_by(params)
query.sort_criteria = params[:sort_by] if params[:sort_by]
end
def apply_filters(params)
return unless params[:filters]
query.filters = []
params[:filters].each do |filter|
query.add_filter(filter[:field], filter[:operator], filter[:values])
end
end
def apply_columns(params)
query.column_names = params[:columns] if params[:columns]
end
def apply_sums(params)
query.display_sums = params[:display_sums] if params.key?(:display_sums)
end
def apply_timeline(params)
query.timeline_visible = params[:timeline_visible] if params.key?(:timeline_visible)
query.timeline_zoom_level = params[:timeline_zoom_level] if params.key?(:timeline_zoom_level)
query.timeline_labels = params[:timeline_labels] if params.key?(:timeline_labels)
end
def apply_hierarchy(params)
query.show_hierarchies = params[:show_hierarchies] if params.key?(:show_hierarchies)
end
def disable_hierarchy_when_only_grouped_by(params)
if params.key?(:group_by) && !params.key?(:show_hierarchies)
query.show_hierarchies = false
end
end
attr_accessor :query,
:current_user,
:params
end
| 28.747664 | 97 | 0.727243 |
6aa306abe6495aaf635b28203cbf2ea981bdb684 | 1,064 | # frozen_string_literal: true
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'spok/version'
Gem::Specification.new do |spec|
spec.name = 'spok'
spec.version = Spok::VERSION
spec.authors = ['Magnetis Staff']
spec.email = ['[email protected]']
spec.summary = 'A gem to work with periods of dates'
spec.description = %q{
Work with period of dates in a easy way.
This gem provides functionalities like workdays, date as string and period to calendars.
}
spec.homepage = 'https://github.com/magnetis/spok'
spec.license = 'Apache 2.0'
spec.files = Dir['**/*']
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_dependency 'activesupport', '~> 5.1'
spec.add_development_dependency 'bundler'
spec.add_development_dependency 'rake'
spec.add_development_dependency 'rspec', '~> 3'
end
| 35.466667 | 92 | 0.663534 |
1824fa7f16602c43a8fc9bab3799918bf48078fc | 120 | require 'test_helper'
class IpCommTest < ActiveSupport::TestCase
# test "the truth" do
# assert true
# end
end
| 15 | 42 | 0.7 |
61488dadb9fe9447c070a3c3bef39fb3ceb8e9b8 | 832 | require "codeclimate-test-reporter"
require "vcr"
require "webmock/rspec"
require "sidekiq/testing"
require "capybara/rspec"
CodeClimate::TestReporter.start
RSpec.configure do |config|
config.expect_with :rspec do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with :rspec do |mocks|
mocks.verify_partial_doubles = true
end
config.before(:suite) do
DatabaseCleaner.clean_with(:truncation)
end
config.before(:each) do
DatabaseCleaner.strategy = :transaction
end
config.before(:each) do
DatabaseCleaner.start
end
config.after(:each) do
DatabaseCleaner.clean
end
end
VCR.configure do |c|
c.cassette_library_dir = "spec/fixtures/vcr_cassettes"
c.allow_http_connections_when_no_cassette = true
c.hook_into :webmock
end
| 21.333333 | 76 | 0.768029 |
bb00b5d3a27207bd6345fb0cddf91bd7736766ee | 96 | # desc "Explaining what the task does"
# task :frendid_api_client do
# # Task goes here
# end
| 19.2 | 38 | 0.708333 |
e9532f1598a38bb8e9548bc2fa75bad98fe96237 | 477 | module WordWalker
class Sequence
attr_reader :letter_count
attr_reader :passage
attr_reader :words
def initialize(passage, passes: 10)
@passage = passage
@words = parse_passage(@passage.to_str.upcase)
end
protected
def parse_passage(passage)
@letter_count = passage.gsub(/\d|\W/, "").length
id = 0
passage.split.map do |word|
id += 1
Word.new(word.gsub(/\d|\W/, ""), id)
end
end
end
end
| 19.08 | 54 | 0.60587 |
f7c608cfc3082f338127456158f7a537beadc4ea | 454 | describe Travis::Yml, 'perl' do
subject { described_class.apply(parse(yaml)) }
describe 'perl' do
describe 'given a seq of strs' do
yaml %(
perl:
- str
)
it { should serialize_to perl: ['str'] }
it { should_not have_msg }
end
describe 'given a str' do
yaml %(
perl: str
)
it { should serialize_to perl: ['str'] }
it { should_not have_msg }
end
end
end
| 19.73913 | 48 | 0.539648 |
91306a323fc5eb4fe573865e5228cb224e8b89e9 | 153 | require 'test/unit'
class SexyActionsTest < Test::Unit::TestCase
# Replace this with your real tests.
test "the truth" do
assert true
end
end
| 17 | 44 | 0.712418 |
ff273dae05e02ff6d13d3214402a4545d7ae8119 | 478 | class Date
def day_name
strftime("%A")
end
def tuesday
monday + 1
end
def wednesday
monday + 2
end
def thursday
monday + 3
end
def friday
monday + 4
end
def current_week?
cweek == Date.today.cweek
end
def less_than_a_week_away?
(self - Date.today).to_i <= 7
end
def registration_cutoff_datetime(hour_of_day)
t = Time.parse(hour_of_day)
DateTime.new(year, month, day, t.hour, t.min, t.sec).utc
end
end
| 12.918919 | 60 | 0.635983 |
e9a6facb5b8a9092d22aa06a54435d0d5e0c7d39 | 20,520 | # encoding: UTF-8
# frozen_string_literal: true
describe WalletService do
let!(:blockchain) { create(:blockchain, 'fake-testnet') }
let!(:currency) { create(:currency, :fake) }
let(:wallet) { create(:wallet, :fake_hot) }
let(:fake_wallet_adapter) { FakeWallet.new }
let(:fake_blockchain_adapter) { FakeBlockchain.new }
let(:service) { WalletService.new(wallet) }
before do
Peatio::Blockchain.registry.expects(:[])
.with(:fake)
.returns(fake_blockchain_adapter.class)
.at_least_once
Peatio::Wallet.registry.expects(:[])
.with(:fake)
.returns(fake_wallet_adapter.class)
.at_least_once
Blockchain.any_instance.stubs(:blockchain_api).returns(BlockchainService.new(blockchain))
end
context :create_address! do
let(:account) { create(:member, :level_3, :barong).get_account(currency) }
let(:blockchain_address) do
{ address: :fake_address,
secret: :changeme,
details: { uid: account.member.uid } }
end
before do
service.adapter.expects(:create_address!).returns(blockchain_address)
end
it 'creates address' do
expect(service.create_address!(account, nil)).to eq blockchain_address
end
end
context :build_withdrawal! do
let(:withdrawal) { OpenStruct.new(rid: 'fake-address', amount: 100) }
let(:transaction) do
Peatio::Transaction.new(hash: '0xfake',
to_address: withdrawal.rid,
amount: withdrawal.amount,
currency_id: currency.id)
end
before do
service.adapter.expects(:create_transaction!).returns(transaction)
end
it 'sends withdrawal' do
expect(service.build_withdrawal!(withdrawal)).to eq transaction
end
end
context :spread_between_wallets do
# Single wallet:
# * Deposit fits exactly.
# * Deposit doesn't fit.
# Two wallets:
# * Deposit fits to first wallet.
# * Deposit fits to second wallet.
# * Partial spread between first and second.
# * Deposit doesn't fit to both wallets.
# * Negative min_collection_amount.
# Three wallets:
# * Partial spread between first and second.
# * Partial spread between first and third.
# * Partial spread between first, second and third.
# * Deposit doesn't fit to all wallets.
let(:amount) { 1.2 }
context 'Single wallet' do
context 'single wallet available' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 8.8,
max_balance: 10,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: amount,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to single wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Single wallet is full' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 10,
max_balance: 10,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: amount,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
end
context 'Two wallets' do
context 'Deposit fits to first wallet' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 5,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 100.0,
max_balance: 100,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: amount,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Deposit fits to second wallet' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 10,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 95,
max_balance: 100,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-2',
status: 'pending',
amount: amount,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Partial spread between first and second' do
let(:amount) { 10 }
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 5,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 90,
max_balance: 100,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: 5,
currency_id: currency.id },
{ to_address: 'destination-wallet-2',
status: 'pending',
amount: 5,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Two wallets are full' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 10,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 100,
max_balance: 100,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-2',
status: 'pending',
amount: 1.2,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'different min_collection_amount' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 10,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 100,
max_balance: 100,
min_collection_amount: 2 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-2',
status: 'pending',
amount: 1.2,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to single wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'tiny min_collection_amount' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 10,
max_balance: 10,
min_collection_amount: 2 },
{ address: 'destination-wallet-2',
balance: 100,
max_balance: 100,
min_collection_amount: 3 }]
end
let(:expected_spread) { [] }
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to single wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
end
context 'Three wallets' do
context 'Partial spread between first and second' do
let(:amount) { 10 }
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 5,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 95,
max_balance: 100,
min_collection_amount: 1 },
{ address: 'destination-wallet-3',
balance: 1001.0,
max_balance: 1000,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: 5,
currency_id: currency.id },
{ to_address: 'destination-wallet-2',
status: 'pending',
amount: 5,
currency_id: currency.id}]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Partial spread between first and third' do
let(:amount) { 10 }
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 5,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 100,
max_balance: 100,
min_collection_amount: 1 },
{ address: 'destination-wallet-3',
balance: 995.0,
max_balance: 1000,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: 5,
currency_id: currency.id },
{ to_address: 'destination-wallet-3',
status: 'pending',
amount: 5,
currency_id: currency.id}]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Three wallets are full' do
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 10.1,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 100.0,
max_balance: 100,
min_collection_amount: 1 },
{ address: 'destination-wallet-3',
balance: 1001.0,
max_balance: 1000,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-3',
status: 'pending',
amount: amount,
currency_id: currency.id }]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Partial spread between first, second and third' do
let(:amount) { 10 }
let(:destination_wallets) do
[{ address: 'destination-wallet-1',
balance: 7,
max_balance: 10,
min_collection_amount: 1 },
{ address: 'destination-wallet-2',
balance: 97,
max_balance: 100,
min_collection_amount: 1 },
{ address: 'destination-wallet-3',
balance: 995.0,
max_balance: 1000,
min_collection_amount: 1 }]
end
let(:expected_spread) do
[{ to_address: 'destination-wallet-1',
status: 'pending',
amount: 3,
currency_id: currency.id },
{ to_address: 'destination-wallet-2',
status: 'pending',
amount: 3,
currency_id: currency.id },
{ to_address: 'destination-wallet-3',
status: 'pending',
amount: 4,
currency_id: currency.id}]
end
subject { service.send(:spread_between_wallets, amount, destination_wallets) }
it 'spreads everything to last wallet' do
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
end
end
context :spread_deposit do
let!(:deposit_wallet) { create(:wallet, :fake_deposit) }
let!(:hot_wallet) { create(:wallet, :fake_hot) }
let!(:cold_wallet) { create(:wallet, :fake_cold) }
let(:service) { WalletService.new(deposit_wallet) }
let(:amount) { 2 }
let(:deposit) { create(:deposit_btc, amount: amount, currency: currency) }
let(:expected_spread) do
[{ to_address: 'fake-cold',
status: 'pending',
amount: '2.0',
currency_id: currency.id }]
end
subject { service.spread_deposit(deposit) }
context 'hot wallet is full and cold wallet balance is not available' do
before do
# Hot wallet balance is full and cold wallet balance is not available.
Wallet.any_instance.stubs(:current_balance).returns({ deposit.currency_id => hot_wallet.max_balance }, { deposit.currency_id => 'N/A' })
end
it 'spreads everything to cold wallet' do
expect(Wallet.active.withdraw.where(currency_id: deposit.currency_id).count).to eq 2
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'hot wallet is full, warm and cold wallet balances are not available' do
let!(:warm_wallet) { create(:wallet, :fake_warm) }
before do
# Hot wallet is full, warm and cold wallet balances are not available.
Wallet.any_instance.stubs(:current_balance).returns({ deposit.currency_id => hot_wallet.max_balance }, { deposit.currency_id => 'N/A' }, { deposit.currency_id => 'N/A' })
end
it 'skips warm wallet and spreads everything to cold wallet' do
expect(Wallet.active.withdraw.where(currency_id: deposit.currency_id).count).to eq 3
expect(subject.map(&:as_json).map(&:symbolize_keys)).to contain_exactly(*expected_spread)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'there is no active wallets' do
before { Wallet.stubs(:active).returns(Wallet.none) }
it 'raises an error' do
expect{ subject }.to raise_error(StandardError)
end
end
end
context :collect_deposit do
let!(:deposit_wallet) { create(:wallet, :fake_deposit) }
let!(:hot_wallet) { create(:wallet, :fake_hot) }
let!(:cold_wallet) { create(:wallet, :fake_cold) }
let(:amount) { 2 }
let(:deposit) { create(:deposit_btc, amount: amount, currency: currency) }
let(:fake_wallet_adapter) { FakeWallet.new }
let(:service) { WalletService.new(deposit_wallet) }
context 'Spread deposit with single entry' do
let(:spread_deposit) do [{ to_address: 'fake-cold',
amount: '2.0',
currency_id: currency.id }]
end
let(:transaction) do
[Peatio::Transaction.new(hash: '0xfake',
to_address: cold_wallet.address,
amount: deposit.amount,
currency_id: currency.id)]
end
subject { service.collect_deposit!(deposit, spread_deposit) }
before do
service.adapter.expects(:create_transaction!).returns(transaction.first)
end
it 'creates single transaction' do
expect(subject).to contain_exactly(*transaction)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context 'Spread deposit with two entry' do
let(:spread_deposit) do [{ to_address: 'fake-hot',
amount: '2.0',
currency_id: currency.id },
{ to_address: 'fake-hot',
amount: '2.0',
currency_id: currency.id }]
end
let(:transaction) do
[{ hash: '0xfake',
to_address: hot_wallet.address,
amount: deposit.amount,
currency_id: currency.id },
{ hash: '0xfake',
to_address: cold_wallet.address,
amount: deposit.amount,
currency_id: currency.id }].map { |t| Peatio::Transaction.new(t)}
end
subject { service.collect_deposit!(deposit, spread_deposit) }
before do
service.adapter.expects(:create_transaction!).with(spread_deposit.first, subtract_fee: true).returns(transaction.first)
service.adapter.expects(:create_transaction!).with(spread_deposit.second, subtract_fee: true).returns(transaction.second)
end
it 'creates two transactions' do
expect(subject).to contain_exactly(*transaction)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
end
context :deposit_collection_fees do
let!(:fee_wallet) { create(:wallet, :fake_fee) }
let!(:deposit_wallet) { create(:wallet, :fake_deposit) }
let(:amount) { 2 }
let(:deposit) { create(:deposit_btc, amount: amount, currency: currency) }
let(:fake_wallet_adapter) { FakeWallet.new }
let(:service) { WalletService.new(fee_wallet) }
let(:spread_deposit) do [{ to_address: 'fake-cold',
amount: '2.0',
currency_id: currency.id }]
end
let(:transactions) do
[Peatio::Transaction.new( hash: '0xfake',
to_address: deposit.address,
amount: '0.01',
currency_id: currency.id)]
end
subject { service.deposit_collection_fees!(deposit, spread_deposit) }
context 'Adapter collect fees for transaction' do
before do
service.adapter.expects(:prepare_deposit_collection!).returns(transactions)
end
it 'returns transaction' do
expect(subject).to contain_exactly(*transactions)
expect(subject).to all(be_a(Peatio::Transaction))
end
end
context "Adapter doesn't perform any actions before collect deposit" do
it 'retunrs empty array' do
expect(subject.blank?).to be true
end
end
end
end
| 32.51981 | 178 | 0.584162 |
e2652515acb01d00ca08f0723fb273b50a9495ce | 1,013 | module Wukong
class Dataflow < Hanuman::Tree
def self.configure(settings)
settings.description = builder.description if builder.description
end
end
class DataflowBuilder < Hanuman::TreeBuilder
def description desc=nil
@description = desc if desc
@description
end
def namespace() Wukong::Dataflow ; end
def handle_dsl_arguments_for(stage, *args, &action)
options = args.extract_options!
while stages.include?(stage.label)
parts = stage.label.to_s.split('_')
if parts.last.to_i > 0
parts[-1] = parts.last.to_i + 1
else
parts.push(1)
end
stage.label = parts.map(&:to_s).join('_').to_sym
end
stage.merge!(options.merge(action: action).compact)
stage.graph = self
stage
end
def method_missing(name, *args, &blk)
if stages[name]
handle_dsl_arguments_for(stages[name], *args, &blk)
else
super
end
end
end
end
| 23.022727 | 71 | 0.613031 |
f75576fd75ac6bc2b751ad66aacb749381ce8b13 | 441 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Subscriptions::Mgmt::V2016_06_01
module Models
#
# Defines values for SubscriptionState
#
module SubscriptionState
Enabled = "Enabled"
Warned = "Warned"
PastDue = "PastDue"
Disabled = "Disabled"
Deleted = "Deleted"
end
end
end
| 23.210526 | 70 | 0.678005 |
4a00450a4f8cf016710098dc405382b7c6f5ebd8 | 546 | module RR
module WildcardMatchers
class Satisfy
attr_reader :expectation_proc
def initialize(expectation_proc)
@expectation_proc = expectation_proc
end
def wildcard_match?(other)
return true if self == other
!!expectation_proc.call(other)
end
def inspect
"satisfy {block}"
end
def ==(other)
return false unless other.is_a?(self.class)
self.expectation_proc == other.expectation_proc
end
alias_method :eql?, :==
end
end
end | 21 | 55 | 0.622711 |
616ab330c39c49e09e2836dbe70d8d45a772f2c1 | 2,916 | # frozen_string_literal: true
module Ferrum
class Page
module Net
AUTHORIZE_TYPE = %i[server proxy]
RESOURCE_TYPES = %w[Document Stylesheet Image Media Font Script TextTrack
XHR Fetch EventSource WebSocket Manifest
SignedExchange Ping CSPViolationReport Other]
def authorize(user:, password:, type: :server)
unless AUTHORIZE_TYPE.include?(type)
raise ArgumentError, ":type should be in #{AUTHORIZE_TYPE}"
end
@authorized_ids ||= {}
@authorized_ids[type] ||= []
intercept_request do |request, index, total|
if request.auth_challenge?(type)
response = authorized_response(@authorized_ids[type],
request.interception_id,
user, password)
@authorized_ids[type] << request.interception_id
request.continue(authChallengeResponse: response)
elsif index + 1 < total
next # There are other callbacks that can handle this, skip
else
request.continue
end
end
end
def intercept_request(pattern: "*", resource_type: nil, &block)
pattern = { urlPattern: pattern }
if resource_type && RESOURCE_TYPES.include?(resource_type.to_s)
pattern[:resourceType] = resource_type
end
command("Network.setRequestInterception", patterns: [pattern])
on_request_intercepted(&block) if block_given?
end
def on_request_intercepted(&block)
@client.on("Network.requestIntercepted") do |params, index, total|
request = Network::InterceptedRequest.new(self, params)
block.call(request, index, total)
end
end
def continue_request(interception_id, options = nil)
options ||= {}
options = options.merge(interceptionId: interception_id)
command("Network.continueInterceptedRequest", **options)
end
def abort_request(interception_id)
continue_request(interception_id, errorReason: "Aborted")
end
private
def subscribe
super if defined?(super)
@client.on("Network.loadingFailed") do |params|
# Free mutex as we aborted main request we are waiting for
if params["requestId"] == @request_id && params["canceled"] == true
@event.set
@document_id = get_document_id
end
end
end
def authorized_response(ids, interception_id, username, password)
if ids.include?(interception_id)
{ response: "CancelAuth" }
elsif username && password
{ response: "ProvideCredentials",
username: username,
password: password }
else
{ response: "CancelAuth" }
end
end
end
end
end
| 32.043956 | 79 | 0.600823 |
bba7205bf6acf830ea1e8b02a309ad1332fdf653 | 2,178 | require 'puppet/resource_api'
# rubocop:disable Style/StringLiterals
# Manage storage containers. A storage container is a logical grouping of related storage objects in a cluster. A storage container corresponds to a vVol datastore in vCenter and is used to group related vVols and track the amount of space that is used/free.
Puppet::ResourceApi.register_type(
name: 'powerstore_storage_container',
features: ['remote_resource'],
# rubocop:disable Lint/UnneededDisable
# rubocop:disable Layout/TrailingWhitespace
desc: <<-EOS,
Manage storage containers. A storage container is a logical grouping of related storage objects in a cluster. A storage container corresponds to a vVol datastore in vCenter and is used to group related vVols and track the amount of space that is used/free.
EOS
attributes: {
ensure: {
type: "Enum['present', 'absent']",
desc: 'Whether this resource should be present or absent on the target system.',
default: 'present',
},
force: {
type: "Optional[Boolean]",
desc: "Normally, deletion of a storage container that is mounted or still contains virtual volumes will be rejected. This option overrides that error and allows the delete to continue. Use with great caution.",
behaviour: :parameter,
},
id: {
type: "Optional[String]",
desc: "The unique id of the storage container.",
behaviour: :read_only,
},
name: {
type: "String[1,64]",
desc: "Name for the storage container that is unique across all storage containers in the cluster. The name must be between 1 and 64 UTF-8 characters (inclusive), and not more than 127 bytes.",
behaviour: :namevar,
},
quota: {
type: "Optional[Integer[0,4611686018427387904]]",
desc: "The number of bytes that can be provisioned against this storage container. This must be a value greater than 10Gb and the default is 0 which means no limit.",
},
},
autorequires: {
file: '$source', # will evaluate to the value of the `source` attribute
package: 'apt',
},
)
| 48.4 | 260 | 0.678145 |
6a6f9922dc7927e5a80d7c5d5b14bb8fa6fe8033 | 168 | require 'rails_helper'
RSpec.describe User, type: :model do
it { should have_many(:activities).dependent(:destroy) }
it { should validate_presence_of(:name) }
end
| 24 | 58 | 0.744048 |
872071b6c910b24fcc9d2d0f16c3b80467bdb796 | 944 | class PlacesController < ApplicationController
def index
@places = Place.order('created_at DESC')
end
def edit_index
@places = Place.order('created_at DESC')
end
def show
@place = Place.find(params[:id])
end
def new
@place = Place.new
end
def edit
@place = Place.find(params[:id])
puts @place.inspect
end
def update
@place = Place.find(params[:id])
respond_to do |format|
if @place.update(place_params)
format.html { redirect_to @place, notice: 'Place was successfully updated.' }
else
format.html { render :edit_index }
end
end
end
def create
@place = Place.new(place_params)
if @place.save
flash[:success] = "Place added!"
redirect_to root_path
else
render 'new'
end
end
private
def place_params
params.require(:place).permit(:title, :raw_address, :latitude, :longitude, :visited_by)
end
end | 18.88 | 91 | 0.639831 |
bfd18be982609be89e06ae9ce95c880dd20dd68b | 325 | require 'test/unit'
require 'io/nonblock'
$-w = true
require 'kgio'
class TestPipePopen < Test::Unit::TestCase
def test_popen
io = Kgio::Pipe.popen("sleep 1 && echo HI")
assert_equal :wait_readable, io.kgio_tryread(2)
sleep 1.5
assert_equal "HI\n", io.kgio_read(3)
assert_nil io.kgio_read(5)
end
end
| 21.666667 | 51 | 0.689231 |
d5012e0bc41c0e07d6f2d2ea5d747e880f90de40 | 1,053 | # coding: utf-8
Gem::Specification.new do |spec|
spec.name = "type-on-strap"
spec.version = "0.6.0"
spec.authors = ["Sylhare","Rohan Chandra"]
spec.email = ["[email protected]", "[email protected]"]
spec.summary = "A simple and responsive jekyll theme template"
spec.description = %q{A custom Type Theme template (a free and open-source Jekyll theme). Great for blogs, easy to customize and responsive.}
spec.homepage = "https://github.com/sylhare/Type-on-Strap"
spec.license = "MIT"
spec.files = Dir.glob("**/{*,.*}").select do |f|
f.match(%r{^(assets/(js|css|fonts|data)|_(includes|layouts|sass)/|(LICENSE|README.md|data.json))}i)
end
spec.required_ruby_version = '~> 2.1'
spec.add_runtime_dependency "jekyll", "~> 3.8.4"
spec.add_runtime_dependency "jekyll-paginate", "~> 1.1"
spec.add_runtime_dependency "jekyll-seo-tag", "~> 2.3"
spec.add_development_dependency "bundler", "~> 1.12"
spec.add_development_dependency "rake", "~> 12.3"
end
| 37.607143 | 145 | 0.650522 |
e8aa37325b0eaf30439ade456af6ed77fc202480 | 68 | class ClassWithOneIncludedMixin
include ModuleWithNoRelations
end
| 17 | 31 | 0.897059 |
8779a5bb5a958759c85d978d9b739a6a79fef0b6 | 2,326 | require 'ostruct'
module VersionInfo
STORAGE_CLASS = {text: TextStorage, yaml: YamlStorage, module: ModuleStorage}
class Data < OpenStruct
def initialize(segments)
super()
@segments = segments
reset
end
def storage
unless @storage
@storage ||= STORAGE_CLASS[VersionInfo.file_format.to_sym].new(self)
end
@storage
end
def file_name
@file_name ||= Dir.pwd + '/' + storage.default_file_name
end
def file_name=(value)
@file_name = value
@storage = nil #recreate storage
end
def load
storage.load
self
end
def save
storage.save
self
end
def reset
clear
assign(get_defaults)
end
def assign(hash)
marshal_load(hash)
end
def bump(key)
idx = segments.index(key.to_sym) + 1
return unless idx
segments[idx..2].each do |sgm|
send("#{sgm}=", 0) if send(sgm)
end
send("#{key}=", 1 + send(key).to_i)
end
def to_s
tag
end
def to_hash
marshal_dump
end
def tag
tag_format % to_hash
end
def tag_format
unless @tag_format
fmts = segments.map { |k| "%<#{k}>s"}
fmt_join = fmts.map { |k| "." }
fmt_join[2] = '+' if fmts.size > 2 #build uses '+'. See semver.org
fmt_join[-1] = '' if fmt_join.size > 0 #remove last char
@tag_format = fmts.zip(fmt_join).flatten.join
end
@tag_format
end
def tag_format=(value)
@tag_format = value
end
def set_version_info(tag_str)
clear
values = tag_str.to_s.split(/\.|\+|\-/)
values.each_with_index do |val, idx|
val = val.to_s.chomp
val = val.match(/(^\d+)$/) ? val.to_i : val
self.send("#{segment_at(idx)}=", val )
end
self
end
def segment_at(idx)
@segments << :build if (@segments.size == 3) && (idx>=3)
(@segments.size..idx).each{|n| @segments << "vinfo#{n}".to_sym}
@segments[idx]
end
def segments
@segments
end
def clear
segments.each{|key| delete_field(key) if @table.has_key?(key)}
@tag_format = nil
end
def get_defaults
segments.inject({}){|h, k| h[k] = 0; h}
end
end
end
| 20.226087 | 79 | 0.551161 |
91fa569ce908d7fbe01233697bbf82c018a2e6a6 | 270 | # frozen_string_literal: true
RSpec.describe Karafka::Extensions::SidekiqAttributesMap do
subject(:attributes_map) { Karafka::AttributesMap }
it { expect(attributes_map.topic).to include :interchanger }
it { expect(attributes_map.topic).to include :worker }
end
| 30 | 62 | 0.781481 |
ff0d132134a0de0d5bdbdf019624fad7cb3628bd | 93 | class BooksController < ApplicationController
def index
@products = Book.all
end
end
| 15.5 | 45 | 0.752688 |
6141fc74f2838bedad4ebbd8368a6d4d3a9bdd93 | 1,477 | #!/usr/bin/env ruby
# -*- coding: utf-8 -*-
#######################################################################
#
# A demo of a Line chart with a secondary axis in WriteXLSX.
#
# reverse(c), March 2011, John McNamara, [email protected]
# convert to ruby by Hideo NAKAMURA, [email protected]
#
require 'write_xlsx'
workbook = WriteXLSX.new('chart_secondary_axis.xlsx')
worksheet = workbook.add_worksheet
bold = workbook.add_format(:bold => 1)
# Add the worksheet data that the charts will refer to.
headings = [ 'Aliens', 'Humans']
data = [
[ 2, 3, 4, 5, 6, 7 ],
[ 10, 40, 50, 20, 10, 50 ]
]
worksheet.write('A1', headings, bold)
worksheet.write('A2', data)
# Create a new chart object. In this case an embedded chart.
chart = workbook.add_chart(:type => 'line', :embedded => 1)
# Configure the first series.
chart.add_series(
:name => '=Sheet1!$A$1',
:values => '=Sheet1!$A$2:$A$7',
:y2_axis => 1
)
chart.add_series(
:name => '=Sheet1!$B$1',
:values => '=Sheet1!$B$2:$B$7'
)
chart.set_legend(:position => 'right')
# Add a chart title and some axis labels.
chart.set_title(:name => 'Survey results')
chart.set_x_axis(:name => 'Days')
chart.set_y_axis(:name => 'Population', :major_gridlines => {:visible => 0})
chart.set_y2_axis(:name => 'Laser wounds')
# Insert the chart into the worksheet (with an offset).
worksheet.insert_chart(
'D2', chart,
:x_offset => 25, :y_offset => 10
)
workbook.close
| 25.465517 | 76 | 0.619499 |
791b9ded909ceb5e1ed6acd198c71dc0ef471604 | 2,891 | require 'formula'
class NoBdb5 < Requirement
satisfy(:build_env => false) { !Formula["berkeley-db"].installed? }
def message; <<-EOS.undent
This software can fail to compile when Berkeley-DB 5.x is installed.
You may need to try:
brew unlink berkeley-db
brew install dsniff
brew link berkeley-db
EOS
end
end
class Dsniff < Formula
homepage 'http://monkey.org/~dugsong/dsniff/'
url 'http://monkey.org/~dugsong/dsniff/beta/dsniff-2.4b1.tar.gz'
sha1 '25cfea26e9bbe016451180758a49d4d5bc9317d3'
depends_on NoBdb5
depends_on 'libnet'
depends_on 'libnids'
def patches
{:p0 => [
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-arpspoof.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-dnsspoof.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-filesnarf.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-macof.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-pcaputil.c",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-record.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-sshcrypto.c",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-sshmitm.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-sshow.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-tcp_raw.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-tcp_raw.h.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-tcpkill.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-tcpnice.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-trigger.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-trigger.h.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-urlsnarf.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-webmitm.c.diff",
"https://trac.macports.org/export/90933/trunk/dports/net/dsniff-devel/files/patch-webspy.c.diff"
]}
end
def install
ENV.append 'CFLAGS', "-DBIND_8_COMPAT"
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}",
"--with-libnet=#{HOMEBREW_PREFIX}",
"--with-libnids=#{HOMEBREW_PREFIX}"
system "make"
system "make install"
end
end
| 49 | 104 | 0.704946 |
26c623d922346a2145b01faeada98352d033ad01 | 492 | namespace :m do
desc 'Load the seed data from db/seeds.rb'
task :seed => 'db:abort_if_pending_migrations' do
seed_file = File.join(File.dirname(__FILE__), '..', '..', 'db', 'seeds.rb')
load(seed_file) if File.exist?(seed_file)
end
task :schema => :environment do
ENV['SCHEMA'] = File.join(File.dirname(__FILE__), '..', '..', 'db', 'schema.rb')
Rake::Task['db:schema:load'].invoke
end
task :setup => ['db:drop', 'db:create', :schema, :seed] do
end
end | 28.941176 | 84 | 0.615854 |
ac2d24eb8f1152890050d7ef7c9587af99aac4f1 | 794 | class OmniauthCallbacksController < Devise::OmniauthCallbacksController
def self.provides_callback_for(provider)
class_eval %Q{
def #{provider}
auth = env["omniauth.auth"]
@user = User.find_for_oauth(env["omniauth.auth"], current_user)
if @user.persisted?
session[:access_toke] = auth.credentials.token
sign_in_and_redirect @user, event: :authentication
set_flash_message(:notice, :success, kind: "#{provider}".capitalize) if is_navigational_format?
else
session["devise.#{provider}_data"] = env["omniauth.auth"]
redirect_to new_user_registration_url
end
end
}
end
[:twitter, :facebook, :google_oauth2, :linkedin].each do |provider|
provides_callback_for provider
end
end | 36.090909 | 105 | 0.677582 |
334d02697b740b1d354635ff7fd728b6d1e946b6 | 168 | class CreateIngredients < ActiveRecord::Migration[6.0]
def change
create_table :ingredients do |t|
t.string :ing_name
t.timestamps
end
end
end
| 16.8 | 54 | 0.684524 |
e8dc881f8be56b66b72b3085f8eb3be5b955bdc9 | 1,382 | # frozen_string_literal: true
require 'spec_helper'
describe API::GroupMilestones do
let(:user) { create(:user) }
let(:group) { create(:group, :private) }
let(:project) { create(:project, namespace: group) }
let!(:group_member) { create(:group_member, group: group, user: user) }
let!(:closed_milestone) { create(:closed_milestone, group: group, title: 'version1', description: 'closed milestone') }
let!(:milestone) { create(:milestone, group: group, title: 'version2', description: 'open milestone', start_date: Date.today, due_date: Date.today + 3.days) }
let!(:issue1) { create(:issue, created_at: Date.today.beginning_of_day, weight: 2, project: project, milestone: milestone) }
let!(:issue2) { create(:issue, created_at: Date.today.middle_of_day, weight: 5, project: project, milestone: milestone) }
let(:issues_route) { "/groups/#{group.id}/milestones/#{milestone.id}/issues" }
before do
project.add_developer(user)
end
it 'matches V4 EE-specific response schema for a list of issues' do
get api(issues_route, user)
expect(response).to have_gitlab_http_status(:ok)
expect(response).to match_response_schema('public_api/v4/issues', dir: 'ee')
end
it_behaves_like 'group and project milestone burndowns', '/groups/:id/milestones/:milestone_id/burndown_events' do
let(:route) { "/groups/#{group.id}/milestones" }
end
end
| 44.580645 | 160 | 0.718524 |
01f654be55a54eab04c51f3a1354f2e36511f35d | 81 | class MangaGenre < ApplicationRecord
belongs_to :manga
belongs_to :genre
end
| 16.2 | 36 | 0.802469 |
ac84ef94b1cebf25d255cd96327d356ca81edb51 | 4,131 | # frozen_string_literal: true
module Avatarable
extend ActiveSupport::Concern
included do
prepend ShadowMethods
include ObjectStorage::BackgroundMove
include Gitlab::Utils::StrongMemoize
validate :avatar_type, if: ->(user) { user.avatar.present? && user.avatar_changed? }
validates :avatar, file_size: { maximum: 200.kilobytes.to_i }, if: :avatar_changed?
mount_uploader :avatar, AvatarUploader
after_initialize :add_avatar_to_batch
end
module ShadowMethods
def avatar_url(**args)
# We use avatar_path instead of overriding avatar_url because of carrierwave.
# See https://gitlab.com/gitlab-org/gitlab-foss/merge_requests/11001/diffs#note_28659864
avatar_path(only_path: args.fetch(:only_path, true), size: args[:size]) || super
end
def retrieve_upload(identifier, paths)
upload = retrieve_upload_from_batch(identifier)
# This fallback is needed when deleting an upload, because we may have
# already been removed from the DB. We have to check an explicit `#nil?`
# because it's a BatchLoader instance.
upload = super if upload.nil?
upload
end
end
class_methods do
def bot_avatar(image:)
Rails.root.join('app', 'assets', 'images', 'bot_avatars', image).open
end
end
def avatar_type
unless self.avatar.image?
errors.add :avatar, "file format is not supported. Please try one of the following supported formats: #{AvatarUploader::SAFE_IMAGE_EXT.join(', ')}"
end
end
def avatar_path(only_path: true, size: nil)
unless self.try(:id)
return uncached_avatar_path(only_path: only_path, size: size)
end
# Cache this avatar path only within the request because avatars in
# object storage may be generated with time-limited, signed URLs.
key = "#{self.class.name}:#{self.id}:#{only_path}:#{size}"
Gitlab::SafeRequestStore[key] ||= uncached_avatar_path(only_path: only_path, size: size)
end
def uncached_avatar_path(only_path: true, size: nil)
return unless self.try(:avatar).present?
asset_host = ActionController::Base.asset_host
use_asset_host = asset_host.present?
use_authentication = respond_to?(:public?) && !public?
query_params = size&.nonzero? ? "?width=#{size}" : ""
# Avatars for private and internal groups and projects require authentication to be viewed,
# which means they can only be served by Rails, on the regular GitLab host.
# If an asset host is configured, we need to return the fully qualified URL
# instead of only the avatar path, so that Rails doesn't prefix it with the asset host.
if use_asset_host && use_authentication
use_asset_host = false
only_path = false
end
url_base = []
if use_asset_host
url_base << asset_host unless only_path
else
url_base << gitlab_config.base_url unless only_path
url_base << gitlab_config.relative_url_root
end
url_base.join + avatar.local_url + query_params
end
# Path that is persisted in the tracking Upload model. Used to fetch the
# upload from the model.
def upload_paths(identifier)
avatar_mounter.blank_uploader.store_dirs.map { |store, path| File.join(path, identifier) }
end
private
def retrieve_upload_from_batch(identifier)
BatchLoader.for(identifier: identifier, model: self)
.batch(key: self.class, cache: true, replace_methods: false) do |upload_params, loader, args|
model_class = args[:key]
paths = upload_params.flat_map do |params|
params[:model].upload_paths(params[:identifier])
end
Upload.where(uploader: AvatarUploader.name, path: paths).find_each do |upload|
model = model_class.instantiate('id' => upload.model_id)
loader.call({ model: model, identifier: File.basename(upload.path) }, upload)
end
end
end
def add_avatar_to_batch
return unless avatar_mounter
avatar_mounter.read_identifiers.each { |identifier| retrieve_upload_from_batch(identifier) }
end
def avatar_mounter
strong_memoize(:avatar_mounter) { _mounter(:avatar) }
end
end
| 33.048 | 153 | 0.710724 |
21c172f1a51936716ffe5003ac37bc62ac516292 | 6,296 | Hadean::Application.routes.draw do
resources :image_groups
# mount Resque::Server.new, at: "/resque"
namespace(:admin){ namespace(:customer_service){ resources :comments } }
resources :user_sessions, only: [:new, :create, :destroy]
get 'admin' => 'admin/overviews#index'
get 'login' => 'user_sessions#new'
get 'logout' => 'user_sessions#destroy'
delete 'logout' => 'user_sessions#destroy'
get 'signup' => 'customer/registrations#new'
get 'admin/merchandise' => 'admin/merchandise/summary#index'
resource :about, only: [:show]
resources :notifications, only: [:update]
resources :products, only: [:index, :show, :create]
resources :states, only: [:index]
resources :terms, only: [:index]
resource :unsubscribe, only: :show
resources :wish_items, only: [:index, :destroy]
root :to => "welcome#index"
namespace :customer do
resources :registrations, only: [:index, :new, :create]
resource :password_reset, only: [:new, :create, :edit, :update]
resource :activation, only: [:show]
end
namespace :myaccount do
resources :orders, only: [:index, :show]
resources :addresses
resources :credit_cards
resources :referrals, only: [:index, :create, :update]
resource :store_credit, only: [:show]
resource :overview, only: [:show, :edit, :update]
end
namespace :shopping do
resources :addresses do
member do
put :select_address
end
end
resources :billing_addresses do
member do
put :select_address
end
end
resources :cart_items do
member do
put :move_to
end
end
resource :coupon, only: [:show, :create]
resources :orders do
member do
get :checkout
get :confirmation
end
end
resources :shipping_methods
end
namespace :admin do
namespace :customer_service do
resources :users do
resources :comments
end
end
resources :users
namespace :user_datas do
resources :referrals do
member do
post :apply
end
end
resources :users do
resource :store_credits, only: [:show, :edit, :update]
resources :addresses
end
end
resources :overviews, only: [:index]
get "help" => "help#index"
namespace :reports do
resource :overview, only: [:show]
resources :graphs
resources :weekly_charts, only: [:index]
end
namespace :rma do
resources :orders do
resources :return_authorizations do
member do
put :complete
end
end
end
#resources :shipments
end
namespace :history do
resources :orders, only: [:index, :show] do
resources :addresses, only: [:index, :show, :edit, :update, :new, :create]
end
end
namespace :fulfillment do
resources :orders do
member do
put :create_shipment
end
resources :comments
end
namespace :partial do
resources :orders do
resources :shipments, only: [ :create, :new, :update ]
end
end
resources :shipments do
member do
put :ship
end
resources :addresses , only: [:edit, :update]# This is for editing the shipment address
end
end
namespace :shopping do
resources :carts
resources :products
resources :users
namespace :checkout do
resources :billing_addresses, only: [:index, :update, :new, :create, :select_address] do
member do
put :select_address
end
end
resources :credit_cards
resource :order, only: [:show, :update, :start_checkout_process] do
member do
post :start_checkout_process
end
end
resources :shipping_addresses, only: [:index, :update, :new, :create, :select_address] do
member do
put :select_address
end
end
resources :shipping_methods, only: [:index, :update]
end
end
namespace :config do
resources :accounts
resources :countries, only: [:index, :edit, :update, :destroy] do
member do
put :activate
end
end
resources :overviews
resources :shipping_categories
resources :shipping_rates
resources :shipping_methods
resources :shipping_zones
resources :tax_rates
resources :tax_categories
end
namespace :generic do
resources :coupons
resources :deals
resources :sales
end
namespace :inventory do
resources :suppliers
resources :overviews
resources :purchase_orders
resources :receivings
resources :adjustments
end
namespace :merchandise do
namespace :images do
resources :products
end
resources :image_groups
resources :properties
resources :prototypes
resources :brands
resources :product_types
resources :prototype_properties
namespace :changes do
resources :products do
resource :property, only: [:edit, :update]
end
end
namespace :wizards do
resources :brands, only: [:index, :create, :update]
resources :products, only: [:new, :create]
resources :properties, only: [:index, :create, :update]
resources :prototypes, only: [:update]
resources :tax_categories, only: [:index, :create, :update]
resources :shipping_categories, only: [:index, :create, :update]
resources :product_types, only: [:index, :create, :update]
end
namespace :multi do
resources :products do
resource :variant, only: [:edit, :update]
end
end
resources :products do
member do
get :add_properties
put :activate
end
resources :variants
end
namespace :products do
resources :descriptions, only: [:edit, :update]
end
end
namespace :document do
resources :invoices
end
end
end
| 26.233333 | 97 | 0.595775 |
91dd92b7c679aa06af80e1c27c365a27ac950039 | 11,919 | # == Schema Information
#
# Table name: services
#
# id :integer not null, primary key
# type :string(255)
# title :string(255)
# project_id :integer
# created_at :datetime
# updated_at :datetime
# active :boolean default(FALSE), not null
# properties :text
# template :boolean default(FALSE)
# push_events :boolean default(TRUE)
# issues_events :boolean default(TRUE)
# merge_requests_events :boolean default(TRUE)
# tag_push_events :boolean default(TRUE)
# note_events :boolean default(TRUE), not null
#
require 'spec_helper'
describe HipchatService, models: true do
describe "Associations" do
it { is_expected.to belong_to :project }
it { is_expected.to have_one :service_hook }
end
describe "Execute" do
let(:hipchat) { HipchatService.new }
let(:user) { create(:user, username: 'username') }
let(:project) { create(:project, name: 'project') }
let(:api_url) { 'https://hipchat.example.com/v2/room/123456/notification?auth_token=verySecret' }
let(:project_name) { project.name_with_namespace.gsub(/\s/, '') }
let(:token) { 'verySecret' }
let(:server_url) { 'https://hipchat.example.com'}
let(:push_sample_data) { Gitlab::PushDataBuilder.build_sample(project, user) }
before(:each) do
allow(hipchat).to receive_messages(
project_id: project.id,
project: project,
room: 123456,
server: server_url,
token: token
)
WebMock.stub_request(:post, api_url)
end
it 'should test and return errors' do
allow(hipchat).to receive(:execute).and_raise(StandardError, 'no such room')
result = hipchat.test(push_sample_data)
expect(result[:success]).to be_falsey
expect(result[:result].to_s).to eq('no such room')
end
it 'should use v1 if version is provided' do
allow(hipchat).to receive(:api_version).and_return('v1')
expect(HipChat::Client).to receive(:new).with(
token,
api_version: 'v1',
server_url: server_url
).and_return(double(:hipchat_service).as_null_object)
hipchat.execute(push_sample_data)
end
it 'should use v2 as the version when nothing is provided' do
allow(hipchat).to receive(:api_version).and_return('')
expect(HipChat::Client).to receive(:new).with(
token,
api_version: 'v2',
server_url: server_url
).and_return(double(:hipchat_service).as_null_object)
hipchat.execute(push_sample_data)
end
context 'push events' do
it "should call Hipchat API for push events" do
hipchat.execute(push_sample_data)
expect(WebMock).to have_requested(:post, api_url).once
end
it "should create a push message" do
message = hipchat.send(:create_push_message, push_sample_data)
push_sample_data[:object_attributes]
branch = push_sample_data[:ref].gsub('refs/heads/', '')
expect(message).to include("#{user.name} pushed to branch " \
"<a href=\"#{project.web_url}/commits/#{branch}\">#{branch}</a> of " \
"<a href=\"#{project.web_url}\">#{project_name}</a>")
end
end
context 'tag_push events' do
let(:push_sample_data) { Gitlab::PushDataBuilder.build(project, user, Gitlab::Git::BLANK_SHA, '1' * 40, 'refs/tags/test', []) }
it "should call Hipchat API for tag push events" do
hipchat.execute(push_sample_data)
expect(WebMock).to have_requested(:post, api_url).once
end
it "should create a tag push message" do
message = hipchat.send(:create_push_message, push_sample_data)
push_sample_data[:object_attributes]
expect(message).to eq("#{user.name} pushed new tag " \
"<a href=\"#{project.web_url}/commits/test\">test</a> to " \
"<a href=\"#{project.web_url}\">#{project_name}</a>\n")
end
end
context 'issue events' do
let(:issue) { create(:issue, title: 'Awesome issue', description: 'please fix') }
let(:issue_service) { Issues::CreateService.new(project, user) }
let(:issues_sample_data) { issue_service.hook_data(issue, 'open') }
it "should call Hipchat API for issue events" do
hipchat.execute(issues_sample_data)
expect(WebMock).to have_requested(:post, api_url).once
end
it "should create an issue message" do
message = hipchat.send(:create_issue_message, issues_sample_data)
obj_attr = issues_sample_data[:object_attributes]
expect(message).to eq("#{user.name} opened " \
"<a href=\"#{obj_attr[:url]}\">issue ##{obj_attr["iid"]}</a> in " \
"<a href=\"#{project.web_url}\">#{project_name}</a>: " \
"<b>Awesome issue</b>" \
"<pre>please fix</pre>")
end
end
context 'merge request events' do
let(:merge_request) { create(:merge_request, description: 'please fix', title: 'Awesome merge request', target_project: project, source_project: project) }
let(:merge_service) { MergeRequests::CreateService.new(project, user) }
let(:merge_sample_data) { merge_service.hook_data(merge_request, 'open') }
it "should call Hipchat API for merge requests events" do
hipchat.execute(merge_sample_data)
expect(WebMock).to have_requested(:post, api_url).once
end
it "should create a merge request message" do
message = hipchat.send(:create_merge_request_message,
merge_sample_data)
obj_attr = merge_sample_data[:object_attributes]
expect(message).to eq("#{user.name} opened " \
"<a href=\"#{obj_attr[:url]}\">merge request ##{obj_attr["iid"]}</a> in " \
"<a href=\"#{project.web_url}\">#{project_name}</a>: " \
"<b>Awesome merge request</b>" \
"<pre>please fix</pre>")
end
end
context "Note events" do
let(:user) { create(:user) }
let(:project) { create(:project, creator_id: user.id) }
let(:issue) { create(:issue, project: project) }
let(:merge_request) { create(:merge_request, source_project: project, target_project: project) }
let(:snippet) { create(:project_snippet, project: project) }
let(:commit_note) { create(:note_on_commit, author: user, project: project, commit_id: project.repository.commit.id, note: 'a comment on a commit') }
let(:merge_request_note) { create(:note_on_merge_request, noteable_id: merge_request.id, note: "merge request note") }
let(:issue_note) { create(:note_on_issue, noteable_id: issue.id, note: "issue note")}
let(:snippet_note) { create(:note_on_project_snippet, noteable_id: snippet.id, note: "snippet note") }
it "should call Hipchat API for commit comment events" do
data = Gitlab::NoteDataBuilder.build(commit_note, user)
hipchat.execute(data)
expect(WebMock).to have_requested(:post, api_url).once
message = hipchat.send(:create_message, data)
obj_attr = data[:object_attributes]
commit_id = Commit.truncate_sha(data[:commit][:id])
title = hipchat.send(:format_title, data[:commit][:message])
expect(message).to eq("#{user.name} commented on " \
"<a href=\"#{obj_attr[:url]}\">commit #{commit_id}</a> in " \
"<a href=\"#{project.web_url}\">#{project_name}</a>: " \
"#{title}" \
"<pre>a comment on a commit</pre>")
end
it "should call Hipchat API for merge request comment events" do
data = Gitlab::NoteDataBuilder.build(merge_request_note, user)
hipchat.execute(data)
expect(WebMock).to have_requested(:post, api_url).once
message = hipchat.send(:create_message, data)
obj_attr = data[:object_attributes]
merge_id = data[:merge_request]['iid']
title = data[:merge_request]['title']
expect(message).to eq("#{user.name} commented on " \
"<a href=\"#{obj_attr[:url]}\">merge request ##{merge_id}</a> in " \
"<a href=\"#{project.web_url}\">#{project_name}</a>: " \
"<b>#{title}</b>" \
"<pre>merge request note</pre>")
end
it "should call Hipchat API for issue comment events" do
data = Gitlab::NoteDataBuilder.build(issue_note, user)
hipchat.execute(data)
message = hipchat.send(:create_message, data)
obj_attr = data[:object_attributes]
issue_id = data[:issue]['iid']
title = data[:issue]['title']
expect(message).to eq("#{user.name} commented on " \
"<a href=\"#{obj_attr[:url]}\">issue ##{issue_id}</a> in " \
"<a href=\"#{project.web_url}\">#{project_name}</a>: " \
"<b>#{title}</b>" \
"<pre>issue note</pre>")
end
it "should call Hipchat API for snippet comment events" do
data = Gitlab::NoteDataBuilder.build(snippet_note, user)
hipchat.execute(data)
expect(WebMock).to have_requested(:post, api_url).once
message = hipchat.send(:create_message, data)
obj_attr = data[:object_attributes]
snippet_id = data[:snippet]['id']
title = data[:snippet]['title']
expect(message).to eq("#{user.name} commented on " \
"<a href=\"#{obj_attr[:url]}\">snippet ##{snippet_id}</a> in " \
"<a href=\"#{project.web_url}\">#{project_name}</a>: " \
"<b>#{title}</b>" \
"<pre>snippet note</pre>")
end
end
context 'build events' do
let(:build) { create(:ci_build) }
let(:data) { Gitlab::BuildDataBuilder.build(build) }
context 'for failed' do
before { build.drop }
it "should call Hipchat API" do
hipchat.execute(data)
expect(WebMock).to have_requested(:post, api_url).once
end
it "should create a build message" do
message = hipchat.send(:create_build_message, data)
project_url = project.web_url
project_name = project.name_with_namespace.gsub(/\s/, '')
sha = data[:sha]
ref = data[:ref]
ref_type = data[:tag] ? 'tag' : 'branch'
duration = data[:commit][:duration]
expect(message).to eq("<a href=\"#{project_url}\">#{project_name}</a>: " \
"Commit <a href=\"#{project_url}/commit/#{sha}/builds\">#{Commit.truncate_sha(sha)}</a> " \
"of <a href=\"#{project_url}/commits/#{ref}\">#{ref}</a> #{ref_type} " \
"by #{data[:commit][:author_name]} failed in #{duration} second(s)")
end
end
context 'for succeeded' do
before do
build.success
end
it "should call Hipchat API" do
hipchat.notify_only_broken_builds = false
hipchat.execute(data)
expect(WebMock).to have_requested(:post, api_url).once
end
it "should notify only broken" do
hipchat.notify_only_broken_builds = true
hipchat.execute(data)
expect(WebMock).to_not have_requested(:post, api_url).once
end
end
end
context "#message_options" do
it "should be set to the defaults" do
expect(hipchat.send(:message_options)).to eq({ notify: false, color: 'yellow' })
end
it "should set notfiy to true" do
allow(hipchat).to receive(:notify).and_return('1')
expect(hipchat.send(:message_options)).to eq({ notify: true, color: 'yellow' })
end
it "should set the color" do
allow(hipchat).to receive(:color).and_return('red')
expect(hipchat.send(:message_options)).to eq({ notify: false, color: 'red' })
end
end
end
end
| 37.958599 | 161 | 0.606427 |
f855d39101f02ec4c6a2a5d8cf35b43f7280bf44 | 189 | class Alexa < ApplicationRecord
belongs_to :chat, optional: true
validates_presence_of :device_user
validates_uniqueness_of :device_user
def display_name
device_user
end
end
| 18.9 | 38 | 0.798942 |
ab36051f8b8f7e164a324d49d956b40163c7ade2 | 586 | # Time Complexity: O(n)
# Space Complexity: O(1)
def max_sub_array(nums)
result = nil
if nums == nil
result = 0
elsif nums.length > 0
max_so_far = 0
max_ending_here = 0
largest_element = nums[0]
nums.each do |element|
max_ending_here = max_ending_here + element
max_ending_here = 0 if max_ending_here < 0
max_so_far = max_ending_here if max_so_far < max_ending_here
largest_element = element if element > largest_element
end
result = (largest_element < 0) ? largest_element : max_so_far
end
return result
end
| 26.636364 | 74 | 0.674061 |
18aff995bdf6ee1bd28f4575b87aea3c964fc192 | 3,578 | require 'test_helper'
class PracticalSupportsControllerTest < ActionDispatch::IntegrationTest
before do
@patient = create :patient
@user = create :user
sign_in @user
end
describe 'create' do
before do
with_versioning(@user) do
@support = attributes_for :practical_support
post patient_practical_supports_path(@patient),
params: { practical_support: @support },
xhr: true
end
end
it 'should create and save a new support record' do
@support[:support_type] = 'different'
assert_difference 'Patient.find(@patient.id).practical_supports.count', 1 do
post patient_practical_supports_path(@patient), params: { practical_support: @support }, xhr: true
end
end
it 'should respond bad_request if the support record does not save' do
# submitting a duplicate support
post patient_practical_supports_path(@patient), params: { practical_support: @support }, xhr: true
assert_response :bad_request
end
it 'should respond success if the support record saves' do
assert_response :success
end
it 'should log the creating user' do
assert_equal Patient.find(@patient.id).practical_supports.last.created_by,
@user
end
end
describe 'update' do
before do
@patient.practical_supports.create support_type: 'Transit',
confirmed: false,
source: 'Transit',
amount: 10
@support = @patient.practical_supports.first
@support_edits = { support_type: 'Lodging' }
patch patient_practical_support_path(@patient, @support),
params: { practical_support: @support_edits },
xhr: true
@support.reload
end
it 'should respond success' do
assert_response :success
end
it 'should update the support_type field' do
assert_equal @support.support_type, 'Lodging'
end
[:source, :support_type].each do |field|
it "should refuse to save #{field} to blank" do
[nil, ''].each do |bad_text|
assert_no_difference '@support.versions.count' do
@support_edits[:source] = bad_text
patch patient_practical_support_path(@patient, @support),
params: { practical_support: @support_edits },
xhr: true
assert_response :bad_request
end
end
end
end
it 'should allow blank amount' do
@support_edits[:amount] = nil
patch patient_practical_support_path(@patient, @support),
params: { practical_support: @support_edits },
xhr: true
assert_response :success
end
it 'should not allow negative amounts' do
@support_edits[:amount] = -3
patch patient_practical_support_path(@patient, @support),
params: { practical_support: @support_edits },
xhr: true
assert_response :bad_request
end
end
describe 'destroy' do
before do
@patient.practical_supports.create support_type: 'Transit',
confirmed: false,
source: 'Transit'
@support = @patient.practical_supports.first
end
it 'should destroy a support record' do
assert_difference 'Patient.find(@patient.id).practical_supports.count', -1 do
delete patient_practical_support_path(@patient, @support), xhr: true
end
end
end
end
| 32.234234 | 106 | 0.622415 |
6adc70d8553cb8cc96d48a8eeb91b175d2760c91 | 1,561 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Ads
module GoogleAds
module V7
module Enums
# The status of combined audience.
class CombinedAudienceStatusEnum
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Enum containing possible combined audience status types.
module CombinedAudienceStatus
# Not specified.
UNSPECIFIED = 0
# Used for return value only. Represents value unknown in this version.
UNKNOWN = 1
# Enabled status - combined audience is enabled and can be targeted.
ENABLED = 2
# Removed status - combined audience is removed and cannot be used for
# targeting.
REMOVED = 3
end
end
end
end
end
end
end
| 30.607843 | 85 | 0.650865 |
1a25fdb453594fd81e7e86b8aedd3594f94eb459 | 162 | class Api::V1::ApplicationsController < ApiController
before_filter :require_ssl
before_filter :require_api_key
respond_to :json
def index
end
end
| 14.727273 | 53 | 0.771605 |
f767b07e7593f3109f9ba71183c1c00b42352360 | 1,212 | require 'abstract_unit'
class DispatcherTest < ActiveSupport::TestCase
class Foo
cattr_accessor :a, :b
end
class DummyApp
def call(env)
[200, {}, 'response']
end
end
def setup
Foo.a, Foo.b = 0, 0
ActionDispatch::Callbacks.reset_callbacks(:call)
end
def test_before_and_after_callbacks
ActionDispatch::Callbacks.before { |*args| Foo.a += 1; Foo.b += 1 }
ActionDispatch::Callbacks.after { |*args| Foo.a += 1; Foo.b += 1 }
dispatch
assert_equal 2, Foo.a
assert_equal 2, Foo.b
dispatch
assert_equal 4, Foo.a
assert_equal 4, Foo.b
dispatch do |env|
raise "error"
end rescue nil
assert_equal 6, Foo.a
assert_equal 6, Foo.b
end
def test_to_prepare_and_cleanup_delegation
prepared = cleaned = false
ActionDispatch::Callbacks.to_prepare { prepared = true }
ActionDispatch::Callbacks.to_prepare { cleaned = true }
ActionDispatch::Reloader.prepare!
assert prepared
ActionDispatch::Reloader.cleanup!
assert cleaned
end
private
def dispatch(&block)
ActionDispatch::Callbacks.new(block || DummyApp.new).call(
{'rack.input' => StringIO.new('')}
)
end
end
| 20.542373 | 71 | 0.658416 |
b9deaa2af3a6b2907ab5b78b674f520fd125d966 | 1,084 | # Taken from bunto/bunto-mentions (Copyright (c) 2016-present GitHub, Inc. Licensened under the MIT).
require 'rubygems'
require 'minitest/autorun'
require 'shoulda'
$LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
$LOAD_PATH.unshift(File.dirname(__FILE__))
require 'bunto-archives'
TEST_DIR = File.expand_path("../", __FILE__)
SOURCE_DIR = File.expand_path("source", TEST_DIR)
DEST_DIR = File.expand_path("destination", TEST_DIR)
class Minitest::Test
def fixture_site(config = {})
Bunto::Site.new(
Bunto::Utils.deep_merge_hashes(
Bunto::Utils.deep_merge_hashes(
Bunto::Configuration::DEFAULTS,
{
"source" => SOURCE_DIR,
"destination" => DEST_DIR
}
),
config
)
)
end
def archive_exists?(site, path)
site.config["archives"].any? { |archive| archive.path == path }
end
def read_file(path)
read_path = File.join(DEST_DIR, path)
if File.exist? read_path
File.read(read_path).strip
else
return false
end
end
end
| 24.088889 | 101 | 0.643911 |
bf094cd50df45158fd4b36722d77ec1c92ed8673 | 500 | class AddSettings < ActiveRecord::Migration
def self.up
Radiant::Config.create :key => 'google_search.api_key', :description => "Required. Your Google search API key"
Radiant::Config.create :key => 'google_search.custom_search_id', :description => "Optional. The Google Custom Search Engine ID if you want to use one"
end
def self.down
Radiant::Config.find_by_key('google_search.api_key').destroy
Radiant::Config.find_by_key('google_search.custom_search_id').destroy
end
end
| 41.666667 | 154 | 0.752 |
ed552e80965d4d9de75c1b5e6afc261b8cfa50bf | 7,437 | # Task file for tests
ref_file = 'tasks/kernel.yml'
control 'kernel-01' do
title 'Prevent unprivileged users from seeing dmesg output'
impact 'low'
ref ref_file
describe kernel_parameter('kernel.dmesg_restrict') do
its('value') { should eq 1 }
end
end
control 'kernel-02' do
title 'Prevent users from reading kernel pointers'
impact 'low'
ref ref_file
describe kernel_parameter('kernel.kptr_restrict') do
its('value') { should eq 2 }
end
end
control 'kernel-03' do
title 'Disable kernel magic key'
impact 'low'
ref ref_file
describe kernel_parameter('kernel.sysrq') do
its('value') { should eq 0 }
end
end
control 'kernel-04' do
title 'Prevent unprivileged users from using ptrace'
impact 'low'
ref ref_file
describe kernel_parameter('kernel.yama.ptrace_scope') do
its('value') { should eq 2 }
end
end
control 'kernel-05' do
title 'Append PID to core name in kernel dumps'
impact 'low'
ref ref_file
describe kernel_parameter('kernel.core_uses_pid') do
its('value') { should eq 1 }
end
end
control 'kernel-06' do
title 'Randomise process address space'
impact 'low'
ref ref_file
describe kernel_parameter('kernel.randomize_va_space') do
its('value') { should eq 2 }
end
end
control 'kernel-07' do
title 'Protect cross-volume links'
impact 'low'
ref ref_file
links = %w[fs.protected_hardlinks fs.protected_symlinks]
links.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 1 }
end
end
end
control 'kernel-08' do
title 'Prevent coredumps from setuid'
impact 'low'
ref ref_file
describe kernel_parameter('fs.suid_dumpable') do
its('value') { should eq 0 }
end
end
control 'kernel-09' do
title 'Disable TCP timestamps'
impact 'low'
ref ref_file
describe kernel_parameter('net.ipv4.tcp_timestamps') do
its('value') { should eq 0 }
end
end
control 'kernel-09' do
title 'Rate limit ICMP packets'
impact 'low'
ref ref_file
describe kernel_parameter('net.ipv4.icmp_ratelimit') do
its('value') { should eq 100 }
end
describe kernel_parameter('net.ipv4.icmp_ratemask') do
its('value') { should eq 88_089 }
end
end
control 'kernel-10' do
title 'Ignore suspicious ICMP packets'
impact 'medium'
ref ref_file
suspicious_icmp_packets = %w[icmp_ignore_bogus_error_responses icmp_echo_ignore_broadcasts]
suspicious_icmp_packets.each do |param|
describe kernel_parameter("net.ipv4.#{param}") do
its('value') { should eq 1 }
end
end
end
control 'kernel-11' do
title 'Protect from TCP attacks'
impact 'medium'
ref ref_file
tcp_attacks = %w[tcp_syncookies tcp_rfc1337]
tcp_attacks.each do |param|
describe kernel_parameter("net.ipv4.#{param}")
end
end
control 'kernel-12' do
title 'Disable IPv6'
impact 'high'
ref ref_file
params = %w[
net.ipv6.conf.all.disable_ipv6
net.ipv6.conf.default.disable_ipv6
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 1 }
end
end
end
control 'kernel-13' do
title 'Enable source address verification'
impact 'high'
ref ref_file
params = %w[
net.ipv4.conf.all.rp_filter
net.ipv4.conf.all.log_martians
net.ipv4.conf.default.rp_filter
net.ipv4.conf.default.log_martians
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 1 }
end
end
end
control 'kernel-14' do
title 'Ignore ARP messages from IP and interface mismatch'
impact 'medium'
ref ref_file
params = %w[
net.ipv4.conf.all.arp_ignore
net.ipv4.conf.default.arp_ignore
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 1 }
end
end
end
control 'kernel-14' do
title 'Ignore ARP messages from IP and interface mismatch'
impact 'medium'
ref ref_file
params = %w[
net.ipv4.conf.all.arp_ignore
net.ipv4.conf.default.arp_ignore
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 1 }
end
end
end
control 'kernel-15' do
title 'Only respond to ARP messages on the appropriate interface'
impact 'medium'
ref ref_file
params = %w[
net.ipv4.conf.all.arp_announce
net.ipv4.conf.default.arp_announce
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 2 }
end
end
end
control 'kernel-16' do
title 'Disable IPv6 autoconfiguration'
impact 'low'
ref ref_file
params = %w[
net.ipv6.conf.all.autoconf
net.ipv6.conf.default.autoconf
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
control 'kernel-17' do
title 'Disable IPv6 neighbour solicitations'
impact 'low'
ref ref_file
params = %w[
net.ipv6.conf.all.dad_transmits
net.ipv6.conf.default.dad_transmits
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
control 'kernel-18' do
title 'Assign one global unicast IPv6 address'
impact 'medium'
ref ref_file
params = %w[
net.ipv6.conf.all.max_addresses
net.ipv6.conf.default.max_addresses
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 1 }
end
end
end
control 'kernel-19' do
title 'Do not send ICMP redirects'
impact 'medium'
ref ref_file
params = %w[
net.ipv4.conf.all.send_redirects
net.ipv4.conf.default.send_redirects
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
control 'kernel-20' do
title 'Refuse IPv6 router advertisements'
impact 'low'
ref ref_file
params = %w[
net.ipv6.conf.all.accept_ra
net.ipv6.conf.all.accept_ra_defrtr
net.ipv6.conf.all.accept_ra_pinfo
net.ipv6.conf.all.accept_ra_rtr_pref
net.ipv6.conf.all.router_solicitations
net.ipv6.conf.default.accept_ra
net.ipv6.conf.default.accept_ra_defrtr
net.ipv6.conf.default.accept_ra_pinfo
net.ipv6.conf.default.accept_ra_rtr_pref
net.ipv6.conf.default.router_solicitations
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
control 'kernel-21' do
title 'Refuse secure ICMP redirects'
impact 'low'
ref ref_file
params = %w[
net.ipv4.conf.all.secure_redirects
net.ipv4.conf.default.secure_redirects
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
control 'kernel-22' do
title 'kernel : Do not accept IP source routing'
impact 'low'
ref ref_file
params = %w[
net.ipv4.conf.all.accept_source_route
net.ipv4.conf.default.accept_source_route
net.ipv6.conf.all.accept_source_route
net.ipv6.conf.default.accept_source_route
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
control 'kernel-23' do
title 'kernel : Do not accept ICMP redirects'
impact 'low'
ref ref_file
params = %w[
net.ipv4.conf.all.accept_redirects
net.ipv4.conf.default.accept_redirects
net.ipv6.conf.all.accept_redirects
net.ipv6.conf.default.accept_redirects
]
params.each do |param|
describe kernel_parameter(param) do
its('value') { should eq 0 }
end
end
end
| 22.536364 | 93 | 0.698669 |
21b9b20b5de0816ac076766cd0318a952ac26579 | 1,362 | require_relative "../test_client"
require "launchy"
module InfluxDB
module Rails
module Matchers
def expect_metric(name: "rails", **options)
expect(metrics).to include(
a_hash_including(options.merge(name: name))
)
end
def expect_no_metric(name: "rails", **options)
expect(metrics).not_to include(
a_hash_including(options.merge(name: name))
)
end
def save_and_open_metrics
dir = File.join(File.dirname(__FILE__), "..", "..", "tmp")
FileUtils.mkdir_p(dir)
file_path = File.join(dir, "metrics.json")
output = JSON.pretty_generate(metrics)
File.write(file_path, output, mode: "wb")
::Launchy.open(file_path)
end
def metrics
TestClient.metrics
end
RSpec.configure do |config|
config.before :each do
InfluxDB::Rails.instance_variable_set :@configuration, nil
InfluxDB::Rails.configure
allow(InfluxDB::Rails).to receive(:client).and_return(InfluxDB::Rails::TestClient.new)
allow_any_instance_of(InfluxDB::Rails::Configuration)
.to receive(:ignored_environments).and_return(%w[development])
InfluxDB::Rails::TestClient.metrics.clear
end
config.include InfluxDB::Rails::Matchers
end
end
end
end
| 27.795918 | 96 | 0.629956 |
6a419357c4b91ae0fe2f6b0088e1834b39f4c818 | 283 | # DO NOT EDIT THIS FILE!
# This file is managed by ModuleSync.
#
# frozen_string_literal: true
require 'puppetlabs_spec_helper/module_spec_helper'
require 'rspec-puppet-facts'
include RspecPuppetFacts
RSpec.configure do |c|
c.hiera_config = 'spec/fixtures/hiera/hiera.yaml'
end
| 20.214286 | 51 | 0.787986 |
616bced258df37717f599cb3c3826999e43da192 | 3,356 | module FHIR
# fhir/explanation_of_benefit_benefit_balance.rb
class ExplanationOfBenefitBenefitBalance < BackboneElement
include Mongoid::Document
embeds_one :category, class_name: 'FHIR::CodeableConcept'
embeds_one :excluded, class_name: 'FHIR::PrimitiveBoolean'
embeds_one :name, class_name: 'FHIR::PrimitiveString'
embeds_one :description, class_name: 'FHIR::PrimitiveString'
embeds_one :network, class_name: 'FHIR::CodeableConcept'
embeds_one :unit, class_name: 'FHIR::CodeableConcept'
embeds_one :term, class_name: 'FHIR::CodeableConcept'
embeds_many :financial, class_name: 'FHIR::ExplanationOfBenefitBenefitBalanceFinancial'
def as_json(*args)
result = super
unless self.category.nil?
result['category'] = self.category.as_json(*args)
end
unless self.excluded.nil?
result['excluded'] = self.excluded.value
serialized = Extension.serializePrimitiveExtension(self.excluded)
result['_excluded'] = serialized unless serialized.nil?
end
unless self.name.nil?
result['name'] = self.name.value
serialized = Extension.serializePrimitiveExtension(self.name)
result['_name'] = serialized unless serialized.nil?
end
unless self.description.nil?
result['description'] = self.description.value
serialized = Extension.serializePrimitiveExtension(self.description)
result['_description'] = serialized unless serialized.nil?
end
unless self.network.nil?
result['network'] = self.network.as_json(*args)
end
unless self.unit.nil?
result['unit'] = self.unit.as_json(*args)
end
unless self.term.nil?
result['term'] = self.term.as_json(*args)
end
unless self.financial.nil? || !self.financial.any?
result['financial'] = self.financial.map{ |x| x.as_json(*args) }
end
result.delete('id')
unless self.fhirId.nil?
result['id'] = self.fhirId
result.delete('fhirId')
end
result
end
def self.transform_json(json_hash, target = ExplanationOfBenefitBenefitBalance.new)
result = self.superclass.transform_json(json_hash, target)
result['category'] = CodeableConcept.transform_json(json_hash['category']) unless json_hash['category'].nil?
result['excluded'] = PrimitiveBoolean.transform_json(json_hash['excluded'], json_hash['_excluded']) unless json_hash['excluded'].nil?
result['name'] = PrimitiveString.transform_json(json_hash['name'], json_hash['_name']) unless json_hash['name'].nil?
result['description'] = PrimitiveString.transform_json(json_hash['description'], json_hash['_description']) unless json_hash['description'].nil?
result['network'] = CodeableConcept.transform_json(json_hash['network']) unless json_hash['network'].nil?
result['unit'] = CodeableConcept.transform_json(json_hash['unit']) unless json_hash['unit'].nil?
result['term'] = CodeableConcept.transform_json(json_hash['term']) unless json_hash['term'].nil?
result['financial'] = json_hash['financial'].map { |var| ExplanationOfBenefitBenefitBalanceFinancial.transform_json(var) } unless json_hash['financial'].nil?
result
end
end
end
| 47.942857 | 163 | 0.68236 |
bb2d051a45f3631a964ace20b50b9f01eede7ab7 | 2,423 | # Userใขใใซใฎใใชใใผใทใงใณใในใ๏ผname, email๏ผ
require 'rails_helper'
RSpec.describe User, type: :model do
#facrory botใๅญๅจใใใใฎใในใ
it 'has a valid factory bot' do
expect(build(:user)).to be_valid
end
#ใใใใใใชใใผใทใงใณใฎใในใ
describe 'validations' do
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_presence_of(:email) }
it { is_expected.to validate_length_of(:name).is_at_most(50) }
it { is_expected.to validate_length_of(:email).is_at_most(255) }
it do
is_expected.to allow_values('[email protected]',
'[email protected]',
'[email protected]',
'[email protected]',
'[email protected]').for(:email)
end
it do
is_expected.to_not allow_values('user@example,com',
'user_at_foo.org',
'user.name@example.',
'foo@bar_baz.com',
'foo@bar+baz.com').for(:email)
end
# Userใขใใซใฎใในใ๏ผpassword๏ผ
describe 'validate presence of password' do
it 'is invalid with a blank password' do
user = build(:user, password: ' ' * 6)
expect(user).to_not be_valid
end
end
it { is_expected.to validate_length_of(:password).is_at_least(6) }
end
# let!ใไฝฟใใจใbefore doใฟใใใซๅ
ใซๅฎ่กใใใ
# letใ ใจ้
ๅปถ่ฉไพกใใใใในใๅใซuserใใใผใฟใใผในใซ็ป้ฒใใใใuser.reload.emailใฎๅคใnilใซใชใ
describe 'validate unqueness of email' do
let!(:user) { create(:user, email: '[email protected]') }
it 'is invalid with a duplicate email' do
user = build(:user, email: '[email protected]')
expect(user).to_not be_valid
end
it 'is case insensitive in email' do
user = build(:user, email: '[email protected]')
expect(user).to_not be_valid
end
end
# Userใขใใซbefore_saveใฎใในใ๏ผ#email_downcase)
describe 'before_save' do
describe '#email_downcase' do
let!(:user) { create(:user, email: '[email protected]') }
it 'makes email to low case' do
expect(user.reload.email).to eq '[email protected]'
end
end
end
end
| 36.712121 | 74 | 0.543541 |
03dd7cbbb84a4ddf60e361ebf3e7e5b12209297e | 1,105 | class GitHooksGo < Formula
desc "Git hooks manager"
homepage "https://git-hooks.github.io/git-hooks"
url "https://github.com/git-hooks/git-hooks/archive/v1.3.0.tar.gz"
sha256 "518eadf3229d9db16d603290634af8ae66461ec021edf646e8bca49deee81850"
license "MIT"
head "https://github.com/git-hooks/git-hooks.git"
bottle do
cellar :any_skip_relocation
sha256 "367a0e8d166a30749447132c88d5bf1c43fc8ab7c4316c91a990c13d4e887c32" => :catalina
sha256 "b61330cf67d4b8a572bb2f7a22434a00bb74d85bb93254ff6a60e8d3c8f12877" => :mojave
sha256 "95786772c28deeaaa6c979f93174e1f49bd6dd8370e8927861f6b950dd5b3910" => :high_sierra
sha256 "65b928768032bcc6085a23ed94d528dd586290da9f710746597d896af70123f9" => :x86_64_linux
end
depends_on "go" => :build
conflicts_with "git-hooks", :because => "both install `git-hooks` binaries"
def install
system "go", "build", *std_go_args, "-o", "#{bin}/git-hooks"
end
test do
system "git", "init"
system "git", "hooks", "install"
assert_match "Git hooks ARE installed in this repository.", shell_output("git hooks")
end
end
| 35.645161 | 94 | 0.753846 |
01d780a9e75c2592405f6facf7a4a3bf6253a59a | 1,706 | require 'test_helper'
class TwitterFeedRefresherTest < ActiveSupport::TestCase
setup do
@user = users(:ben)
@feed = @user.feeds.first
@keys = {"twitter_access_token" => "token", "twitter_access_secret" => "secret"}
@feed.update(feed_type: :twitter)
@user.update(@keys)
end
test "feed gets scheduled" do
Sidekiq::Worker.clear_all
assert_difference "TwitterFeedRefresher.jobs.size", +1 do
TwitterFeedRefresher.new().perform
end
args = [@feed.id, @feed.feed_url, [@keys]]
job = Sidekiq::Queues["feed_refresher_fetcher"].first
assert_equal args, job["args"]
assert(job.has_key?("at"), "job should have an 'at' parameter")
end
test "feed gets with passed user" do
Sidekiq::Worker.clear_all
assert_difference "Sidekiq::Queues['feed_refresher_fetcher_critical'].count", +1 do
TwitterFeedRefresher.new().enqueue_feed(@feed, @user)
end
args = [@feed.id, @feed.feed_url, [@keys]]
job = Sidekiq::Queues["feed_refresher_fetcher_critical"].first
assert_equal args, job["args"]
assert_not(job.has_key?("at"), "job should not have an 'at' parameter")
end
test "feed does not get scheduled because user doesn't match" do
Feed.class_eval do
def self.readonly_attributes
[]
end
end
@feed.update(feed_type: :twitter_home, feed_url: "https://twitter.com?screen_name=bsaid")
Sidekiq::Worker.clear_all
assert_no_difference "TwitterFeedRefresher.jobs.size" do
TwitterFeedRefresher.new().perform
end
@user.update(twitter_screen_name: "bsaid")
assert_difference "TwitterFeedRefresher.jobs.size", +1 do
TwitterFeedRefresher.new().perform
end
end
end | 29.929825 | 93 | 0.694021 |
61ff99132894b50e1caeba4c800e1498f0e29774 | 1,238 | # Copyright 2015 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "helper"
describe Google::Cloud::PubSub::Topic, :reference, :mock_pubsub do
let(:topic_name) { "topic-name-goes-here" }
let(:topic_grpc) { Google::Cloud::PubSub::V1::Topic.new topic_hash(topic_name) }
let(:topic) { Google::Cloud::PubSub::Topic.from_grpc topic_grpc, pubsub.service }
it "is not reference when created with an HTTP method" do
topic.wont_be :reference?
topic.must_be :resource?
end
describe "reference topic" do
let :topic do
Google::Cloud::PubSub::Topic.from_name topic_name, pubsub.service
end
it "is reference" do
topic.must_be :reference?
topic.wont_be :resource?
end
end
end
| 32.578947 | 83 | 0.727787 |
1d380af389fd705bcd0397a94cdac14fe59b03e6 | 1,405 | class Dolt < Formula
desc "Git for Data"
homepage "https://github.com/liquidata-inc/dolt"
url "https://github.com/liquidata-inc/dolt/archive/v0.21.0.tar.gz"
sha256 "788aa1d906d4ee4e93ed576607a971f803b6fabf2aeb7394cf5b2b1940c4a8e9"
license "Apache-2.0"
livecheck do
url "https://github.com/liquidata-inc/dolt/releases/latest"
regex(%r{href=.*?/tag/v?(\d+(?:\.\d+)+)["' >]}i)
end
bottle do
cellar :any_skip_relocation
sha256 "7307ce20dc309692fea2e5f88e07bbb40c257cff03f3a5b3b47d6ec86a3ecfc6" => :catalina
sha256 "cfcf20596083a278fa12e04c22b2a1336604a9ddac12c35ab4e120d49ffca14f" => :mojave
sha256 "d8f5df86d585d9a62437776d043a1f9e481f29640b28bd6c9748dd0af7470d42" => :high_sierra
end
depends_on "go" => :build
def install
chdir "go" do
system "go", "build", *std_go_args, "./cmd/dolt"
system "go", "build", *std_go_args, "-o", bin/"git-dolt", "./cmd/git-dolt"
system "go", "build", *std_go_args, "-o", bin/"git-dolt-smudge", "./cmd/git-dolt-smudge"
end
end
test do
ENV["DOLT_ROOT_PATH"] = testpath
mkdir "state-populations" do
system bin/"dolt", "init", "--name", "test", "--email", "test"
system bin/"dolt", "sql", "-q", "create table state_populations ( state varchar(14), primary key (state) )"
assert_match "state_populations", shell_output("#{bin}/dolt sql -q 'show tables'")
end
end
end
| 35.125 | 113 | 0.683274 |
aba69a2014b97c0414c0240f0183b36b9d9f8901 | 2,524 | # frozen_string_literal: true
# See LICENSE.txt at root of repository
# GENERATED FILE - DO NOT EDIT!!
require 'ansible/ruby/modules/base'
module Ansible
module Ruby
module Modules
# Create, update and remove hosts.
class Cs_host < Base
# @return [String] Name of the host.
attribute :name
validates :name, presence: true, type: String
# @return [Object, nil] Url of the host used to create a host.,If not provided, C(http://) and param C(name) is used as url.,Only considered if C(state=present) and host does not yet exist.
attribute :url
# @return [Object, nil] Username for the host.,Required if C(state=present) and host does not yet exist.
attribute :username
# @return [Object, nil] Password for the host.,Required if C(state=present) and host does not yet exist.
attribute :password
# @return [String, nil] Name of the pod.,Required if C(state=present) and host does not yet exist.
attribute :pod
validates :pod, type: String
# @return [String, nil] Name of the cluster.
attribute :cluster
validates :cluster, type: String
# @return [:KVM, :VMware, :BareMetal, :XenServer, :LXC, :HyperV, :UCS, :OVM, :Simulator, nil] Name of the cluster.,Required if C(state=present) and host does not yet exist.
attribute :hypervisor
validates :hypervisor, expression_inclusion: {:in=>[:KVM, :VMware, :BareMetal, :XenServer, :LXC, :HyperV, :UCS, :OVM, :Simulator], :message=>"%{value} needs to be :KVM, :VMware, :BareMetal, :XenServer, :LXC, :HyperV, :UCS, :OVM, :Simulator"}, allow_nil: true
# @return [:enabled, :disabled, nil] Allocation state of the host.
attribute :allocation_state
validates :allocation_state, expression_inclusion: {:in=>[:enabled, :disabled], :message=>"%{value} needs to be :enabled, :disabled"}, allow_nil: true
# @return [Array<String>, String, nil] Tags of the host.
attribute :host_tags
validates :host_tags, type: TypeGeneric.new(String)
# @return [:present, :absent, nil] State of the host.
attribute :state
validates :state, expression_inclusion: {:in=>[:present, :absent], :message=>"%{value} needs to be :present, :absent"}, allow_nil: true
# @return [String, nil] Name of the zone in which the host should be deployed.,If not set, default zone is used.
attribute :zone
validates :zone, type: String
end
end
end
end
| 45.890909 | 266 | 0.654913 |
bb0603e7bc9624328d6d4002ae84a25a47d10de8 | 6,924 | require 'prime'
x = 0
10.times do |x|
x ** x
x+=1
puts Math.log(x)
puts x.prime_division
Prime::Generator23.new
end
def highest_biPrimefac(p1, p2, n)
result = []
d = p1* p2 * 2
(n).downto(d).detect do |n|
if n.prime_division.length ==2
if n.prime_division[0][0] == p1 && n.prime_division[1][0] == p2
result << n
result << n.prime_division[0][1]
result << n.prime_division[1][1]
end
end
end
return result
end
#def highest_biPrimefac(p1, p2, n)
# # your code biggest num, smaller nums exp, larger num exp
# #until max >= n, (p1 ** (range)) * (p2 **(range))
# # p1 ** 1..? * p2 ** 1..?
# # if p2 **exp > max
# # p1 ** exp
# exp1 = 1
# exp2 = 1
# max = p1 * p2
# until p1 * p2 >= n
# # exp1 += 1
# # if p1 * p2 < n && p1 * p2 > p1 * p2 ** (exp1 -1)
# if p1 * p2 ** (exp2 +1) > max && p1 * p2 ** (exp2 +1) <= n
# exp2 +=1
# p p2 = p2 ** exp2
# puts max
# elsif p2 * p1 ** (exp1 +1) > max && p2 * p1 ** (exp1 +1) <= n
# p "P1"
# exp1 += 1
# p p1 = p1 ** exp1
# max = p1 **( exp1 + 1 )* p2 ** exp2
# # break
# elsif p1 ** (exp1 +1) * p2 ** (exp2 -1) > p1 ** (exp1 -1) * p2 ** (exp2 +1) && p2 * p1 ** (exp1 +1) <= n && p2 * p1 ** (exp1 +1) > max
# p "P2"
# p max = p1 **( exp1 + 1 )* p2 ** exp2
# exp2 -= 1
# end
# end
# puts max
# end
# highest_biPrimefac(2,3, 50)
# n.downto(p1) do |num|
# # if num.modulos(p1)[0] > num.modulos(p2)
# increment either primes exponent only if product closer to n , even if removing the other primes exponent gets closer to n without exceeding
# primes.inject(exp) do |product, (prime, index)|
# until product >= n
# prime **= exp
# exp+=1
# if prime ** (exp-1) * prime > product
# end
# end
def prime?(val)
val = -val if val < 0
return false if val <2
for num in 0..val
q,r = val.divmod(num)
return true if q < num
return false if r == 0
end
end
def make_valley(arr)
first = []
sec = []
arr.each.with_index {| n, i | i % 2 == 0 ? first.push(n) : sec.push(n) }
( first + sec.reverse ).flatten
end
Test.describe("make_valley") do
Test.it("Basic tests 1") do
testing(make_valley([17, 17, 15, 14, 8, 7, 7, 5, 4, 4, 1]), [17, 15, 8, 7, 4, 1, 4, 5, 7, 14, 17])
testing(make_valley([20, 7, 6, 2]), [20, 6, 2, 7])
testing(make_valley([14, 10, 8]), [14, 8, 10])
testing(make_valley([20, 18, 17, 13, 12, 12, 10, 9, 4, 2, 2, 1, 1]), [20, 17, 12, 10, 4, 2, 1, 1, 2, 9, 12, 13, 18])
testing(make_valley([20, 16, 14, 10, 1]), [20, 14, 1, 10, 16])
testing(make_valley([19, 19, 18, 14, 12, 11, 9, 7, 4]), [19, 18, 12, 9, 4, 7, 11, 14, 19])
testing(make_valley([20, 18, 16, 15, 14, 14, 13, 13, 10, 9, 4, 4, 4, 1]), [20, 16, 14, 13, 10, 4, 4, 1, 4, 9, 13, 14, 15, 18])
testing(make_valley([20, 20, 16, 14, 12, 12, 11, 10, 3, 2]), [20, 16, 12, 11, 3, 2, 10, 12, 14, 20])
testing(make_valley([19, 17, 16, 15, 13, 8, 5, 5, 4, 4, 4]), [19, 16, 13, 5, 4, 4, 4, 5, 8, 15, 17])
testing(make_valley([19, 8, 6]), [19, 6, 8])
end
end
def play_pass(str, n)
result_str = " "
alpha = ("a".."z").to_a
num = 9
str.downcase.each_with_index do |l, i|
if alpha.include?(l)
letter = alpha.index(l) + n
result_str << alpha[letter]
elsif l.to_i
result_str << (l - num).abs.to_s
else
result_str << l
end
result_str.reverse
end
def play_pass(str, n)
result_str = []
alpha = ("a".."z").to_a
num = 9
str.downcase.each_char.with_index do |l, i|
if alpha.include?(l)
letter = alpha.index(l) + n
i.even? ? l.upcase : l
result_str << alpha[letter]
elsif l.to_i == Numeric && l.to_i != 0
result_str << (l.to_i - 9).abs.to_s
else
result_str << l
end
end
result_str.map.with_index do |ch, i|
if i.even? || i == 4
ch.upcase!
end
end
result_str.join('').reverse
end
def play_pass(str, n)
result_str = []
alpha = ("A".."Z").to_a
numeric = (1..9).to_a
num = 9
result = []
str.each_char.with_index do |l, i|
if alpha.include?(l)
letter = alpha.index(l) + n
letter >= 26 ? result_str << alpha[letter-26] : result_str << alpha[letter]
elsif numeric.include?(l.to_i) || l == '0'
result_str << (l.to_i - num).abs.to_s
p l.to_i
else
result_str << l
end
end
result_str.map.with_index do |ch, i|
i % 2 != 0 && ch != nil ? result << ch.downcase : result << ch
end
result.join('').reverse
end
# str.chars.map{|e| e.match(/\d/).nil? ? e : (9-e.to_i).to_s}.map{|x| x.match(/[A-Z]/).nil? ? x : ((x.ord+n-65)%26+65).chr}.map.with_index{|e, i| i.odd? ? e.downcase : e}.join.reverse
Test Passed: Value == "4897 NkTrC Hq fT67 GjV Pq aP OqTh gOcE CoPcTi aO"
Test Passed: Value == ".ySjWjKkNi jWf xIjJs wZtD JgDfR ...dJm yZg sJyKt tTy qTtY YcJy xNmY JxZ Y'StI N ZtD MyNb yXjStM Jg tY"
Test.assert_equals(play_pass("I LOVE YOU!!!", 1), "!!!vPz fWpM J")
Test.assert_equals(play_pass("MY GRANMA CAME FROM NY ON THE 23RD OF APRIL 2015", 2),
"4897 NkTrC Hq fT67 GjV Pq aP OqTh gOcE CoPcTi aO")
# def vert_mirror(strng)
# # Your code
# end
# def hor_mirror(strng)
# # Your code
# end
# def oper(fct, s)
# # Your code
# end Moves in squared strings (I)
def vert_mirror(strng)
strng.split('/').each.reverse
end
def hor_mirror(strng)
strng.split('/').reverse
end
def oper(fct, s)
p(s)
end
def vert_mirror(strng)
strng.split("\n").map(&:reverse).join("\n")
end
def hor_mirror(strng)
strng.split("\n").reverse.join("\n")
end
def oper(fct, s)
fct.call(s)
end
def testing(actual, expected)
Test.assert_equals(actual, expected)
end
Test.describe("opstrings") do
Test.it("Basic tests vert_mirror") do
testing(oper(method(:vert_mirror), "hSgdHQ\nHnDMao\nClNNxX\niRvxxH\nbqTVvA\nwvSyRu"), "QHdgSh\noaMDnH\nXxNNlC\nHxxvRi\nAvVTqb\nuRySvw")
testing(oper(method(:vert_mirror), "IzOTWE\nkkbeCM\nWuzZxM\nvDddJw\njiJyHF\nPVHfSx"), "EWTOzI\nMCebkk\nMxZzuW\nwJddDv\nFHyJij\nxSfHVP")
end
Test.it("Basic tests hor_mirror") do
testing(oper(method(:hor_mirror), "lVHt\nJVhv\nCSbg\nyeCt"), "yeCt\nCSbg\nJVhv\nlVHt")
testing(oper(method(:hor_mirror), "njMK\ndbrZ\nLPKo\ncEYz"), "cEYz\nLPKo\ndbrZ\nnjMK")
end
end
#Moves into squared strings
def rot(strng)
p strng.split('\n')[-1].reverse
end
def selfie_and_rot(strng)
new = " "
strng.split('/').each do |s|
p new << s + "..."
end
new + new.reverse
end
def oper(fct, s)
fct.call(s)
end
letters = []
printers = []
"Unbox Yourself".split('').each { |letter|
letters << letter
printers << Proc.new { puts letters.join }
} printers.map{|printer| printer.call }
printers.each{|printer| printer.call }
| 26.128302 | 187 | 0.563836 |
28bb20d688021ea2dd758bbad0d4bb8211986031 | 384 | # frozen_string_literal: true
module ActionDispatch
module Routing
class Endpoint # :nodoc:
def dispatcher?; false; end
def redirect?; false; end
def matches?(req); true; end
def app; self; end
def rack_app; app; end
def engine?
rack_app.is_a?(Class) && rack_app < Rails::Engine
end
end
end
end
| 21.333333 | 57 | 0.580729 |
794e6d3092404ec8a030669596945b3100b63053 | 559 | # frozen_string_literal: true
module WaterDrop
# Namespace for all the things related with WaterDrop instrumentation process
module Instrumentation
class << self
# Builds a manager for statistics callbacks
# @return [WaterDrop::CallbacksManager]
def statistics_callbacks
@statistics_callbacks ||= CallbacksManager.new
end
# Builds a manager for error callbacks
# @return [WaterDrop::CallbacksManager]
def error_callbacks
@error_callbacks ||= CallbacksManager.new
end
end
end
end
| 26.619048 | 79 | 0.70483 |
f860f3a29c1da68e861edf821b48750de9a808d4 | 5,542 | =begin
#NSX-T Manager API
#VMware NSX-T Manager REST API
OpenAPI spec version: 2.5.1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.19
=end
require 'date'
module NSXT
# Duplicate IP detection and control
class DuplicateIPDetection
# Indicates whether duplicate IP detection should be enabled
attr_accessor :duplicate_ip_detection_enabled
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'duplicate_ip_detection_enabled' => :'duplicate_ip_detection_enabled'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'duplicate_ip_detection_enabled' => :'BOOLEAN'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
if attributes.has_key?(:'duplicate_ip_detection_enabled')
self.duplicate_ip_detection_enabled = attributes[:'duplicate_ip_detection_enabled']
else
self.duplicate_ip_detection_enabled = false
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
duplicate_ip_detection_enabled == o.duplicate_ip_detection_enabled
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[duplicate_ip_detection_enabled].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
temp_model = NSXT.const_get(type).new
temp_model.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.322751 | 107 | 0.634248 |
912e68b426f8cbaf4fb4e1106e0ef7d8d224b67e | 2,350 | #-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2013 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See doc/COPYRIGHT.rdoc for more details.
#++
When(/^I create a new enumeration with the following:$/) do |table|
attributes = table.rows_hash
type = activity_type_from_string(attributes['type'])
visit new_enumeration_path(:type => type)
fill_in 'enumeration_name', :with => attributes['name']
click_button(I18n.t(:button_create))
end
Then(/^I should see the enumeration:$/) do |table|
attributes = table.rows_hash
type = activity_type_from_string(attributes['type'])
# as the html is not structured in any way we have to look for the first
# h3 that contains the heading for the activity we are interested in
# and then the td within the directly following table
should have_selector("h3:contains('#{i18n_for_activity_type(type)}') + table td",
:text => attributes['name'])
end
def activity_type_from_string(string)
case string.gsub(/\s/,"_").camelcase
when "Activity", "TimeEntryActivity"
TimeEntryActivity
else
raise "Don't know this enumeration yet"
end
end
def i18n_for_activity_type(type)
if type == TimeEntryActivity
I18n.t(:enumeration_activities)
else
raise "Don't know this enumeration yet"
end
end
| 34.057971 | 91 | 0.742128 |
878ed74bb3589d89aacddbdae2292a0fe07e2734 | 27 | module CurationsHelper
end
| 9 | 22 | 0.888889 |
abf815c3923a23a57fbbf10d64d2a6f939d62215 | 7,423 | #! /usr/bin/env ruby
require 'spec_helper'
describe Puppet::Parser::AST::Resource do
ast = Puppet::Parser::AST
describe "for builtin types" do
before :each do
@title = Puppet::Parser::AST::String.new(:value => "mytitle")
@compiler = Puppet::Parser::Compiler.new(Puppet::Node.new("mynode"))
@scope = Puppet::Parser::Scope.new(@compiler)
@scope.stubs(:resource).returns(stub_everything)
@instance = ast::ResourceInstance.new(:title => @title, :parameters => ast::ASTArray.new(:children => []))
@resource = ast::Resource.new(:type => "file", :instances => ast::ASTArray.new(:children => [@instance]))
@resource.stubs(:qualified_type).returns("Resource")
end
it "should evaluate all its parameters" do
param = stub 'param'
param.expects(:safeevaluate).with(@scope).returns Puppet::Parser::Resource::Param.new(:name => "myparam", :value => "myvalue", :source => stub("source"))
@instance.stubs(:parameters).returns [param]
@resource.evaluate(@scope)
end
it "should evaluate its title" do
@resource.evaluate(@scope)[0].title.should == "mytitle"
end
it "should flatten the titles array" do
titles = []
%w{one two}.each do |title|
titles << Puppet::Parser::AST::String.new(:value => title)
end
array = Puppet::Parser::AST::ASTArray.new(:children => titles)
@instance.title = array
result = @resource.evaluate(@scope).collect { |r| r.title }
result.should be_include("one")
result.should be_include("two")
end
it "should create and return one resource objects per title" do
titles = []
%w{one two}.each do |title|
titles << Puppet::Parser::AST::String.new(:value => title)
end
array = Puppet::Parser::AST::ASTArray.new(:children => titles)
@instance.title = array
result = @resource.evaluate(@scope).collect { |r| r.title }
result.should be_include("one")
result.should be_include("two")
end
it "should implicitly iterate over instances" do
new_title = Puppet::Parser::AST::String.new(:value => "other_title")
new_instance = ast::ResourceInstance.new(:title => new_title, :parameters => ast::ASTArray.new(:children => []))
@resource.instances.push(new_instance)
@resource.evaluate(@scope).collect { |r| r.title }.should == ["mytitle", "other_title"]
end
it "should handover resources to the compiler" do
titles = []
%w{one two}.each do |title|
titles << Puppet::Parser::AST::String.new(:value => title)
end
array = Puppet::Parser::AST::ASTArray.new(:children => titles)
@instance.title = array
result = @resource.evaluate(@scope)
result.each do |res|
@compiler.catalog.resource(res.ref).should be_instance_of(Puppet::Parser::Resource)
end
end
it "should generate virtual resources if it is virtual" do
@resource.virtual = true
result = @resource.evaluate(@scope)
result[0].should be_virtual
end
it "should generate virtual and exported resources if it is exported" do
@resource.exported = true
result = @resource.evaluate(@scope)
result[0].should be_virtual
result[0].should be_exported
end
# Related to #806, make sure resources always look up the full path to the resource.
describe "when generating qualified resources" do
before do
@scope = Puppet::Parser::Scope.new Puppet::Parser::Compiler.new(Puppet::Node.new("mynode"))
@parser = Puppet::Parser::Parser.new(Puppet::Node::Environment.new)
["one", "one::two", "three"].each do |name|
@parser.environment.known_resource_types.add(Puppet::Resource::Type.new(:definition, name, {}))
end
@twoscope = @scope.newscope(:namespace => "one")
@twoscope.resource = @scope.resource
end
def resource(type, params = nil)
params ||= Puppet::Parser::AST::ASTArray.new(:children => [])
instance = Puppet::Parser::AST::ResourceInstance.new(
:title => Puppet::Parser::AST::String.new(:value => "myresource"), :parameters => params)
Puppet::Parser::AST::Resource.new(:type => type,
:instances => Puppet::Parser::AST::ASTArray.new(:children => [instance]))
end
it "should be able to generate resources with fully qualified type information" do
resource("two").evaluate(@twoscope)[0].type.should == "One::Two"
end
it "should be able to generate resources with unqualified type information" do
resource("one").evaluate(@twoscope)[0].type.should == "One"
end
it "should correctly generate resources that can look up builtin types" do
resource("file").evaluate(@twoscope)[0].type.should == "File"
end
it "should correctly generate resources that can look up defined classes by title" do
@scope.known_resource_types.add_hostclass Puppet::Resource::Type.new(:hostclass, "Myresource", {})
@scope.compiler.stubs(:evaluate_classes)
res = resource("class").evaluate(@twoscope)[0]
res.type.should == "Class"
res.title.should == "Myresource"
end
it "should evaluate parameterized classes when they are instantiated" do
@scope.known_resource_types.add_hostclass Puppet::Resource::Type.new(:hostclass, "Myresource", {})
@scope.compiler.expects(:evaluate_classes).with(['myresource'],@twoscope,false,true)
resource("class").evaluate(@twoscope)[0]
end
it "should fail for resource types that do not exist" do
lambda { resource("nosuchtype").evaluate(@twoscope) }.should raise_error(Puppet::ParseError)
end
end
end
describe "for class resources" do
before do
@title = Puppet::Parser::AST::String.new(:value => "classname")
@compiler = Puppet::Parser::Compiler.new(Puppet::Node.new("mynode"))
@scope = Puppet::Parser::Scope.new(@compiler)
@scope.stubs(:resource).returns(stub_everything)
@instance = ast::ResourceInstance.new(:title => @title, :parameters => ast::ASTArray.new(:children => []))
@resource = ast::Resource.new(:type => "Class", :instances => ast::ASTArray.new(:children => [@instance]))
@resource.stubs(:qualified_type).returns("Resource")
@type = Puppet::Resource::Type.new(:hostclass, "classname")
@compiler.known_resource_types.add(@type)
end
it "should instantiate the class" do
@compiler.stubs(:evaluate_classes)
result = @resource.evaluate(@scope)
result.length.should == 1
result.first.ref.should == "Class[Classname]"
@compiler.catalog.resource("Class[Classname]").should equal(result.first)
end
it "should cause its parent to be evaluated" do
parent_type = Puppet::Resource::Type.new(:hostclass, "parentname")
@compiler.stubs(:evaluate_classes)
@compiler.known_resource_types.add(parent_type)
@type.parent = "parentname"
result = @resource.evaluate(@scope)
result.length.should == 1
result.first.ref.should == "Class[Classname]"
@compiler.catalog.resource("Class[Classname]").should equal(result.first)
@compiler.catalog.resource("Class[Parentname]").should be_instance_of(Puppet::Parser::Resource)
end
end
end
| 40.342391 | 159 | 0.645157 |
4ada8b26aad326e6c456f72f7aa9abaf1e15eb18 | 3,886 | module CandyCheck
module PlayStore
# Describes a successfully validated subscription
class Subscription
include Utils::AttributeReader
# @return [Hash] the raw attributes returned from the server
attr_reader :attributes
# The payment of the subscription is pending (payment_state)
PAYMENT_PENDING = 0
# The payment of the subscript is received (payment_state)
PAYMENT_RECEIVED = 1
# The subscription was canceled by the user (cancel_reason)
PAYMENT_CANCELED = 0
# The payment failed during processing (cancel_reason)
PAYMENT_FAILED = 1
# Free trial
PAYMENT_TRIAL = 2
# Initializes a new instance which bases on a JSON result
# from Google's servers
# @param attributes [Hash]
def initialize(attributes)
@attributes = attributes
end
# Check if the expiration date is passed
# @return [bool]
def expired?
overdue_days > 0
end
# Check if in trial
# @return [bool]
def trial?
payment_state == PAYMENT_TRIAL
end
# see if payment is ok
# @return [bool]
def payment_received?
payment_state == PAYMENT_RECEIVED
end
# see if payment is pending
# @return [bool]
def payment_pending?
payment_state == PAYMENT_PENDING
end
# see if payment has failed according to Google
# @return [bool]
def payment_failed?
cancel_reason == PAYMENT_FAILED
end
# see if this the user has canceled its subscription
# @return [bool]
def canceled_by_user?
cancel_reason == PAYMENT_CANCELED
end
# Get number of overdue days. If this is negative, it is not overdue.
# @return [Integer]
def overdue_days
(Date.today - expires_at.to_date).to_i
end
# Get the auto renewal status as given by Google
# @return [bool] true if renewing automatically, false otherwise
def auto_renewing?
read_bool(:auto_renewing)
end
# Get the payment state as given by Google
# @return [Integer]
def payment_state
read_integer(:payment_state)
end
# Get the price amount for the subscription in micros in the payed
# currency
# @return [Integer]
def price_amount_micros
read_integer(:price_amount_micros)
end
# Get the cancel reason, as given by Google
# @return [Integer]
def cancel_reason
read_integer(:cancel_reason)
end
# Get the kind of subscription as stored in the android publisher service
# @return [String]
def kind
read(:kind)
end
# Get developer-specified supplemental information about the order
# @return [String]
def developer_payload
read(:developer_payload)
end
# Get the currency code in ISO 4217 format, e.g. "GBP" for British pounds
# @return [String]
def price_currency_code
read(:price_currency_code)
end
# Get start time for subscription in milliseconds since Epoch
# @return [Integer]
def start_time_millis
read_integer(:start_time_millis)
end
# Get expiry time for subscription in milliseconds since Epoch
# @return [Integer]
def expiry_time_millis
read_integer(:expiry_time_millis)
end
# Get start time in UTC
# @return [DateTime]
def starts_at
read_datetime_from_millis(:start_time_millis)
end
# Get expiration time in UTC
# @return [DateTime]
def expires_at
read_datetime_from_millis(:expiry_time_millis)
end
# Get cancelation time in UTC
# @return [DateTime]
def canceled_at
read_datetime_from_millis(:user_cancellation_time_millis)
end
end
end
end
| 26.8 | 79 | 0.638446 |
f8e8cf7588111d1cdffdf504b39185e5188a3afb | 158 | class AddFirstLegislationNumberToDistrict < ActiveRecord::Migration[6.0]
def change
add_column :districts, :first_legislation_number, :string
end
end
| 26.333333 | 72 | 0.803797 |
f8188d3417b69d23347c3a67f20ad3ee187be9e8 | 2,695 | #
# Cookbook:: sc-mongodb
# Recipe:: mongodb_org_repo
#
# Copyright:: 2011, edelight GmbH
# Authors:
# Miquel Torres <[email protected]>
#
# Copyright:: 2016-2017, Grant Ridder
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Sets up the repositories for stable mongodb-org packages found here:
# http://www.mongodb.org/downloads#packages
node.override['mongodb']['package_name'] = 'mongodb-org'
package_version_major = node['mongodb']['package_version'].to_f
package_repo_url = case node['platform']
when 'redhat', 'oracle', 'centos'
"https://repo.mongodb.org/yum/redhat/$releasever/mongodb-org/#{package_version_major}/#{node['kernel']['machine'] =~ /x86_64/ ? 'x86_64' : 'i686'}"
when 'fedora'
"https://repo.mongodb.org/yum/redhat/7/mongodb-org/#{package_version_major}/#{node['kernel']['machine'] =~ /x86_64/ ? 'x86_64' : 'i686'}"
when 'amazon'
"https://repo.mongodb.org/yum/amazon/2013.03/mongodb-org/#{package_version_major}/x86_64/"
end
case node['platform_family']
when 'debian'
# Ubuntu: https://docs.mongodb.com/manual/tutorial/install-mongodb-on-ubuntu/
# Debian: https://docs.mongodb.com/manual/tutorial/install-mongodb-on-debian/
apt_repository 'mongodb' do
uri node['mongodb']['repo']
distribution "#{node['lsb']['codename']}/mongodb-org/#{package_version_major}"
components platform?('ubuntu') ? ['multiverse'] : ['main']
key "https://www.mongodb.org/static/pgp/server-#{package_version_major}.asc"
end
when 'amazon', 'fedora', 'rhel'
# RHEL: https://docs.mongodb.com/manual/tutorial/install-mongodb-on-red-hat/
# Amazon: https://docs.mongodb.com/manual/tutorial/install-mongodb-on-amazon/
yum_repository 'mongodb' do
description 'mongodb RPM Repository'
baseurl package_repo_url
gpgkey "https://www.mongodb.org/static/pgp/server-#{package_version_major}.asc"
gpgcheck true
sslverify true
enabled true
end
else
# pssst build from source
Chef::Log.warn("Adding the #{node['platform_family']} mongodb-org repository is not yet not supported by this cookbook")
end
| 42.109375 | 168 | 0.693506 |
79414a77b28fd9b89d6a1bb4c8e8d306a3bde5db | 388 | # frozen_string_literal: true
module EE
module Gitlab
module GitAccessDesign
extend ::Gitlab::Utils::Override
private
override :check_protocol!
def check_protocol!
return if geo?
super
end
override :check_can_create_design!
def check_can_create_design!
return if geo?
super
end
end
end
end
| 14.923077 | 40 | 0.621134 |
5d24a99c52d4430cee27d3b7649e9632e6a2b2d7 | 7,914 | module NOVAHawk::Providers
module Openstack
module RefreshParserCommon
module OrchestrationStacks
def stack_resources(stack)
return @resources[stack.id] if @resources && [email protected]_path(stack.id).blank?
@resources = {} unless @resources
@resources[stack.id] = safe_list { stack.resources }
end
def load_orchestration_stacks
process_collection(stacks, :orchestration_stacks) { |stack| parse_stack(stack) }
update_nested_stack_relations
end
private
def stacks
@stacks ||= detailed_stacks
end
def detailed_stacks
return [] unless @orchestration_service
# TODO(lsmola) We need a support of GET /{tenant_id}/stacks/detail in FOG, it was implemented here
# https://review.openstack.org/#/c/35034/, but never documented in API reference, so right now we
# can't get list of detailed stacks in one API call.
if @ems.kind_of?(NOVAHawk::Providers::Openstack::CloudManager) && ::Settings.ems.ems_openstack.refresh.heat.is_global_admin
@orchestration_service.handled_list(:stacks, {:show_nested => true, :global_tenant => true}, true).collect(&:details)
else
@orchestration_service.handled_list(:stacks, :show_nested => true).collect(&:details)
end
rescue Excon::Errors::Forbidden
# Orchestration service is detected but not open to the user
$log.warn("Skip refreshing stacks because the user cannot access the orchestration service")
[]
end
def parse_stack(stack)
uid = stack.id.to_s
resources = find_stack_resources(stack)
orchestration_stack_type = case @ems
when NOVAHawk::Providers::Openstack::CloudManager
"NOVAHawk::Providers::Openstack::CloudManager::OrchestrationStack"
when NOVAHawk::Providers::Openstack::InfraManager
"NOVAHawk::Providers::Openstack::InfraManager::OrchestrationStack"
else
"OrchestrationStack"
end
new_result = {
:type => orchestration_stack_type,
:ems_ref => uid,
:name => stack.stack_name,
:description => stack.description,
:status => stack.stack_status,
:status_reason => stack.stack_status_reason,
:parent_stack_id => stack.parent,
:resources => resources,
:outputs => find_stack_outputs(stack),
:parameters => find_stack_parameters(stack),
:orchestration_template => find_stack_template(stack),
:cloud_tenant => @data_index.fetch_path(:cloud_tenants, stack.service.current_tenant["id"])
}
return uid, new_result
end
def parse_stack_template(stack)
# Only need a temporary unique identifier for the template. Using the stack id is the cheapest way.
uid = stack.id
template = stack.template
template_type = template.format == "HOT" ? "OrchestrationTemplateHot" : "OrchestrationTemplateCfn"
new_result = {
:type => template_type,
:name => stack.stack_name,
:description => template.description,
:content => template.content,
:orderable => false
}
return uid, new_result
end
def parse_stack_parameter(param_key, param_val, stack_id)
uid = compose_ems_ref(stack_id, param_key)
new_result = {
:ems_ref => uid,
:name => param_key,
:value => param_val
}
return uid, new_result
end
def parse_stack_output(output, stack_id)
uid = compose_ems_ref(stack_id, output['output_key'])
new_result = {
:ems_ref => uid,
:key => output['output_key'],
:value => output['output_value'],
:description => output['description']
}
return uid, new_result
end
def parse_stack_resource(resource)
uid = resource.physical_resource_id
new_result = {
:ems_ref => uid,
:logical_resource => resource.logical_resource_id,
:physical_resource => uid,
:resource_category => resource.resource_type,
:resource_status => resource.resource_status,
:resource_status_reason => resource.resource_status_reason,
:last_updated => resource.updated_time
}
return uid, new_result
end
def get_stack_parameters(stack_id, parameters)
process_collection(parameters, :orchestration_stack_parameters) do |param_key, param_val|
parse_stack_parameter(param_key, param_val, stack_id)
end
end
def get_stack_outputs(stack_id, outputs)
process_collection(outputs, :orchestration_stack_outputs) do |output|
parse_stack_output(output, stack_id)
end
end
def get_stack_resources(resources)
process_collection(resources, :orchestration_stack_resources) { |resource| parse_stack_resource(resource) }
end
def get_stack_template(stack)
process_collection([stack], :orchestration_templates) { |the_stack| parse_stack_template(the_stack) }
end
def find_stack_parameters(stack)
raw_parameters = safe_list { stack.parameters }
get_stack_parameters(stack.id, raw_parameters)
raw_parameters.collect do |parameter|
@data_index.fetch_path(:orchestration_stack_parameters, compose_ems_ref(stack.id, parameter[0]))
end
end
def find_stack_template(stack)
get_stack_template(stack)
@data_index.fetch_path(:orchestration_templates, stack.id)
end
def find_stack_outputs(stack)
raw_outputs = safe_list { stack.outputs }
get_stack_outputs(stack.id, raw_outputs)
raw_outputs.collect do |output|
@data_index.fetch_path(:orchestration_stack_outputs, compose_ems_ref(stack.id, output['output_key']))
end
end
def find_stack_resources(stack)
# convert the AWS Resource Summary collection to an array to avoid the same API getting called twice
raw_resources = stack_resources(stack)
# physical_resource_id can be empty if the resource was not successfully created; ignore such
raw_resources.reject! { |r| r.physical_resource_id.nil? }
get_stack_resources(raw_resources)
raw_resources.collect do |resource|
physical_id = resource.physical_resource_id
@resource_to_stack[physical_id] = stack.id
@data_index.fetch_path(:orchestration_stack_resources, physical_id)
end
end
#
# Helper methods
#
# Remap from children to parent
def update_nested_stack_relations
@data[:orchestration_stacks].each do |stack|
parent_stack = @data_index.fetch_path(:orchestration_stacks, stack[:parent_stack_id])
stack[:parent] = parent_stack if parent_stack
stack.delete(:parent_stack_id)
end
end
# Compose an ems_ref combining some existing keys
def compose_ems_ref(*keys)
keys.join('_')
end
end
end
end
end
| 40.172589 | 133 | 0.592747 |
91701e9ea3c07cefc53edb605ae7c7ccb324bfc1 | 14,457 | # frozen_string_literal: true
# Use this hook to configure devise mailer, warden hooks and so forth.
# Many of these configuration options can be set straight in your model.
Devise.setup do |config|
# The secret key used by Devise. Devise uses this key to generate
# random tokens. Changing this key will render invalid all existing
# confirmation, reset password and unlock tokens in the database.
# Devise will use the `secret_key_base` as its `secret_key`
# by default. You can change it below and use your own secret key.
# config.secret_key = 'a6b3e54e7a604516091cf746eeaedeff7d4ebda67eb0ac8cbf286df66f83ef81572cffc9e865f6dbb057c8329967a6a2974e1d919a1f6ca790c475c641cad428'
# ==> Controller configuration
# Configure the parent class to the devise controllers.
# config.parent_controller = 'DeviseController'
# ==> Mailer Configuration
# Configure the e-mail address which will be shown in Devise::Mailer,
# note that it will be overwritten if you use your own mailer class
# with default "from" parameter.
config.mailer_sender = '[email protected]'
# Configure the class responsible to send e-mails.
# config.mailer = 'Devise::Mailer'
# Configure the parent class responsible to send e-mails.
# config.parent_mailer = 'ActionMailer::Base'
# ==> ORM configuration
# Load and configure the ORM. Supports :active_record (default) and
# :mongoid (bson_ext recommended) by default. Other ORMs may be
# available as additional gems.
require 'devise/orm/active_record'
# ==> Configuration for any authentication mechanism
# Configure which keys are used when authenticating a user. The default is
# just :email. You can configure it to use [:username, :subdomain], so for
# authenticating a user, both parameters are required. Remember that those
# parameters are used only when authenticating and not when retrieving from
# session. If you need permissions, you should implement that in a before filter.
# You can also supply a hash where the value is a boolean determining whether
# or not authentication should be aborted when the value is not present.
# config.authentication_keys = [:email]
# Configure parameters from the request object used for authentication. Each entry
# given should be a request method and it will automatically be passed to the
# find_for_authentication method and considered in your model lookup. For instance,
# if you set :request_keys to [:subdomain], :subdomain will be used on authentication.
# The same considerations mentioned for authentication_keys also apply to request_keys.
# config.request_keys = []
# Configure which authentication keys should be case-insensitive.
# These keys will be downcased upon creating or modifying a user and when used
# to authenticate or find a user. Default is :email.
config.case_insensitive_keys = [:email]
# Configure which authentication keys should have whitespace stripped.
# These keys will have whitespace before and after removed upon creating or
# modifying a user and when used to authenticate or find a user. Default is :email.
config.strip_whitespace_keys = [:email]
# Tell if authentication through request.params is enabled. True by default.
# It can be set to an array that will enable params authentication only for the
# given strategies, for example, `config.params_authenticatable = [:database]` will
# enable it only for database (email + password) authentication.
# config.params_authenticatable = true
# Tell if authentication through HTTP Auth is enabled. False by default.
# It can be set to an array that will enable http authentication only for the
# given strategies, for example, `config.http_authenticatable = [:database]` will
# enable it only for database authentication. The supported strategies are:
# :database = Support basic authentication with authentication key + password
# config.http_authenticatable = false
# If 401 status code should be returned for AJAX requests. True by default.
# config.http_authenticatable_on_xhr = true
# The realm used in Http Basic Authentication. 'Application' by default.
# config.http_authentication_realm = 'Application'
# It will change confirmation, password recovery and other workflows
# to behave the same regardless if the e-mail provided was right or wrong.
# Does not affect registerable.
# config.paranoid = true
# By default Devise will store the user in session. You can skip storage for
# particular strategies by setting this option.
# Notice that if you are skipping storage for all authentication paths, you
# may want to disable generating routes to Devise's sessions controller by
# passing skip: :sessions to `devise_for` in your config/routes.rb
config.skip_session_storage = [:http_auth]
# By default, Devise cleans up the CSRF token on authentication to
# avoid CSRF token fixation attacks. This means that, when using AJAX
# requests for sign in and sign up, you need to get a new CSRF token
# from the server. You can disable this option at your own risk.
# config.clean_up_csrf_token_on_authentication = true
# When false, Devise will not attempt to reload routes on eager load.
# This can reduce the time taken to boot the app but if your application
# requires the Devise mappings to be loaded during boot time the application
# won't boot properly.
# config.reload_routes = true
# ==> Configuration for :database_authenticatable
# For bcrypt, this is the cost for hashing the password and defaults to 11. If
# using other algorithms, it sets how many times you want the password to be hashed.
#
# Limiting the stretches to just one in testing will increase the performance of
# your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use
# a value less than 10 in other environments. Note that, for bcrypt (the default
# algorithm), the cost increases exponentially with the number of stretches (e.g.
# a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation).
config.stretches = Rails.env.test? ? 1 : 11
# Set up a pepper to generate the hashed password.
# config.pepper = 'ff896e7fbc8377024896b8809c2b80aecea03e1a2605dbd43c58815b13703c829ecb2b5cf927e7fa91caaedf0b218558561f1eac08a9ea5026da92a550a750e5'
# Send a notification to the original email when the user's email is changed.
# config.send_email_changed_notification = false
# Send a notification email when the user's password is changed.
# config.send_password_change_notification = false
# ==> Configuration for :confirmable
# A period that the user is allowed to access the website even without
# confirming their account. For instance, if set to 2.days, the user will be
# able to access the website for two days without confirming their account,
# access will be blocked just in the third day.
# You can also set it to nil, which will allow the user to access the website
# without confirming their account.
# Default is 0.days, meaning the user cannot access the website without
# confirming their account.
# config.allow_unconfirmed_access_for = 2.days
# A period that the user is allowed to confirm their account before their
# token becomes invalid. For example, if set to 3.days, the user can confirm
# their account within 3 days after the mail was sent, but on the fourth day
# their account can't be confirmed with the token any more.
# Default is nil, meaning there is no restriction on how long a user can take
# before confirming their account.
# config.confirm_within = 3.days
# If true, requires any email changes to be confirmed (exactly the same way as
# initial account confirmation) to be applied. Requires additional unconfirmed_email
# db field (see migrations). Until confirmed, new email is stored in
# unconfirmed_email column, and copied to email column on successful confirmation.
config.reconfirmable = true
# Defines which key will be used when confirming an account
# config.confirmation_keys = [:email]
# ==> Configuration for :rememberable
# The time the user will be remembered without asking for credentials again.
# config.remember_for = 2.weeks
# Invalidates all the remember me tokens when the user signs out.
config.expire_all_remember_me_on_sign_out = true
# If true, extends the user's remember period when remembered via cookie.
# config.extend_remember_period = false
# Options to be passed to the created cookie. For instance, you can set
# secure: true in order to force SSL only cookies.
# config.rememberable_options = {}
# ==> Configuration for :validatable
# Range for password length.
config.password_length = 6..128
# Email regex used to validate email formats. It simply asserts that
# one (and only one) @ exists in the given string. This is mainly
# to give user feedback and not to assert the e-mail validity.
config.email_regexp = /\A[^@\s]+@[^@\s]+\z/
# ==> Configuration for :timeoutable
# The time you want to timeout the user session without activity. After this
# time the user will be asked for credentials again. Default is 30 minutes.
# config.timeout_in = 30.minutes
# ==> Configuration for :lockable
# Defines which strategy will be used to lock an account.
# :failed_attempts = Locks an account after a number of failed attempts to sign in.
# :none = No lock strategy. You should handle locking by yourself.
# config.lock_strategy = :failed_attempts
# Defines which key will be used when locking and unlocking an account
# config.unlock_keys = [:email]
# Defines which strategy will be used to unlock an account.
# :email = Sends an unlock link to the user email
# :time = Re-enables login after a certain amount of time (see :unlock_in below)
# :both = Enables both strategies
# :none = No unlock strategy. You should handle unlocking by yourself.
# config.unlock_strategy = :both
# Number of authentication tries before locking an account if lock_strategy
# is failed attempts.
# config.maximum_attempts = 20
# Time interval to unlock the account if :time is enabled as unlock_strategy.
# config.unlock_in = 1.hour
# Warn on the last attempt before the account is locked.
# config.last_attempt_warning = true
# ==> Configuration for :recoverable
#
# Defines which key will be used when recovering the password for an account
# config.reset_password_keys = [:email]
# Time interval you can reset your password with a reset password key.
# Don't put a too small interval or your users won't have the time to
# change their passwords.
config.reset_password_within = 6.hours
# When set to false, does not sign a user in automatically after their password is
# reset. Defaults to true, so a user is signed in automatically after a reset.
# config.sign_in_after_reset_password = true
# ==> Configuration for :encryptable
# Allow you to use another hashing or encryption algorithm besides bcrypt (default).
# You can use :sha1, :sha512 or algorithms from others authentication tools as
# :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20
# for default behavior) and :restful_authentication_sha1 (then you should set
# stretches to 10, and copy REST_AUTH_SITE_KEY to pepper).
#
# Require the `devise-encryptable` gem when using anything other than bcrypt
# config.encryptor = :sha512
# ==> Scopes configuration
# Turn scoped views on. Before rendering "sessions/new", it will first check for
# "users/sessions/new". It's turned off by default because it's slower if you
# are using only default views.
# config.scoped_views = false
# Configure the default scope given to Warden. By default it's the first
# devise role declared in your routes (usually :user).
# config.default_scope = :user
# Set this configuration to false if you want /users/sign_out to sign out
# only the current scope. By default, Devise signs out all scopes.
# config.sign_out_all_scopes = true
# ==> Navigation configuration
# Lists the formats that should be treated as navigational. Formats like
# :html, should redirect to the sign in page when the user does not have
# access, but formats like :xml or :json, should return 401.
#
# If you have any extra navigational formats, like :iphone or :mobile, you
# should add them to the navigational formats lists.
#
# The "*/*" below is required to match Internet Explorer requests.
# config.navigational_formats = ['*/*', :html]
# The default HTTP method used to sign out a resource. Default is :delete.
config.sign_out_via = :delete
# ==> OmniAuth
# Add a new OmniAuth provider. Check the wiki for more information on setting
# up on your models and hooks.
# config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo'
# ==> Warden configuration
# If you want to use other strategies, that are not supported by Devise, or
# change the failure app, you can configure them inside the config.warden block.
#
# config.warden do |manager|
# manager.intercept_401 = false
# manager.default_strategies(scope: :user).unshift :some_external_strategy
# end
# ==> Mountable engine configurations
# When using Devise inside an engine, let's call it `MyEngine`, and this engine
# is mountable, there are some extra configurations to be taken into account.
# The following options are available, assuming the engine is mounted as:
#
# mount MyEngine, at: '/my_engine'
#
# The router that invoked `devise_for`, in the example above, would be:
# config.router_name = :my_engine
#
# When using OmniAuth, Devise cannot automatically set OmniAuth path,
# so you need to do it manually. For the users scope, it would be:
# config.omniauth_path_prefix = '/my_engine/users/auth'
# ==> Turbolinks configuration
# If your app is using Turbolinks, Turbolinks::Controller needs to be included to make redirection work correctly:
#
# ActiveSupport.on_load(:devise_failure_app) do
# include Turbolinks::Controller
# end
# ==> Configuration for :registerable
# When set to false, does not sign a user in automatically after their password is
# changed. Defaults to true, so a user is signed in automatically after changing a password.
# config.sign_in_after_change_password = true
end
| 48.19 | 154 | 0.751262 |
ac9ff154e042c90058f8c02cc62c8bbfc1f88e5a | 1,158 | #!/usr/bin/env ruby -w
require 'rmagick'
imgl = Magick::ImageList.new
imgl.new_image(400, 150) { self.background_color = 'white' }
gc = Magick::Draw.new
gc.stroke('black').stroke_width(15)
gc.fill_opacity(0)
gc.stroke_linecap('butt')
# Draw lines with miter join
gc.stroke_linejoin('miter')
gc.polyline(25, 100, 75, 25, 125, 100)
# Draw lines with round join
gc.stroke_linejoin('round')
gc.polyline(150, 100, 200, 25, 250, 100)
# Draw lines with bevel join
gc.stroke_linejoin('bevel')
gc.polyline(275, 100, 325, 25, 375, 100)
# Show line endpoints in pink
gc.fill('lightpink').fill_opacity(0)
gc.stroke('lightpink').stroke_width(2)
gc.stroke_linejoin('miter')
gc.polyline(25, 100, 75, 25, 125, 100)
gc.polyline(150, 100, 200, 25, 250, 100)
gc.polyline(275, 100, 325, 25, 375, 100)
gc.fill_opacity(1)
gc.circle(75, 25, 76, 26)
gc.circle(200, 25, 201, 26)
gc.circle(325, 25, 326, 26)
# Annotate
gc.fill('black')
gc.stroke('transparent')
gc.pointsize(14)
gc.font_weight(Magick::BoldWeight)
gc.text(35, 120, "\"'miter' join\"")
gc.text(160, 120, "\"'round' join\"")
gc.text(280, 120, "\"'bevel' join\"")
gc.draw(imgl)
imgl.write('stroke_linejoin.gif')
| 23.632653 | 60 | 0.699482 |
38dbe62fde72995fd310a4c596433e237d4b55c5 | 5,769 | # encoding: utf-8
module Mongoid #:nodoc:
# Observer classes respond to life cycle callbacks to implement trigger-like
# behavior outside the original class. This is a great way to reduce the
# clutter that normally comes when the model class is burdened with
# functionality that doesn't pertain to the core responsibility of the
# class. Mongoid's observers work similar to ActiveRecord's. Example:
#
# class CommentObserver < Mongoid::Observer
# def after_save(comment)
# Notifications.comment(
# "[email protected]", "New comment was posted", comment
# ).deliver
# end
# end
#
# This Observer sends an email when a Comment#save is finished.
#
# class ContactObserver < Mongoid::Observer
# def after_create(contact)
# contact.logger.info('New contact added!')
# end
#
# def after_destroy(contact)
# contact.logger.warn("Contact with an id of #{contact.id} was destroyed!")
# end
# end
#
# This Observer uses logger to log when specific callbacks are triggered.
#
# == Observing a class that can't be inferred
#
# Observers will by default be mapped to the class with which they share a
# name. So CommentObserver will be tied to observing Comment,
# ProductManagerObserver to ProductManager, and so on. If you want to
# name your observer differently than the class you're interested in
# observing, you can use the Observer.observe class method which takes
# either the concrete class (Product) or a symbol for that class (:product):
#
# class AuditObserver < Mongoid::Observer
# observe :account
#
# def after_update(account)
# AuditTrail.new(account, "UPDATED")
# end
# end
#
# If the audit observer needs to watch more than one kind of object,
# this can be specified with multiple arguments:
#
# class AuditObserver < Mongoid::Observer
# observe :account, :balance
#
# def after_update(record)
# AuditTrail.new(record, "UPDATED")
# end
# end
#
# The AuditObserver will now act on both updates to Account and Balance
# by treating them both as records.
#
# == Available callback methods
#
# * after_initialize
# * before_validation
# * after_validation
# * before_create
# * around_create
# * after_create
# * before_update
# * around_update
# * after_update
# * before_save
# * around_save
# * after_save
# * before_destroy
# * around_destroy
# * after_destroy
#
# == Storing Observers in Rails
#
# If you're using Mongoid within Rails, observer classes are usually stored
# in +app/models+ with the naming convention of +app/models/audit_observer.rb+.
#
# == Configuration
#
# In order to activate an observer, list it in the +config.mongoid.observers+
# configuration setting in your +config/application.rb+ file.
#
# config.mongoid.observers = :comment_observer, :signup_observer
#
# Observers will not be invoked unless you define them in your
# application configuration.
#
# == Loading
#
# Observers register themselves with the model class that they observe,
# since it is the class that notifies them of events when they occur.
# As a side-effect, when an observer is loaded, its corresponding model
# class is loaded.
#
# Observers are loaded after the application initializers, so that
# observed models can make use of extensions. If by any chance you are
# using observed models in the initialization, you can
# still load their observers by calling +ModelObserver.instance+ before.
# Observers are singletons and that call instantiates and registers them.
class Observer < ActiveModel::Observer
# Instantiate the new observer. Will add all child observers as well.
#
# @example Instantiate the observer.
# Mongoid::Observer.new
#
# @since 2.0.0.rc.8
def initialize
super and observed_descendants.each { |klass| add_observer!(klass) }
end
protected
# Get all the child observers.
#
# @example Get the children.
# observer.observed_descendants
#
# @return [ Array<Class> ] The children.
#
# @since 2.0.0.rc.8
def observed_descendants
observed_classes.sum([]) { |klass| klass.descendants }
end
# Adds the specified observer to the class.
#
# @example Add the observer.
# observer.add_observer!(Document)
#
# @param [ Class ] klass The child observer to add.
#
# @since 2.0.0.rc.8
def add_observer!(klass)
super and define_callbacks(klass)
end
# Defines all the callbacks for each observer of the model.
#
# @example Define all the callbacks.
# observer.define_callbacks(Document)
#
# @param [ Class ] klass The model to define them on.
#
# @since 2.0.0.rc.8
def define_callbacks(klass)
tap do |observer|
observer_name = observer.class.name.underscore.gsub('/', '__')
Mongoid::Callbacks::CALLBACKS.each do |callback|
next unless respond_to?(callback)
callback_meth = :"_notify_#{observer_name}_for_#{callback}"
unless klass.respond_to?(callback_meth)
klass.send(:define_method, callback_meth) do |&block|
if value = observer.update(callback, self, &block)
value
else
block.call if block
end
end
klass.send(callback, callback_meth)
end
end
end
end
def disabled_for?(object)
klass = object.class
return false unless klass.respond_to?(:observers)
klass.observers.disabled_for?(self) || Mongoid.observers.disabled_for?(self)
end
end
end
| 31.697802 | 83 | 0.664067 |
87088658bf37e2282e9a9f66e7b3e79b7b6ce7db | 356 | name 'disabled'
maintainer 'Josiah Kiehl'
maintainer_email '[email protected]'
license 'Apache 2.0'
description 'Prevents a node from running chef-client as having this on the run list indicates that the node is disabled'
long_description IO.read(File.join(File.dirname(__FILE__), 'README.md'))
version '0.1.0'
| 39.555556 | 126 | 0.696629 |
edf3bcc0dcb7904a3bc6fcd4ec799139ce82b134 | 950 | # encoding: utf-8
# This file is distributed under New Relic's license terms.
# See https://github.com/newrelic/rpm/blob/master/LICENSE for complete details.
module NewRelic
class Control
# Structs holding info for the remote server and proxy server
class Server < Struct.new :name, :port #:nodoc:
def to_s; "#{name}:#{port}"; end
end
# Contains methods that deal with connecting to the server
module ServerMethods
def server
@remote_server ||= server_from_host(nil)
end
# the server we should contact for api requests, like uploading
# deployments and the like
def api_server
@api_server ||= NewRelic::Control::Server.new(Agent.config[:api_host], Agent.config[:api_port])
end
def server_from_host(hostname=nil)
NewRelic::Control::Server.new(hostname || Agent.config[:host], Agent.config[:port])
end
end
include ServerMethods
end
end
| 28.787879 | 103 | 0.681053 |
e8f591ec3b02c018eaa9037b2c23b244e91ece95 | 764 | require File.expand_path('test_helper', File.dirname(__FILE__))
class TestHTMLOutputter < MiniTest::Unit::TestCase
def setup
@parser = Hippo_eyeDoc::Parser.new
@sample_835 = @parser.parse_file('samples/005010X221A1_business_scenario_1.edi').first
@sample_837 = @parser.parse_file('samples/005010X231A1_01.edi').first
end
def test_segment_to_html_returns_segment_to_s
assert_equal @sample_835.BPR.to_s, @sample_835.BPR.to_html
end
def test_transaction_to_html_returns_string
assert_kind_of String, @sample_835.to_html
end
def test_verify_output_against_known_good_html
assert_equal File.read('samples/sample835.html'), @sample_835.to_html
assert_equal File.read('samples/sample837.html'), @sample_837.to_html
end
end
| 33.217391 | 90 | 0.790576 |
ed5ba5f4c6044b99ccacee5d86131dfde2fc4290 | 1,108 | require "jp_jis_code/mapping"
module JpJisCode
class Code
attr_accessor :code, :prefecture_name, :city_name, :prefecture_name_h, :city_name_h
def self.build(ret)
city = self.new
city.code = ret[:code]
city.prefecture_name = ret[:prefecture_name]
city.city_name = ret[:city_name]
city.prefecture_name_h = ret[:prefecture_name_h]
city.city_name_h = ret[:city_name_h]
city
end
def self.find(code)
n_code = normalized_code(code)
return unless n_code
ret = Mapping.data[n_code]
return unless ret
build(ret)
end
def self.normalized_code(code)
return false if code.nil?
return false unless code.is_a?(String)
return check_digit(code) if code.to_s.length == 5
return code if code.to_s.length == 6
false
end
def self.check_digit(code)
digit = code.to_s.chars.map(&:to_i)
last_digit = (((digit[0] * 6 + digit[1] * 5 + digit[2] * 4 + digit[3] * 3 + digit[4] * 2) % 11) - 11).abs % 10
code.to_s + last_digit.to_s
end
end
end | 27.7 | 117 | 0.617329 |
e8bd6b8d7643c05054488dc7f4907998d1f91c6d | 497 | class StripeReview < StripeModelCallbacks::ApplicationRecord
belongs_to :stripe_charge, optional: true, primary_key: "stripe_id"
def self.stripe_class
Stripe::Review
end
def assign_from_stripe(object)
check_object_is_stripe_class(object)
assign_attributes(
stripe_charge_id: object.charge
)
StripeModelCallbacks::AttributesAssignerService.execute!(
model: self, stripe_model: object,
attributes: %w[id created livemode open reason]
)
end
end
| 24.85 | 69 | 0.748491 |
f7cf2ec055c6c8ca3a50db9ba9b6cccb78ca3b06 | 4,933 | module RailsKindeditor
module Helper
def kindeditor_tag(name, content = nil, options = {})
id = sanitize_to_id(name)
input_html = { :id => id }.merge(options.delete(:input_html) || {})
input_html[:class] = "#{input_html[:class]} rails_kindeditor"
output = ActiveSupport::SafeBuffer.new
output << text_area_tag(name, content, input_html)
output << javascript_tag(js_replace(id, options))
end
def kindeditor(name, method, options = {})
# TODO: Refactory options: 1. kindeditor_option 2. html_option
input_html = (options.delete(:input_html) || {}).stringify_keys
output_buffer = ActiveSupport::SafeBuffer.new
output_buffer << build_text_area_tag(name, method, self, merge_assets_info(options), input_html)
output_buffer << javascript_tag(js_replace(input_html['id'], options))
end
def merge_assets_info(options)
owner = options.delete(:owner)
options[:class] = "#{options[:class]} rails_kindeditor"
if (!owner.nil?) && (!owner.id.nil?)
begin
owner_id = owner.id
owner_type = owner.class.name
options.reverse_merge!(owner_id: owner_id, owner_type: owner_type, data: {upload: kindeditor_upload_json_path(owner_id: owner_id, owner_type: owner_type), filemanager: kindeditor_file_manager_json_path})
return options
end
else
options.reverse_merge!(data: {upload: kindeditor_upload_json_path, filemanager: kindeditor_file_manager_json_path})
end
end
def kindeditor_upload_json_path(*args)
options = args.extract_options!
owner_id_query_string = options[:owner_id] ? "?owner_id=#{options[:owner_id]}" : ''
owner_type_query_string = options[:owner_type] ? "&owner_type=#{options[:owner_type]}" : ''
if owner_id_query_string == '' && owner_type_query_string == ''
"#{main_app_root_url}kindeditor/upload"
else
"#{main_app_root_url}kindeditor/upload#{owner_id_query_string}#{owner_type_query_string}"
end
end
def kindeditor_file_manager_json_path
"#{main_app_root_url}kindeditor/filemanager"
end
private
def main_app_root_url
begin
main_app.root_url.slice(0, main_app.root_url.rindex(main_app.root_path)) + '/'
rescue
'/'
end
end
def js_replace(dom_id, options = {})
editor_id = options[:editor_id].nil? ? '' : "#{options[:editor_id].to_s.downcase} = "
if options[:window_onload]
require 'securerandom'
random_name = SecureRandom.hex;
"var old_onload_#{random_name};
if(typeof window.onload == 'function') old_onload_#{random_name} = window.onload;
window.onload = function() {
KindEditor.basePath='#{RailsKindeditor.base_path}';
#{editor_id}KindEditor.create('##{dom_id}', #{get_options(options).to_json});
if(old_onload_#{random_name}) old_onload_#{random_name}();
}"
else
"KindEditor.basePath='#{RailsKindeditor.base_path}';
KindEditor.ready(function(K){
#{editor_id}K.create('##{dom_id}', #{get_options(options).to_json});
});"
end
end
def get_options(options)
options.delete(:editor_id)
options.delete(:window_onload)
options.reverse_merge!(:width => '100%')
options.reverse_merge!(:height => 300)
options.reverse_merge!(:allowFileManager => true)
options.reverse_merge!(:uploadJson => kindeditor_upload_json_path(:owner_id => options.delete(:owner_id), :owner_type => options.delete(:owner_type)))
options.reverse_merge!(:fileManagerJson => kindeditor_file_manager_json_path)
if options[:simple_mode] == true
options.merge!(:items => %w{fontname fontsize | forecolor hilitecolor bold italic underline removeformat | justifyleft justifycenter justifyright insertorderedlist insertunorderedlist | emoticons image link})
end
options.delete(:simple_mode)
options
end
def build_text_area_tag(name, method, template, options, input_html)
if Rails.version >= '4.0.0'
text_area_tag = ActionView::Helpers::Tags::TextArea.new(name, method, template, options)
text_area_tag.send(:add_default_name_and_id, input_html)
text_area_tag.render
elsif Rails.version >= '3.1.0'
text_area_tag = ActionView::Base::InstanceTag.new(name, method, template, options.delete(:object))
text_area_tag.send(:add_default_name_and_id, input_html)
text_area_tag.to_text_area_tag(input_html)
elsif Rails.version >= '3.0.0'
raise 'Please use rails_kindeditor v0.2.8 for Rails v3.0.x'
else
raise 'Please upgrade your Rails !'
end
end
end
module Builder
def kindeditor(method, options = {})
@template.send("kindeditor", @object_name, method, objectify_options(options))
end
end
end | 42.162393 | 216 | 0.672005 |
916cdac218cc87b8dcccf5acdfc4c41471c79536 | 492 | require 'bio-ucsc'
describe "Bio::Ucsc::Hg18::EncodeUppsalaChipUsf1" do
describe "#find_by_interval" do
context "given range chr1:1-150,000,000" do
it 'returns a record (r.chrom == "chr1")' do
Bio::Ucsc::Hg18::DBConnection.default
Bio::Ucsc::Hg18::DBConnection.connect
i = Bio::GenomicInterval.parse("chr1:1-150,000,000")
r = Bio::Ucsc::Hg18::EncodeUppsalaChipUsf1.find_by_interval(i)
r.chrom.should == "chr1"
end
end
end
end
| 30.75 | 70 | 0.648374 |
91e81ae64272dc7b63e55301c1c3d259c00dabf1 | 3,105 | require 'fileutils'
require 'json'
require 'rake'
require 'rubygems/package'
require 'tmpdir'
require 'yaml'
require 'zlib'
load File.expand_path('../../../../lib/tasks/build/aws.rake', __FILE__)
describe 'Aws' do
before(:each) do
@original_env = ENV.to_hash
@build_dir = File.expand_path('../../../../build', __FILE__)
@version_dir = Dir.mktmpdir('aws')
@agent_dir = Dir.mktmpdir('aws')
@base_amis_dir = Dir.mktmpdir('aws')
@output_directory = 'bosh-windows-stemcell'
FileUtils.rm_rf(@output_directory)
Rake::Task['build:aws'].reenable
end
after(:each) do
ENV.replace(@original_env)
FileUtils.rm_rf(@output_directory)
FileUtils.rm_rf(@version_dir)
FileUtils.rm_rf(@agent_dir)
FileUtils.rm_rf(@base_amis_dir)
end
it 'should build an aws stemcell' do
Dir.mktmpdir('aws-stemcell-test') do |tmpdir|
os_version = 'some-os-version'
version = 'some-version'
agent_commit = 'some-agent-commit'
ENV['AWS_ACCESS_KEY'] = 'some-aws_access_key'
ENV['AWS_SECRET_KEY'] = 'some-aws_secret_key'
ENV['OS_VERSION'] = os_version
ENV['PATH'] = "#{File.join(File.expand_path('../../../..', __FILE__), 'spec', 'fixtures', 'aws')}:#{ENV['PATH']}"
ENV['VERSION_DIR'] = @version_dir
ENV['BASE_AMIS_DIR'] = @base_amis_dir
File.write(
File.join(@version_dir, 'number'),
'some-version'
)
FileUtils.mkdir_p(File.join(@build_dir, 'compiled-agent'))
File.write(
File.join(@build_dir, 'compiled-agent', 'sha'),
agent_commit
)
File.write(
File.join(@base_amis_dir, 'base-amis-1.json'),
[
{
"name" => "us-east-1",
"base_ami" => "base-east-1"
},
{
"name" => "us-east-2",
"base_ami" => "base-east-2"
}
].to_json
)
Rake::Task['build:aws'].invoke
stemcell = File.join(@output_directory, "light-bosh-stemcell-#{version}-aws-xen-hvm-#{os_version}-go_agent.tgz")
stemcell_sha = File.join(@output_directory, "light-bosh-stemcell-#{version}-aws-xen-hvm-#{os_version}-go_agent.tgz.sha")
stemcell_manifest = YAML.load(read_from_tgz(stemcell, 'stemcell.MF'))
expect(stemcell_manifest['version']).to eq(version)
expect(stemcell_manifest['sha1']).to eq(EMPTY_FILE_SHA)
expect(stemcell_manifest['operating_system']).to eq(os_version)
expect(stemcell_manifest['cloud_properties']['infrastructure']).to eq('aws')
expect(stemcell_manifest['cloud_properties']['ami']['us-east-1']).to eq('ami-east1id')
expect(stemcell_manifest['cloud_properties']['ami']['us-east-2']).to eq('ami-east2id')
apply_spec = JSON.parse(read_from_tgz(stemcell, 'apply_spec.yml'))
expect(apply_spec['agent_commit']).to eq(agent_commit)
expect(read_from_tgz(stemcell, 'updates.txt')).to eq('some-updates')
expect(read_from_tgz(stemcell, 'image')).to be_nil
expect(File.read(stemcell_sha)).to eq(Digest::SHA1.hexdigest(File.read(stemcell)))
end
end
end
| 33.75 | 126 | 0.633494 |
abc3f95cf21a52cf64d4b93ef4b0d83e5d506222 | 2,445 | # frozen_string_literal: true
require 'find'
require 'pathname'
module Reek
module Source
#
# Finds Ruby source files in a filesystem.
#
class SourceLocator
# Initialize with the paths we want to search.
#
# paths - a list of paths as Strings
def initialize(paths, configuration: Configuration::AppConfiguration.default, options: Reek::CLI::Options.new)
@options = options
@paths = paths.flat_map do |string|
path = Pathname.new(string)
current_directory?(path) ? path.entries : path
end
@configuration = configuration
end
# Traverses all paths we initialized the SourceLocator with, finds
# all relevant Ruby files and returns them as a list.
#
# @return [Array<Pathname>] - Ruby paths found
def sources
source_paths
end
private
attr_reader :configuration, :paths, :options
# :reek:TooManyStatements: { max_statements: 7 }
# :reek:NestedIterators: { max_allowed_nesting: 2 }
def source_paths
paths.each_with_object([]) do |given_path, relevant_paths|
unless given_path.exist?
print_no_such_file_error(given_path)
next
end
given_path.find do |path|
if path.directory?
ignore_path?(path) ? Find.prune : next
elsif ruby_file?(path)
relevant_paths << path unless ignore_file?(path)
end
end
end
end
def ignore_file?(path)
if options.force_exclusion?
path.ascend do |ascendant|
break true if path_excluded?(ascendant)
false
end
else
false
end
end
def path_excluded?(path)
configuration.path_excluded?(path)
end
# :reek:UtilityFunction
def print_no_such_file_error(path)
warn "Error: No such file - #{path}"
end
# :reek:UtilityFunction
def hidden_directory?(path)
path.basename.to_s.start_with? '.'
end
def ignore_path?(path)
path_excluded?(path) || hidden_directory?(path)
end
# :reek:UtilityFunction
def ruby_file?(path)
path.extname == '.rb'
end
# :reek:UtilityFunction
def current_directory?(path)
[Pathname.new('.'), Pathname.new('./')].include?(path)
end
end
end
end
| 25.46875 | 116 | 0.593047 |
e86bd10c2981cb2529245e3549f7d050578d8cd4 | 1,310 | module Cryptoexchange::Exchanges
module Bitsonic
module Services
class OrderBook < Cryptoexchange::Services::Market
class << self
def supports_individual_ticker_query?
true
end
end
def fetch(market_pair)
output = super(ticker_url(market_pair))
adapt(output["result"], market_pair)
end
def ticker_url(market_pair)
"#{Cryptoexchange::Exchanges::Bitsonic::Market::API_URL}/external/depth?symbol=#{market_pair.base}#{market_pair.target}"
end
def adapt(output, market_pair)
order_book = Cryptoexchange::Models::OrderBook.new
order_book.base = market_pair.base
order_book.target = market_pair.target
order_book.market = Bitsonic::Market::NAME
order_book.asks = adapt_orders output['asks']
order_book.bids = adapt_orders output['bids']
order_book.timestamp = Time.now.to_i
order_book.payload = output
order_book
end
def adapt_orders(orders)
orders.collect do |order_entry|
Cryptoexchange::Models::Order.new(price: order_entry[0],
amount: order_entry[1])
end
end
end
end
end
end
| 30.465116 | 130 | 0.6 |
5de2accafc44723d86ff0de3e679965f08f9a017 | 220 | # frozen_string_literal: true
module DropletKit
class Account < BaseModel
attribute :droplet_limit
attribute :floating_ip_limit
attribute :email
attribute :uuid
attribute :email_verified
end
end
| 18.333333 | 32 | 0.754545 |
61103b833b03281bee71bab16c8ece3617fdd964 | 1,128 | #!/bin/ruby
require 'yaml'
require 'pp'
if ARGV.count < 1
puts "Usage: volume_planner.rb (path)"
puts " path == a directory containing docker swarm compose files"
puts ""
puts "This tool counts how many times volumes are shared between containers"
exit 1
end
volumes = []
Dir.glob("#{ARGV.first}/*.yaml") do |item|
next if item == '.' or item == '..'
next if item == './docker-compose.yaml'
puts "Processing #{item}..."
compose = YAML.load_file(item)
#volumes += compose['volumes'].keys
compose['services'].each do |name, keys|
next if keys['volumes'] == nil or keys['volumes'].count == 0
keys['volumes'].each do |volume|
next if volume[0,1] == "." or volume[0,1] == "/"
volumes.push(volume.split(":").first)
end
end
end
shared_volumes = Hash.new
all_volumes = Hash[volumes.group_by {|x| x}.map {|k,v| [k,v.count]}]
all_volumes.each do |name, count|
next if count == 1
shared_volumes.merge!({name => count})
end
puts ""
puts "These volumes are shared by mulitple containers:"
shared_volumes.each do |volume,count|
puts(" Volume: #{volume}, Attachments: #{count}")
end
| 27.512195 | 78 | 0.653369 |
086e78c125cdfe58dfb9f7c6b099c17588ec1e3b | 2,781 | module Kontena::Cli::Grids
class LogsCommand < Clamp::Command
include Kontena::Cli::Common
option ["-t", "--tail"], :flag, "Tail (follow) logs", default: false
option ["-s", "--search"], "SEARCH", "Search from logs"
option "--lines", "LINES", "Number of lines to show from the end of the logs"
option "--since", "SINCE", "Show logs since given timestamp"
option "--node", "NODE", "Filter by node name", multivalued: true
option "--service", "SERVICE", "Filter by service name", multivalued: true
option ["-c", "--container"], "CONTAINER", "Filter by container", multivalued: true
def execute
require_api_url
token = require_token
query_params = {}
query_params[:nodes] = node_list.join(",") unless node_list.empty?
query_params[:services] = service_list.join(",") unless service_list.empty?
query_params[:containers] = container_list.join(",") unless container_list.empty?
query_params[:search] = search if search
query_params[:limit] = lines if lines
query_params[:since] = since if since
if tail?
@buffer = ''
query_params[:follow] = 1
stream_logs(token, query_params)
else
list_logs(token, query_params)
end
end
def list_logs(token, query_params)
result = client(token).get("grids/#{current_grid}/container_logs", query_params)
result['logs'].each do |log|
color = color_for_container(log['name'])
prefix = ""
prefix << "#{log['created_at']} "
prefix << "#{log['name']}:"
prefix = prefix.colorize(color)
puts "#{prefix} #{log['data']}"
end
end
def stream_logs(token, query_params)
streamer = lambda do |chunk, remaining_bytes, total_bytes|
begin
unless @buffer.empty?
chunk = @buffer + chunk
end
unless chunk.empty?
log = JSON.parse(chunk)
end
@buffer = ''
rescue => exc
@buffer << chunk
end
if log
color = color_for_container(log['name'])
puts "#{log['name'].colorize(color)} | #{log['data']}"
end
end
result = client(token).get_stream(
"grids/#{current_grid}/container_logs", streamer, query_params
)
end
def color_for_container(container_id)
color_maps[container_id] = colors.shift unless color_maps[container_id]
color_maps[container_id].to_sym
end
def color_maps
@color_maps ||= {}
end
def colors
if(@colors.nil? || @colors.size == 0)
@colors = [:green, :yellow, :magenta, :cyan, :red,
:light_green, :light_yellow, :ligh_magenta, :light_cyan, :light_red]
end
@colors
end
end
end
| 31.602273 | 87 | 0.598706 |
33bcd501a7380288b91870ec0bf91185f3a7b820 | 358 | require 'fileutils'
include FileUtils
require 'rubygems'
%w[rake hoe newgem rubigen].each do |req_gem|
begin
require req_gem
rescue LoadError
puts "This Rakefile requires the '#{req_gem}' RubyGem."
puts "Installation: gem install #{req_gem} -y"
exit
end
end
$:.unshift(File.join(File.dirname(__FILE__), %w[.. lib]))
require 'gitjour' | 21.058824 | 59 | 0.703911 |
796e40b6518b0f250f44be7920aa4d22f0adbd53 | 1,268 | class Sshs < Formula
desc "Graphical command-line client for SSH"
homepage "https://github.com/quantumsheep/sshs"
url "https://github.com/quantumsheep/sshs/archive/refs/tags/3.2.0.tar.gz"
sha256 "8b3127178c7bff19c1ea1a5fd2c5758137863c7e7cfbc4900cdf3fa2f13007a6"
license "MIT"
bottle do
root_url "https://github.com/gromgit/homebrew-core-mojave/releases/download/sshs"
sha256 cellar: :any_skip_relocation, mojave: "6c96444975c81c5da325fdd277bd4e2ae10411ab8ac901681f9b7e172a5df62a"
end
depends_on "go" => :build
def install
system "make", "build", "VERSION=#{version}", "OUTPUT=#{bin}/sshs"
end
test do
assert_equal "sshs version #{version}", shell_output(bin/"sshs --version").strip
(testpath/".ssh/config").write <<~EOS
Host "Test"
HostName example.com
User root
Port 22
EOS
require "pty"
require "io/console"
ENV["TERM"] = "xterm"
PTY.spawn(bin/"sshs") do |r, w, _pid|
r.winsize = [80, 40]
sleep 1
# Search for Test host
w.write "Test"
sleep 1
# Quit
w.write "\003"
sleep 1
begin
r.read
rescue Errno::EIO
# GNU/Linux raises EIO when read is done on closed pty
end
end
end
end
| 23.481481 | 115 | 0.647476 |
18716d633941306fe25c6a3e4333b1606e04f5ba | 765 | module Fog
module Parsers
module Zerigo
module DNS
class ListZones < Fog::Parsers::Base
def reset
@zone = {}
@response = { 'zones' => [] }
end
def end_element(name)
case name
when 'default-ttl', 'id', 'nx-ttl', 'hosts-count'
@zone[name] = @value.to_i
when 'created-at', 'custom-nameservers', 'custom-ns', 'domain', 'hostmaster', 'notes', 'ns1', 'ns-type', 'slave-nameservers', 'tag-list', 'updated-at', 'hosts', 'axfr-ips', 'restrict-axfr'
@zone[name] = @value
when 'zone'
@response['zones'] << @zone
@zone = {}
end
end
end
end
end
end
end
| 24.677419 | 200 | 0.470588 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.