hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
28b0387f7e3f17461df4fd4a8ecf7dc306add3db | 2,559 | require 'rspec'
require 'pry'
require 'word_definer'
require 'definition'
describe '#Word' do
before(:each) do
Word.clear
Definition.clear
end
describe('#save') do
it("saves a word") do
word = Word.new({:name => "Tree", :id => nil})
word.save
word2 = Word.new({:name => "House", :id => nil})
word2.save
expect(Word.all).to(eq([word, word2]))
end
end
describe('.all') do
it("returns an empty array when there are no words") do
expect(Word.all).to(eq([]))
end
end
describe('.clear') do
it("clears all words") do
word = Word.new({:name => "Tree", :id => nil})
word.save
word2 = Word.new({:name => "House", :id => nil})
word2.save
Word.clear
expect(Word.all).to(eq([]))
end
end
describe('#==') do
it("is the same word if it has the same attributes as another word") do
word = Word.new({:name => "Tree", :id => nil})
word2 = Word.new({:name => "Tree", :id => nil})
expect(word).to(eq(word2))
end
end
describe('.find') do
it("finds an word by id") do
word = Word.new({:name => "Tree", :id => nil})
word.save
word2 = Word.new({:name => "House", :id => nil})
word2.save
expect(Word.find(word.id)).to(eq(word))
end
end
describe('#update') do
it("updates a word by id") do
word = Word.new({:name => "Tree", :id => nil})
word.save
word.update("House")
expect(word.name).to(eq("House"))
end
end
describe('#delete') do
it("deletes a word by id") do
word = Word.new({:name => "Tree", :id => nil})
word.save
word2 = Word.new({:name => "House", :id => nil})
word2.save
word.delete
expect(Word.all).to(eq([word2]))
end
end
describe('.search') do
it("allows user to search for a word") do
word = Word.new({:name => "Tree", :id => nil})
word.save
word2 = Word.new({:name => "House", :id => nil})
word2.save
expect(Word.search(word.name)).to(eq([word]))
end
end
describe('#definitions') do
it("returns a word's definitions") do
word = Word.new({:name => "Tree", :id => nil})
word.save
definition = Definition.new({:definition => "a large plant with leaves", :word_id => word.id, :id => nil})
definition.save
definition2 = Definition.new({:definition => "a four-legged canine", :word_id => word.id, :id => nil})
definition2.save
expect(word.definitions).to(eq([definition, definition2]))
end
end
end | 25.848485 | 112 | 0.558812 |
0399b6cc557e08238c57c25e82d374c8690f0ce4 | 338 | elasticsearch_user 'elasticsearch'
elasticsearch_install 'elasticsearch' do
type 'package'
end
elasticsearch_configure 'elasticsearch' do
allocated_memory '512m'
configuration ({
'cluster.name' => node['elasticsearch']['cluster']['name']
})
end
elasticsearch_service 'elasticsearch' do
service_actions [:enable, :start]
end
| 24.142857 | 62 | 0.766272 |
f72e8fdb138010eb35eceb3294dd79c409c95d75 | 21,002 | # frozen_string_literal: true
require 'carrierwave/orm/activerecord'
class Issue < ApplicationRecord
include AtomicInternalId
include IidRoutes
include Issuable
include Noteable
include Referable
include Spammable
include FasterCacheKeys
include RelativePositioning
include TimeTrackable
include ThrottledTouch
include LabelEventable
include IgnorableColumns
include MilestoneEventable
include WhereComposite
include StateEventable
include IdInOrdered
include Presentable
include IssueAvailableFeatures
include Todoable
include FromUnion
include EachBatch
extend ::Gitlab::Utils::Override
DueDateStruct = Struct.new(:title, :name).freeze
NoDueDate = DueDateStruct.new('No Due Date', '0').freeze
AnyDueDate = DueDateStruct.new('Any Due Date', '').freeze
Overdue = DueDateStruct.new('Overdue', 'overdue').freeze
DueThisWeek = DueDateStruct.new('Due This Week', 'week').freeze
DueThisMonth = DueDateStruct.new('Due This Month', 'month').freeze
DueNextMonthAndPreviousTwoWeeks = DueDateStruct.new('Due Next Month And Previous Two Weeks', 'next_month_and_previous_two_weeks').freeze
SORTING_PREFERENCE_FIELD = :issues_sort
# Types of issues that should be displayed on lists across the app
# for example, project issues list, group issues list and issue boards.
# Some issue types, like test cases, should be hidden by default.
TYPES_FOR_LIST = %w(issue incident).freeze
belongs_to :project
has_one :namespace, through: :project
belongs_to :duplicated_to, class_name: 'Issue'
belongs_to :closed_by, class_name: 'User'
belongs_to :iteration, foreign_key: 'sprint_id'
belongs_to :moved_to, class_name: 'Issue'
has_one :moved_from, class_name: 'Issue', foreign_key: :moved_to_id
has_internal_id :iid, scope: :project, track_if: -> { !importing? }
has_many :events, as: :target, dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :merge_requests_closing_issues,
class_name: 'MergeRequestsClosingIssues',
dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :issue_assignees
has_many :issue_email_participants
has_many :assignees, class_name: "User", through: :issue_assignees
has_many :zoom_meetings
has_many :user_mentions, class_name: "IssueUserMention", dependent: :delete_all # rubocop:disable Cop/ActiveRecordDependent
has_many :sent_notifications, as: :noteable
has_many :designs, class_name: 'DesignManagement::Design', inverse_of: :issue
has_many :design_versions, class_name: 'DesignManagement::Version', inverse_of: :issue do
def most_recent
ordered.first
end
end
has_one :issuable_severity
has_one :sentry_issue
has_one :alert_management_alert, class_name: 'AlertManagement::Alert'
has_and_belongs_to_many :self_managed_prometheus_alert_events, join_table: :issues_self_managed_prometheus_alert_events # rubocop: disable Rails/HasAndBelongsToMany
has_and_belongs_to_many :prometheus_alert_events, join_table: :issues_prometheus_alert_events # rubocop: disable Rails/HasAndBelongsToMany
has_many :prometheus_alerts, through: :prometheus_alert_events
accepts_nested_attributes_for :issuable_severity, update_only: true
accepts_nested_attributes_for :sentry_issue
validates :project, presence: true
validates :issue_type, presence: true
enum issue_type: {
issue: 0,
incident: 1,
test_case: 2, ## EE-only
requirement: 3 ## EE-only
}
alias_method :issuing_parent, :project
alias_attribute :external_author, :service_desk_reply_to
scope :in_projects, ->(project_ids) { where(project_id: project_ids) }
scope :not_in_projects, ->(project_ids) { where.not(project_id: project_ids) }
scope :with_due_date, -> { where.not(due_date: nil) }
scope :without_due_date, -> { where(due_date: nil) }
scope :due_before, ->(date) { where('issues.due_date < ?', date) }
scope :due_between, ->(from_date, to_date) { where('issues.due_date >= ?', from_date).where('issues.due_date <= ?', to_date) }
scope :due_tomorrow, -> { where(due_date: Date.tomorrow) }
scope :not_authored_by, ->(user) { where.not(author_id: user) }
scope :order_due_date_asc, -> { reorder(::Gitlab::Database.nulls_last_order('due_date', 'ASC')) }
scope :order_due_date_desc, -> { reorder(::Gitlab::Database.nulls_last_order('due_date', 'DESC')) }
scope :order_closest_future_date, -> { reorder(Arel.sql('CASE WHEN issues.due_date >= CURRENT_DATE THEN 0 ELSE 1 END ASC, ABS(CURRENT_DATE - issues.due_date) ASC')) }
scope :order_relative_position_asc, -> { reorder(::Gitlab::Database.nulls_last_order('relative_position', 'ASC')) }
scope :order_relative_position_desc, -> { reorder(::Gitlab::Database.nulls_first_order('relative_position', 'DESC')) }
scope :order_closed_date_desc, -> { reorder(closed_at: :desc) }
scope :order_created_at_desc, -> { reorder(created_at: :desc) }
scope :order_severity_asc, -> { includes(:issuable_severity).order('issuable_severities.severity ASC NULLS FIRST') }
scope :order_severity_desc, -> { includes(:issuable_severity).order('issuable_severities.severity DESC NULLS LAST') }
scope :preload_associated_models, -> { preload(:assignees, :labels, project: :namespace) }
scope :with_web_entity_associations, -> { preload(:author, project: [:project_feature, :route, namespace: :route]) }
scope :preload_awardable, -> { preload(:award_emoji) }
scope :with_label_attributes, ->(label_attributes) { joins(:labels).where(labels: label_attributes) }
scope :with_alert_management_alerts, -> { joins(:alert_management_alert) }
scope :with_prometheus_alert_events, -> { joins(:issues_prometheus_alert_events) }
scope :with_self_managed_prometheus_alert_events, -> { joins(:issues_self_managed_prometheus_alert_events) }
scope :with_api_entity_associations, -> {
preload(:timelogs, :closed_by, :assignees, :author, :labels,
milestone: { project: [:route, { namespace: :route }] },
project: [:route, { namespace: :route }])
}
scope :with_issue_type, ->(types) { where(issue_type: types) }
scope :public_only, -> { where(confidential: false) }
scope :confidential_only, -> { where(confidential: true) }
scope :counts_by_state, -> { reorder(nil).group(:state_id).count }
scope :service_desk, -> { where(author: ::User.support_bot) }
scope :inc_relations_for_view, -> { includes(author: :status, assignees: :status) }
# An issue can be uniquely identified by project_id and iid
# Takes one or more sets of composite IDs, expressed as hash-like records of
# `{project_id: x, iid: y}`.
#
# @see WhereComposite::where_composite
#
# e.g:
#
# .by_project_id_and_iid({project_id: 1, iid: 2})
# .by_project_id_and_iid([]) # returns ActiveRecord::NullRelation
# .by_project_id_and_iid([
# {project_id: 1, iid: 1},
# {project_id: 2, iid: 1},
# {project_id: 1, iid: 2}
# ])
#
scope :by_project_id_and_iid, ->(composites) do
where_composite(%i[project_id iid], composites)
end
after_commit :expire_etag_cache, unless: :importing?
after_save :ensure_metrics, unless: :importing?
after_create_commit :record_create_action, unless: :importing?
attr_spammable :title, spam_title: true
attr_spammable :description, spam_description: true
state_machine :state_id, initial: :opened, initialize: false do
event :close do
transition [:opened] => :closed
end
event :reopen do
transition closed: :opened
end
state :opened, value: Issue.available_states[:opened]
state :closed, value: Issue.available_states[:closed]
before_transition any => :closed do |issue, transition|
args = transition.args
issue.closed_at = issue.system_note_timestamp
next if args.empty?
next unless args.first.is_a?(User)
issue.closed_by = args.first
end
before_transition closed: :opened do |issue|
issue.closed_at = nil
issue.closed_by = nil
end
end
class << self
extend ::Gitlab::Utils::Override
# Alias to state machine .with_state_id method
# This needs to be defined after the state machine block to avoid errors
alias_method :with_state, :with_state_id
alias_method :with_states, :with_state_ids
override :order_upvotes_desc
def order_upvotes_desc
reorder(upvotes_count: :desc)
end
override :order_upvotes_asc
def order_upvotes_asc
reorder(upvotes_count: :asc)
end
end
def self.relative_positioning_query_base(issue)
projects = issue.project.group&.root_ancestor&.all_projects || issue.project
in_projects(projects)
end
def self.relative_positioning_parent_column
:project_id
end
def self.reference_prefix
'#'
end
# Pattern used to extract `#123` issue references from text
#
# This pattern supports cross-project references.
def self.reference_pattern
@reference_pattern ||= %r{
(#{Project.reference_pattern})?
#{Regexp.escape(reference_prefix)}#{Gitlab::Regex.issue}
}x
end
def self.link_reference_pattern
@link_reference_pattern ||= super("issues", Gitlab::Regex.issue)
end
def self.reference_valid?(reference)
reference.to_i > 0 && reference.to_i <= Gitlab::Database::MAX_INT_VALUE
end
def self.project_foreign_key
'project_id'
end
def self.simple_sorts
super.merge(
{
'closest_future_date' => -> { order_closest_future_date },
'closest_future_date_asc' => -> { order_closest_future_date },
'due_date' => -> { order_due_date_asc.with_order_id_desc },
'due_date_asc' => -> { order_due_date_asc.with_order_id_desc },
'due_date_desc' => -> { order_due_date_desc.with_order_id_desc },
'relative_position' => -> { order_relative_position_asc.with_order_id_desc },
'relative_position_asc' => -> { order_relative_position_asc.with_order_id_desc }
}
)
end
def self.sort_by_attribute(method, excluded_labels: [])
case method.to_s
when 'closest_future_date', 'closest_future_date_asc' then order_closest_future_date
when 'due_date', 'due_date_asc' then order_due_date_asc.with_order_id_desc
when 'due_date_desc' then order_due_date_desc.with_order_id_desc
when 'relative_position', 'relative_position_asc' then order_relative_position_asc.with_order_id_desc
when 'severity_asc' then order_severity_asc.with_order_id_desc
when 'severity_desc' then order_severity_desc.with_order_id_desc
else
super
end
end
# `with_cte` argument allows sorting when using CTE queries and prevents
# errors in postgres when using CTE search optimisation
def self.order_by_position_and_priority(with_cte: false)
order = Gitlab::Pagination::Keyset::Order.build([column_order_relative_position, column_order_highest_priority, column_order_id_desc])
order_labels_priority(with_cte: with_cte)
.reorder(order)
end
def self.column_order_relative_position
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'relative_position',
column_expression: arel_table[:relative_position],
order_expression: Gitlab::Database.nulls_last_order('issues.relative_position', 'ASC'),
reversed_order_expression: Gitlab::Database.nulls_last_order('issues.relative_position', 'DESC'),
order_direction: :asc,
nullable: :nulls_last,
distinct: false
)
end
def self.column_order_highest_priority
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'highest_priority',
column_expression: Arel.sql('highest_priorities.label_priority'),
order_expression: Gitlab::Database.nulls_last_order('highest_priorities.label_priority', 'ASC'),
reversed_order_expression: Gitlab::Database.nulls_last_order('highest_priorities.label_priority', 'DESC'),
order_direction: :asc,
nullable: :nulls_last,
distinct: false
)
end
def self.column_order_id_desc
Gitlab::Pagination::Keyset::ColumnOrderDefinition.new(
attribute_name: 'id',
order_expression: arel_table[:id].desc
)
end
def self.to_branch_name(*args)
branch_name = args.map(&:to_s).each_with_index.map do |arg, i|
arg.parameterize(preserve_case: i == 0).presence
end.compact.join('-')
if branch_name.length > 100
truncated_string = branch_name[0, 100]
# Delete everything dangling after the last hyphen so as not to risk
# existence of unintended words in the branch name due to mid-word split.
branch_name = truncated_string.sub(/-[^-]*\Z/, '')
end
branch_name
end
# Temporary disable moving null elements because of performance problems
# For more information check https://gitlab.com/gitlab-com/gl-infra/production/-/issues/4321
def check_repositioning_allowed!
if blocked_for_repositioning?
raise ::Gitlab::RelativePositioning::IssuePositioningDisabled, "Issue relative position changes temporarily disabled."
end
end
def blocked_for_repositioning?
resource_parent.root_namespace&.issue_repositioning_disabled?
end
def hook_attrs
Gitlab::HookData::IssueBuilder.new(self).build
end
# `from` argument can be a Namespace or Project.
def to_reference(from = nil, full: false)
reference = "#{self.class.reference_prefix}#{iid}"
"#{project.to_reference_base(from, full: full)}#{reference}"
end
def suggested_branch_name
return to_branch_name unless project.repository.branch_exists?(to_branch_name)
start_counting_from = 2
Uniquify.new(start_counting_from).string(-> (counter) { "#{to_branch_name}-#{counter}" }) do |suggested_branch_name|
project.repository.branch_exists?(suggested_branch_name)
end
end
# Returns boolean if a related branch exists for the current issue
# ignores merge requests branchs
def has_related_branch?
project.repository.branch_names.any? do |branch|
/\A#{iid}-(?!\d+-stable)/i =~ branch
end
end
# To allow polymorphism with MergeRequest.
def source_project
project
end
def moved?
!moved_to_id.nil?
end
def duplicated?
!duplicated_to_id.nil?
end
def can_move?(user, to_project = nil)
if to_project
return false unless user.can?(:admin_issue, to_project)
end
!moved? && persisted? &&
user.can?(:admin_issue, self.project)
end
alias_method :can_clone?, :can_move?
def to_branch_name
if self.confidential?
"#{iid}-confidential-issue"
else
self.class.to_branch_name(iid, title)
end
end
def related_issues(current_user, preload: nil)
related_issues = ::Issue
.select(['issues.*', 'issue_links.id AS issue_link_id',
'issue_links.link_type as issue_link_type_value',
'issue_links.target_id as issue_link_source_id',
'issue_links.created_at as issue_link_created_at',
'issue_links.updated_at as issue_link_updated_at'])
.joins("INNER JOIN issue_links ON
(issue_links.source_id = issues.id AND issue_links.target_id = #{id})
OR
(issue_links.target_id = issues.id AND issue_links.source_id = #{id})")
.preload(preload)
.reorder('issue_link_id')
related_issues = yield related_issues if block_given?
cross_project_filter = -> (issues) { issues.where(project: project) }
Ability.issues_readable_by_user(related_issues,
current_user,
filters: { read_cross_project: cross_project_filter })
end
def can_be_worked_on?
!self.closed? && !self.project.forked?
end
# Returns `true` if the current issue can be viewed by either a logged in User
# or an anonymous user.
def visible_to_user?(user = nil)
return false unless project && project.feature_available?(:issues, user)
return publicly_visible? unless user
return false unless readable_by?(user)
user.can_read_all_resources? ||
::Gitlab::ExternalAuthorization.access_allowed?(
user, project.external_authorization_classification_label)
end
def check_for_spam?(user:)
# content created via support bots is always checked for spam, EVEN if
# the issue is not publicly visible and/or confidential
return true if user.support_bot? && spammable_attribute_changed?
# Only check for spam on issues which are publicly visible (and thus indexed in search engines)
return false unless publicly_visible?
# Only check for spam if certain attributes have changed
spammable_attribute_changed?
end
def as_json(options = {})
super(options).tap do |json|
if options.key?(:labels)
json[:labels] = labels.as_json(
project: project,
only: [:id, :title, :description, :color, :priority],
methods: [:text_color]
)
end
end
end
def etag_caching_enabled?
true
end
def discussions_rendered_on_frontend?
true
end
# rubocop: disable CodeReuse/ServiceClass
def update_project_counter_caches
Projects::OpenIssuesCountService.new(project).refresh_cache
end
# rubocop: enable CodeReuse/ServiceClass
def merge_requests_count(user = nil)
::MergeRequestsClosingIssues.count_for_issue(self.id, user)
end
def labels_hook_attrs
labels.map(&:hook_attrs)
end
def previous_updated_at
previous_changes['updated_at']&.first || updated_at
end
def banzai_render_context(field)
super.merge(label_url_method: :project_issues_url)
end
def design_collection
@design_collection ||= ::DesignManagement::DesignCollection.new(self)
end
def from_service_desk?
author.id == User.support_bot.id
end
def issue_link_type
return unless respond_to?(:issue_link_type_value) && respond_to?(:issue_link_source_id)
type = IssueLink.link_types.key(issue_link_type_value) || IssueLink::TYPE_RELATES_TO
return type if issue_link_source_id == id
IssueLink.inverse_link_type(type)
end
def relocation_target
moved_to || duplicated_to
end
def supports_assignee?
issue_type_supports?(:assignee)
end
def supports_time_tracking?
issue_type_supports?(:time_tracking)
end
def email_participants_emails
issue_email_participants.pluck(:email)
end
def email_participants_emails_downcase
issue_email_participants.pluck(IssueEmailParticipant.arel_table[:email].lower)
end
def issue_assignee_user_ids
issue_assignees.pluck(:user_id)
end
def update_upvotes_count
self.lock!
self.update_column(:upvotes_count, self.upvotes)
end
private
def spammable_attribute_changed?
title_changed? ||
description_changed? ||
# NOTE: We need to check them for spam when issues are made non-confidential, because spam
# may have been added while they were confidential and thus not being checked for spam.
confidential_changed?(from: true, to: false)
end
# Ensure that the metrics association is safely created and respecting the unique constraint on issue_id
override :ensure_metrics
def ensure_metrics
if !association(:metrics).loaded? || metrics.blank?
metrics_record = Issue::Metrics.safe_find_or_create_by(issue: self)
self.metrics = metrics_record
end
metrics.record!
end
def record_create_action
Gitlab::UsageDataCounters::IssueActivityUniqueCounter.track_issue_created_action(author: author)
end
# Returns `true` if the given User can read the current Issue.
#
# This method duplicates the same check of issue_policy.rb
# for performance reasons, check commit: 002ad215818450d2cbbc5fa065850a953dc7ada8
# Make sure to sync this method with issue_policy.rb
def readable_by?(user)
if user.can_read_all_resources?
true
elsif project.owner == user
true
elsif confidential? && !assignee_or_author?(user)
project.team.member?(user, Gitlab::Access::REPORTER)
else
project.public? ||
project.internal? && !user.external? ||
project.team.member?(user)
end
end
# Returns `true` if this Issue is visible to everybody.
def publicly_visible?
project.public? && !confidential? && !::Gitlab::ExternalAuthorization.enabled?
end
def expire_etag_cache
key = Gitlab::Routing.url_helpers.realtime_changes_project_issue_path(project, self)
Gitlab::EtagCaching::Store.new.touch(key)
end
def could_not_move(exception)
# Symptom of running out of space - schedule rebalancing
IssueRebalancingWorker.perform_async(nil, *project.self_or_root_group_ids)
end
end
Issue.prepend_mod_with('Issue')
| 34.599671 | 168 | 0.71617 |
1dbacece23d28c53f6c2c030ad95e859500cba7f | 1,122 | class Climgur::CLI
attr_accessor :climgur
def initialize
@climgur = nil
end
def start
@climgur = Climgur::ImgurGet.new.scrape_main_page
puts "welcome to CLIMGUR, the command line imgur browser."
commands
end
def commands
puts "\ncommands:\n 'start' loads previews of the most popular images right now \n enter the number of a image to see it in full size \n 'exit' exits the program\n"
input = get_input
if input == 'start'
climgur.display_top_images
commands
elsif input == 'exit'
goodbye
elsif input == 'billions'
billions
else
climgur.display_large_image(input)
commands
end
end
def get_input
input = 0
while input != 'start' && input !='exit' && !input.to_i.between?(1, climgur.images.count) && input != 'billions'
puts "\nenter command: "
input = gets.strip
end
input
end
def billions
puts climgur.large_image("//6336-presscdn-26-82.pagely.netdna-cdn.com/wp-content/uploads/2013/11/carl-sagan.jpg")
commands
end
def goodbye
puts "goodbye!"
end
end
| 22.897959 | 168 | 0.64795 |
6ae2f2b0a0ea7d6c3395c341ea3ce2b524fefc97 | 291 | # frozen_string_literal: true
require_relative "production.rb"
Rails.application.configure do
# Overwrite any production settings here, or if you want to start from
# scratch then remove line 1.
# Use AWS for active storage for staging
config.active_storage.service = :amazon
end
| 24.25 | 72 | 0.776632 |
bb894abb590a71f4c559ac9837f3f28f42a9655e | 1,750 | #
# Cookbook Name:: mongodb
# Recipe:: configserver
#
# Copyright 2011, edelight GmbH
# Authors:
# Markus Korn <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#Removed to stop the mongod service from being created.
#include_recipe "mongodb"
package node[:mongodb][:package_name] do
action :install
version node[:mongodb][:package_version]
end
chef_gem 'bson_ext'
chef_gem 'mongo'do
version '1.10.2'
end
if not node['mongodb']['dbpath'].empty?
directory node['mongodb']['dbpath'] do
owner 'mongodb'
group 'mongodb'
recursive true
mode '0755'
action :create
end
execute "set ownership" do
command "chown -Rf mongodb:mongodb #{node['mongodb']['dbpath']}"
end
end
# we are not starting the configserver service with the --configsvr
# commandline option because right now this only changes the port it's
# running on, and we are overwriting this port anyway.
mongodb_instance "configserver" do
mongodb_type "configserver"
port node['mongodb']['port']
logpath node['mongodb']['logpath']
dbpath node['mongodb']['dbpath']
enable_rest node['mongodb']['enable_rest']
smallfiles node['mongodb']['smallfiles']
auth node['mongodb']['auth']
end
| 29.166667 | 74 | 0.716571 |
1c4aa048d8efc2ac8022a2a1ef440cffa42902a9 | 590 | require_relative 'cocoapods/mixbox_spec'
Mixbox::FrameworkSpec.new do |s|
s.name = 'MixboxIpcSbtuiClient'
s.platforms = [:ios]
s.dependency 'MixboxIpc'
s.dependency 'MixboxSBTUITestTunnelClient'
# for network mocks, kind of a kludge, but SBTUITestTunnel should be removed soon:
s.dependency 'MixboxTestsFoundation'
s.dependency 'MixboxUiTestsFoundation'
s.framework = "XCTest"
s.user_target_xcconfig = {
'FRAMEWORK_SEARCH_PATHS' => '$(PLATFORM_DIR)/Developer/Library/Frameworks'
}
s.xcconfig = {
'ENABLE_TESTING_SEARCH_PATHS' => 'YES'
}
end | 25.652174 | 85 | 0.723729 |
ace28426d68774f95be4b5d9fc2ddca37681f97a | 1,118 | require File.join(%W[#{File.dirname(__FILE__)} lib yard-appendix version])
Gem::Specification.new do |s|
s.name = 'yard-appendix'
s.summary = "A YARD plugin that adds support for Appendix sections."
s.description = <<-eof
yard-appendix is a plugin for YARD, the Ruby documentation generation tool,
that defines a special directive @!appendix for writing appendixes for your
code documentation, similar to appendixes you find in books.
Appendix entries can be referenced to by methods and classes in your docs
using the @see tag and inline-references, just like any other object.
eof
s.version = YARD::AppendixPlugin::VERSION
s.date = Time.now.strftime('%Y-%m-%d')
s.authors = ["Ahmad Amireh"]
s.email = '[email protected]'
s.homepage = 'https://github.com/amireh/yard-appendix'
s.files = Dir.glob("{lib,spec,templates}/**/*.{rb,erb}") +
['LICENSE', 'README.md', '.rspec', __FILE__]
s.has_rdoc = 'yard'
s.license = 'MIT'
s.add_dependency('yard', '>= 0.8.0')
s.add_development_dependency 'rspec'
end
| 43 | 79 | 0.663685 |
7ab1687f8ba3f486ee2f4b369f0615e4028b8b70 | 295 | # frozen_string_literal: true
class MockDeploymentService < DeploymentService
def title
'Mock deployment'
end
def description
'Mock deployment service'
end
def self.to_param
'mock_deployment'
end
# No terminals support
def terminals(environment)
[]
end
end
| 14.047619 | 47 | 0.715254 |
f89235d8f9a80058e64a0b16f21d3c6f06086cbd | 4,883 | require 'spec_helper'
describe SS::SortEmulator, dbscope: :example do
let!(:site) { cms_site }
let(:now) { Time.zone.now.change(usec: 0) }
shared_examples "sort enumerator (released) is" do
it do
array1 = ruby.to_a
array2 = mongo.to_a
expect(array1.length).to eq array2.length
array1.each_with_index do |item1, index|
item2 = array2[index]
expect(item1.id).to eq item2.id
end
end
end
context "with released" do
let!(:node) { create :cms_node_page, cur_site: site }
let!(:page1) do
created = now - rand(3.months..6.months)
first_released = created + rand(7.days..14.days)
updated = first_released + rand(7.days..14.days)
page = nil
Timecop.freeze(created) do
page = create(:cms_page, cur_site: site, cur_node: node, released_type: released_type, state: "closed")
end
Timecop.freeze(first_released) do
page.state = "public"
page.released = first_released
page.save!
end
Timecop.freeze(updated) do
page.description = Array.new(2) { unique_id }.join("\n")
page.save!
end
page
end
let!(:page2) do
created = now - rand(3.months..6.months)
first_released = created + rand(7.days..14.days)
updated = first_released + rand(7.days..14.days)
page = nil
Timecop.freeze(created) do
page = create(:cms_page, cur_site: site, cur_node: node, released_type: released_type, state: "closed")
end
Timecop.freeze(first_released) do
page.state = "public"
page.released = first_released
page.save!
end
Timecop.freeze(updated) do
page.description = Array.new(2) { unique_id }.join("\n")
page.save!
end
page
end
let!(:page3) do
created = page2.created
first_released = page2.first_released
updated = page2.updated
page = nil
Timecop.freeze(created) do
page = create(:cms_page, cur_site: site, cur_node: node, released_type: released_type, state: "closed")
end
Timecop.freeze(first_released) do
page.state = "public"
page.released = first_released
page.save!
end
Timecop.freeze(updated) do
page.description = Array.new(2) { unique_id }.join("\n")
page.save!
end
page
end
let!(:page4) { create :cms_page, cur_site: site, cur_node: node, released_type: released_type }
let(:criteria) { Cms::Page.all }
before do
page4.unset(:released, :first_released, :created, :updated)
end
context "when released_type is 'fixed'" do
let(:released_type) { "fixed" }
context "with asc" do
let(:ruby) { described_class.new(criteria, { "released" => 1 }) }
let(:mongo) { criteria.reorder(released: 1) }
it_behaves_like "sort enumerator (released) is"
end
context "with desc" do
let(:ruby) { described_class.new(criteria, { "released" => -1 }) }
let(:mongo) { criteria.reorder(released: -1) }
it_behaves_like "sort enumerator (released) is"
end
end
context "when released_type is 'same_as_updated'" do
let(:released_type) { "same_as_updated" }
context "with asc" do
let(:ruby) { described_class.new(criteria, { "released" => 1 }) }
let(:mongo) { criteria.reorder(updated: 1) }
it_behaves_like "sort enumerator (released) is"
end
context "with desc" do
let(:ruby) { described_class.new(criteria, { "released" => -1 }) }
let(:mongo) { criteria.reorder(updated: -1) }
it_behaves_like "sort enumerator (released) is"
end
end
context "when released_type is 'same_as_created'" do
let(:released_type) { "same_as_created" }
context "with asc" do
let(:ruby) { described_class.new(criteria, { "released" => 1 }) }
let(:mongo) { criteria.reorder(created: 1) }
it_behaves_like "sort enumerator (released) is"
end
context "with desc" do
let(:ruby) { described_class.new(criteria, { "released" => -1 }) }
let(:mongo) { criteria.reorder(created: -1) }
it_behaves_like "sort enumerator (released) is"
end
end
context "when released_type is 'same_as_first_released'" do
let(:released_type) { "same_as_first_released" }
context "with asc" do
let(:ruby) { described_class.new(criteria, { "released" => 1 }) }
let(:mongo) { criteria.reorder(first_released: 1) }
it_behaves_like "sort enumerator (released) is"
end
context "with desc" do
let(:ruby) { described_class.new(criteria, { "released" => -1 }) }
let(:mongo) { criteria.reorder(first_released: -1) }
it_behaves_like "sort enumerator (released) is"
end
end
end
end
| 30.329193 | 111 | 0.611305 |
b93ecb746e699699431ebe515cadd4e0b08e09f7 | 3,709 | require 'net/imap'
require "getoptlong"
$stdout.sync = true
$port = nil
$user = ENV["USER"] || ENV["LOGNAME"]
$auth = "login"
$ssl = false
$starttls = false
def usage
<<EOF
usage: #{$0} [options] <host>
--help print this message
--port=PORT specifies port
--user=USER specifies user
--auth=AUTH specifies auth type
--starttls use starttls
--ssl use ssl
EOF
end
begin
require 'io/console'
rescue LoadError
def _noecho(&block)
system("stty", "-echo")
begin
yield STDIN
ensure
system("stty", "echo")
end
end
else
def _noecho(&block)
STDIN.noecho(&block)
end
end
def get_password
print "password: "
begin
return _noecho(&:gets).chomp
ensure
puts
end
end
def get_command
printf("%s@%s> ", $user, $host)
if line = gets
return line.strip.split(/\s+/)
else
return nil
end
end
parser = GetoptLong.new
parser.set_options(['--debug', GetoptLong::NO_ARGUMENT],
['--help', GetoptLong::NO_ARGUMENT],
['--port', GetoptLong::REQUIRED_ARGUMENT],
['--user', GetoptLong::REQUIRED_ARGUMENT],
['--auth', GetoptLong::REQUIRED_ARGUMENT],
['--starttls', GetoptLong::NO_ARGUMENT],
['--ssl', GetoptLong::NO_ARGUMENT])
begin
parser.each_option do |name, arg|
case name
when "--port"
$port = arg
when "--user"
$user = arg
when "--auth"
$auth = arg
when "--ssl"
$ssl = true
when "--starttls"
$starttls = true
when "--debug"
Net::IMAP.debug = true
when "--help"
usage
exit
end
end
rescue
abort usage
end
$host = ARGV.shift
unless $host
abort usage
end
imap = Net::IMAP.new($host, :port => $port, :ssl => $ssl)
begin
imap.starttls if $starttls
class << password = method(:get_password)
alias to_str call
end
imap.authenticate($auth, $user, password)
while true
cmd, *args = get_command
break unless cmd
begin
case cmd
when "list"
for mbox in imap.list("", args[0] || "*")
if mbox.attr.include?(Net::IMAP::NOSELECT)
prefix = "!"
elsif mbox.attr.include?(Net::IMAP::MARKED)
prefix = "*"
else
prefix = " "
end
print prefix, mbox.name, "\n"
end
when "select"
imap.select(args[0] || "inbox")
print "ok\n"
when "close"
imap.close
print "ok\n"
when "summary"
unless messages = imap.responses["EXISTS"][-1]
puts "not selected"
next
end
if messages > 0
for data in imap.fetch(1..-1, ["ENVELOPE"])
print data.seqno, ": ", data.attr["ENVELOPE"].subject, "\n"
end
else
puts "no message"
end
when "fetch"
if args[0]
data = imap.fetch(args[0].to_i, ["RFC822.HEADER", "RFC822.TEXT"])[0]
puts data.attr["RFC822.HEADER"]
puts data.attr["RFC822.TEXT"]
else
puts "missing argument"
end
when "logout", "exit", "quit"
break
when "help", "?"
print <<EOF
list [pattern] list mailboxes
select [mailbox] select mailbox
close close mailbox
summary display summary
fetch [msgno] display message
logout logout
help, ? display help message
EOF
else
print "unknown command: ", cmd, "\n"
end
rescue Net::IMAP::Error
puts $!
end
end
ensure
imap.logout
imap.disconnect
end
| 22.077381 | 78 | 0.535724 |
793541eb17664c4ace365ef5a007835351f799e3 | 2,389 | RSpec.describe Api::V1::GeographicalAreasController, 'GET #countries' do
render_views
let!(:geographical_area1) do
create :geographical_area,
:with_description,
:country
end
let!(:geographical_area2) do
create :geographical_area,
:with_description,
:country
end
let!(:geographical_area3) do
create :geographical_area,
:with_description,
geographical_code: '2'
end
let(:pattern) do
[
{ id: String, description: String },
{ id: String, description: String },
{ id: String, description: String },
]
end
it 'returns rendered records' do
get :countries, format: :json
expect(response.body).to match_json_expression pattern
end
it 'includes geographical areas with code 2' do
get :countries, format: :json
expect(response.body.to_s).to include(
geographical_area3.geographical_area_id,
)
end
describe 'machine timed' do
let!(:geographical_area1) do
create :geographical_area,
:with_description,
:country,
validity_start_date: '2014-12-31 00:00:00',
validity_end_date: '2015-12-31 00:00:00'
end
let!(:geographical_area2) do
create :geographical_area,
:with_description,
:country,
validity_start_date: '2014-12-01 00:00:00',
validity_end_date: '2015-12-01 00:00:00'
end
let!(:geographical_area3) do
create :geographical_area,
:with_description,
geographical_code: '2',
validity_start_date: '2014-12-31 00:00:00',
validity_end_date: '2015-12-31 00:00:00'
end
let(:pattern) do
[
{ id: String, description: String },
{ id: String, description: String },
]
end
before do
get :countries,
params: { as_of: '2015-12-04' },
format: :json
end
it 'finds one area' do
expect(response.body).to match_json_expression pattern
end
it 'includes area 1' do
expect(response.body.to_s).to include(
"\"id\":\"#{geographical_area1.geographical_area_id}\"",
)
end
it "doesn't include area 2" do
expect(response.body.to_s).not_to include(
"\"id\":\"#{geographical_area2.geographical_area_id}\"",
)
end
end
end
| 25.147368 | 72 | 0.603181 |
010613b2be03ca749e807309f7073b2b84addfc9 | 734 | module Questionnaires::Validations
extend ActiveSupport::Concern
included do
validates :name, :organization, :email_subject, :email_text, :email_link,
presence: true
validates :name, uniqueness: true, allow_nil: true, allow_blank: true
validates :name, :email_subject, :email_text, :email_link,
:email_clarification, pdf_encoding: true
validates :name, :email_subject, :email_link, length: { maximum: 255 },
allow_nil: true, allow_blank: true
before_destroy :check_for_answered_polls, prepend: true
end
private
def check_for_answered_polls
if polls.answered(true).any?
errors.add(:base, :cannot_destroy_with_answered_poll)
throw :abort
end
end
end
| 29.36 | 77 | 0.716621 |
b9c4988194047ea8261c0a3f28958669b85000ae | 2,000 | class UsersController < ApplicationController
before_action :error_if_not_logged_in, only: [:update, :friend, :messages]
def create
user = User.new(user_params)
user.save
session[:user_id] = user.id
render json: user, status: 200
end
def update
user = User.find(params[:id].to_i)
user.update(user_params)
render json: user, status: 200
end
def show
user = User.find(params[:id])
render json: user, status: 200
end
def recipes
pagy, recipes = pagy_resp(params)
render json: recipes,
meta: pagy,
status: 200
end
def favorites
pagy, favorites = pagy_resp(params)
render json: favorites,
root: :favorites,
meta: pagy,
status: 200
end
def friends
pagy, friends = pagy_resp(params)
render json: friends,
root: :friends,
meta: pagy,
status: 200
end
def messages
message_scope = MessagesService.new(params, current_user)
pagy, messages = pagy(message_scope.filter, {items:10,
assets:params[:action],
scope: params[:scope]})
render json: messages,
include: "**",
meta: pagy,
status: 200
end
def search
meta, users = pagy(User.from_identifier(params[:query]), {
items: 2,
query:params[:query]
})
render json: users,
meta: meta,
status: 200
end
private
def user_params
params.permit(:username, :email, :password, :avatar)
end
def pagy_resp(params)
user = User.find(params[:user_id])
assets, items = if params[:preview]
[user.send(params[:action]).preview, {}]
else
[user.send(params[:action]), {items:5}]
end
pagy(assets, items)
end
end
| 22.222222 | 76 | 0.535 |
1a3895068adfceaa5e73f04e46ba3158c87fbeab | 1,366 | # frozen_string_literal: true
require 'application_system_test_case'
class Bookmark::PageTest < ApplicationSystemTestCase
setup do
@page = pages(:page1)
end
test 'show page bookmark on lists' do
visit_with_auth '/current_user/bookmarks', 'kimura'
assert_text @page.title
end
test 'show active button when bookmarked page' do
visit_with_auth "/pages/#{@page.id}", 'kimura'
assert_selector '#bookmark-button.is-active'
assert_no_selector '#bookmark-button.is-inactive'
end
test 'show inactive button when not bookmarked page' do
visit_with_auth "/pages/#{@page.id}", 'komagata'
assert_selector '#bookmark-button.is-inactive'
assert_no_selector '#bookmark-button.is-active'
end
test 'bookmark page' do
visit_with_auth "/pages/#{@page.id}", 'komagata'
find('#bookmark-button').click
assert_selector '#bookmark-button.is-active'
assert_no_selector '#bookmark-button.is-inactive'
visit '/current_user/bookmarks'
assert_text @page.title
end
test 'unbookmark page' do
visit_with_auth "/pages/#{@page.id}", 'kimura'
assert_selector '#bookmark-button.is-active'
find('#bookmark-button').click
assert_selector '#bookmark-button.is-inactive'
assert_no_selector '#bookmark-button.is-active'
visit '/current_user/bookmarks'
assert_no_text @page.title
end
end
| 28.458333 | 57 | 0.72328 |
79387a1d144ebef2c2b0c93d540fefb99c9801cd | 1,366 | require 'net/ssh'
module Sitefull
module Cloud
class Provider
include Mock
PROVIDERS = %w(amazon azure google)
attr_reader :type, :options
def initialize(type, options = {})
@type = type || 'base'
extend(provider_module)
@options = respond_to?(:process) ? process(options) : options
end
class << self
def all_required_options
PROVIDERS.map { |type| required_options_for(type) }.flatten
end
def required_options_for(type)
provider_class(type).const_get(:REQUIRED_OPTIONS)
end
def provider_class(type)
require "sitefull-cloud/provider/#{type}"
Kernel.const_get "Sitefull::Provider::#{type.capitalize}"
end
end
def auth
@auth ||= Sitefull::Cloud::Auth.new(type, options)
end
protected
def credentials
@credentials ||= auth.credentials
end
def key_data
@key_data ||= generate_key_data
end
private
def provider_module
return self.class.provider_class(:mock) if mocked?
@provider_module ||= self.class.provider_class(type)
end
def generate_key_data
key = OpenSSL::PKey::RSA.new 2048
{ public_key: [ key.to_blob ].pack('m0'), private_key: key.to_s }
end
end
end
end
| 22.766667 | 73 | 0.601757 |
f72273f142aef968fb398ab3aa64611f7ea2cc0a | 66 | FactoryGirl.define do
factory :category_item do
end
end
| 9.428571 | 27 | 0.712121 |
d5a12f75c6a346551b2b5e2b160ca5cc017a0e55 | 141 | # frozen_string_literal: true
$LOAD_PATH.unshift File.expand_path("../../lib", __FILE__)
require "show_and_tell"
require "minitest/autorun"
| 23.5 | 58 | 0.77305 |
1cbb87ac60fbb2f4a4098f1c85caeb6b37eb5a88 | 1,181 | # Copyright 2018 Noragh Analytics, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied.
#
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'test/unit'
require 'mocha/test_unit'
# DO NOT MODIFY THIS FILE
require_relative '../lib/armagh/custom_actions'
class TestCustomActions < Test::Unit::TestCase
def setup
end
def teardown
end
def test_name
assert_not_empty(Armagh::CustomActions::NAME, 'No NAME defined for CustomActions')
end
def test_version
assert_not_empty(Armagh::CustomActions::VERSION, 'No VERSION defined for CustomActions')
end
def test_defined_actions
assert_not_empty(Armagh::Actions.defined_actions, 'No available custom actions or dividers were discovered')
end
end | 27.465116 | 112 | 0.762066 |
622d9011e90e8b9f7c7151e9f720125ba62b383a | 215 | # frozen_string_literal: true
# NB: Keep this file here as it is executed by the "book-secure-move-metrics" container in each pod
require File.expand_path('../../config/environment', __dir__) unless defined? Rails
| 43 | 99 | 0.767442 |
1cd105aa9b0bc3057a21ea761939aa01c59f9762 | 707 | RSpec.describe Tatami::Models::HttpRequest do
describe '#to_s' do
let(:sut) { Tatami::Models::HttpRequest.new(
name: 'name',
base_uri: 'base_uri',
method: 'method',
user_agent: 'user_agent',
uri: 'uri',
headers: 'headers',
cookies: 'cookies',
path_infos: 'path_infos',
query_strings: 'query_strings',
fragment: 'fragment',
content: 'content'
) }
subject { sut.to_s }
it { is_expected.to eq 'name=name, base_uri=base_uri, method=method, user_agent=user_agent, uri=uri, headers=headers, cookies=cookies, path_infos=path_infos, query_strings=query_strings, fragment=fragment, content=content' }
end
end | 37.210526 | 228 | 0.640736 |
ab23e9500fb92691eb4fe4e8272f67ade60c4ef8 | 1,260 | ###############################################################################
# tc_isdst.rb
#
# Test case for the Time#isdst instance method, and the Time#dst? alias.
###############################################################################
require 'test/unit'
require 'test/helper'
class TC_Time_Isdst_InstanceMethod < Test::Unit::TestCase
include Test::Helper
def setup
@time = Time.local(2000, 1, 1)
end
def test_isdst_basic
assert_respond_to(@time, :isdst)
assert_nothing_raised{ @time.isdst }
assert_kind_of(Boolean, @time.isdst)
end
def test_dst_basic
assert_respond_to(@time, :dst?)
assert_nothing_raised{ @time.dst? }
assert_kind_of(Boolean, @time.dst?)
end
def test_isdst
#assert_equal(true, Time.local(2000, 7, 1).isdst)
#assert_equal(false, Time.local(2000, 1, 1).isdst)
end
def test_dst?
#assert_equal(true, Time.local(2000, 7, 1).dst?)
#assert_equal(false, Time.local(2000, 1, 1).dst?)
end
def test_isdst_expected_errors
assert_raise(ArgumentError){ @time.isdst(1) }
assert_raise(ArgumentError){ @time.dst?(1) }
assert_raise(NoMethodError){ @time.isdst = 1 }
end
def teardown
@time = nil
end
end
| 26.25 | 79 | 0.580952 |
6285586716c0bf2a8c1bad8c20012fcc9ea618d1 | 4,114 | # frozen_string_literal: true
require 'spec_helper'
require 'bolt/pal/yaml_plan'
require 'bolt/util'
require 'bolt_spec/files'
describe Bolt::Util do
include BoltSpec::Files
context "when creating a typed name from a modulepath" do
it "removes init from the typed name" do
expect(Bolt::Util.module_name('mymod/plans/init.pp')).to eq('mymod')
expect(Bolt::Util.module_name('mymod/tasks/init.html.erb')).to eq('mymod')
end
it "supports extended paths" do
expect(Bolt::Util.module_name('mymod/plans/subdir/plan.pp')).to eq('mymod::subdir::plan')
end
it "splits on the first plans or tasks directory" do
expect(Bolt::Util.module_name('mymod/plans/plans/myplan.pp')).to eq('mymod::plans::myplan')
expect(Bolt::Util.module_name('mymod/tasks/plans/mytask.rb')).to eq('mymod::plans::mytask')
end
context "#to_code" do
it "turns DoubleQuotedString types into code strings" do
string = Bolt::PAL::YamlPlan::DoubleQuotedString.new('doublebubble')
expect(Bolt::Util.to_code(string)).to eq("\"doublebubble\"")
end
context "turns BareString types into code strings" do
it 'with a preceding variable' do
string = Bolt::PAL::YamlPlan::BareString.new('$variable')
expect(Bolt::Util.to_code(string)).to eq('$variable')
end
it 'with no variable' do
string = Bolt::PAL::YamlPlan::BareString.new('nonvariable')
expect(Bolt::Util.to_code(string)).to eq("'nonvariable'")
end
end
it "turns CodeLiteral types into code strings" do
string = Bolt::PAL::YamlPlan::CodeLiteral.new('[$codelit].join()')
expect(Bolt::Util.to_code(string)).to eq('[$codelit].join()')
end
it "turns EvaluableString types into code strings" do
string = Bolt::PAL::YamlPlan::EvaluableString.new('ev@l$tr1ng')
expect(Bolt::Util.to_code(string)).to eq('ev@l$tr1ng')
end
it "turns Hashes into code strings" do
hash = { 'hash' => Bolt::PAL::YamlPlan::BareString.new('$brown') }
expect(Bolt::Util.to_code(hash)).to eq("{'hash' => $brown}")
end
it "turns Arrays into code string" do
array = ['a', 'r', 'r', Bolt::PAL::YamlPlan::BareString.new('$a'), 'y']
expect(Bolt::Util.to_code(array)).to eq("['a', 'r', 'r', $a, 'y']")
end
end
end
context "when parsing a yaml file with read_yaml_hash" do
it "raises an error with line and column number if the YAML has a syntax error" do
contents = <<-YAML
---
version: 2
config:
transport: winrm
ssl-verify: false
ssl: true
YAML
with_tempfile_containing('config_file_test', contents) do |file|
expect {
Bolt::Util.read_yaml_hash(file, 'inventory')
}.to raise_error(Bolt::FileError, /Error at line 2 column 14/)
end
end
it "returns an empty hash when the yaml file is empty" do
with_tempfile_containing('empty', '') do |file|
expect(Bolt::Util.read_yaml_hash(file, 'config')).to eq({})
end
end
it "errors when file does not exist and is required" do
expect {
Bolt::Util.read_yaml_hash('does-not-exist', 'config')
}.to raise_error(Bolt::FileError)
end
it "errors when a non-hash object is read from a yaml file" do
contents = <<-YAML
---
foo
YAML
with_tempfile_containing('config_file_test', contents) do |file|
expect {
Bolt::Util.read_yaml_hash(file, 'inventory')
}.to raise_error(Bolt::FileError, /should be a Hash or empty, not String/)
end
end
end
context "when parsing a yaml file with read_optional_yaml_hash" do
it "returns an empty hash when the yaml file does not exist" do
expect(Bolt::Util.read_optional_yaml_hash('does-not-exist', 'config')).to eq({})
end
end
describe '#deep_clone' do
it 'works with frozen hashes' do
hash = { key: 'value', boolean: true }
hash.freeze
expect(Bolt::Util.deep_clone(hash)).to eq(hash)
end
end
end
| 33.721311 | 97 | 0.631988 |
abe308dac5ef06584c5e14fd2d7fade93481c294 | 228 | class CreateInspectionFindings < ActiveRecord::Migration
def change
create_table :inspection_findings do |t|
t.integer :inspection_id
t.text :finding
t.string :label
t.timestamps
end
end
end
| 19 | 56 | 0.692982 |
e2642443fb12a6c37bd6561927e2cfadc7dabfd1 | 2,937 | require "jwt"
class ApplicationController < ActionController::Base
skip_before_action :verify_authenticity_token
before_action :snake_case_params
# Handle OPTIONS preflight checks
# Credit to jpbalarini https://gist.github.com/jpbalarini/54a1aa22ebb261af9d8bfd9a24e811f0
before_action :cors_set_access_control_headers
def cors_preflight_check
return unless request.method == "OPTIONS"
cors_set_access_control_headers
render json: {}
end
def auth_header
request.headers["Authorization"]
end
# check that the token is a real JWT
def validate_token_integrity(token)
/^[\w-]+\.[\w-]+\.[\w-]+$/.match?(token)
end
def decoded_token
if !auth_header
raise "Invalid Authorization Token"
end
auth = auth_header.split(" ")
# Header must be prefixed with "Bearer"
if auth[0] != "Bearer"
raise "Invalid Authorization Token"
end
token = auth[1]
if !validate_token_integrity(token)
logger.error "Invalid token: #{token}"
return {valid: "false"}
end
secret = Rails.application.credentials.twitch[:extension_secret]
algorithm = Rails.application.credentials.jwt_algorithm
node_path = Rails.application.credentials.node_path
begin
token_json = `#{node_path} app/javascript/verify_jwt.js #{token} #{secret}`
rescue
logger.error "Invalid token: #{token}"
return { valid: false }
end
verify = JSON.parse(token_json)
if !verify["valid"]
logger.error "Invalid token: #{token}"
end
return verify
end
def jwt_auth
begin
token = decoded_token
rescue
logger.error "Failed to decode token: #{token}"
return false
end
if !token["valid"]
return false
end
return token["result"]
end
def sign(payload)
secret = Rails.application.credentials.twitch[:extension_secret]
algorithm = Rails.application.credentials.jwt_algorithm
JWT.encode payload, secret, algorithm
end
protected
def cors_set_access_control_headers
response.headers["Access-Control-Allow-Origin"] = "*"
response.headers["Access-Control-Allow-Methods"] = "POST, GET, PUT, PATCH, DELETE, OPTIONS"
response.headers["Access-Control-Allow-Headers"] = "Origin, Content-Type, Accept, Authorization, Token, " \
"Auth-Token, Email, X-User-Token, X-User-Email, x-xsrf-token"
response.headers["Access-Control-Max-Age"] = "1728000"
response.headers["Access-Control-Allow-Credentials"] = true
end
def restrict_to_development
head(:bad_request) unless Rails.env.development?
end
private
# snake_case the query params and all other params
# This is probably more hacky than it needs to be
# TODO: investigate ActionController::Parameters class and see if we can override
def snake_case_params
params.deep_transform_keys!(&:underscore)
request.parameters.deep_transform_keys!(&:underscore)
end
end
| 25.763158 | 111 | 0.705482 |
bbebbfffe0e28128b4a7e579133765060388b52f | 1,300 | # frozen_string_literal: true
class PostReplacement < ApplicationRecord
belongs_to :post
belongs_to :creator, class_name: "User"
before_validation :initialize_fields, on: :create
before_create :process!
attr_accessor :replacement_file, :final_source, :tags
attribute :replacement_url, default: ""
def initialize_fields
self.original_url = post.source
self.old_file_ext = post.file_ext
self.old_file_size = post.file_size
self.old_image_width = post.image_width
self.old_image_height = post.image_height
self.old_md5 = post.md5
end
concerning :Search do
class_methods do
def search(params = {})
q = search_attributes(params, :id, :created_at, :updated_at, :md5, :old_md5, :file_ext, :old_file_ext, :original_url, :replacement_url, :creator, :post)
q.apply_default_order(params)
end
end
end
def process!
PostReplacementProcessor.new(post: post, replacement: self).process!
end
def suggested_tags_for_removal
tags = post.tag_array.select do |tag|
Danbooru.config.post_replacement_tag_removals.any? do |pattern|
tag.match?(/\A#{pattern}\z/i)
end
end
tags = tags.map { |tag| "-#{tag}" }
tags.join(" ")
end
def self.available_includes
[:creator, :post]
end
end
| 26 | 160 | 0.702308 |
181c46ef64e856d89d6b60b5df01ad0d7083ff75 | 152 | # Command to reload module/alias/etc. configs
# Use like "%admin/reload"
require 'java'
ctx.pctx.reload_configs
puts 'Configs successfully reloaded'
| 16.888889 | 45 | 0.769737 |
79796e67baec502da3037df5e0d0293d8c0eab91 | 3,167 | describe Bindl::Entry do
include FakeFS::SpecHelpers
before(:each) { @store = Bindl::Store.new('/test/').create! }
describe '.initialize' do
it 'should demand that the store exist' do
expect do
Bindl::Entry.new(Bindl::Store.new('/dne/'), 'entry.ext')
end.to raise_error Bindl::Store::StoreDoesNotExistError
end
it 'should demand a path that produces a valid name' do
FileUtils.touch 'invalid.'
expect do
Bindl::Entry.new @store, 'invalid.'
end.to raise_error Bindl::Name::NameError
end
it 'should require that the file exists' do
expect do
Bindl::Entry.new @store, 'dne.ext'
end.to raise_error Bindl::Entry::EntryDoesNotExistError
FileUtils.touch 'exists.ext'
expect do
Bindl::Entry.new @store, 'exists.ext'
end.to_not raise_error
end
end
describe '.create!' do
it 'should raise if the store does not exist' do
expect do
Bindl::Entry.create! Bindl::Store.new('/dne/'), 'entry'
end.to raise_error Bindl::Store::StoreDoesNotExistError
end
it 'should raise if the file exists' do
path = '/test/entry.yml'
FileUtils.touch path
expect do
Bindl::Entry.create! @store, 'entry'
end.to raise_error Bindl::Entry::EntryExistsError
end
it 'should raise if the entry would have an invalid name' do
path = '/test/entry.invalid.'
FileUtils.touch path
expect do
Bindl::Entry.create! @store, path
end.to raise_error Bindl::Name::NameError
end
it 'should create the file' do
name = 'entry'
expect do
Bindl::Entry.create! @store, name
end.to_not raise_error
expect(File.file?('/test/entry.yml')).to be true
end
it 'should encrypt if asked, but fail without a key' do
@store.meta.set(Bindl::ID_KEYPATH, 'nonsense')
# It should raise, since encrypted files need a key to exist.
expect do
Bindl::Entry.create! @store, 'enctest', encrypt: true
end.to raise_error Bindl::Encrypt::GPGError
end
end
describe '#delete!' do
it 'should delete the file' do
path = '/entry.ext'
FileUtils.touch path
entry = Bindl::Entry.new @store, path
expect(File.file?(path)).to be true
entry.delete!
expect(File.file?(path)).to be false
end
it 'should set path to nil' do
path = '/entry.ext'
FileUtils.touch path
entry = Bindl::Entry.new @store, path
expect(File.file?(path)).to be true
entry.delete!
expect(entry.path).to be nil
end
end
describe 'data' do
it 'should return the contents of the file' do
path = '/entry.ext'
data = 'this is a test string!'
File.write(path, data)
entry = Bindl::Entry.new @store, path
expect(entry.data).to eq data
end
end
describe 'data=' do
it 'should overwrite the file with the new data' do
path = '/entry.ext'
data = 'this is new data'
File.write(path, 'old data')
entry = Bindl::Entry.new @store, path
entry.data = data
expect(File.read(path)).to eq data.to_yaml
end
end
end
| 31.989899 | 67 | 0.629302 |
1ca3a037a7d5ba2c9ac8449030830e1b22269077 | 7,212 | class IvlNotices::IvlToCoverallTransitionNoticeBuilder < IvlNotice
include ApplicationHelper
def initialize(consumer_role, args = {})
@family = Family.find(args[:options][:family])
find_transition_people(args[:options][:result][:people])
args[:recipient] = @family.primary_applicant.person
args[:notice] = PdfTemplates::ConditionalEligibilityNotice.new
args[:market_kind] = 'individual'
args[:recipient_document_store]= @family.primary_applicant.person
args[:to] = @family.primary_applicant.person.work_email_or_best
self.header = "notices/shared/header_ivl.html.erb"
super(args)
end
def attach_required_documents
generate_custom_notice('notices/ivl/documents_section')
attach_blank_page(custom_notice_path)
join_pdfs [notice_path, custom_notice_path]
clear_tmp(custom_notice_path)
end
def deliver
append_hbe
build
generate_pdf_notice
attach_blank_page(notice_path)
attach_docs
attach_appeals
attach_non_discrimination
attach_taglines
upload_and_send_secure_message
if recipient.consumer_role.can_receive_electronic_communication?
send_generic_notice_alert
end
if recipient.consumer_role.can_receive_paper_communication?
store_paper_notice
end
clear_tmp(notice_path)
end
def attach_docs
attach_required_documents
end
def notice_filename
"#{subject.titleize.gsub("Dc", "DC").gsub(/[^0-9a-z]/i,'')}"
end
def build
notice.notification_type = self.event_name
notice.mpi_indicator = self.mpi_indicator
notice.primary_identifier = recipient.hbx_id
check_for_transitioned_individuals
check_for_unverified_individuals
append_unverified_individuals
notice.primary_fullname = recipient.full_name.titleize || ""
notice.primary_firstname = recipient.first_name.titleize || ""
notice.past_due_text = "PAST DUE"
if recipient.mailing_address
append_address(recipient.mailing_address)
else
raise 'mailing address not present'
end
end
def find_transition_people(people_ids)
@transition_people = []
people_ids.each do |person_id|
@transition_people << Person.find(person_id)
end
end
def check_for_transitioned_individuals
@transition_people.each do |person|
notice.individuals << PdfTemplates::Individual.new({
:first_name => person.first_name.titleize,
:last_name => person.last_name.titleize,
:age => calculate_age_by_dob(person.dob),
})
end
end
def check_for_unverified_individuals
family = recipient.primary_family
date = TimeKeeper.date_of_record
enrollments = HbxEnrollment.where(family_id: family.id).select do |hbx_en|
(!hbx_en.is_shop?) && (!["coverage_canceled", "shopping", "inactive"].include?(hbx_en.aasm_state)) &&
(hbx_en.terminated_on.blank? || hbx_en.terminated_on >= TimeKeeper.date_of_record)
end
enrollments.reject!{|e| e.coverage_terminated? }
hbx_enrollments = []
en = enrollments.select{ |en| HbxEnrollment::ENROLLED_STATUSES.include?(en.aasm_state)}
health_enrollments = en.select{ |e| e.coverage_kind == "health"}.sort_by(&:effective_on)
dental_enrollments = en.select{ |e| e.coverage_kind == "dental"}.sort_by(&:effective_on)
hbx_enrollments << health_enrollments
hbx_enrollments << dental_enrollments
hbx_enrollments.flatten!
hbx_enrollments.compact!
hbx_enrollments.each do |enrollment|
notice.enrollments << append_enrollment_information(enrollment)
end
notice.coverage_year = hbx_enrollments.compact.first.effective_on.year
end
def append_unverified_individuals
@transition_people.each do |person|
person.consumer_role.expired_verification_types.each do |verification_type|
case verification_type.type_name
when "Social Security Number"
notice.ssa_unverified << PdfTemplates::Individual.new({ full_name: person.full_name.titleize, past_due_text: "PAST DUE", age: person.age_on(TimeKeeper.date_of_record) })
when "Immigration status"
notice.immigration_unverified << PdfTemplates::Individual.new({ full_name: person.full_name.titleize, past_due_text: "PAST DUE", age: person.age_on(TimeKeeper.date_of_record) })
when "Citizenship"
notice.dhs_unverified << PdfTemplates::Individual.new({ full_name: person.full_name.titleize, past_due_text: "PAST DUE", age: person.age_on(TimeKeeper.date_of_record) })
when "American Indian Status"
notice.american_indian_unverified << PdfTemplates::Individual.new({ full_name: person.full_name.titleize, past_due_text: "PAST DUE", age: person.age_on(TimeKeeper.date_of_record) })
when EnrollRegistry[:enroll_app].setting(:state_residency).item
notice.residency_inconsistency << PdfTemplates::Individual.new({ full_name: person.full_name.titleize, past_due_text: "PAST DUE", age: person.age_on(TimeKeeper.date_of_record) })
end
end
end
end
def append_enrollment_information(enrollment)
plan = PdfTemplates::Plan.new({
plan_name: enrollment.product.title,
is_csr: enrollment.product.is_csr?,
coverage_kind: enrollment.product.kind,
plan_carrier: enrollment.product.issuer_profile.organization.legal_name,
family_deductible: enrollment.product.family_deductible.split("|").last.squish,
deductible: enrollment.product.deductible
})
PdfTemplates::Enrollment.new({
created_at: enrollment.created_at,
premium: enrollment.total_premium.round(2),
aptc_amount: enrollment.applied_aptc_amount.round(2),
responsible_amount: (enrollment.total_premium - enrollment.applied_aptc_amount.to_f).round(2),
phone: phone_number(enrollment.product.issuer_profile.legal_name),
is_receiving_assistance: enrollment.applied_aptc_amount > 0 || enrollment.product.is_csr? ? true : false,
coverage_kind: enrollment.coverage_kind,
kind: enrollment.kind,
effective_on: enrollment.effective_on,
plan: plan,
enrollees: enrollment.hbx_enrollment_members.inject([]) do |enrollees, member|
enrollee = PdfTemplates::Individual.new({
full_name: member.person.full_name.titleize,
age: member.person.age_on(TimeKeeper.date_of_record)
})
enrollees << enrollee
end
})
end
def phone_number(legal_name)
case legal_name
when "BestLife"
"(800) 433-0088"
when "CareFirst"
"(855) 444-3119"
when "Delta Dental"
"(800) 471-0236"
when "Dominion"
"(855) 224-3016"
when "Kaiser"
"(844) 524-7370"
end
end
end | 40.745763 | 191 | 0.672629 |
e212552e303c5af632ef5d736671e51c60f25f89 | 2,668 | module Google # deviates from other bin stuff to accomodate gem
class << self
def class_for(key)
case key
when :compute
Fog::Compute::Google
when :dns
Fog::DNS::Google
when :monitoring
Fog::Google::Monitoring
when :storage
Fog::Storage::Google
when :sql
Fog::Google::SQL
else
raise ArgumentError, "Unsupported #{self} service: #{key}"
end
end
def [](service)
@@connections ||= Hash.new do |hash, key|
hash[key] = case key
when :compute
Fog::Logger.warning("Google[:compute] is not recommended, use Compute[:google] for portability")
Fog::Compute.new(:provider => 'Google')
when :dns
Fog::Logger.warning("Google[:dns] is not recommended, use DNS[:google] for portability")
Fog::DNS.new(:provider => 'Google')
when :monitoring
Fog::Google::Monitoring.new
when :sql
Fog::Google::SQL.new
when :storage
Fog::Logger.warning("Google[:storage] is not recommended, use Storage[:google] for portability")
Fog::Storage.new(:provider => 'Google')
else
raise ArgumentError, "Unrecognized service: #{key.inspect}"
end
end
@@connections[service]
end
def account
@@connections[:compute].account
end
def services
Fog::Google.services
end
# based off of virtual_box.rb
def available?
# Make sure the gem we use is enabled.
availability = if Gem::Specification.respond_to?(:find_all_by_name)
!Gem::Specification.find_all_by_name('google-api-client').empty? # newest rubygems
else
!Gem.source_index.find_name('google-api-client').empty? # legacy
end
# Then make sure we have all of the requirements
for service in services
begin
service = self.class_for(service)
availability &&= service.requirements.all? { |requirement| Fog.credentials.include?(requirement) }
rescue ArgumentError => e
Fog::Logger.warning(e.message)
availability = false
rescue => e
availability = false
end
end
if availability
for service in services
for collection in self.class_for(service).collections
unless self.respond_to?(collection)
self.class_eval <<-EOS, __FILE__, __LINE__
def self.#{collection}
self[:#{service}].#{collection}
end
EOS
end
end
end
end
availability
end
end
end
| 29.644444 | 108 | 0.584708 |
393ebe4d08ef978a5d4e9b76fd890f8401ae1861 | 328 | Peatio::Blockchain.registry[:bitcoin] = Bitcoin::Blockchain
Peatio::Blockchain.registry[:geth] = Ethereum::Eth::Blockchain
Peatio::Blockchain.registry[:parity] = Ethereum::Eth::Blockchain
Peatio::Blockchain.registry[:"geth-bsc"] = Ethereum::Bsc::Blockchain
Peatio::Blockchain.registry[:"geth-heco"] = Ethereum::Heco::Blockchain
| 54.666667 | 70 | 0.77439 |
01dda4de844bd9976856bfb68158e4c79176d14f | 319 | require 'net/http'
puts ((ARGV.length != 1) ? "Usage: #$0 <zip code>" : (["The temperature
in"] + (/Weather<\/b> for <b>(.*)<\/b>.*\D(\d+)°F/.match(Net::HTTP.get(
URI.parse("http://www.google.com/search?hl=en&q=temperature+#{ARGV[0]}")))[1,2].collect!
{|x| " is " + x})).to_s.gsub!(/in is /, "in ") + " degree F")
| 53.166667 | 88 | 0.554859 |
ff81fe968d7232833738de345de1253172809ce3 | 5,394 | # frozen_string_literal: true
require "cose/algorithm"
require "cose/error"
require "cose/rsapkcs1_algorithm"
require "openssl"
require "webauthn/authenticator_data/attested_credential_data"
require "webauthn/error"
module WebAuthn
module AttestationStatement
class UnsupportedAlgorithm < Error; end
ATTESTATION_TYPE_NONE = "None"
ATTESTATION_TYPE_BASIC = "Basic"
ATTESTATION_TYPE_SELF = "Self"
ATTESTATION_TYPE_ATTCA = "AttCA"
ATTESTATION_TYPE_BASIC_OR_ATTCA = "Basic_or_AttCA"
ATTESTATION_TYPES_WITH_ROOT = [
ATTESTATION_TYPE_BASIC,
ATTESTATION_TYPE_BASIC_OR_ATTCA,
ATTESTATION_TYPE_ATTCA
].freeze
class Base
AAGUID_EXTENSION_OID = "1.3.6.1.4.1.45724.1.1.4"
def initialize(statement)
@statement = statement
end
def valid?(_authenticator_data, _client_data_hash)
raise NotImplementedError
end
def format
WebAuthn::AttestationStatement::FORMAT_TO_CLASS.key(self.class)
end
def attestation_certificate
certificates&.first
end
def certificate_chain
if certificates
certificates[1..-1]
end
end
def attestation_certificate_key_id
raw_subject_key_identifier&.unpack("H*")&.[](0)
end
private
attr_reader :statement
def matching_aaguid?(attested_credential_data_aaguid)
extension = attestation_certificate&.extensions&.detect { |ext| ext.oid == AAGUID_EXTENSION_OID }
if extension
# `extension.value` mangles data into ASCII, so we must manually compare bytes
# see https://github.com/ruby/openssl/pull/234
extension.to_der[-WebAuthn::AuthenticatorData::AttestedCredentialData::AAGUID_LENGTH..-1] ==
attested_credential_data_aaguid
else
true
end
end
def certificates
@certificates ||=
raw_certificates&.map do |raw_certificate|
OpenSSL::X509::Certificate.new(raw_certificate)
end
end
def algorithm
statement["alg"]
end
def raw_certificates
statement["x5c"]
end
def signature
statement["sig"]
end
def attestation_trust_path
if certificates&.any?
certificates
end
end
def trustworthy?(aaguid: nil, attestation_certificate_key_id: nil)
if ATTESTATION_TYPES_WITH_ROOT.include?(attestation_type)
configuration.acceptable_attestation_types.include?(attestation_type) &&
valid_certificate_chain?(aaguid: aaguid, attestation_certificate_key_id: attestation_certificate_key_id)
else
configuration.acceptable_attestation_types.include?(attestation_type)
end
end
def valid_certificate_chain?(aaguid: nil, attestation_certificate_key_id: nil)
attestation_root_certificates_store(
aaguid: aaguid,
attestation_certificate_key_id: attestation_certificate_key_id
).verify(attestation_certificate, attestation_trust_path)
end
def attestation_root_certificates_store(aaguid: nil, attestation_certificate_key_id: nil)
OpenSSL::X509::Store.new.tap do |store|
root_certificates(
aaguid: aaguid,
attestation_certificate_key_id: attestation_certificate_key_id
).each do |cert|
store.add_cert(cert)
end
end
end
def root_certificates(aaguid: nil, attestation_certificate_key_id: nil)
root_certificates =
configuration.attestation_root_certificates_finders.reduce([]) do |certs, finder|
if certs.empty?
finder.find(
attestation_format: format,
aaguid: aaguid,
attestation_certificate_key_id: attestation_certificate_key_id
) || []
else
certs
end
end
if root_certificates.empty? && respond_to?(:default_root_certificates, true)
default_root_certificates
else
root_certificates
end
end
def raw_subject_key_identifier
extension = attestation_certificate.extensions.detect { |ext| ext.oid == "subjectKeyIdentifier" }
return unless extension
ext_asn1 = OpenSSL::ASN1.decode(extension.to_der)
ext_value = ext_asn1.value.last
OpenSSL::ASN1.decode(ext_value.value).value
end
def valid_signature?(authenticator_data, client_data_hash, public_key = attestation_certificate.public_key)
raise("Incompatible algorithm and key") unless cose_algorithm.compatible_key?(public_key)
cose_algorithm.verify(
public_key,
signature,
verification_data(authenticator_data, client_data_hash)
)
rescue COSE::Error
false
end
def verification_data(authenticator_data, client_data_hash)
authenticator_data.data + client_data_hash
end
def cose_algorithm
@cose_algorithm ||=
COSE::Algorithm.find(algorithm).tap do |alg|
alg && configuration.algorithms.include?(alg.name) ||
raise(UnsupportedAlgorithm, "Unsupported algorithm #{algorithm}")
end
end
def configuration
WebAuthn.configuration
end
end
end
end
| 29.47541 | 116 | 0.661661 |
ab44c2c6dbc8eb8fe6b1500d0737d55bbc1fef3c | 1,750 | # frozen_string_literal: true
FactoryBot.define do
factory :deposit do
member { create(:member, :level_3) }
amount { Kernel.rand(100..10_000).to_d }
factory :deposit_btc, class: 'Deposits::Coin' do
currency { find_or_create :currency, :btc, id: :btc }
address { create(:payment_address, :btc_address).address }
txid { Faker::Lorem.characters(64) }
txout { 0 }
block_number { rand(1..1_349_999) }
end
factory :deposit_usd, class: 'Deposits::Fiat' do
currency { find_or_create :currency, :usd, id: :usd }
end
trait :deposit_btc do
type { Deposits::Coin }
currency { find_or_create :currency, :btc, id: :btc }
address { create(:payment_address, :btc_address).address }
txid { Faker::Lorem.characters(64) }
txout { 0 }
end
trait :deposit_eth do
type { Deposits::Coin }
currency { find_or_create :currency, :eth, id: :eth }
member { create(:member, :level_3, :barong) }
address { create(:payment_address, :eth_address).address }
txid { Faker::Lorem.characters(64) }
txout { 0 }
end
trait :deposit_trst do
type { Deposits::Coin }
currency { find_or_create :currency, :trst, id: :trst }
member { create(:member, :level_3, :barong) }
address { create(:payment_address, :trst_address).address }
txid { Faker::Lorem.characters(64) }
txout { 0 }
end
trait :deposit_ring do
type { Deposits::Coin }
currency { find_or_create :currency, :ring, id: :ring }
member { create(:member, :level_3, :barong) }
address { create(:payment_address, :trst_address).address }
txid { Faker::Lorem.characters(64) }
txout { 0 }
end
end
end
| 31.25 | 65 | 0.624 |
ff925cac387a9ef5826cb119734fc35eb873336c | 3,164 | Rails.application.configure do
# Settings specified here will take precedence over those in config/application.rb.
# Code is not reloaded between requests.
config.cache_classes = true
# Eager load code on boot. This eager loads most of Rails and
# your application in memory, allowing both threaded web servers
# and those relying on copy on write to perform better.
# Rake tasks automatically ignore this option for performance.
config.eager_load = true
# Full error reports are disabled and caching is turned on.
config.consider_all_requests_local = false
config.action_controller.perform_caching = true
# Enable Rack::Cache to put a simple HTTP cache in front of your application
# Add `rack-cache` to your Gemfile before enabling this.
# For large-scale production use, consider using a caching reverse proxy like nginx, varnish or squid.
# config.action_dispatch.rack_cache = true
# Disable Rails's static asset server (Apache or nginx will already do this).
config.serve_static_assets = false
# Compress JavaScripts and CSS.
config.assets.js_compressor = :uglifier
# config.assets.css_compressor = :sass
# Do not fallback to assets pipeline if a precompiled asset is missed.
config.assets.compile = false
# Generate digests for assets URLs.
config.assets.digest = true
# `config.assets.precompile` and `config.assets.version` have moved to config/initializers/assets.rb
# Specifies the header that your server uses for sending files.
# config.action_dispatch.x_sendfile_header = "X-Sendfile" # for apache
# config.action_dispatch.x_sendfile_header = 'X-Accel-Redirect' # for nginx
# Force all access to the app over SSL, use Strict-Transport-Security, and use secure cookies.
# config.force_ssl = true
# Set to :debug to see everything in the log.
config.log_level = :debug
# Prepend all log lines with the following tags.
# config.log_tags = [ :subdomain, :uuid ]
# Use a different logger for distributed setups.
# config.logger = ActiveSupport::TaggedLogging.new(SyslogLogger.new)
# Use a different cache store in production.
# config.cache_store = :mem_cache_store
# Enable serving of images, stylesheets, and JavaScripts from an asset server.
# config.action_controller.asset_host = "http://assets.example.com"
# Ignore bad email addresses and do not raise email delivery errors.
# Set this to true and configure the email server for immediate delivery to raise delivery errors.
# config.action_mailer.raise_delivery_errors = false
# Enable locale fallbacks for I18n (makes lookups for any locale fall back to
# the I18n.default_locale when a translation cannot be found).
config.i18n.fallbacks = true
# Send deprecation notices to registered listeners.
config.active_support.deprecation = :notify
# Disable automatic flushing of the log to improve performance.
# config.autoflush_log = false
# Use default logging formatter so that PID and timestamp are not suppressed.
config.log_formatter = ::Logger::Formatter.new
# Do not dump schema after migrations.
config.active_record.dump_schema_after_migration = false
end
| 40.050633 | 104 | 0.762642 |
d59c932c6f23e73c3b73c7bc53a3897988bcf855 | 206 | class Category < ApplicationRecord
has_many :workouts
scope :filter_duplicates, -> { group(:name).having("count(*) >= 1")}
def self.order_by_size
order(workouts: :desc)
end
end
| 15.846154 | 72 | 0.640777 |
18f6a95566aae347ac23f5c9a83c62979b0d1074 | 660 | require 'yaml'
class MinitestQueryHook < Mumukit::Templates::FileHook
isolated true
line_number_offset 2, include_extra: true
def tempfile_extension
'_test.yml'
end
def compile_file_content(request)
if request.query.strip != 'rake test'
raise Mumukit::CompilationError, t(:unrecognized_command)
end
"require 'minitest/autorun'\n#{request.extra}\n#{request.content}"
end
def command_line(filename)
"ruby #{filename} --seed 0 2>&1"
end
def post_process_file(file, result, status)
if result =~ /^.+\n\n(# Running\:\n\n.+?\n\n).+?\n\n(.+)$/m
["#{$1}#{$2}", status]
else
super
end
end
end
| 21.290323 | 70 | 0.651515 |
280aa52318a214d26e97add4af7431f83bb488b7 | 194 | class CreateAirlines < ActiveRecord::Migration[6.0]
def change
create_table :airlines do |t|
t.string :name
t.string :icao_code, limit: 4
t.timestamps
end
end
end
| 17.636364 | 51 | 0.654639 |
017690c9cc2e152d9fb0f433b7975bf1367980a8 | 552 | class CreateBridgeCacheDomains < ActiveRecord::Migration[5.0]
def change
return unless BridgeCache.use_internal_database
create_table :bridge_cache_domains do |t|
t.integer :bridge_id, limit: 8
t.string :name
t.integer :parent_id, limit: 8
t.datetime :created_at
t.datetime :updated_at
t.datetime :deleted_at
t.text :config
t.integer :owner_id, limit: 8
t.boolean :active
t.integer :users_count
t.integer :course_templates_count
t.integer :domain_type
end
end
end
| 27.6 | 61 | 0.684783 |
f7cb08bfc2c262892eca8c533b64fed9cf276bd2 | 594 | module Spina
class Blog::CategoriesController < ApplicationController
before_action :set_page
before_action :find_category
before_action :find_posts
def show
end
private
def find_category
@category = Spina::Blog::Category.friendly.find params[:id]
end
def find_posts
@posts = @category.posts.available.live.order(published_at: :desc).page(params[:page])
end
def set_page
@page = Spina::Page.find_or_create_by name: 'blog' do |page|
page.link_url = '/blog'
page.deletable = false
end
end
end
end
| 19.8 | 92 | 0.664983 |
1dd95805c3de220b3668983f54ea1186e99c4749 | 146 | module OrigenTesters
MAJOR = 0
MINOR = 49
BUGFIX = 0
DEV = 0
VERSION = [MAJOR, MINOR, BUGFIX].join(".") + (DEV ? ".pre#{DEV}" : '')
end
| 18.25 | 72 | 0.561644 |
26cec0d333e0dd2791479186140d01aa7cac5439 | 261 | #!/usr/bin/ruby
# Install with 'gem install droplet_kit'
require 'droplet_kit'
require './config.rb'
client = DropletKit::Client.new(access_token: @docean_token_v2)
client.ssh_keys.all().each { |x|
puts "[SSHKey] id: " + x.id.to_s + " , name: " + x.name
}
| 20.076923 | 63 | 0.678161 |
e99813d1cdef619ed848d9fba390261133a91d46 | 948 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2021_01_18_133130) do
create_table "products", force: :cascade do |t|
t.string "name"
t.string "tagline"
t.datetime "created_at", null: false
t.datetime "updated_at", null: false
end
end
| 41.217391 | 86 | 0.767932 |
abf874ba4a2dec27e58e391ec3e63eb6c6faa5ba | 423 | module ExecutableHooks
module Specification
def self.find
@executable_hooks_spec ||=
if Gem::Specification.respond_to?(:find_by_name)
Gem::Specification.find_by_name("executable-hooks")
else
Gem.source_index.find_name("executable-hooks").last
end
rescue Gem::LoadError
nil
end
def self.version
find ? find.version.to_s : nil
end
end
end
| 23.5 | 61 | 0.647754 |
28855cadd76a81a6958d0912e775621aa9f5b1f8 | 4,870 | # This file was generated by the `rails generate rspec:install` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
require 'simplecov'
require 'codecov'
SimpleCov.start
SimpleCov.formatter = SimpleCov::Formatter::Codecov
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = true
end
# This option will default to `:apply_to_host_groups` in RSpec 4 (and will
# have no way to turn it off -- the option exists only for backwards
# compatibility in RSpec 3). It causes shared context metadata to be
# inherited by the metadata hash of host groups and examples, rather than
# triggering implicit auto-inclusion in groups with matching metadata.
config.shared_context_metadata_behavior = :apply_to_host_groups
# The settings below are suggested to provide a good initial experience
# with RSpec, but feel free to customize to your heart's content.
=begin
# This allows you to limit a spec run to individual examples or groups
# you care about by tagging them with `:focus` metadata. When nothing
# is tagged with `:focus`, all examples get run. RSpec also provides
# aliases for `it`, `describe`, and `context` that include `:focus`
# metadata: `fit`, `fdescribe` and `fcontext`, respectively.
config.filter_run_when_matching :focus
# Allows RSpec to persist some state between runs in order to support
# the `--only-failures` and `--next-failure` CLI options. We recommend
# you configure your source control system to ignore this file.
config.example_status_persistence_file_path = "spec/examples.txt"
# Limits the available syntax to the non-monkey patched syntax that is
# recommended. For more details, see:
# - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/
# - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
# - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode
config.disable_monkey_patching!
# Many RSpec users commonly either run the entire suite or an individual
# file, and it's useful to allow more verbose output when running an
# individual spec file.
if config.files_to_run.one?
# Use the documentation formatter for detailed output,
# unless a formatter has already been configured
# (e.g. via a command-line flag).
config.default_formatter = "doc"
end
# Print the 10 slowest examples and example groups at the
# end of the spec run, to help surface which specs are running
# particularly slow.
config.profile_examples = 10
# Run specs in random order to surface order dependencies. If you find an
# order dependency and want to debug it, you can fix the order by providing
# the seed, which is printed after each run.
# --seed 1234
config.order = :random
# Seed global randomization in this process using the `--seed` CLI option.
# Setting this allows you to use `--seed` to deterministically reproduce
# test failures related to randomization by passing the same `--seed` value
# as the one that triggered the failure.
Kernel.srand config.seed
=end
end
| 46.826923 | 92 | 0.744969 |
bb20812851e634687f829d4b747f6550b1d0c1ae | 172 | include_recipe "curl"
package "python"
package "python-dev"
execute "curl http://python-distribute.org/distribute_setup.py | python" do
not_if "which easy_install"
end
| 19.111111 | 75 | 0.773256 |
ab99e5f5338828b8b25cd4709e71268b0eded9d7 | 722 | Pod::Spec.new do |s|
s.name = "DacteevMusic"
s.version = "0.0.1"
s.summary = "Dacteev Music SDK"
s.homepage = "https://github.com/Dacteev/dacteev-music-sdk-swift"
s.license = { :type => "MIT", :file => "LICENSE.md" }
s.authors = { "Axel Etcheverry" => "[email protected]" }
s.social_media_url = "http://twitter.com/euskadi31"
s.ios.deployment_target = "9.0"
s.osx.deployment_target = "10.11"
s.tvos.deployment_target = "9.0"
s.watchos.deployment_target = "2.0"
s.source = {
:git => "https://github.com/Dacteev/dacteev-music-sdk-swift.git",
:tag => "v#{s.version}"
}
s.source_files = "Sources/*.swift"
s.dependency "Alamofire", "~> 4.0"
end
| 30.083333 | 73 | 0.603878 |
e90096335040e28beb23f1cac9d20347a498e63b | 6,071 | # frozen_string_literal: true
require "openssl"
require "bundler/settings"
RSpec.describe Bundler::Env do
let(:git_proxy_stub) { Bundler::Source::Git::GitProxy.new(nil, nil, nil) }
describe "#report" do
it "prints the environment" do
out = described_class.report
expect(out).to include("Environment")
expect(out).to include(Bundler::VERSION)
expect(out).to include(Gem::VERSION)
expect(out).to include(described_class.send(:ruby_version))
expect(out).to include(described_class.send(:git_version))
expect(out).to include(OpenSSL::OPENSSL_VERSION)
end
describe "rubygems paths" do
it "prints gem home" do
with_clear_paths("GEM_HOME", "/a/b/c") do
out = described_class.report
expect(out).to include("Gem Home /a/b/c")
end
end
it "prints gem path" do
with_clear_paths("GEM_PATH", "/a/b/c#{File::PATH_SEPARATOR}d/e/f") do
out = described_class.report
expect(out).to include("Gem Path /a/b/c#{File::PATH_SEPARATOR}d/e/f")
end
end
it "prints user home" do
skip "needs to use a valid HOME" if Gem.win_platform? && RUBY_VERSION < "2.6.0"
with_clear_paths("HOME", "/a/b/c") do
out = described_class.report
expect(out).to include("User Home /a/b/c")
end
end
it "prints user path" do
skip "needs to use a valid HOME" if Gem.win_platform? && RUBY_VERSION < "2.6.0"
with_clear_paths("HOME", "/a/b/c") do
allow(File).to receive(:exist?)
allow(File).to receive(:exist?).with("/a/b/c/.gem").and_return(true)
out = described_class.report
expect(out).to include("User Path /a/b/c/.gem")
end
end
it "prints bin dir" do
with_clear_paths("GEM_HOME", "/a/b/c") do
out = described_class.report
expect(out).to include("Bin Dir /a/b/c/bin")
end
end
private
def with_clear_paths(env_var, env_value)
old_env_var = ENV[env_var]
ENV[env_var] = env_value
Gem.clear_paths
yield
ensure
ENV[env_var] = old_env_var
end
end
context "when there is a Gemfile and a lockfile and print_gemfile is true" do
before do
gemfile "gem 'rack', '1.0.0'"
lockfile <<-L
GEM
remote: #{file_uri_for(gem_repo1)}/
specs:
rack (1.0.0)
DEPENDENCIES
rack
BUNDLED WITH
1.10.0
L
allow(Bundler::SharedHelpers).to receive(:find_gemfile).and_return(bundled_app_gemfile)
end
let(:output) { described_class.report(:print_gemfile => true) }
it "prints the Gemfile" do
expect(output).to include("Gemfile")
expect(output).to include("'rack', '1.0.0'")
end
it "prints the lockfile" do
expect(output).to include("Gemfile.lock")
expect(output).to include("rack (1.0.0)")
end
end
context "when there no Gemfile and print_gemfile is true" do
let(:output) { described_class.report(:print_gemfile => true) }
it "prints the environment" do
expect(output).to start_with("## Environment")
end
end
context "when Gemfile contains a gemspec and print_gemspecs is true" do
let(:gemspec) do
strip_whitespace(<<-GEMSPEC)
Gem::Specification.new do |gem|
gem.name = "foo"
gem.author = "Fumofu"
end
GEMSPEC
end
before do
gemfile("gemspec")
File.open(bundled_app.join("foo.gemspec"), "wb") do |f|
f.write(gemspec)
end
allow(Bundler::SharedHelpers).to receive(:find_gemfile).and_return(bundled_app_gemfile)
end
it "prints the gemspec" do
output = described_class.report(:print_gemspecs => true)
expect(output).to include("foo.gemspec")
expect(output).to include(gemspec)
end
end
context "when eval_gemfile is used" do
it "prints all gemfiles" do
create_file bundled_app("other/Gemfile-other"), "gem 'rack'"
create_file bundled_app("other/Gemfile"), "eval_gemfile 'Gemfile-other'"
create_file bundled_app("Gemfile-alt"), <<-G
source "#{file_uri_for(gem_repo1)}"
eval_gemfile "other/Gemfile"
G
gemfile "eval_gemfile #{bundled_app("Gemfile-alt").to_s.dump}"
allow(Bundler::SharedHelpers).to receive(:find_gemfile).and_return(bundled_app_gemfile)
allow(Bundler::SharedHelpers).to receive(:pwd).and_return(bundled_app)
output = described_class.report(:print_gemspecs => true)
expect(output).to include(strip_whitespace(<<-ENV))
## Gemfile
### Gemfile
```ruby
eval_gemfile #{bundled_app("Gemfile-alt").to_s.dump}
```
### Gemfile-alt
```ruby
source "#{file_uri_for(gem_repo1)}"
eval_gemfile "other/Gemfile"
```
### other/Gemfile
```ruby
eval_gemfile 'Gemfile-other'
```
### other/Gemfile-other
```ruby
gem 'rack'
```
### Gemfile.lock
```
<No #{bundled_app_lock} found>
```
ENV
end
end
context "when the git version is OS specific" do
it "includes OS specific information with the version number" do
expect(git_proxy_stub).to receive(:git).with("--version").
and_return("git version 1.2.3 (Apple Git-BS)")
expect(Bundler::Source::Git::GitProxy).to receive(:new).and_return(git_proxy_stub)
expect(described_class.report).to include("Git 1.2.3 (Apple Git-BS)")
end
end
end
describe ".version_of" do
let(:parsed_version) { described_class.send(:version_of, "ruby") }
it "strips version of new line characters" do
expect(parsed_version).to_not end_with("\n")
end
end
end
| 28.502347 | 95 | 0.589853 |
d5637e5482f91aecf24a4b35357dbe39656615dc | 861 | class CreateBandEvents < ActiveRecord::Migration
def change
create_table :band_events do |t|
t.integer :band_id
t.integer :event_id
t.boolean :confirmed #(y/n) (validate when confirmed)
t.time :load_time
t.time :doors_time
t.time :show_time
t.time :set_time
t.integer :tour_id
t.boolean :food_comp
t.boolean :drink_comp
t.boolean :hotel_comp
# COMP: (tied to event, pull from most recent event at that venue, if it exists, and make it editable)
t.float :flat_rate
t.boolean :door_deal #(y/n)
t.string :door_deal_details #(if door deal, sub form appears.)
t.integer :stay_id
t.string :event_notes
# (make available after event ends:)
t.float :merch_total
t.float :tips_total
t.timestamps null: false
end
end
end
| 25.323529 | 108 | 0.641115 |
7abd5ca8772b26e17caba59e0fcc553b6ea05397 | 1,459 | # frozen_string_literal: true
require 'spec_helper'
RSpec.describe 'PipelineDestroy' do
include GraphqlHelpers
let_it_be(:project) { create(:project) }
let_it_be(:user) { project.first_owner }
let_it_be(:pipeline) { create(:ci_pipeline, :success, project: project, user: user) }
let(:mutation) do
variables = {
id: pipeline.to_global_id.to_s
}
graphql_mutation(:pipeline_destroy, variables, 'errors')
end
it 'returns an error if the user is not allowed to destroy the pipeline' do
post_graphql_mutation(mutation, current_user: create(:user))
expect(graphql_errors).not_to be_empty
end
it 'destroys a pipeline' do
post_graphql_mutation(mutation, current_user: user)
expect(response).to have_gitlab_http_status(:success)
expect { pipeline.reload }.to raise_error(ActiveRecord::RecordNotFound)
end
context 'when project is undergoing stats refresh' do
before do
create(:project_build_artifacts_size_refresh, :pending, project: pipeline.project)
end
it 'returns an error and does not destroy the pipeline' do
expect(Gitlab::ProjectStatsRefreshConflictsLogger)
.to receive(:warn_request_rejected_during_stats_refresh)
.with(pipeline.project.id)
post_graphql_mutation(mutation, current_user: user)
expect(graphql_mutation_response(:pipeline_destroy)['errors']).not_to be_empty
expect(pipeline.reload).to be_persisted
end
end
end
| 29.77551 | 88 | 0.740918 |
39f8c7667c13278338aabf664a9803f918d9a1dc | 1,352 | class RbacSetup < ActiveRecord::Migration
def self.up
create_table "permissions" do |t|
t.string "provider"
t.string "operation"
t.string "rule"
t.timestamps
end
create_table "permissions_roles", :id => false do |t|
t.integer "permission_id"
t.integer "role_id"
end
add_index "permissions_roles", ["permission_id"], :name => "index_permissions_roles_on_permission_id"
add_index "permissions_roles", ["role_id"], :name => "index_permissions_roles_on_role_id"
create_table "roles" do |t|
t.string "name"
end
create_table "roles_users", :id => false do |t|
t.integer "role_id"
t.integer "user_id"
end
add_index "roles_users", ["role_id"], :name => "index_roles_users_on_role_id"
add_index "roles_users", ["user_id"], :name => "index_roles_users_on_user_id"
create_table "users", :force => true do |t|
t.string "login"
t.string "email"
t.string "crypted_password", :limit => 40
t.string "salt", :limit => 40
t.datetime "created_at"
t.datetime "updated_at"
t.string "remember_token"
t.datetime "remember_token_expires_at"
t.string "prs_id"
t.string "first_name"
t.string "last_name"
end
end
def self.down
end
end
| 27.591837 | 105 | 0.620562 |
79de989621c4e59df2f3fd604a6c8a68d35b75f5 | 110 | Rails.application.routes.draw do
post '/graphql', to: 'graphql#execute'
devise_for :users, skip: :all
end | 22 | 40 | 0.727273 |
5de6ef88a864371c3fd435ecd9a478c2134b7a5b | 87 | $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
require 'ld4l_browser_data'
| 29 | 58 | 0.758621 |
6ae7fa3abed07f9cb1a51191358916e491bf1825 | 650 | Pod::Spec.new do |s|
s.name = 'QEDSDK'
s.version = '1.1.4'
s.summary = 'The QED REST API provides an easy way to leverage blockchain technology attaching immutable timestamps to your files.'
s.homepage = 'http://qed.digital/'
s.author = { 'Name' => '[email protected]' }
s.license = { :type => 'Apache-2.0', :file => 'LICENSE' }
s.platform = :ios
s.source = { :git => 'https://github.com/buzztechno/QEDSDK_POD.git', :tag => 'v1.1.4' }
s.ios.deployment_target = '11.0'
s.ios.vendored_frameworks = 'QEDSDK.framework'
end
| 40.625 | 145 | 0.550769 |
e25d5f8347394c3db4b2637ed11e6a3f90bbe1d4 | 689 | cask "font-iosevka-ss09" do
version "7.0.4"
sha256 "9749448e5b37d2fb7549b10a45b86e708d4c491a956a2abba57493d9359b4935"
url "https://github.com/be5invis/Iosevka/releases/download/v#{version}/ttc-iosevka-ss09-#{version}.zip"
name "Iosevka SS09"
desc "Sans-serif, slab-serif, monospace and quasi‑proportional typeface family"
homepage "https://github.com/be5invis/Iosevka/"
font "iosevka-ss09-bold.ttc"
font "iosevka-ss09-extrabold.ttc"
font "iosevka-ss09-extralight.ttc"
font "iosevka-ss09-heavy.ttc"
font "iosevka-ss09-light.ttc"
font "iosevka-ss09-medium.ttc"
font "iosevka-ss09-regular.ttc"
font "iosevka-ss09-semibold.ttc"
font "iosevka-ss09-thin.ttc"
end
| 34.45 | 105 | 0.756168 |
1d8ec850582bdf2473822a9cf57ae191b5e31af6 | 739 | # Copyright 2012, Dell
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
class BarclampImportHive < ActiveRecord::Migration
def up
Barclamp.import_1x 'hive'
end
def down
Barclamp.delete(Barclamp.find_by_name 'hive')
end
end
| 29.56 | 74 | 0.753721 |
1d62ceea1731e3df5f401f21345e260634cbeb22 | 189 | class TokyoMetro::Factory::Seed::Static::RailwayDirection::Hash < TokyoMetro::Factory::Seed::Static::MetaClass::Hash
include ::TokyoMetro::ClassNameLibrary::Static::RailwayDirection
end | 37.8 | 116 | 0.793651 |
6a3f948427fd2ffe764d18de16739868cd90d991 | 210 | require "test_helper"
class HttpclientxTest < Minitest::Test
def test_that_it_has_a_version_number
refute_nil ::Httpclientx::VERSION
end
def test_it_does_something_useful
assert false
end
end
| 17.5 | 39 | 0.790476 |
ff2b0672f7b6d0b207a5cde3fc05500382fe0aef | 1,553 | class Roda
module RodaPlugins
# The content_for plugin is designed to be used with the
# render plugin, allowing you to store content inside one
# template, and retrieve that content inside a separate
# template. Most commonly, this is so view templates
# can set content for the layout template to display outside
# of the normal content pane.
#
# The content_for template probably only works with erb
# templates, and requires that you don't override the
# +:outvar+ render option. In the template in which you
# want to store content, call content_for with a block:
#
# <% content_for :foo do %>
# Some content here.
# <% end %>
#
# In the template in which you want to retrieve content,
# call content_for without the block:
#
# <%= content_for :foo %>
module ContentFor
module InstanceMethods
# If called with a block, store content enclosed by block
# under the given key. If called without a block, retrieve
# stored content with the given key, or return nil if there
# is no content stored with that key.
def content_for(key, &block)
if block
@_content_for ||= {}
buf_was = @_out_buf
@_out_buf = ''
yield
@_content_for[key] = @_out_buf
@_out_buf = buf_was
elsif @_content_for
@_content_for[key]
end
end
end
end
register_plugin(:content_for, ContentFor)
end
end
| 33.042553 | 67 | 0.619446 |
0153a1e6c15eccd45159c1b275d8ce28fc17ae62 | 194 | require "test_helper"
class AlpsTest < Minitest::Test
def test_casually
Alps.x(:test)
a = Class.new do
def a
1 + 1
end
end
a.new.a
sleep 2
end
end
| 11.411765 | 31 | 0.556701 |
08bf33c288cab6a59ec79e8bc079d2fb747f96dc | 173 | module ErrnoKnows
def knows? error
@constants ||= constants.map{|n| const_get(n)}
@constants.include? error.class
end
end
module Errno
extend ErrnoKnows
end
| 14.416667 | 50 | 0.710983 |
18048f0858434176acecbfdc694caf558e97c52f | 698 | # This migration comes from refinery_guides (originally 1)
class CreateGuidesGuides < ActiveRecord::Migration
def up
create_table :refinery_guides do |t|
t.string :title
t.text :description
t.text :raw_source
t.text :html
t.string :author
t.string :category
t.string :source_url
t.string :sha
t.integer :position
t.timestamps
end
end
def down
if defined?(::Refinery::UserPlugin)
::Refinery::UserPlugin.destroy_all({:name => "refinerycms-guides"})
end
if defined?(::Refinery::Page)
::Refinery::Page.delete_all({:link_url => "/guides/guides"})
end
drop_table :refinery_guides
end
end
| 19.942857 | 73 | 0.646132 |
f861dd0da199e248dd32bd1518a738ed0c296414 | 1,611 | # frozen_string_literal: true
class CommentsController < ApplicationController
before_action :authenticate_user!
before_action :set_comment, only: %i[show edit update destroy]
before_action :set_idea, only: %i[new create show edit update destroy]
# GET /comments
def index
@comments = Comment.includes(:user).where('idea_id = ?', params[:idea_id])
end
def show; end
def new
authorize @idea, policy_class: CommentPolicy
@comment = @idea.comments.build
end
def edit; end
def update
authorize @comment
if @comment.update(comment_params)
redirect_to idea_comment_path(@comment.idea, @comment), notice: 'Comment was successfully updated.'
else
render :edit
end
end
def destroy
authorize @comment
@comment.redacted = true
@comment.save!
redirect_to idea_path(@comment.idea), notice: 'Comment was successfully deleted.'
end
def create
authorize @idea, policy_class: CommentPolicy
create_comment
return if performed?
redirect_to idea_comment_path(@comment.idea, @comment), notice: 'Comment created'
end
private
def create_comment
@comment = @idea.comments.build(comment_params)
@comment.status_at_comment_time = Comment.status_at_comment_times[@idea.status.to_sym]
@comment.user = current_user
render :new unless @comment.save
end
def set_comment
@comment = Comment.includes(:user).find(params[:id])
end
def set_idea
@idea = Idea.includes(:votes).find(params[:idea_id])
end
def comment_params
params.require(:comment).permit(
:body
)
end
end
| 22.375 | 105 | 0.711359 |
f8ac9ba6fb9f5881f5cd9706f37155103985bbd6 | 336 | require 'test_helper'
class ProjectTest < ActiveSupport::TestCase
context 'associations' do
should have_many(:positions)
should belong_to(:organization)
end
context 'validations' do
should validate_length_of(:title).
is_at_least(3).is_at_most(20)
should validate_presence_of(:organization_id)
end
end
| 21 | 49 | 0.744048 |
e2ea5b7c38105fe201e091c32ac65799c8158be3 | 557 | class IGVTools < FPM::Cookery::Recipe
description 'The igvtools utility provides a set of tools for pre-processing data files. '
name 'igvtools'
version '2.3.94'
revision 0
homepage 'https://www.broadinstitute.org/igv/igvtools'
source "http://data.broadinstitute.org/igv/projects/downloads/igvtools_#{version}.zip"
md5 'b7faea83f121246a1e9bf03f26ba52ee'
# Let's install build dependencies first.
depends ['default-jre', 'default-jdk']
# Build:
def build
end
# Install:
def install
bin.install Dir["IGVTools/*"]
end
end
| 25.318182 | 92 | 0.723519 |
28ca93188ce6a5d9d71d70ba1cf2c59872365474 | 527 | module AwsAlertMonitor
class Config
attr_accessor :logger, :file
def initialize(args={})
@opts = args[:opts] ||= Hash.new
log_level = args[:log_level]
self.logger = AwsAlertMonitor::Logger.new :log_level => log_level
self.file = load_config_file
end
private
def load_config_file
config_file = "#{ENV['HOME']}/.aws-alert-monitor.yml"
if File.exists? config_file
YAML::load File.open config_file
else
{ }
end
end
end
end
| 19.518519 | 71 | 0.607211 |
e8420586ecdf7e753bae3ea9ee4ba41dee111f9e | 180 | class AddListenersToAlbumsAndArtists < ActiveRecord::Migration[7.0]
def change
add_column :artists, :listeners, :bigint
add_column :albums, :listeners, :bigint
end
end
| 25.714286 | 67 | 0.755556 |
8736b1af21e793d0441fe90f48cfd33f48b63122 | 365 | require 'twitter/geo'
module Twitter
class Geo
class Point < Twitter::Geo
# @return [Integer]
def latitude
coordinates[0]
end
alias_method :lat, :latitude
# @return [Integer]
def longitude
coordinates[1]
end
alias_method :long, :longitude
alias_method :lng, :longitude
end
end
end
| 17.380952 | 36 | 0.59726 |
039ee0bb76e7b80b65a54e359e49f950a74fadea | 760 | $:.unshift File.expand_path("../lib", __FILE__)
require "deb/s3"
Gem::Specification.new do |gem|
gem.name = "deb-s3"
gem.version = Deb::S3::VERSION
gem.author = "Ken Robertson"
gem.email = "[email protected]"
gem.homepage = "http://invalidlogic.com/"
gem.summary = "Easily create and manage an APT repository on S3."
gem.description = gem.summary
gem.license = "MIT"
gem.executables = "deb-s3"
gem.files = Dir["**/*"].select { |d| d =~ %r{^(README|bin/|ext/|lib/)} }
gem.required_ruby_version = '>= 1.9.3'
gem.add_dependency "thor", "~> 0.19.0"
gem.add_dependency "aws-sdk", "~> 1.66"
gem.add_development_dependency "minitest", "~> 5"
gem.add_development_dependency "rake", "~> 11"
end
| 30.4 | 74 | 0.625 |
d5d6ef5a37633711ff89eab4ed9cf1709258f9dd | 2,100 | module Sudoku
class Generator
include PeersAndUnits
VALID_DIFFICULTIES = ["easy", "medium", "hard", "samuraj"]
attr_reader :difficulty_analyzer
def initialize
@difficulty_analyzer = DifficultyAnalyzer.new
end
def generate(difficulty = "easy")
check_difficulty!(difficulty)
grid = start_grid
keys = Hamster.list(randomized_keys)
search(grid, difficulty, keys)
end
def check_difficulty!(difficulty)
raise "Invalid difficulty" unless VALID_DIFFICULTIES.include?(difficulty)
end
def search(grid, difficulty, keys)
raise "Could not generate a grid" if keys.empty?
return grid if done_generating?(grid, difficulty)
keys.reduce(nil) do |found_grid, key|
new_grid = grid.clone
new_grid.reset(key)
if uniquely_solvable? new_grid
new_keys = Hamster.list(*new_grid.assigned_keys.to_a.shuffle)
return search(new_grid, difficulty, new_keys)
else
return false
end
end
end
def done_generating?(grid, difficulty)
enough_empty_squares?(grid) && correct_difficulty?(difficulty, grid) && uniquely_solvable?(grid)
end
def enough_empty_squares?(grid)
grid.empty_values.size > 40
end
def uniquely_solvable?(grid)
solver = Solver.new(grid.to_propagating_grid)
solver.solve && solver.unique_solution?
end
def randomized_keys
keys = SORTED_KEYS.clone
keys.shuffle
end
def correct_difficulty?(difficulty, grid)
difficulty_analyzer.grid = grid
difficulty_analyzer.difficulty == difficulty
end
def start_grid
solve(seed).to_non_propagating_grid
end
def solve(grid)
solver = Solver.new(grid)
solver.solve(check_uniqueness: false)
solver.solution
end
def seed
grid = NonPropagatingGrid.new
grid.set(linear_random_key, linear_random)
grid
end
def linear_random_key
ROW_KEYS[linear_random] + linear_random.to_s
end
def linear_random
rand(8) + 1
end
end
end | 24.418605 | 102 | 0.67 |
bf704ebef87d1f69f77052904bc4ab680fb7ea1a | 126 | class AddIndexesToIncidents < ActiveRecord::Migration[6.0]
def change
add_index :incidents, :occurred_on_date
end
end
| 21 | 58 | 0.777778 |
26b66df074d172491b749ba78e8498771db75ca4 | 2,379 | require "test_helper"
class PasswordResetsTest < ActionDispatch::IntegrationTest
def setup
ActionMailer::Base.deliveries.clear
@user = users(:user_a)
end
test "password resets" do
get new_password_reset_path
assert_template "password_resets/new"
assert_select "input[name=?]", "password_reset[email]"
# Invalid email
post password_resets_path, params: { password_reset: { email: "" } }
assert_not flash.empty?
assert_template "password_resets/new"
# Valid email
post password_resets_path,
params: { password_reset: { email: @user.email } }
assert_not_equal @user.reset_digest, @user.reload.reset_digest
assert_equal 1, ActionMailer::Base.deliveries.size
assert_not flash.empty?
assert_redirected_to root_url
# Password reset form
user = assigns(:user)
# Wrong email
get edit_password_reset_path(user.reset_token, email: "")
assert_redirected_to root_url
# Inactive user
user.toggle!(:activated)
get edit_password_reset_path(user.reset_token, email: user.email)
assert_redirected_to root_url
user.toggle!(:activated)
# Right email, wrong token
get edit_password_reset_path("wrong token", email: user.email)
assert_redirected_to root_url
# Right email, right token
get edit_password_reset_path(user.reset_token, email: user.email)
assert_template "password_resets/edit"
assert_select "input[name=email][type=hidden][value=?]", user.email
# Invalid password & confirmation
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "barquux" } }
assert_select "div#error_explanation"
# Empty password
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "",
password_confirmation: "" } }
assert_select "div#error_explanation"
# Valid password & confirmation
patch password_reset_path(user.reset_token),
params: { email: user.email,
user: { password: "foobaz",
password_confirmation: "foobaz" } }
assert logged_in?
assert_not flash.empty?
assert_redirected_to user
end
end
| 37.171875 | 72 | 0.664985 |
d54d20cc1c5b50741b8f7c1b2de024779a21db52 | 1,670 | # -*- coding: utf-8 -*- #
# frozen_string_literal: true
describe Rouge::Lexers::ConsoleLexer do
let(:subject) { Rouge::Lexers::ConsoleLexer.new }
let(:klass) { Rouge::Lexers::ConsoleLexer }
include Support::Lexing
it 'parses a basic prompt' do
assert_tokens_equal '$ foo',
['Generic.Prompt', '$'],
['Text.Whitespace', ' '],
['Text', 'foo']
end
it 'parses a custom prompt' do
subject_with_options = klass.new({ prompt: '%' })
assert_tokens_equal '% foo', subject_with_options,
['Generic.Prompt', '%'],
['Text.Whitespace', ' '],
['Text', 'foo']
end
it 'parses a custom error' do
subject_with_options = klass.new({ error: 'No command,Unhandled' })
assert_tokens_equal 'No command \'foo\' found, did you mean:', subject_with_options,
['Generic.Error', 'No command \'foo\' found, did you mean:']
assert_tokens_equal 'Unhandled condition in test.lisp', subject_with_options,
['Generic.Error', 'Unhandled condition in test.lisp']
assert_tokens_equal 'foo', subject_with_options,
['Generic.Output', 'foo']
end
it 'parses single-line comments' do
subject_with_options = klass.new({ comments: true })
assert_tokens_equal '# this is a comment', subject_with_options,
['Comment', '# this is a comment']
end
it 'ignores single-line comments' do
assert_tokens_equal '# this is not a comment',
['Generic.Prompt', '#'],
['Text.Whitespace', ' '],
['Text', 'this is not a comment']
end
describe 'guessing' do
include Support::Guessing
it 'guesses by filename' do
assert_guess :filename => 'foo.cap'
end
end
end
| 29.821429 | 88 | 0.643713 |
1abce1a79c7477224459790420fc73dfe5cd8d07 | 4,071 | require 'exact_target_client/exact_target_rest_client'
require 'exact_target_client/exact_target_soap_client'
module ExactTargetClient
class ExactTargetAPI
attr_accessor :oauth_token, :refresh_token
class TimeOut < Exception;
end
class TokenExpired < Exception;
end
class ClientException < Exception;
end
def initialize
yield self if block_given?
raise ArgumentError, 'block not given' unless block_given?
init_clients
end
def refresh_oauth_token
results = @rest_client.get_oauth_token(Conf.client_id, Conf.client_secret, refresh_token)
if results
@oauth_token = results['accessToken']
@refresh_token = results['refreshToken']
refresh_clients(results['accessToken'])
results
end
end
def get_emails(ids = nil)
properties = %w(ID Name HTMLBody)
if ids.present?
filter = {property: 'ID', value: ids}
end
response = soap_client.retrieve('Email', properties, filter)
check_response(response)
end
def create_email(email_name, subject, html_template)
response = soap_client.create('Email',
{'Name' => email_name,
'Subject' => subject,
'HTMLBody' => html_template}
)
check_response(response)
end
def update_email(email_id, name, html_template, subject = nil)
properties = {'ID' => email_id, 'Name' => name, 'HTMLBody' => html_template}
if subject.present?
properties['Subject'] = subject
end
response = soap_client.update('Email', properties)
check_response(response)
end
def delete_email(email_id)
response = soap_client.delete('Email', {'ID' => email_id})
check_response(response)
end
def create_content_area(name, content)
response = soap_client.create('ContentArea',
{'Name' => name,
'Content' => content}
)
check_response(response)
end
def update_content_area(content_area_id, name, content)
response = soap_client.update('ContentArea', {'ID' => content_area_id, 'Name' => name, 'Content' => content})
check_response(response)
end
def delete_content_area(content_area_id)
response = soap_client.delete('ContentArea', {'ID' => content_area_id})
check_response(response)
end
def create_data_extension(properties)
response = soap_client.create('DataExtension', properties)
check_response(response)
end
def upsert_data_extension_row(data_extension_customer_key, primary_key_name, primary_key_value, object_hash)
rest_client.upsert_data_extension_row(data_extension_customer_key, primary_key_name, primary_key_value, object_hash)
end
def increment_data_extension_row(data_extension_customer_key, primary_key_name, primary_key_value, column, step = 1)
rest_client.increment_data_extension_row(data_extension_customer_key, primary_key_name, primary_key_value, column, step)
end
def get_subscribers_by_email(email, properties)
response = soap_client.retrieve('Subscriber', properties, {property: 'EmailAddress', value: email})
check_response(response)
end
private
attr_accessor :soap_client, :rest_client
def init_clients
@soap_client = ExactTargetClient::ExactTargetSoapClient.new do |c|
c.oauth_token = oauth_token
c.wsdl = Conf.wsdl % {:instance => oauth_token[0]} # WSDL instance is determined by first char of token
end
@rest_client = ExactTargetClient::ExactTargetRestClient.new do |c|
c.oauth_token = oauth_token
end
end
def refresh_clients(token)
@soap_client.set_oauth_token(token)
@rest_client.set_oauth_token(token)
end
def check_response(response)
if response.success?
response.results
else
raise ClientException.new(response.message)
end
end
end
end
| 31.55814 | 126 | 0.669369 |
7936ad19689a9fae82da8ac5bf84fada7e9bdaa0 | 8,960 | require 'action_view'
require 'fileutils'
require 'cucumber/formatter/io'
require 'cucumber/formatter/duration'
require 'cucumber/core/ast/scenario'
require 'cucumber/multiline_argument/data_table'
require File.join(File.dirname(__FILE__), 'view_helper')
require File.join(File.dirname(__FILE__), 'report')
# Starting with ActionPack 4.1.1, the module Mime doesn't get initialized before it's needed by PrettyFace and so
# it would blow up with errors about uninitialized constants. We need to explicitly load it to prevent this problem.
require 'action_dispatch/http/mime_type'
module PrettyFace
module Formatter
class Html
include Cucumber::Formatter::Io
include Cucumber::Formatter::Duration
include ViewHelper
attr_reader :report, :logo
def initialize(step_mother, path_or_io, options)
@path = path_or_io
set_path_and_file(path_or_io)
@path_to_erb = File.join(File.dirname(__FILE__), '..', 'templates')
@step_mother = step_mother
@options = options
# The expand option is set to true by RubyMine and cannot be turned off using the IDE. This option causes
# a test run while using this gem to terminate.
@options[:expand] = false unless @options.nil?
@report = Report.new
@img_id = 0
@logo = 'face.png'
end
def set_path_and_file(path_or_io)
return if path_or_io.nil?
dir = File.dirname(path_or_io)
FileUtils.mkdir_p dir unless File.directory? dir
@io = ensure_io(path_or_io)
end
def embed(src, mime_type, label)
case(mime_type)
when /^image\/(png|gif|jpg|jpeg)/
embed_image(src, label)
end
end
def embed_image(src, label)
@report.current_scenario.image << src.split(separator).last
@report.current_scenario.image_label << label
@report.current_scenario.image_id << "img_#{@img_id}"
@img_id += 1
filename = "#{File.dirname(@path)}#{separator}images"
FileUtils.cp src, filename
end
def before_features(features)
make_output_directories
@tests_started = Time.now
end
def features_summary_file
parts = @io.path.split(separator)
parts[parts.length - 1]
end
def before_feature(feature)
@report.add_feature ReportFeature.new(feature, features_summary_file)
end
def after_feature(feature)
@report.current_feature.close(feature)
end
def before_background(background)
@report.begin_background
end
def after_background(background)
@report.end_background
@report.current_feature.background << ReportStep.new(background)
end
def before_feature_element(feature_element)
@report.add_scenario ReportScenario.new(feature_element)
end
def after_feature_element(feature_element)
process_scenario(feature_element)
end
def before_table_row(example_row)
@before_example_row = example_row
@report.add_scenario ReportScenario.new(example_row) unless info_row?(example_row)
end
def after_table_row(example_row)
unless info_row?(example_row)
@report.current_scenario.populate(example_row)
build_scenario_outline_steps(example_row)
end
populate_cells(example_row) if example_row.instance_of? Cucumber::Core::Ast::DataTable
end
def before_step(step)
@step_timer = Time.now
end
def after_step(step)
step = process_step(step) unless step_belongs_to_outline? step
if @cells
step.table = @cells
@cells = nil
end
end
def after_features(features)
@features = features
@duration = format_duration(Time.now - @tests_started)
copy_images
copy_stylesheets
generate_report
end
def features
@report.features
end
def custom_suite_header?
return false unless customization_directory
Dir.foreach(customization_directory) do |file|
return true if file == '_suite_header.erb'
end
false
end
def custom_feature_header?
return false unless customization_directory
Dir.foreach(customization_directory) do |file|
return true if file == '_feature_header.erb'
end
false
end
private
def generate_report
paths = [@path_to_erb, customization_directory.to_s]
renderer = ActionView::Base.new(paths)
filename = File.join(@path_to_erb, 'main')
@io.puts renderer.render(:file => filename, :locals => {:report => self, :logo => @logo})
features.each do |feature|
write_feature_file(feature)
end
end
def write_feature_file(feature)
paths = [@path_to_erb, customization_directory.to_s]
renderer = ActionView::Base.new(paths)
filename = File.join(@path_to_erb, 'feature')
output_file = "#{File.dirname(@path)}#{separator}#{feature.file}"
to_cut = output_file.split(separator).last
directory = output_file.sub("#{separator}#{to_cut}", '')
FileUtils.mkdir_p directory unless File.directory? directory
file = File.new(output_file, Cucumber.file_mode('w'))
file.puts renderer.render(:file => filename, :locals => {:feature => feature, :logo => @logo, :customize => custom_feature_header?})
file.flush
file.close
end
def make_output_directories
make_directory 'images'
make_directory 'stylesheets'
end
def make_directory(dir)
path = "#{File.dirname(@path)}#{separator}#{dir}"
FileUtils.mkdir_p path unless File.directory? path
end
def copy_directory(dir, file_names, file_extension)
path = "#{File.dirname(@path)}#{separator}#{dir}"
file_names.each do |file|
copy_file File.join(File.dirname(__FILE__), '..', 'templates', "#{file}.#{file_extension}"), path
end
end
def copy_file(source, destination)
FileUtils.cp source, destination
end
def copy_images
copy_directory 'images', %w(failed passed pending undefined skipped table_failed table_passed table_pending table_undefined table_skipped), "png"
logo = logo_file
copy_file logo, "#{File.join(File.dirname(@path), 'images')}" if logo
copy_directory 'images', ['face'], 'png' unless logo
end
def copy_stylesheets
copy_directory 'stylesheets', ['style'], 'css'
end
def logo_file
dir = customization_directory
Dir.foreach(dir) do |file|
if file =~ /^logo\.(png|gif|jpg|jpeg)$/
@logo = file
return File.join(dir, file)
end
end if dir
end
def customization_directory
dir = File.join(File.expand_path('features'), 'support', 'pretty_face')
return dir if File.exists? dir
end
def process_scenario(scenario)
@report.current_scenario.populate(scenario)
end
def process_step(step, status=nil)
duration = Time.now - @step_timer
report_step = ReportStep.new(step)
report_step.duration = duration
report_step.status = status unless status.nil?
if step.background
@report.current_feature.background << report_step if @report.processing_background_steps?
else
@report.add_step report_step
end
report_step
end
def scenario_outline?(feature_element)
feature_element.is_a? Cucumber::Core::Ast::ScenarioOutline
end
def info_row?(example_row)
return example_row.scenario_outline.nil? if example_row.respond_to? :scenario_outline
return true if example_row.instance_of? Cucumber::Formatter::LegacyApi::Ast::DataTableRow
false
end
def step_belongs_to_outline?(step)
scenario = step.instance_variable_get "@feature_element"
not scenario.nil?
end
def build_scenario_outline_steps(example_row)
si = example_row.cells
si.each do |row|
duration = Time.now - @step_timer
report_step = ReportStep.new(@before_example_row, example_row)
report_step.duration = duration
@report.add_step report_step
# process_step(row, row.status)
end
end
def step_error(exception)
return nil if exception.nil?
exception.backtrace[-1] =~ /^#{step.file_colon_line}/ ? exception : nil
end
def populate_cells(example_row)
@cells ||= []
values = []
example_row.to_a.each do |cell|
values << cell.value
end
@cells << values
end
def separator
File::ALT_SEPARATOR || File::SEPARATOR
end
end
end
end
| 31.111111 | 153 | 0.643527 |
e99a245e41e73b579b56c2d31ef2f63561c8c0c1 | 107 | require 'rails_i18n/common_pluralizations/one_other'
::RailsI18n::Pluralization::OneOther.with_locale(:sw) | 35.666667 | 53 | 0.841121 |
b9671884ab478be44ef2a38979b13275084828cf | 481 | module FeaturesHelper
def ensure_on(path)
visit(path) unless current_path == path
end
def verify(call)
expect(call).to be_truthy
end
def refute(call)
expect(call).to be_falsey
end
def dom_id_for(model)
ActionView::RecordIdentifier.dom_id(model)
end
def dom_id_selector(model)
"##{dom_id_for(model)}"
end
def pagination_params(options = {})
{
page: 1,
per_page: 20,
total_count: 0,
}.merge(options)
end
end
| 16.033333 | 46 | 0.654886 |
e9351cf1ee6805be86668d3612126d3637f8357e | 383 | cask "font-digital-numbers" do
version :latest
sha256 :no_check
# github.com/google/fonts/ was verified as official when first introduced to the cask
url "https://github.com/google/fonts/raw/master/ofl/digitalnumbers/DigitalNumbers-Regular.ttf"
name "Digital Numbers"
homepage "https://fonts.google.com/specimen/Digital+Numbers"
font "DigitalNumbers-Regular.ttf"
end
| 31.916667 | 96 | 0.772846 |
266991b180443c71dea6b3c64c407b495c8c74de | 701 | # Read about factories at https://github.com/thoughtbot/factory_girl
FactoryGirl.define do
factory :team_question_bet, class: QuestionBet do
answer '42'
association :bet, factory: :bet, strategy: :build
association :question, factory: :team_question, strategy: :build
end
factory :player_question_bet, class: QuestionBet do
answer '42'
association :bet, factory: :bet, strategy: :build
association :question, factory: :player_question, strategy: :build
end
factory :boolean_question_bet, class: QuestionBet do
answer 'true'
association :bet, factory: :bet, strategy: :build
association :question, factory: :boolean_question, strategy: :build
end
end
| 31.863636 | 71 | 0.736091 |
21f3c2f09c9a93cafe221c57061ce15054eae4ae | 7,589 | =begin
#Adobe Experience Manager (AEM) API
#Swagger AEM is an OpenAPI specification for Adobe Experience Manager (AEM) API
The version of the OpenAPI document: 3.2.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
OpenAPI Generator version: 4.3.1-SNAPSHOT
=end
require 'date'
module SwaggerAemClient
class SamlConfigurationPropertyItemsString
# property name
attr_accessor :name
# True if optional
attr_accessor :optional
# True if property is set
attr_accessor :is_set
# Property type, 1=String, 3=long, 11=boolean, 12=Password
attr_accessor :type
# Property value
attr_accessor :value
# Property description
attr_accessor :description
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
:'name' => :'name',
:'optional' => :'optional',
:'is_set' => :'is_set',
:'type' => :'type',
:'value' => :'value',
:'description' => :'description'
}
end
# Attribute type mapping.
def self.openapi_types
{
:'name' => :'String',
:'optional' => :'Boolean',
:'is_set' => :'Boolean',
:'type' => :'Integer',
:'value' => :'String',
:'description' => :'String'
}
end
# List of attributes with nullable: true
def self.openapi_nullable
Set.new([
])
end
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
def initialize(attributes = {})
if (!attributes.is_a?(Hash))
fail ArgumentError, "The input argument (attributes) must be a hash in `SwaggerAemClient::SamlConfigurationPropertyItemsString` initialize method"
end
# check to see if the attribute exists and convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h|
if (!self.class.attribute_map.key?(k.to_sym))
fail ArgumentError, "`#{k}` is not a valid attribute in `SwaggerAemClient::SamlConfigurationPropertyItemsString`. Please check the name to make sure it's valid. List of attributes: " + self.class.attribute_map.keys.inspect
end
h[k.to_sym] = v
}
if attributes.key?(:'name')
self.name = attributes[:'name']
end
if attributes.key?(:'optional')
self.optional = attributes[:'optional']
end
if attributes.key?(:'is_set')
self.is_set = attributes[:'is_set']
end
if attributes.key?(:'type')
self.type = attributes[:'type']
end
if attributes.key?(:'value')
self.value = attributes[:'value']
end
if attributes.key?(:'description')
self.description = attributes[:'description']
end
end
# Show invalid properties with the reasons. Usually used together with valid?
# @return Array for valid properties with the reasons
def list_invalid_properties
invalid_properties = Array.new
invalid_properties
end
# Check to see if the all the properties in the model are valid
# @return true if the model is valid
def valid?
true
end
# Checks equality by comparing each attribute.
# @param [Object] Object to be compared
def ==(o)
return true if self.equal?(o)
self.class == o.class &&
name == o.name &&
optional == o.optional &&
is_set == o.is_set &&
type == o.type &&
value == o.value &&
description == o.description
end
# @see the `==` method
# @param [Object] Object to be compared
def eql?(o)
self == o
end
# Calculates hash code according to all attributes.
# @return [Integer] Hash code
def hash
[name, optional, is_set, type, value, description].hash
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def self.build_from_hash(attributes)
new.build_from_hash(attributes)
end
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.openapi_types.each_pair do |key, type|
if type =~ /\AArray<(.*)>/i
# check to ensure the input is an array given that the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
self.send("#{key}=", attributes[self.class.attribute_map[key]].map { |v| _deserialize($1, v) })
end
elsif !attributes[self.class.attribute_map[key]].nil?
self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]]))
end # or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# Deserializes the data based on type
# @param string type Data type
# @param string value Value to be deserialized
# @return [Object] Deserialized data
def _deserialize(type, value)
case type.to_sym
when :DateTime
DateTime.parse(value)
when :Date
Date.parse(value)
when :String
value.to_s
when :Integer
value.to_i
when :Float
value.to_f
when :Boolean
if value.to_s =~ /\A(true|t|yes|y|1)\z/i
true
else
false
end
when :Object
# generic object (usually a Hash), return directly
value
when /\AArray<(?<inner_type>.+)>\z/
inner_type = Regexp.last_match[:inner_type]
value.map { |v| _deserialize(inner_type, v) }
when /\AHash<(?<k_type>.+?), (?<v_type>.+)>\z/
k_type = Regexp.last_match[:k_type]
v_type = Regexp.last_match[:v_type]
{}.tap do |hash|
value.each do |k, v|
hash[_deserialize(k_type, k)] = _deserialize(v_type, v)
end
end
else # model
SwaggerAemClient.const_get(type).build_from_hash(value)
end
end
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# to_body is an alias to to_hash (backward compatibility)
# @return [Hash] Returns the object in the form of hash
def to_body
to_hash
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = self.send(attr)
if value.nil?
is_nullable = self.class.openapi_nullable.include?(attr)
next if !is_nullable || (is_nullable && !instance_variable_defined?(:"@#{attr}"))
end
hash[param] = _to_hash(value)
end
hash
end
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
| 29.414729 | 232 | 0.613783 |
26c50bca38e8cc9954471c76b725a4b4724446c0 | 1,834 | require 'test_helper'
class UsersControllerTest < ActionDispatch::IntegrationTest
def setup
@user = users(:michael)
@second_user = users(:archer)
end
test "should get new" do
get signup_path
assert_response :success
end
test "should redirect index when not logged in" do
get users_path
assert_redirected_to login_url
end
test "should redirect edit when not logged in" do
get edit_user_path(@user)
assert_not flash.empty?
assert_redirected_to login_url
end
test "should redirect update when not logged in" do
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert_not flash.empty?
assert_redirected_to login_url
end
test "should redirect edit when logged in as wrong user" do
log_in_as(@second_user)
get edit_user_path(@user)
assert flash.empty?
assert_redirected_to root_url
end
test "should redirect update when logged in as wrong user" do
log_in_as(@second_user)
patch user_path(@user), params: { user: { name: @user.name,
email: @user.email } }
assert flash.empty?
assert_redirected_to root_url
end
test "should redirect destroy when not logged in" do
assert_no_difference "User.count" do
delete user_path(@user)
end
assert_redirected_to login_url
end
test "should redirect destroy when logged in as non-admin" do
log_in_as(@second_user)
assert_no_difference "User.count" do
delete user_path(@user)
end
assert_redirected_to root_url
end
test "should redirect following when not logged in" do
get following_user_path(@user)
assert_redirected_to login_url
end
test "should redirect followers when not logged in" do
get followers_user_path(@user)
assert_redirected_to login_url
end
end
| 25.123288 | 63 | 0.715921 |
b96ef796e741845ab755f59cd155c0648e0cec16 | 591 | require File.dirname(__FILE__) + '/../test_helper'
class CoffeeStatusTest < Test::Unit::TestCase
def setup
@room = Tinder::Room.new(stub, 1, 'Test room')
@bot = Scout::Bot.new(@room)
@command = Scout::Commands::CoffeeStatus.new("ma", @bot, "coffeestatus", [])
end
def test_update_status
@command.stubs(:args).returns(["ya plz"])
@command.process
assert_not_nil @command.statuses["ma"]
end
def test_clear
test_update_status
@command.stubs(:args).returns(%w(clear))
@command.process
assert_equal Hash.new, @command.statuses
end
end | 25.695652 | 80 | 0.670051 |
8759e92e2ad024c59a981ffa4a371196957262f4 | 44 | require 'cocoapods-usource/command/usource'
| 22 | 43 | 0.840909 |
ed348ca67ae5cfd5da96c135acda8bbf34246839 | 170 | # frozen_string_literal: true
require "spec_helper"
RSpec.describe Code do
it "#foo returns :foo being passed 42" do
expect(subject.foo(42)).to eq :foo
end
end
| 17 | 43 | 0.723529 |
5daa8ffceb0ccc52d652c2dd35d1afcdbd07ada0 | 518 | require "test_helper"
require "generators/markerb/mailer/mailer_generator"
class GeneratorTest < Rails::Generators::TestCase
tests Markerb::Generators::MailerGenerator
destination File.expand_path("../tmp", __FILE__)
setup :prepare_destination
test "assert all views are properly created with given name" do
run_generator %w(notifier foo bar baz)
assert_file "app/views/notifier/foo.markerb"
assert_file "app/views/notifier/bar.markerb"
assert_file "app/views/notifier/baz.markerb"
end
end | 32.375 | 65 | 0.776062 |
1d8cf052e5fbc1a62fe80de219dbff735d1628b7 | 2,622 | describe Unidom::Shipment::Shipment, type: :model do
before :each do
end
after :each do
end
context do
model_attributes = {
sender_party_id: SecureRandom.uuid,
sender_party_type: 'Unidom::Shipment::SenderParty::Mock',
sender_agent_id: SecureRandom.uuid,
sender_agent_type: 'Unidom::Shipment::SenderAgent::Mock',
sender_location_id: SecureRandom.uuid,
sender_location_type: 'Unidom::Shipment::SenderLocation::Mock',
sender_contact_id: SecureRandom.uuid,
sender_contact_type: 'Unidom::Shipment::SenderContact::Mock',
receiver_party_id: SecureRandom.uuid,
receiver_party_type: 'Unidom::Shipment::ReceiverParty::Mock',
receiver_agent_id: SecureRandom.uuid,
receiver_agent_type: 'Unidom::Shipment::ReceiverAgent::Mock',
receiver_location_id: SecureRandom.uuid,
receiver_location_type: 'Unidom::Shipment::ReceiverLocation::Mock',
receiver_contact_id: SecureRandom.uuid,
receiver_contact_type: 'Unidom::Shipment::ReceiverContact::Mock',
conveyance_code: 'CSSP',
estimated_ready_on: Date.current+2.days,
estimated_shipped_on: Date.current+3.days,
estimated_arrived_on: Date.current+9.days,
last_cancellable_at: Time.now+2.days,
shipped_at: Time.now+3.days,
received_at: Time.now+9.days,
cancelled_at: nil,
estimated_amount: 1_000.00,
actual_amount: 1_050.00
}
it_behaves_like 'Unidom::Common::Concerns::ModelExtension', model_attributes
it_behaves_like 'validates numericality', model_attributes, :estimated_amount,
range: 0..1_000_000_000, minimum_inclusive: true, maximum_inclusive: true
it_behaves_like 'validates numericality', model_attributes, :actual_amount,
range: 0..1_000_000_000, minimum_inclusive: true, maximum_inclusive: true
shipment_item_1_attributes = {
shipped_id: SecureRandom.uuid,
shipped_type: 'Unidom::Shipment::Shipped::Mock',
ordinal: 1,
quantity: 10.00
}
shipment_item_2_attributes = {
shipped_id: SecureRandom.uuid,
shipped_type: 'Unidom::Shipment::Shipped::Mock',
ordinal: 2,
quantity: 20.00
}
it_behaves_like 'has_many', model_attributes, :items, Unidom::Shipment::ShipmentItem, [ shipment_item_1_attributes, shipment_item_2_attributes ]
it_behaves_like 'ProgneTapera::EnumCode', described_class.new(model_attributes), :conveyance, Unidom::Shipment::Conveyance
end
end
| 38.558824 | 148 | 0.683829 |
1177425a6a67a5ac0ef6c859a74836718e319dbe | 79 | Rails.application.routes.draw do
mount SwellBot::Engine => "/swell_bot"
end
| 15.8 | 40 | 0.746835 |
5d0a4a0aa1a875c48315604665e0970103d07945 | 2,262 | require 'alert_parser'
require 'faker'
require 'open-uri'
Dir[Pathname(__FILE__).dirname.join('shared/*.rb').to_s].each { |f| require f }
Dir["./spec/support/**/*.rb"].each {|f| require f }
# This file was generated by the `rspec --init` command. Conventionally, all
# specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
# The generated `.rspec` file contains `--require spec_helper` which will cause
# this file to always be loaded, without a need to explicitly require it in any
# files.
#
# Given that it is always loaded, you are encouraged to keep this file as
# light-weight as possible. Requiring heavyweight dependencies from this file
# will add to the boot time of your test suite on EVERY test run, even for an
# individual file that may not need all of that loaded. Instead, consider making
# a separate helper file that requires the additional dependencies and performs
# the additional setup, and require it from the spec files that actually need
# it.
#
# The `.rspec` file also contains a few flags that are not defaults but that
# users commonly want.
#
# See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
RSpec.configure do |config|
# rspec-expectations config goes here. You can use an alternate
# assertion/expectation library such as wrong or the stdlib/minitest
# assertions if you prefer.
config.expect_with :rspec do |expectations|
# This option will default to `true` in RSpec 4. It makes the `description`
# and `failure_message` of custom matchers include text for helper methods
# defined using `chain`, e.g.:
# be_bigger_than(2).and_smaller_than(4).description
# # => "be bigger than 2 and smaller than 4"
# ...rather than:
# # => "be bigger than 2"
expectations.include_chain_clauses_in_custom_matcher_descriptions = true
end
# rspec-mocks config goes here. You can use an alternate test double
# library (such as bogus or mocha) by changing the `mock_with` option here.
config.mock_with :rspec do |mocks|
# Prevents you from mocking or stubbing a method that does not exist on
# a real object. This is generally recommended, and will default to
# `true` in RSpec 4.
mocks.verify_partial_doubles = false
end
end
| 44.352941 | 80 | 0.733864 |
ed1c20e1641435554d027008e7c03b27243b1708 | 1,476 | # Copyright (c) 2021 Target Brands, Inc. All rights reserved.
#
# Use of this source code is governed by the LICENSE file in this repository.
class Vela < Formula
# repository information
head "https://github.com/go-vela/cli.git"
homepage 'https://github.com/go-vela/cli'
# utility information
version 'v0.10.2'
# macOS
on_macos do
if Hardware::CPU.arm?
url "#{homepage}/releases/download/#{version}/vela_darwin_arm64.tar.gz"
sha256 '29c93600dd09975c30e67bc6134627f206fd543b6d1edfc397e9af090a818870'
else
url "#{homepage}/releases/download/#{version}/vela_darwin_amd64.tar.gz"
sha256 '4c2d467769b61ed16e930c9ed036422a68e60cc5e406a112d9598f29036d42d1'
end
end
# linux
on_linux do
if Hardware::CPU.arm?
if Hardware::CPU.is_64_bit?
url "#{homepage}/releases/download/#{version}/vela_linux_arm64.tar.gz"
sha256 '5e793ec30155dbe29df3c6822018f6e812e243c219f0dc14371106044919eae4'
else
url "#{homepage}/releases/download/#{version}/vela_linux_arm.tar.gz"
sha256 '22d7a12b819a195f674b26d81fa4d415da0b63457fe0f4edb35b1a4523b07dd6'
end
else
url "#{homepage}/releases/download/#{version}/vela_linux_amd64.tar.gz"
sha256 '295b40efcf0dd3f602bd562734ea2657edbb4a1bd0a88a532bacab7e343a13ed'
end
end
# install information
def install
bin.install 'vela'
end
# test information
test do
system "#{bin}/vela", "--version"
end
end
| 29.52 | 81 | 0.72019 |
2198310da7d2ca3bff4e5e6d15b91681fa0ddfff | 3,876 | # frozen_string_literal: true
require 'rails/generators/active_record'
require 'generators/devise/orm_helpers'
module ActiveRecord
module Generators
class DeviseGenerator < ActiveRecord::Generators::Base
argument :attributes, type: :array, default: [], banner: "field:type field:type"
class_option :primary_key_type, type: :string, desc: "The type for primary key"
include Devise::Generators::OrmHelpers
source_root File.expand_path("../templates", __FILE__)
def copy_devise_migration
if (behavior == :invoke && model_exists?) || (behavior == :revoke && migration_exists?(table_name))
migration_template "migration_existing.rb", "#{migration_path}/add_devise_to_#{table_name}.rb", migration_version: migration_version
else
migration_template "migration.rb", "#{migration_path}/devise_create_#{table_name}.rb", migration_version: migration_version
end
end
def generate_model
invoke "active_record:model", [name], migration: false unless model_exists? && behavior == :invoke
end
def inject_devise_content
content = model_contents
class_path = if namespaced?
class_name.to_s.split("::")
else
[class_name]
end
indent_depth = class_path.size - 1
content = content.split("\n").map { |line| " " * indent_depth + line } .join("\n") << "\n"
inject_into_class(model_path, class_path.last, content) if model_exists?
end
def migration_data
<<RUBY
## Database authenticatable
t.string :email, null: false, default: ""
t.string :encrypted_password, null: false, default: ""
## Recoverable
t.string :reset_password_token
t.datetime :reset_password_sent_at
## Rememberable
t.datetime :remember_created_at
## Trackable
# t.integer :sign_in_count, default: 0, null: false
# t.datetime :current_sign_in_at
# t.datetime :last_sign_in_at
# t.#{ip_column} :current_sign_in_ip
# t.#{ip_column} :last_sign_in_ip
## Confirmable
# t.string :confirmation_token
# t.datetime :confirmed_at
# t.datetime :confirmation_sent_at
# t.string :unconfirmed_email # Only if using reconfirmable
## Lockable
# t.integer :failed_attempts, default: 0, null: false # Only if lock strategy is :failed_attempts
# t.string :unlock_token # Only if unlock strategy is :email or :both
# t.datetime :locked_at
RUBY
end
def ip_column
# Padded with spaces so it aligns nicely with the rest of the columns.
"%-8s" % (inet? ? "inet" : "string")
end
def inet?
postgresql?
end
def rails5_and_up?
Rails::VERSION::MAJOR >= 5
end
def rails61_and_up?
Rails::VERSION::MAJOR > 6 || (Rails::VERSION::MAJOR == 6 && Rails::VERSION::MINOR >= 1)
end
def postgresql?
ar_config && ar_config['adapter'] == 'postgresql'
end
def ar_config
if ActiveRecord::Base.configurations.respond_to?(:configs_for)
if rails61_and_up?
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, name: "primary").configuration_hash
else
ActiveRecord::Base.configurations.configs_for(env_name: Rails.env, spec_name: "primary").config
end
else
ActiveRecord::Base.configurations[Rails.env]
end
end
def migration_version
if rails5_and_up?
"[#{Rails::VERSION::MAJOR}.#{Rails::VERSION::MINOR}]"
end
end
def primary_key_type
primary_key_string if rails5_and_up?
end
def primary_key_string
key_string = options[:primary_key_type]
", id: :#{key_string}" if key_string
end
end
end
end
| 30.761905 | 142 | 0.640093 |
1ae39c9af549db632c35d3c1ae956b08723326d5 | 1,502 | # app/models/mismo_enum/project_legal_structure_type.rb
# enum
class MismoEnum::ProjectLegalStructureType < MismoEnum::Base
validates_presence_of :name
validates_uniqueness_of :name
def self.description
'Specifies the form of ownership that defines the quality and quantity of'+
' ownership and rights to the individual unit owner.'
end
def self.seed
[[1, 'Condominium', 'A project that is legally formed as a condominium '+
'under the Condominium-Enabling Legislation of the state in which the'+
' project is located.'],
[2, 'Cooperative', 'A project in which a corporation or business trust '+
'holds title to the property and issues shares of stock as evidence '+
'of ownership in the corporation or business trust. The corporation or'+
' business trust grants occupancy rights to the shareholder tenants '+
'through proprietary leases. '],
[3, 'CommonInterestApartment', 'Any project or building that is owned by '+
'several owners as tenants in common or by a home owners association '+
'in which individuals have an undivided interest in a residential '+
'apartment building and land, and are the rights of exclusive '+
'occupancy of a specific apartment in the building.'],
[4, 'Unknown', '']
].each { |id, entry, desc| create(id: id,
name: entry,
description: desc) }
end
end
| 45.515152 | 80 | 0.659121 |
5dde8d38e3e564f3f5f87b0f8fdfbb838c6f66c8 | 342 | class CreateServiceManagers < ActiveRecord::Migration[6.1]
def change
create_table :service_managers do |t|
t.references :service, null: false, foreign_key: true
t.string :name
t.string :identify
t.integer :update_count
t.string :created_by
t.string :updated_by
t.timestamps
end
end
end
| 22.8 | 59 | 0.675439 |
4a83198c7bd3d032f1ed98c55015404048153653 | 90,048 | # frozen_string_literal: true
# WARNING ABOUT GENERATED CODE
#
# This file is generated. See the contributing guide for more information:
# https://github.com/aws/aws-sdk-ruby/blob/master/CONTRIBUTING.md
#
# WARNING ABOUT GENERATED CODE
require 'seahorse/client/plugins/content_length.rb'
require 'aws-sdk-core/plugins/credentials_configuration.rb'
require 'aws-sdk-core/plugins/logging.rb'
require 'aws-sdk-core/plugins/param_converter.rb'
require 'aws-sdk-core/plugins/param_validator.rb'
require 'aws-sdk-core/plugins/user_agent.rb'
require 'aws-sdk-core/plugins/helpful_socket_errors.rb'
require 'aws-sdk-core/plugins/retry_errors.rb'
require 'aws-sdk-core/plugins/global_configuration.rb'
require 'aws-sdk-core/plugins/regional_endpoint.rb'
require 'aws-sdk-core/plugins/endpoint_discovery.rb'
require 'aws-sdk-core/plugins/endpoint_pattern.rb'
require 'aws-sdk-core/plugins/response_paging.rb'
require 'aws-sdk-core/plugins/stub_responses.rb'
require 'aws-sdk-core/plugins/idempotency_token.rb'
require 'aws-sdk-core/plugins/jsonvalue_converter.rb'
require 'aws-sdk-core/plugins/client_metrics_plugin.rb'
require 'aws-sdk-core/plugins/client_metrics_send_plugin.rb'
require 'aws-sdk-core/plugins/transfer_encoding.rb'
require 'aws-sdk-core/plugins/http_checksum.rb'
require 'aws-sdk-core/plugins/signature_v4.rb'
require 'aws-sdk-core/plugins/protocols/rest_json.rb'
Aws::Plugins::GlobalConfiguration.add_identifier(:mediapackage)
module Aws::MediaPackage
# An API client for MediaPackage. To construct a client, you need to configure a `:region` and `:credentials`.
#
# client = Aws::MediaPackage::Client.new(
# region: region_name,
# credentials: credentials,
# # ...
# )
#
# For details on configuring region and credentials see
# the [developer guide](/sdk-for-ruby/v3/developer-guide/setup-config.html).
#
# See {#initialize} for a full list of supported configuration options.
class Client < Seahorse::Client::Base
include Aws::ClientStubs
@identifier = :mediapackage
set_api(ClientApi::API)
add_plugin(Seahorse::Client::Plugins::ContentLength)
add_plugin(Aws::Plugins::CredentialsConfiguration)
add_plugin(Aws::Plugins::Logging)
add_plugin(Aws::Plugins::ParamConverter)
add_plugin(Aws::Plugins::ParamValidator)
add_plugin(Aws::Plugins::UserAgent)
add_plugin(Aws::Plugins::HelpfulSocketErrors)
add_plugin(Aws::Plugins::RetryErrors)
add_plugin(Aws::Plugins::GlobalConfiguration)
add_plugin(Aws::Plugins::RegionalEndpoint)
add_plugin(Aws::Plugins::EndpointDiscovery)
add_plugin(Aws::Plugins::EndpointPattern)
add_plugin(Aws::Plugins::ResponsePaging)
add_plugin(Aws::Plugins::StubResponses)
add_plugin(Aws::Plugins::IdempotencyToken)
add_plugin(Aws::Plugins::JsonvalueConverter)
add_plugin(Aws::Plugins::ClientMetricsPlugin)
add_plugin(Aws::Plugins::ClientMetricsSendPlugin)
add_plugin(Aws::Plugins::TransferEncoding)
add_plugin(Aws::Plugins::HttpChecksum)
add_plugin(Aws::Plugins::SignatureV4)
add_plugin(Aws::Plugins::Protocols::RestJson)
# @overload initialize(options)
# @param [Hash] options
# @option options [required, Aws::CredentialProvider] :credentials
# Your AWS credentials. This can be an instance of any one of the
# following classes:
#
# * `Aws::Credentials` - Used for configuring static, non-refreshing
# credentials.
#
# * `Aws::InstanceProfileCredentials` - Used for loading credentials
# from an EC2 IMDS on an EC2 instance.
#
# * `Aws::SharedCredentials` - Used for loading credentials from a
# shared file, such as `~/.aws/config`.
#
# * `Aws::AssumeRoleCredentials` - Used when you need to assume a role.
#
# When `:credentials` are not configured directly, the following
# locations will be searched for credentials:
#
# * `Aws.config[:credentials]`
# * The `:access_key_id`, `:secret_access_key`, and `:session_token` options.
# * ENV['AWS_ACCESS_KEY_ID'], ENV['AWS_SECRET_ACCESS_KEY']
# * `~/.aws/credentials`
# * `~/.aws/config`
# * EC2 IMDS instance profile - When used by default, the timeouts are
# very aggressive. Construct and pass an instance of
# `Aws::InstanceProfileCredentails` to enable retries and extended
# timeouts.
#
# @option options [required, String] :region
# The AWS region to connect to. The configured `:region` is
# used to determine the service `:endpoint`. When not passed,
# a default `:region` is searched for in the following locations:
#
# * `Aws.config[:region]`
# * `ENV['AWS_REGION']`
# * `ENV['AMAZON_REGION']`
# * `ENV['AWS_DEFAULT_REGION']`
# * `~/.aws/credentials`
# * `~/.aws/config`
#
# @option options [String] :access_key_id
#
# @option options [Boolean] :active_endpoint_cache (false)
# When set to `true`, a thread polling for endpoints will be running in
# the background every 60 secs (default). Defaults to `false`.
#
# @option options [Boolean] :adaptive_retry_wait_to_fill (true)
# Used only in `adaptive` retry mode. When true, the request will sleep
# until there is sufficent client side capacity to retry the request.
# When false, the request will raise a `RetryCapacityNotAvailableError` and will
# not retry instead of sleeping.
#
# @option options [Boolean] :client_side_monitoring (false)
# When `true`, client-side metrics will be collected for all API requests from
# this client.
#
# @option options [String] :client_side_monitoring_client_id ("")
# Allows you to provide an identifier for this client which will be attached to
# all generated client side metrics. Defaults to an empty string.
#
# @option options [String] :client_side_monitoring_host ("127.0.0.1")
# Allows you to specify the DNS hostname or IPv4 or IPv6 address that the client
# side monitoring agent is running on, where client metrics will be published via UDP.
#
# @option options [Integer] :client_side_monitoring_port (31000)
# Required for publishing client metrics. The port that the client side monitoring
# agent is running on, where client metrics will be published via UDP.
#
# @option options [Aws::ClientSideMonitoring::Publisher] :client_side_monitoring_publisher (Aws::ClientSideMonitoring::Publisher)
# Allows you to provide a custom client-side monitoring publisher class. By default,
# will use the Client Side Monitoring Agent Publisher.
#
# @option options [Boolean] :convert_params (true)
# When `true`, an attempt is made to coerce request parameters into
# the required types.
#
# @option options [Boolean] :correct_clock_skew (true)
# Used only in `standard` and adaptive retry modes. Specifies whether to apply
# a clock skew correction and retry requests with skewed client clocks.
#
# @option options [Boolean] :disable_host_prefix_injection (false)
# Set to true to disable SDK automatically adding host prefix
# to default service endpoint when available.
#
# @option options [String] :endpoint
# The client endpoint is normally constructed from the `:region`
# option. You should only configure an `:endpoint` when connecting
# to test or custom endpoints. This should be a valid HTTP(S) URI.
#
# @option options [Integer] :endpoint_cache_max_entries (1000)
# Used for the maximum size limit of the LRU cache storing endpoints data
# for endpoint discovery enabled operations. Defaults to 1000.
#
# @option options [Integer] :endpoint_cache_max_threads (10)
# Used for the maximum threads in use for polling endpoints to be cached, defaults to 10.
#
# @option options [Integer] :endpoint_cache_poll_interval (60)
# When :endpoint_discovery and :active_endpoint_cache is enabled,
# Use this option to config the time interval in seconds for making
# requests fetching endpoints information. Defaults to 60 sec.
#
# @option options [Boolean] :endpoint_discovery (false)
# When set to `true`, endpoint discovery will be enabled for operations when available.
#
# @option options [Aws::Log::Formatter] :log_formatter (Aws::Log::Formatter.default)
# The log formatter.
#
# @option options [Symbol] :log_level (:info)
# The log level to send messages to the `:logger` at.
#
# @option options [Logger] :logger
# The Logger instance to send log messages to. If this option
# is not set, logging will be disabled.
#
# @option options [Integer] :max_attempts (3)
# An integer representing the maximum number attempts that will be made for
# a single request, including the initial attempt. For example,
# setting this value to 5 will result in a request being retried up to
# 4 times. Used in `standard` and `adaptive` retry modes.
#
# @option options [String] :profile ("default")
# Used when loading credentials from the shared credentials file
# at HOME/.aws/credentials. When not specified, 'default' is used.
#
# @option options [Proc] :retry_backoff
# A proc or lambda used for backoff. Defaults to 2**retries * retry_base_delay.
# This option is only used in the `legacy` retry mode.
#
# @option options [Float] :retry_base_delay (0.3)
# The base delay in seconds used by the default backoff function. This option
# is only used in the `legacy` retry mode.
#
# @option options [Symbol] :retry_jitter (:none)
# A delay randomiser function used by the default backoff function.
# Some predefined functions can be referenced by name - :none, :equal, :full,
# otherwise a Proc that takes and returns a number. This option is only used
# in the `legacy` retry mode.
#
# @see https://www.awsarchitectureblog.com/2015/03/backoff.html
#
# @option options [Integer] :retry_limit (3)
# The maximum number of times to retry failed requests. Only
# ~ 500 level server errors and certain ~ 400 level client errors
# are retried. Generally, these are throttling errors, data
# checksum errors, networking errors, timeout errors, auth errors,
# endpoint discovery, and errors from expired credentials.
# This option is only used in the `legacy` retry mode.
#
# @option options [Integer] :retry_max_delay (0)
# The maximum number of seconds to delay between retries (0 for no limit)
# used by the default backoff function. This option is only used in the
# `legacy` retry mode.
#
# @option options [String] :retry_mode ("legacy")
# Specifies which retry algorithm to use. Values are:
#
# * `legacy` - The pre-existing retry behavior. This is default value if
# no retry mode is provided.
#
# * `standard` - A standardized set of retry rules across the AWS SDKs.
# This includes support for retry quotas, which limit the number of
# unsuccessful retries a client can make.
#
# * `adaptive` - An experimental retry mode that includes all the
# functionality of `standard` mode along with automatic client side
# throttling. This is a provisional mode that may change behavior
# in the future.
#
#
# @option options [String] :secret_access_key
#
# @option options [String] :session_token
#
# @option options [Boolean] :stub_responses (false)
# Causes the client to return stubbed responses. By default
# fake responses are generated and returned. You can specify
# the response data to return or errors to raise by calling
# {ClientStubs#stub_responses}. See {ClientStubs} for more information.
#
# ** Please note ** When response stubbing is enabled, no HTTP
# requests are made, and retries are disabled.
#
# @option options [Boolean] :validate_params (true)
# When `true`, request parameters are validated before
# sending the request.
#
# @option options [URI::HTTP,String] :http_proxy A proxy to send
# requests through. Formatted like 'http://proxy.com:123'.
#
# @option options [Float] :http_open_timeout (15) The number of
# seconds to wait when opening a HTTP session before raising a
# `Timeout::Error`.
#
# @option options [Integer] :http_read_timeout (60) The default
# number of seconds to wait for response data. This value can
# safely be set per-request on the session.
#
# @option options [Float] :http_idle_timeout (5) The number of
# seconds a connection is allowed to sit idle before it is
# considered stale. Stale connections are closed and removed
# from the pool before making a request.
#
# @option options [Float] :http_continue_timeout (1) The number of
# seconds to wait for a 100-continue response before sending the
# request body. This option has no effect unless the request has
# "Expect" header set to "100-continue". Defaults to `nil` which
# disables this behaviour. This value can safely be set per
# request on the session.
#
# @option options [Boolean] :http_wire_trace (false) When `true`,
# HTTP debug output will be sent to the `:logger`.
#
# @option options [Boolean] :ssl_verify_peer (true) When `true`,
# SSL peer certificates are verified when establishing a
# connection.
#
# @option options [String] :ssl_ca_bundle Full path to the SSL
# certificate authority bundle file that should be used when
# verifying peer certificates. If you do not pass
# `:ssl_ca_bundle` or `:ssl_ca_directory` the the system default
# will be used if available.
#
# @option options [String] :ssl_ca_directory Full path of the
# directory that contains the unbundled SSL certificate
# authority files for verifying peer certificates. If you do
# not pass `:ssl_ca_bundle` or `:ssl_ca_directory` the the
# system default will be used if available.
#
def initialize(*args)
super
end
# @!group API Operations
# Creates a new Channel.
#
# @option params [String] :description
#
# @option params [required, String] :id
#
# @option params [Hash<String,String>] :tags
# A collection of tags associated with a resource
#
# @return [Types::CreateChannelResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateChannelResponse#arn #arn} => String
# * {Types::CreateChannelResponse#description #description} => String
# * {Types::CreateChannelResponse#hls_ingest #hls_ingest} => Types::HlsIngest
# * {Types::CreateChannelResponse#id #id} => String
# * {Types::CreateChannelResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.create_channel({
# description: "__string",
# id: "__string", # required
# tags: {
# "__string" => "__string",
# },
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.description #=> String
# resp.hls_ingest.ingest_endpoints #=> Array
# resp.hls_ingest.ingest_endpoints[0].id #=> String
# resp.hls_ingest.ingest_endpoints[0].password #=> String
# resp.hls_ingest.ingest_endpoints[0].url #=> String
# resp.hls_ingest.ingest_endpoints[0].username #=> String
# resp.id #=> String
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/CreateChannel AWS API Documentation
#
# @overload create_channel(params = {})
# @param [Hash] params ({})
def create_channel(params = {}, options = {})
req = build_request(:create_channel, params)
req.send_request(options)
end
# Creates a new HarvestJob record.
#
# @option params [required, String] :end_time
#
# @option params [required, String] :id
#
# @option params [required, String] :origin_endpoint_id
#
# @option params [required, Types::S3Destination] :s3_destination
# Configuration parameters for where in an S3 bucket to place the
# harvested content
#
# @option params [required, String] :start_time
#
# @return [Types::CreateHarvestJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateHarvestJobResponse#arn #arn} => String
# * {Types::CreateHarvestJobResponse#channel_id #channel_id} => String
# * {Types::CreateHarvestJobResponse#created_at #created_at} => String
# * {Types::CreateHarvestJobResponse#end_time #end_time} => String
# * {Types::CreateHarvestJobResponse#id #id} => String
# * {Types::CreateHarvestJobResponse#origin_endpoint_id #origin_endpoint_id} => String
# * {Types::CreateHarvestJobResponse#s3_destination #s3_destination} => Types::S3Destination
# * {Types::CreateHarvestJobResponse#start_time #start_time} => String
# * {Types::CreateHarvestJobResponse#status #status} => String
#
# @example Request syntax with placeholder values
#
# resp = client.create_harvest_job({
# end_time: "__string", # required
# id: "__string", # required
# origin_endpoint_id: "__string", # required
# s3_destination: { # required
# bucket_name: "__string", # required
# manifest_key: "__string", # required
# role_arn: "__string", # required
# },
# start_time: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.channel_id #=> String
# resp.created_at #=> String
# resp.end_time #=> String
# resp.id #=> String
# resp.origin_endpoint_id #=> String
# resp.s3_destination.bucket_name #=> String
# resp.s3_destination.manifest_key #=> String
# resp.s3_destination.role_arn #=> String
# resp.start_time #=> String
# resp.status #=> String, one of "IN_PROGRESS", "SUCCEEDED", "FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/CreateHarvestJob AWS API Documentation
#
# @overload create_harvest_job(params = {})
# @param [Hash] params ({})
def create_harvest_job(params = {}, options = {})
req = build_request(:create_harvest_job, params)
req.send_request(options)
end
# Creates a new OriginEndpoint record.
#
# @option params [Types::Authorization] :authorization
# CDN Authorization credentials
#
# @option params [required, String] :channel_id
#
# @option params [Types::CmafPackageCreateOrUpdateParameters] :cmaf_package
# A Common Media Application Format (CMAF) packaging configuration.
#
# @option params [Types::DashPackage] :dash_package
# A Dynamic Adaptive Streaming over HTTP (DASH) packaging configuration.
#
# @option params [String] :description
#
# @option params [Types::HlsPackage] :hls_package
# An HTTP Live Streaming (HLS) packaging configuration.
#
# @option params [required, String] :id
#
# @option params [String] :manifest_name
#
# @option params [Types::MssPackage] :mss_package
# A Microsoft Smooth Streaming (MSS) packaging configuration.
#
# @option params [String] :origination
#
# @option params [Integer] :startover_window_seconds
#
# @option params [Hash<String,String>] :tags
# A collection of tags associated with a resource
#
# @option params [Integer] :time_delay_seconds
#
# @option params [Array<String>] :whitelist
#
# @return [Types::CreateOriginEndpointResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::CreateOriginEndpointResponse#arn #arn} => String
# * {Types::CreateOriginEndpointResponse#authorization #authorization} => Types::Authorization
# * {Types::CreateOriginEndpointResponse#channel_id #channel_id} => String
# * {Types::CreateOriginEndpointResponse#cmaf_package #cmaf_package} => Types::CmafPackage
# * {Types::CreateOriginEndpointResponse#dash_package #dash_package} => Types::DashPackage
# * {Types::CreateOriginEndpointResponse#description #description} => String
# * {Types::CreateOriginEndpointResponse#hls_package #hls_package} => Types::HlsPackage
# * {Types::CreateOriginEndpointResponse#id #id} => String
# * {Types::CreateOriginEndpointResponse#manifest_name #manifest_name} => String
# * {Types::CreateOriginEndpointResponse#mss_package #mss_package} => Types::MssPackage
# * {Types::CreateOriginEndpointResponse#origination #origination} => String
# * {Types::CreateOriginEndpointResponse#startover_window_seconds #startover_window_seconds} => Integer
# * {Types::CreateOriginEndpointResponse#tags #tags} => Hash<String,String>
# * {Types::CreateOriginEndpointResponse#time_delay_seconds #time_delay_seconds} => Integer
# * {Types::CreateOriginEndpointResponse#url #url} => String
# * {Types::CreateOriginEndpointResponse#whitelist #whitelist} => Array<String>
#
# @example Request syntax with placeholder values
#
# resp = client.create_origin_endpoint({
# authorization: {
# cdn_identifier_secret: "__string", # required
# secrets_role_arn: "__string", # required
# },
# channel_id: "__string", # required
# cmaf_package: {
# encryption: {
# key_rotation_interval_seconds: 1,
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# hls_manifests: [
# {
# ad_markers: "NONE", # accepts NONE, SCTE35_ENHANCED, PASSTHROUGH, DATERANGE
# ad_triggers: ["SPLICE_INSERT"], # accepts SPLICE_INSERT, BREAK, PROVIDER_ADVERTISEMENT, DISTRIBUTOR_ADVERTISEMENT, PROVIDER_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_PLACEMENT_OPPORTUNITY, PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY
# ads_on_delivery_restrictions: "NONE", # accepts NONE, RESTRICTED, UNRESTRICTED, BOTH
# id: "__string", # required
# include_iframe_only_stream: false,
# manifest_name: "__string",
# playlist_type: "NONE", # accepts NONE, EVENT, VOD
# playlist_window_seconds: 1,
# program_date_time_interval_seconds: 1,
# },
# ],
# segment_duration_seconds: 1,
# segment_prefix: "__string",
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# },
# dash_package: {
# ad_triggers: ["SPLICE_INSERT"], # accepts SPLICE_INSERT, BREAK, PROVIDER_ADVERTISEMENT, DISTRIBUTOR_ADVERTISEMENT, PROVIDER_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_PLACEMENT_OPPORTUNITY, PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY
# ads_on_delivery_restrictions: "NONE", # accepts NONE, RESTRICTED, UNRESTRICTED, BOTH
# encryption: {
# key_rotation_interval_seconds: 1,
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# manifest_layout: "FULL", # accepts FULL, COMPACT
# manifest_window_seconds: 1,
# min_buffer_time_seconds: 1,
# min_update_period_seconds: 1,
# period_triggers: ["ADS"], # accepts ADS
# profile: "NONE", # accepts NONE, HBBTV_1_5
# segment_duration_seconds: 1,
# segment_template_format: "NUMBER_WITH_TIMELINE", # accepts NUMBER_WITH_TIMELINE, TIME_WITH_TIMELINE, NUMBER_WITH_DURATION
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# suggested_presentation_delay_seconds: 1,
# },
# description: "__string",
# hls_package: {
# ad_markers: "NONE", # accepts NONE, SCTE35_ENHANCED, PASSTHROUGH, DATERANGE
# ad_triggers: ["SPLICE_INSERT"], # accepts SPLICE_INSERT, BREAK, PROVIDER_ADVERTISEMENT, DISTRIBUTOR_ADVERTISEMENT, PROVIDER_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_PLACEMENT_OPPORTUNITY, PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY
# ads_on_delivery_restrictions: "NONE", # accepts NONE, RESTRICTED, UNRESTRICTED, BOTH
# encryption: {
# constant_initialization_vector: "__string",
# encryption_method: "AES_128", # accepts AES_128, SAMPLE_AES
# key_rotation_interval_seconds: 1,
# repeat_ext_x_key: false,
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# include_iframe_only_stream: false,
# playlist_type: "NONE", # accepts NONE, EVENT, VOD
# playlist_window_seconds: 1,
# program_date_time_interval_seconds: 1,
# segment_duration_seconds: 1,
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# use_audio_rendition_group: false,
# },
# id: "__string", # required
# manifest_name: "__string",
# mss_package: {
# encryption: {
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# manifest_window_seconds: 1,
# segment_duration_seconds: 1,
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# },
# origination: "ALLOW", # accepts ALLOW, DENY
# startover_window_seconds: 1,
# tags: {
# "__string" => "__string",
# },
# time_delay_seconds: 1,
# whitelist: ["__string"],
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.authorization.cdn_identifier_secret #=> String
# resp.authorization.secrets_role_arn #=> String
# resp.channel_id #=> String
# resp.cmaf_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.cmaf_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.cmaf_package.encryption.speke_key_provider.resource_id #=> String
# resp.cmaf_package.encryption.speke_key_provider.role_arn #=> String
# resp.cmaf_package.encryption.speke_key_provider.system_ids #=> Array
# resp.cmaf_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.cmaf_package.encryption.speke_key_provider.url #=> String
# resp.cmaf_package.hls_manifests #=> Array
# resp.cmaf_package.hls_manifests[0].ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.cmaf_package.hls_manifests[0].id #=> String
# resp.cmaf_package.hls_manifests[0].include_iframe_only_stream #=> Boolean
# resp.cmaf_package.hls_manifests[0].manifest_name #=> String
# resp.cmaf_package.hls_manifests[0].playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.cmaf_package.hls_manifests[0].playlist_window_seconds #=> Integer
# resp.cmaf_package.hls_manifests[0].program_date_time_interval_seconds #=> Integer
# resp.cmaf_package.hls_manifests[0].url #=> String
# resp.cmaf_package.segment_duration_seconds #=> Integer
# resp.cmaf_package.segment_prefix #=> String
# resp.cmaf_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.cmaf_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.cmaf_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.dash_package.ad_triggers #=> Array
# resp.dash_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.dash_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.dash_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.dash_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.dash_package.encryption.speke_key_provider.resource_id #=> String
# resp.dash_package.encryption.speke_key_provider.role_arn #=> String
# resp.dash_package.encryption.speke_key_provider.system_ids #=> Array
# resp.dash_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.dash_package.encryption.speke_key_provider.url #=> String
# resp.dash_package.manifest_layout #=> String, one of "FULL", "COMPACT"
# resp.dash_package.manifest_window_seconds #=> Integer
# resp.dash_package.min_buffer_time_seconds #=> Integer
# resp.dash_package.min_update_period_seconds #=> Integer
# resp.dash_package.period_triggers #=> Array
# resp.dash_package.period_triggers[0] #=> String, one of "ADS"
# resp.dash_package.profile #=> String, one of "NONE", "HBBTV_1_5"
# resp.dash_package.segment_duration_seconds #=> Integer
# resp.dash_package.segment_template_format #=> String, one of "NUMBER_WITH_TIMELINE", "TIME_WITH_TIMELINE", "NUMBER_WITH_DURATION"
# resp.dash_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.dash_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.dash_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.dash_package.suggested_presentation_delay_seconds #=> Integer
# resp.description #=> String
# resp.hls_package.ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.hls_package.ad_triggers #=> Array
# resp.hls_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.hls_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.hls_package.encryption.constant_initialization_vector #=> String
# resp.hls_package.encryption.encryption_method #=> String, one of "AES_128", "SAMPLE_AES"
# resp.hls_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.hls_package.encryption.repeat_ext_x_key #=> Boolean
# resp.hls_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.hls_package.encryption.speke_key_provider.resource_id #=> String
# resp.hls_package.encryption.speke_key_provider.role_arn #=> String
# resp.hls_package.encryption.speke_key_provider.system_ids #=> Array
# resp.hls_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.hls_package.encryption.speke_key_provider.url #=> String
# resp.hls_package.include_iframe_only_stream #=> Boolean
# resp.hls_package.playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.hls_package.playlist_window_seconds #=> Integer
# resp.hls_package.program_date_time_interval_seconds #=> Integer
# resp.hls_package.segment_duration_seconds #=> Integer
# resp.hls_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.hls_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.hls_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.hls_package.use_audio_rendition_group #=> Boolean
# resp.id #=> String
# resp.manifest_name #=> String
# resp.mss_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.mss_package.encryption.speke_key_provider.resource_id #=> String
# resp.mss_package.encryption.speke_key_provider.role_arn #=> String
# resp.mss_package.encryption.speke_key_provider.system_ids #=> Array
# resp.mss_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.mss_package.encryption.speke_key_provider.url #=> String
# resp.mss_package.manifest_window_seconds #=> Integer
# resp.mss_package.segment_duration_seconds #=> Integer
# resp.mss_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.mss_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.mss_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origination #=> String, one of "ALLOW", "DENY"
# resp.startover_window_seconds #=> Integer
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
# resp.time_delay_seconds #=> Integer
# resp.url #=> String
# resp.whitelist #=> Array
# resp.whitelist[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/CreateOriginEndpoint AWS API Documentation
#
# @overload create_origin_endpoint(params = {})
# @param [Hash] params ({})
def create_origin_endpoint(params = {}, options = {})
req = build_request(:create_origin_endpoint, params)
req.send_request(options)
end
# Deletes an existing Channel.
#
# @option params [required, String] :id
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_channel({
# id: "__string", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/DeleteChannel AWS API Documentation
#
# @overload delete_channel(params = {})
# @param [Hash] params ({})
def delete_channel(params = {}, options = {})
req = build_request(:delete_channel, params)
req.send_request(options)
end
# Deletes an existing OriginEndpoint.
#
# @option params [required, String] :id
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.delete_origin_endpoint({
# id: "__string", # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/DeleteOriginEndpoint AWS API Documentation
#
# @overload delete_origin_endpoint(params = {})
# @param [Hash] params ({})
def delete_origin_endpoint(params = {}, options = {})
req = build_request(:delete_origin_endpoint, params)
req.send_request(options)
end
# Gets details about a Channel.
#
# @option params [required, String] :id
#
# @return [Types::DescribeChannelResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeChannelResponse#arn #arn} => String
# * {Types::DescribeChannelResponse#description #description} => String
# * {Types::DescribeChannelResponse#hls_ingest #hls_ingest} => Types::HlsIngest
# * {Types::DescribeChannelResponse#id #id} => String
# * {Types::DescribeChannelResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.describe_channel({
# id: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.description #=> String
# resp.hls_ingest.ingest_endpoints #=> Array
# resp.hls_ingest.ingest_endpoints[0].id #=> String
# resp.hls_ingest.ingest_endpoints[0].password #=> String
# resp.hls_ingest.ingest_endpoints[0].url #=> String
# resp.hls_ingest.ingest_endpoints[0].username #=> String
# resp.id #=> String
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/DescribeChannel AWS API Documentation
#
# @overload describe_channel(params = {})
# @param [Hash] params ({})
def describe_channel(params = {}, options = {})
req = build_request(:describe_channel, params)
req.send_request(options)
end
# Gets details about an existing HarvestJob.
#
# @option params [required, String] :id
#
# @return [Types::DescribeHarvestJobResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeHarvestJobResponse#arn #arn} => String
# * {Types::DescribeHarvestJobResponse#channel_id #channel_id} => String
# * {Types::DescribeHarvestJobResponse#created_at #created_at} => String
# * {Types::DescribeHarvestJobResponse#end_time #end_time} => String
# * {Types::DescribeHarvestJobResponse#id #id} => String
# * {Types::DescribeHarvestJobResponse#origin_endpoint_id #origin_endpoint_id} => String
# * {Types::DescribeHarvestJobResponse#s3_destination #s3_destination} => Types::S3Destination
# * {Types::DescribeHarvestJobResponse#start_time #start_time} => String
# * {Types::DescribeHarvestJobResponse#status #status} => String
#
# @example Request syntax with placeholder values
#
# resp = client.describe_harvest_job({
# id: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.channel_id #=> String
# resp.created_at #=> String
# resp.end_time #=> String
# resp.id #=> String
# resp.origin_endpoint_id #=> String
# resp.s3_destination.bucket_name #=> String
# resp.s3_destination.manifest_key #=> String
# resp.s3_destination.role_arn #=> String
# resp.start_time #=> String
# resp.status #=> String, one of "IN_PROGRESS", "SUCCEEDED", "FAILED"
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/DescribeHarvestJob AWS API Documentation
#
# @overload describe_harvest_job(params = {})
# @param [Hash] params ({})
def describe_harvest_job(params = {}, options = {})
req = build_request(:describe_harvest_job, params)
req.send_request(options)
end
# Gets details about an existing OriginEndpoint.
#
# @option params [required, String] :id
#
# @return [Types::DescribeOriginEndpointResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::DescribeOriginEndpointResponse#arn #arn} => String
# * {Types::DescribeOriginEndpointResponse#authorization #authorization} => Types::Authorization
# * {Types::DescribeOriginEndpointResponse#channel_id #channel_id} => String
# * {Types::DescribeOriginEndpointResponse#cmaf_package #cmaf_package} => Types::CmafPackage
# * {Types::DescribeOriginEndpointResponse#dash_package #dash_package} => Types::DashPackage
# * {Types::DescribeOriginEndpointResponse#description #description} => String
# * {Types::DescribeOriginEndpointResponse#hls_package #hls_package} => Types::HlsPackage
# * {Types::DescribeOriginEndpointResponse#id #id} => String
# * {Types::DescribeOriginEndpointResponse#manifest_name #manifest_name} => String
# * {Types::DescribeOriginEndpointResponse#mss_package #mss_package} => Types::MssPackage
# * {Types::DescribeOriginEndpointResponse#origination #origination} => String
# * {Types::DescribeOriginEndpointResponse#startover_window_seconds #startover_window_seconds} => Integer
# * {Types::DescribeOriginEndpointResponse#tags #tags} => Hash<String,String>
# * {Types::DescribeOriginEndpointResponse#time_delay_seconds #time_delay_seconds} => Integer
# * {Types::DescribeOriginEndpointResponse#url #url} => String
# * {Types::DescribeOriginEndpointResponse#whitelist #whitelist} => Array<String>
#
# @example Request syntax with placeholder values
#
# resp = client.describe_origin_endpoint({
# id: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.authorization.cdn_identifier_secret #=> String
# resp.authorization.secrets_role_arn #=> String
# resp.channel_id #=> String
# resp.cmaf_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.cmaf_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.cmaf_package.encryption.speke_key_provider.resource_id #=> String
# resp.cmaf_package.encryption.speke_key_provider.role_arn #=> String
# resp.cmaf_package.encryption.speke_key_provider.system_ids #=> Array
# resp.cmaf_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.cmaf_package.encryption.speke_key_provider.url #=> String
# resp.cmaf_package.hls_manifests #=> Array
# resp.cmaf_package.hls_manifests[0].ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.cmaf_package.hls_manifests[0].id #=> String
# resp.cmaf_package.hls_manifests[0].include_iframe_only_stream #=> Boolean
# resp.cmaf_package.hls_manifests[0].manifest_name #=> String
# resp.cmaf_package.hls_manifests[0].playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.cmaf_package.hls_manifests[0].playlist_window_seconds #=> Integer
# resp.cmaf_package.hls_manifests[0].program_date_time_interval_seconds #=> Integer
# resp.cmaf_package.hls_manifests[0].url #=> String
# resp.cmaf_package.segment_duration_seconds #=> Integer
# resp.cmaf_package.segment_prefix #=> String
# resp.cmaf_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.cmaf_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.cmaf_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.dash_package.ad_triggers #=> Array
# resp.dash_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.dash_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.dash_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.dash_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.dash_package.encryption.speke_key_provider.resource_id #=> String
# resp.dash_package.encryption.speke_key_provider.role_arn #=> String
# resp.dash_package.encryption.speke_key_provider.system_ids #=> Array
# resp.dash_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.dash_package.encryption.speke_key_provider.url #=> String
# resp.dash_package.manifest_layout #=> String, one of "FULL", "COMPACT"
# resp.dash_package.manifest_window_seconds #=> Integer
# resp.dash_package.min_buffer_time_seconds #=> Integer
# resp.dash_package.min_update_period_seconds #=> Integer
# resp.dash_package.period_triggers #=> Array
# resp.dash_package.period_triggers[0] #=> String, one of "ADS"
# resp.dash_package.profile #=> String, one of "NONE", "HBBTV_1_5"
# resp.dash_package.segment_duration_seconds #=> Integer
# resp.dash_package.segment_template_format #=> String, one of "NUMBER_WITH_TIMELINE", "TIME_WITH_TIMELINE", "NUMBER_WITH_DURATION"
# resp.dash_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.dash_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.dash_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.dash_package.suggested_presentation_delay_seconds #=> Integer
# resp.description #=> String
# resp.hls_package.ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.hls_package.ad_triggers #=> Array
# resp.hls_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.hls_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.hls_package.encryption.constant_initialization_vector #=> String
# resp.hls_package.encryption.encryption_method #=> String, one of "AES_128", "SAMPLE_AES"
# resp.hls_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.hls_package.encryption.repeat_ext_x_key #=> Boolean
# resp.hls_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.hls_package.encryption.speke_key_provider.resource_id #=> String
# resp.hls_package.encryption.speke_key_provider.role_arn #=> String
# resp.hls_package.encryption.speke_key_provider.system_ids #=> Array
# resp.hls_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.hls_package.encryption.speke_key_provider.url #=> String
# resp.hls_package.include_iframe_only_stream #=> Boolean
# resp.hls_package.playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.hls_package.playlist_window_seconds #=> Integer
# resp.hls_package.program_date_time_interval_seconds #=> Integer
# resp.hls_package.segment_duration_seconds #=> Integer
# resp.hls_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.hls_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.hls_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.hls_package.use_audio_rendition_group #=> Boolean
# resp.id #=> String
# resp.manifest_name #=> String
# resp.mss_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.mss_package.encryption.speke_key_provider.resource_id #=> String
# resp.mss_package.encryption.speke_key_provider.role_arn #=> String
# resp.mss_package.encryption.speke_key_provider.system_ids #=> Array
# resp.mss_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.mss_package.encryption.speke_key_provider.url #=> String
# resp.mss_package.manifest_window_seconds #=> Integer
# resp.mss_package.segment_duration_seconds #=> Integer
# resp.mss_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.mss_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.mss_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origination #=> String, one of "ALLOW", "DENY"
# resp.startover_window_seconds #=> Integer
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
# resp.time_delay_seconds #=> Integer
# resp.url #=> String
# resp.whitelist #=> Array
# resp.whitelist[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/DescribeOriginEndpoint AWS API Documentation
#
# @overload describe_origin_endpoint(params = {})
# @param [Hash] params ({})
def describe_origin_endpoint(params = {}, options = {})
req = build_request(:describe_origin_endpoint, params)
req.send_request(options)
end
# Returns a collection of Channels.
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListChannelsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListChannelsResponse#channels #channels} => Array<Types::Channel>
# * {Types::ListChannelsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_channels({
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.channels #=> Array
# resp.channels[0].arn #=> String
# resp.channels[0].description #=> String
# resp.channels[0].hls_ingest.ingest_endpoints #=> Array
# resp.channels[0].hls_ingest.ingest_endpoints[0].id #=> String
# resp.channels[0].hls_ingest.ingest_endpoints[0].password #=> String
# resp.channels[0].hls_ingest.ingest_endpoints[0].url #=> String
# resp.channels[0].hls_ingest.ingest_endpoints[0].username #=> String
# resp.channels[0].id #=> String
# resp.channels[0].tags #=> Hash
# resp.channels[0].tags["__string"] #=> String
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/ListChannels AWS API Documentation
#
# @overload list_channels(params = {})
# @param [Hash] params ({})
def list_channels(params = {}, options = {})
req = build_request(:list_channels, params)
req.send_request(options)
end
# Returns a collection of HarvestJob records.
#
# @option params [String] :include_channel_id
#
# @option params [String] :include_status
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListHarvestJobsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListHarvestJobsResponse#harvest_jobs #harvest_jobs} => Array<Types::HarvestJob>
# * {Types::ListHarvestJobsResponse#next_token #next_token} => String
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_harvest_jobs({
# include_channel_id: "__string",
# include_status: "__string",
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.harvest_jobs #=> Array
# resp.harvest_jobs[0].arn #=> String
# resp.harvest_jobs[0].channel_id #=> String
# resp.harvest_jobs[0].created_at #=> String
# resp.harvest_jobs[0].end_time #=> String
# resp.harvest_jobs[0].id #=> String
# resp.harvest_jobs[0].origin_endpoint_id #=> String
# resp.harvest_jobs[0].s3_destination.bucket_name #=> String
# resp.harvest_jobs[0].s3_destination.manifest_key #=> String
# resp.harvest_jobs[0].s3_destination.role_arn #=> String
# resp.harvest_jobs[0].start_time #=> String
# resp.harvest_jobs[0].status #=> String, one of "IN_PROGRESS", "SUCCEEDED", "FAILED"
# resp.next_token #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/ListHarvestJobs AWS API Documentation
#
# @overload list_harvest_jobs(params = {})
# @param [Hash] params ({})
def list_harvest_jobs(params = {}, options = {})
req = build_request(:list_harvest_jobs, params)
req.send_request(options)
end
# Returns a collection of OriginEndpoint records.
#
# @option params [String] :channel_id
#
# @option params [Integer] :max_results
#
# @option params [String] :next_token
#
# @return [Types::ListOriginEndpointsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListOriginEndpointsResponse#next_token #next_token} => String
# * {Types::ListOriginEndpointsResponse#origin_endpoints #origin_endpoints} => Array<Types::OriginEndpoint>
#
# The returned {Seahorse::Client::Response response} is a pageable response and is Enumerable. For details on usage see {Aws::PageableResponse PageableResponse}.
#
# @example Request syntax with placeholder values
#
# resp = client.list_origin_endpoints({
# channel_id: "__string",
# max_results: 1,
# next_token: "__string",
# })
#
# @example Response structure
#
# resp.next_token #=> String
# resp.origin_endpoints #=> Array
# resp.origin_endpoints[0].arn #=> String
# resp.origin_endpoints[0].authorization.cdn_identifier_secret #=> String
# resp.origin_endpoints[0].authorization.secrets_role_arn #=> String
# resp.origin_endpoints[0].channel_id #=> String
# resp.origin_endpoints[0].cmaf_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.origin_endpoints[0].cmaf_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.origin_endpoints[0].cmaf_package.encryption.speke_key_provider.resource_id #=> String
# resp.origin_endpoints[0].cmaf_package.encryption.speke_key_provider.role_arn #=> String
# resp.origin_endpoints[0].cmaf_package.encryption.speke_key_provider.system_ids #=> Array
# resp.origin_endpoints[0].cmaf_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.origin_endpoints[0].cmaf_package.encryption.speke_key_provider.url #=> String
# resp.origin_endpoints[0].cmaf_package.hls_manifests #=> Array
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].id #=> String
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].include_iframe_only_stream #=> Boolean
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].manifest_name #=> String
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].playlist_window_seconds #=> Integer
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].program_date_time_interval_seconds #=> Integer
# resp.origin_endpoints[0].cmaf_package.hls_manifests[0].url #=> String
# resp.origin_endpoints[0].cmaf_package.segment_duration_seconds #=> Integer
# resp.origin_endpoints[0].cmaf_package.segment_prefix #=> String
# resp.origin_endpoints[0].cmaf_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].cmaf_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].cmaf_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origin_endpoints[0].dash_package.ad_triggers #=> Array
# resp.origin_endpoints[0].dash_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.origin_endpoints[0].dash_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.origin_endpoints[0].dash_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.origin_endpoints[0].dash_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.origin_endpoints[0].dash_package.encryption.speke_key_provider.resource_id #=> String
# resp.origin_endpoints[0].dash_package.encryption.speke_key_provider.role_arn #=> String
# resp.origin_endpoints[0].dash_package.encryption.speke_key_provider.system_ids #=> Array
# resp.origin_endpoints[0].dash_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.origin_endpoints[0].dash_package.encryption.speke_key_provider.url #=> String
# resp.origin_endpoints[0].dash_package.manifest_layout #=> String, one of "FULL", "COMPACT"
# resp.origin_endpoints[0].dash_package.manifest_window_seconds #=> Integer
# resp.origin_endpoints[0].dash_package.min_buffer_time_seconds #=> Integer
# resp.origin_endpoints[0].dash_package.min_update_period_seconds #=> Integer
# resp.origin_endpoints[0].dash_package.period_triggers #=> Array
# resp.origin_endpoints[0].dash_package.period_triggers[0] #=> String, one of "ADS"
# resp.origin_endpoints[0].dash_package.profile #=> String, one of "NONE", "HBBTV_1_5"
# resp.origin_endpoints[0].dash_package.segment_duration_seconds #=> Integer
# resp.origin_endpoints[0].dash_package.segment_template_format #=> String, one of "NUMBER_WITH_TIMELINE", "TIME_WITH_TIMELINE", "NUMBER_WITH_DURATION"
# resp.origin_endpoints[0].dash_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].dash_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].dash_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origin_endpoints[0].dash_package.suggested_presentation_delay_seconds #=> Integer
# resp.origin_endpoints[0].description #=> String
# resp.origin_endpoints[0].hls_package.ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.origin_endpoints[0].hls_package.ad_triggers #=> Array
# resp.origin_endpoints[0].hls_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.origin_endpoints[0].hls_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.origin_endpoints[0].hls_package.encryption.constant_initialization_vector #=> String
# resp.origin_endpoints[0].hls_package.encryption.encryption_method #=> String, one of "AES_128", "SAMPLE_AES"
# resp.origin_endpoints[0].hls_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.origin_endpoints[0].hls_package.encryption.repeat_ext_x_key #=> Boolean
# resp.origin_endpoints[0].hls_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.origin_endpoints[0].hls_package.encryption.speke_key_provider.resource_id #=> String
# resp.origin_endpoints[0].hls_package.encryption.speke_key_provider.role_arn #=> String
# resp.origin_endpoints[0].hls_package.encryption.speke_key_provider.system_ids #=> Array
# resp.origin_endpoints[0].hls_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.origin_endpoints[0].hls_package.encryption.speke_key_provider.url #=> String
# resp.origin_endpoints[0].hls_package.include_iframe_only_stream #=> Boolean
# resp.origin_endpoints[0].hls_package.playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.origin_endpoints[0].hls_package.playlist_window_seconds #=> Integer
# resp.origin_endpoints[0].hls_package.program_date_time_interval_seconds #=> Integer
# resp.origin_endpoints[0].hls_package.segment_duration_seconds #=> Integer
# resp.origin_endpoints[0].hls_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].hls_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].hls_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origin_endpoints[0].hls_package.use_audio_rendition_group #=> Boolean
# resp.origin_endpoints[0].id #=> String
# resp.origin_endpoints[0].manifest_name #=> String
# resp.origin_endpoints[0].mss_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.origin_endpoints[0].mss_package.encryption.speke_key_provider.resource_id #=> String
# resp.origin_endpoints[0].mss_package.encryption.speke_key_provider.role_arn #=> String
# resp.origin_endpoints[0].mss_package.encryption.speke_key_provider.system_ids #=> Array
# resp.origin_endpoints[0].mss_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.origin_endpoints[0].mss_package.encryption.speke_key_provider.url #=> String
# resp.origin_endpoints[0].mss_package.manifest_window_seconds #=> Integer
# resp.origin_endpoints[0].mss_package.segment_duration_seconds #=> Integer
# resp.origin_endpoints[0].mss_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].mss_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.origin_endpoints[0].mss_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origin_endpoints[0].origination #=> String, one of "ALLOW", "DENY"
# resp.origin_endpoints[0].startover_window_seconds #=> Integer
# resp.origin_endpoints[0].tags #=> Hash
# resp.origin_endpoints[0].tags["__string"] #=> String
# resp.origin_endpoints[0].time_delay_seconds #=> Integer
# resp.origin_endpoints[0].url #=> String
# resp.origin_endpoints[0].whitelist #=> Array
# resp.origin_endpoints[0].whitelist[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/ListOriginEndpoints AWS API Documentation
#
# @overload list_origin_endpoints(params = {})
# @param [Hash] params ({})
def list_origin_endpoints(params = {}, options = {})
req = build_request(:list_origin_endpoints, params)
req.send_request(options)
end
# @option params [required, String] :resource_arn
#
# @return [Types::ListTagsForResourceResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::ListTagsForResourceResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.list_tags_for_resource({
# resource_arn: "__string", # required
# })
#
# @example Response structure
#
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/ListTagsForResource AWS API Documentation
#
# @overload list_tags_for_resource(params = {})
# @param [Hash] params ({})
def list_tags_for_resource(params = {}, options = {})
req = build_request(:list_tags_for_resource, params)
req.send_request(options)
end
# Changes the Channel's first IngestEndpoint's username and password.
# WARNING - This API is deprecated. Please use
# RotateIngestEndpointCredentials instead
#
# @option params [required, String] :id
#
# @return [Types::RotateChannelCredentialsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::RotateChannelCredentialsResponse#arn #arn} => String
# * {Types::RotateChannelCredentialsResponse#description #description} => String
# * {Types::RotateChannelCredentialsResponse#hls_ingest #hls_ingest} => Types::HlsIngest
# * {Types::RotateChannelCredentialsResponse#id #id} => String
# * {Types::RotateChannelCredentialsResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.rotate_channel_credentials({
# id: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.description #=> String
# resp.hls_ingest.ingest_endpoints #=> Array
# resp.hls_ingest.ingest_endpoints[0].id #=> String
# resp.hls_ingest.ingest_endpoints[0].password #=> String
# resp.hls_ingest.ingest_endpoints[0].url #=> String
# resp.hls_ingest.ingest_endpoints[0].username #=> String
# resp.id #=> String
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/RotateChannelCredentials AWS API Documentation
#
# @overload rotate_channel_credentials(params = {})
# @param [Hash] params ({})
def rotate_channel_credentials(params = {}, options = {})
req = build_request(:rotate_channel_credentials, params)
req.send_request(options)
end
# Rotate the IngestEndpoint's username and password, as specified by
# the IngestEndpoint's id.
#
# @option params [required, String] :id
#
# @option params [required, String] :ingest_endpoint_id
#
# @return [Types::RotateIngestEndpointCredentialsResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::RotateIngestEndpointCredentialsResponse#arn #arn} => String
# * {Types::RotateIngestEndpointCredentialsResponse#description #description} => String
# * {Types::RotateIngestEndpointCredentialsResponse#hls_ingest #hls_ingest} => Types::HlsIngest
# * {Types::RotateIngestEndpointCredentialsResponse#id #id} => String
# * {Types::RotateIngestEndpointCredentialsResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.rotate_ingest_endpoint_credentials({
# id: "__string", # required
# ingest_endpoint_id: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.description #=> String
# resp.hls_ingest.ingest_endpoints #=> Array
# resp.hls_ingest.ingest_endpoints[0].id #=> String
# resp.hls_ingest.ingest_endpoints[0].password #=> String
# resp.hls_ingest.ingest_endpoints[0].url #=> String
# resp.hls_ingest.ingest_endpoints[0].username #=> String
# resp.id #=> String
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/RotateIngestEndpointCredentials AWS API Documentation
#
# @overload rotate_ingest_endpoint_credentials(params = {})
# @param [Hash] params ({})
def rotate_ingest_endpoint_credentials(params = {}, options = {})
req = build_request(:rotate_ingest_endpoint_credentials, params)
req.send_request(options)
end
# @option params [required, String] :resource_arn
#
# @option params [required, Hash<String,String>] :tags
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.tag_resource({
# resource_arn: "__string", # required
# tags: { # required
# "__string" => "__string",
# },
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/TagResource AWS API Documentation
#
# @overload tag_resource(params = {})
# @param [Hash] params ({})
def tag_resource(params = {}, options = {})
req = build_request(:tag_resource, params)
req.send_request(options)
end
# @option params [required, String] :resource_arn
#
# @option params [required, Array<String>] :tag_keys
#
# @return [Struct] Returns an empty {Seahorse::Client::Response response}.
#
# @example Request syntax with placeholder values
#
# resp = client.untag_resource({
# resource_arn: "__string", # required
# tag_keys: ["__string"], # required
# })
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/UntagResource AWS API Documentation
#
# @overload untag_resource(params = {})
# @param [Hash] params ({})
def untag_resource(params = {}, options = {})
req = build_request(:untag_resource, params)
req.send_request(options)
end
# Updates an existing Channel.
#
# @option params [String] :description
#
# @option params [required, String] :id
#
# @return [Types::UpdateChannelResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateChannelResponse#arn #arn} => String
# * {Types::UpdateChannelResponse#description #description} => String
# * {Types::UpdateChannelResponse#hls_ingest #hls_ingest} => Types::HlsIngest
# * {Types::UpdateChannelResponse#id #id} => String
# * {Types::UpdateChannelResponse#tags #tags} => Hash<String,String>
#
# @example Request syntax with placeholder values
#
# resp = client.update_channel({
# description: "__string",
# id: "__string", # required
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.description #=> String
# resp.hls_ingest.ingest_endpoints #=> Array
# resp.hls_ingest.ingest_endpoints[0].id #=> String
# resp.hls_ingest.ingest_endpoints[0].password #=> String
# resp.hls_ingest.ingest_endpoints[0].url #=> String
# resp.hls_ingest.ingest_endpoints[0].username #=> String
# resp.id #=> String
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/UpdateChannel AWS API Documentation
#
# @overload update_channel(params = {})
# @param [Hash] params ({})
def update_channel(params = {}, options = {})
req = build_request(:update_channel, params)
req.send_request(options)
end
# Updates an existing OriginEndpoint.
#
# @option params [Types::Authorization] :authorization
# CDN Authorization credentials
#
# @option params [Types::CmafPackageCreateOrUpdateParameters] :cmaf_package
# A Common Media Application Format (CMAF) packaging configuration.
#
# @option params [Types::DashPackage] :dash_package
# A Dynamic Adaptive Streaming over HTTP (DASH) packaging configuration.
#
# @option params [String] :description
#
# @option params [Types::HlsPackage] :hls_package
# An HTTP Live Streaming (HLS) packaging configuration.
#
# @option params [required, String] :id
#
# @option params [String] :manifest_name
#
# @option params [Types::MssPackage] :mss_package
# A Microsoft Smooth Streaming (MSS) packaging configuration.
#
# @option params [String] :origination
#
# @option params [Integer] :startover_window_seconds
#
# @option params [Integer] :time_delay_seconds
#
# @option params [Array<String>] :whitelist
#
# @return [Types::UpdateOriginEndpointResponse] Returns a {Seahorse::Client::Response response} object which responds to the following methods:
#
# * {Types::UpdateOriginEndpointResponse#arn #arn} => String
# * {Types::UpdateOriginEndpointResponse#authorization #authorization} => Types::Authorization
# * {Types::UpdateOriginEndpointResponse#channel_id #channel_id} => String
# * {Types::UpdateOriginEndpointResponse#cmaf_package #cmaf_package} => Types::CmafPackage
# * {Types::UpdateOriginEndpointResponse#dash_package #dash_package} => Types::DashPackage
# * {Types::UpdateOriginEndpointResponse#description #description} => String
# * {Types::UpdateOriginEndpointResponse#hls_package #hls_package} => Types::HlsPackage
# * {Types::UpdateOriginEndpointResponse#id #id} => String
# * {Types::UpdateOriginEndpointResponse#manifest_name #manifest_name} => String
# * {Types::UpdateOriginEndpointResponse#mss_package #mss_package} => Types::MssPackage
# * {Types::UpdateOriginEndpointResponse#origination #origination} => String
# * {Types::UpdateOriginEndpointResponse#startover_window_seconds #startover_window_seconds} => Integer
# * {Types::UpdateOriginEndpointResponse#tags #tags} => Hash<String,String>
# * {Types::UpdateOriginEndpointResponse#time_delay_seconds #time_delay_seconds} => Integer
# * {Types::UpdateOriginEndpointResponse#url #url} => String
# * {Types::UpdateOriginEndpointResponse#whitelist #whitelist} => Array<String>
#
# @example Request syntax with placeholder values
#
# resp = client.update_origin_endpoint({
# authorization: {
# cdn_identifier_secret: "__string", # required
# secrets_role_arn: "__string", # required
# },
# cmaf_package: {
# encryption: {
# key_rotation_interval_seconds: 1,
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# hls_manifests: [
# {
# ad_markers: "NONE", # accepts NONE, SCTE35_ENHANCED, PASSTHROUGH, DATERANGE
# ad_triggers: ["SPLICE_INSERT"], # accepts SPLICE_INSERT, BREAK, PROVIDER_ADVERTISEMENT, DISTRIBUTOR_ADVERTISEMENT, PROVIDER_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_PLACEMENT_OPPORTUNITY, PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY
# ads_on_delivery_restrictions: "NONE", # accepts NONE, RESTRICTED, UNRESTRICTED, BOTH
# id: "__string", # required
# include_iframe_only_stream: false,
# manifest_name: "__string",
# playlist_type: "NONE", # accepts NONE, EVENT, VOD
# playlist_window_seconds: 1,
# program_date_time_interval_seconds: 1,
# },
# ],
# segment_duration_seconds: 1,
# segment_prefix: "__string",
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# },
# dash_package: {
# ad_triggers: ["SPLICE_INSERT"], # accepts SPLICE_INSERT, BREAK, PROVIDER_ADVERTISEMENT, DISTRIBUTOR_ADVERTISEMENT, PROVIDER_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_PLACEMENT_OPPORTUNITY, PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY
# ads_on_delivery_restrictions: "NONE", # accepts NONE, RESTRICTED, UNRESTRICTED, BOTH
# encryption: {
# key_rotation_interval_seconds: 1,
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# manifest_layout: "FULL", # accepts FULL, COMPACT
# manifest_window_seconds: 1,
# min_buffer_time_seconds: 1,
# min_update_period_seconds: 1,
# period_triggers: ["ADS"], # accepts ADS
# profile: "NONE", # accepts NONE, HBBTV_1_5
# segment_duration_seconds: 1,
# segment_template_format: "NUMBER_WITH_TIMELINE", # accepts NUMBER_WITH_TIMELINE, TIME_WITH_TIMELINE, NUMBER_WITH_DURATION
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# suggested_presentation_delay_seconds: 1,
# },
# description: "__string",
# hls_package: {
# ad_markers: "NONE", # accepts NONE, SCTE35_ENHANCED, PASSTHROUGH, DATERANGE
# ad_triggers: ["SPLICE_INSERT"], # accepts SPLICE_INSERT, BREAK, PROVIDER_ADVERTISEMENT, DISTRIBUTOR_ADVERTISEMENT, PROVIDER_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_PLACEMENT_OPPORTUNITY, PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY, DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY
# ads_on_delivery_restrictions: "NONE", # accepts NONE, RESTRICTED, UNRESTRICTED, BOTH
# encryption: {
# constant_initialization_vector: "__string",
# encryption_method: "AES_128", # accepts AES_128, SAMPLE_AES
# key_rotation_interval_seconds: 1,
# repeat_ext_x_key: false,
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# include_iframe_only_stream: false,
# playlist_type: "NONE", # accepts NONE, EVENT, VOD
# playlist_window_seconds: 1,
# program_date_time_interval_seconds: 1,
# segment_duration_seconds: 1,
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# use_audio_rendition_group: false,
# },
# id: "__string", # required
# manifest_name: "__string",
# mss_package: {
# encryption: {
# speke_key_provider: { # required
# certificate_arn: "__string",
# resource_id: "__string", # required
# role_arn: "__string", # required
# system_ids: ["__string"], # required
# url: "__string", # required
# },
# },
# manifest_window_seconds: 1,
# segment_duration_seconds: 1,
# stream_selection: {
# max_video_bits_per_second: 1,
# min_video_bits_per_second: 1,
# stream_order: "ORIGINAL", # accepts ORIGINAL, VIDEO_BITRATE_ASCENDING, VIDEO_BITRATE_DESCENDING
# },
# },
# origination: "ALLOW", # accepts ALLOW, DENY
# startover_window_seconds: 1,
# time_delay_seconds: 1,
# whitelist: ["__string"],
# })
#
# @example Response structure
#
# resp.arn #=> String
# resp.authorization.cdn_identifier_secret #=> String
# resp.authorization.secrets_role_arn #=> String
# resp.channel_id #=> String
# resp.cmaf_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.cmaf_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.cmaf_package.encryption.speke_key_provider.resource_id #=> String
# resp.cmaf_package.encryption.speke_key_provider.role_arn #=> String
# resp.cmaf_package.encryption.speke_key_provider.system_ids #=> Array
# resp.cmaf_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.cmaf_package.encryption.speke_key_provider.url #=> String
# resp.cmaf_package.hls_manifests #=> Array
# resp.cmaf_package.hls_manifests[0].ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.cmaf_package.hls_manifests[0].id #=> String
# resp.cmaf_package.hls_manifests[0].include_iframe_only_stream #=> Boolean
# resp.cmaf_package.hls_manifests[0].manifest_name #=> String
# resp.cmaf_package.hls_manifests[0].playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.cmaf_package.hls_manifests[0].playlist_window_seconds #=> Integer
# resp.cmaf_package.hls_manifests[0].program_date_time_interval_seconds #=> Integer
# resp.cmaf_package.hls_manifests[0].url #=> String
# resp.cmaf_package.segment_duration_seconds #=> Integer
# resp.cmaf_package.segment_prefix #=> String
# resp.cmaf_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.cmaf_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.cmaf_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.dash_package.ad_triggers #=> Array
# resp.dash_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.dash_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.dash_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.dash_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.dash_package.encryption.speke_key_provider.resource_id #=> String
# resp.dash_package.encryption.speke_key_provider.role_arn #=> String
# resp.dash_package.encryption.speke_key_provider.system_ids #=> Array
# resp.dash_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.dash_package.encryption.speke_key_provider.url #=> String
# resp.dash_package.manifest_layout #=> String, one of "FULL", "COMPACT"
# resp.dash_package.manifest_window_seconds #=> Integer
# resp.dash_package.min_buffer_time_seconds #=> Integer
# resp.dash_package.min_update_period_seconds #=> Integer
# resp.dash_package.period_triggers #=> Array
# resp.dash_package.period_triggers[0] #=> String, one of "ADS"
# resp.dash_package.profile #=> String, one of "NONE", "HBBTV_1_5"
# resp.dash_package.segment_duration_seconds #=> Integer
# resp.dash_package.segment_template_format #=> String, one of "NUMBER_WITH_TIMELINE", "TIME_WITH_TIMELINE", "NUMBER_WITH_DURATION"
# resp.dash_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.dash_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.dash_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.dash_package.suggested_presentation_delay_seconds #=> Integer
# resp.description #=> String
# resp.hls_package.ad_markers #=> String, one of "NONE", "SCTE35_ENHANCED", "PASSTHROUGH", "DATERANGE"
# resp.hls_package.ad_triggers #=> Array
# resp.hls_package.ad_triggers[0] #=> String, one of "SPLICE_INSERT", "BREAK", "PROVIDER_ADVERTISEMENT", "DISTRIBUTOR_ADVERTISEMENT", "PROVIDER_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_PLACEMENT_OPPORTUNITY", "PROVIDER_OVERLAY_PLACEMENT_OPPORTUNITY", "DISTRIBUTOR_OVERLAY_PLACEMENT_OPPORTUNITY"
# resp.hls_package.ads_on_delivery_restrictions #=> String, one of "NONE", "RESTRICTED", "UNRESTRICTED", "BOTH"
# resp.hls_package.encryption.constant_initialization_vector #=> String
# resp.hls_package.encryption.encryption_method #=> String, one of "AES_128", "SAMPLE_AES"
# resp.hls_package.encryption.key_rotation_interval_seconds #=> Integer
# resp.hls_package.encryption.repeat_ext_x_key #=> Boolean
# resp.hls_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.hls_package.encryption.speke_key_provider.resource_id #=> String
# resp.hls_package.encryption.speke_key_provider.role_arn #=> String
# resp.hls_package.encryption.speke_key_provider.system_ids #=> Array
# resp.hls_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.hls_package.encryption.speke_key_provider.url #=> String
# resp.hls_package.include_iframe_only_stream #=> Boolean
# resp.hls_package.playlist_type #=> String, one of "NONE", "EVENT", "VOD"
# resp.hls_package.playlist_window_seconds #=> Integer
# resp.hls_package.program_date_time_interval_seconds #=> Integer
# resp.hls_package.segment_duration_seconds #=> Integer
# resp.hls_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.hls_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.hls_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.hls_package.use_audio_rendition_group #=> Boolean
# resp.id #=> String
# resp.manifest_name #=> String
# resp.mss_package.encryption.speke_key_provider.certificate_arn #=> String
# resp.mss_package.encryption.speke_key_provider.resource_id #=> String
# resp.mss_package.encryption.speke_key_provider.role_arn #=> String
# resp.mss_package.encryption.speke_key_provider.system_ids #=> Array
# resp.mss_package.encryption.speke_key_provider.system_ids[0] #=> String
# resp.mss_package.encryption.speke_key_provider.url #=> String
# resp.mss_package.manifest_window_seconds #=> Integer
# resp.mss_package.segment_duration_seconds #=> Integer
# resp.mss_package.stream_selection.max_video_bits_per_second #=> Integer
# resp.mss_package.stream_selection.min_video_bits_per_second #=> Integer
# resp.mss_package.stream_selection.stream_order #=> String, one of "ORIGINAL", "VIDEO_BITRATE_ASCENDING", "VIDEO_BITRATE_DESCENDING"
# resp.origination #=> String, one of "ALLOW", "DENY"
# resp.startover_window_seconds #=> Integer
# resp.tags #=> Hash
# resp.tags["__string"] #=> String
# resp.time_delay_seconds #=> Integer
# resp.url #=> String
# resp.whitelist #=> Array
# resp.whitelist[0] #=> String
#
# @see http://docs.aws.amazon.com/goto/WebAPI/mediapackage-2017-10-12/UpdateOriginEndpoint AWS API Documentation
#
# @overload update_origin_endpoint(params = {})
# @param [Hash] params ({})
def update_origin_endpoint(params = {}, options = {})
req = build_request(:update_origin_endpoint, params)
req.send_request(options)
end
# @!endgroup
# @param params ({})
# @api private
def build_request(operation_name, params = {})
handlers = @handlers.for(operation_name)
context = Seahorse::Client::RequestContext.new(
operation_name: operation_name,
operation: config.api.operation(operation_name),
client: self,
params: params,
config: config)
context[:gem_name] = 'aws-sdk-mediapackage'
context[:gem_version] = '1.31.0'
Seahorse::Client::Request.new(handlers, context)
end
# @api private
# @deprecated
def waiter_names
[]
end
class << self
# @api private
attr_reader :identifier
# @api private
def errors_module
Errors
end
end
end
end
| 52.506122 | 317 | 0.685101 |
bfce5f2f9f6dac3ddf9b650e6c81ee6d28ca92bf | 463 | module UsersHelper
# Link to a user (default is by name).
def user_link(text, user = nil, html_options = nil)
if user.nil?
user = text
text = user.name
elsif user.is_a?(Hash)
html_options = user
user = text
text = user.name
end
# We normally write link_to(..., user) for brevity, but that breaks
# activities_helper_spec due to an RSpec bug.
link_to(h(text), profile_path(user), html_options)
end
end | 27.235294 | 71 | 0.643629 |
38efffcb148086d8f6a1d5e9497898a3a64cb853 | 1,301 | $:.push File.expand_path("../lib", __FILE__)
# Maintain your gem's version:
require "spud_media/version"
# Describe your gem and declare its dependencies:
Gem::Specification.new do |s|
s.name = "spud_media"
s.version = Spud::Media::VERSION
s.authors = ["David Estes"]
s.email = ["[email protected]"]
s.homepage = "http://www.github.com/spud-rails/spud_media"
s.summary = "Spud File upload/management module"
s.description = "Spud Media allows you to upload files to your site and manage them in the spud administrative panel. It also uses paperclip and supports s3 storage"
s.files = Dir["{app,config,db,lib}/**/*"] + ["MIT-LICENSE", "Rakefile", "Readme.markdown"]
s.test_files = Dir["test/**/*"]
s.add_dependency "rails", ">= 4.0.0"
s.add_dependency 'spud_core', ">= 1.0.0"
s.add_dependency "paperclip", ">= 4.2.0"
s.add_development_dependency 'mysql2'
s.add_development_dependency 'rspec', '2.14.0'
s.add_development_dependency 'rspec-rails', '2.14.0'
s.add_development_dependency 'shoulda', '~> 3.0.1'
s.add_development_dependency 'factory_girl', '~> 3.0'
s.add_development_dependency 'database_cleaner', '1.0.0.RC1'
s.add_development_dependency 'mocha', '0.14.0'
s.add_development_dependency 'simplecov', '~> 0.6.4'
end
| 39.424242 | 167 | 0.691776 |
bb774e232e46deb801417e8762398758941b7b59 | 2,986 | require 'pxp-agent/test_helper.rb'
require 'yaml'
test_name 'Run puppet agent as non-root' do
agents.each do |agent|
platform = agent.platform
skip_test "Test is not compatible with #{platform}" if platform =~ /windows/
step 'create non-root user on all nodes' do
@user_name = 'foo'
@group_name = 'foobar'
if platform =~ /solaris/
@user_home_dir = "/export/home/#{@user_name}"
elsif platform =~ /osx/
@user_home_dir = "/Users/#{@user_name}"
else
@user_home_dir = "/home/#{@user_name}"
end
@user_puppetlabs_dir = "#{@user_home_dir}/.puppetlabs"
@user_puppet_dir = "#{@user_puppetlabs_dir}/etc/puppet"
@user_pxp_dir = "#{@user_puppetlabs_dir}/etc/pxp-agent"
on(agent, "mkdir -p #{@user_puppet_dir}")
on(agent, "mkdir -p #{@user_pxp_dir}")
on(agent, puppet("resource group #{@group_name} ensure=present"))
if platform =~ /eos/
#arista
on(agent, "useradd #{@user_name} -p p@ssw0rd")
else
on(agent, puppet("resource user #{@user_name} ensure=present home=#{@user_home_dir} groups=#{@group_name}"))
end
end
teardown do
get_process_pids(agent, 'pxp-agent').each do |pid|
on(agent, "kill -9 #{pid}", :accept_all_exit_codes => true)
end
on(agent, puppet("resource user #{@user_name} ensure=absent"))
on(agent, "rm -rf #{@user_home_dir}")
end
step 'Ensure pxp-agent is stopped' do
on(agent, puppet("resource service pxp-agent ensure=stopped enable=false"))
end
step 'Copy certs and keys to new users home directory' do
puppet_ssldir = on(agent, puppet('config print ssldir')).stdout.chomp
#ssl-key ssl-ca-cert ssl-cert
on(agent, "cp -R #{puppet_ssldir} #{@user_puppet_dir}")
end
step 'Attempt start of pxp-agent as non-root user' do
ssl_config = {
:ssl_key => "#{@user_puppet_dir}/ssl/private_keys/#{agent}.pem",
:ssl_ca_cert => "#{@user_puppet_dir}/ssl/certs/ca.pem",
:ssl_cert => "#{@user_puppet_dir}/ssl/certs/#{agent}.pem"
}
create_remote_file(agent, "#{@user_pxp_dir}/pxp-agent.conf", pxp_config_hocon(master, agent, ssl_config).to_s)
on(agent, "chown -R #{@user_name}:#{@group_name} #{@user_home_dir}")
if platform =~ /solaris|aix|eos/
command = "cd #{@user_home_dir} && HOME=#{@user_home_dir} su #{@user_name} -c \"/opt/puppetlabs/puppet/bin/pxp-agent\""
else
command = "su -l #{@user_name} -c \"/opt/puppetlabs/puppet/bin/pxp-agent\""
end
on(agent, command, :accept_all_exit_codes => true) do |result|
assert_equal(0, result.exit_code, "The expected exit code was not observed \n #{result.output}")
end
assert(is_associated?(master, "pcp://#{agent}/agent"),
"At the start of the test, #{agent} (with PCP identity pcp://#{agent}/agent ) should be associated with pcp-broker")
end
end
end
| 38.779221 | 129 | 0.623912 |
f837e96b2bb26e84feb9ace0466cd17cc15888c4 | 2,236 | class PostfixLogLine < ActiveRecord::Base
belongs_to :delivery
after_save :update_status!
def dsn_class
match = dsn.match(/^(\d)\.(\d+)\.(\d+)/)
if match
match[1].to_i
else
raise "Unexpected form for dsn code"
end
end
def status
case dsn_class
when 2
"delivered"
when 4
"soft_bounce"
when 5
"hard_bounce"
else
raise "Unknown dsn class"
end
end
# My status has changed. Tell those effected.
def update_status!
delivery.update_status!
end
def self.create_from_line(line)
values = match_main_content(line)
program = values.delete(:program)
to = values.delete(:to)
queue_id = values.delete(:queue_id)
# Only log delivery attempts
if program == "smtp"
delivery = Delivery.joins(:email, :address).order("emails.created_at DESC").find_by("addresses.text" => to, postfix_queue_id: queue_id)
if delivery
# Don't resave duplicates
delivery.postfix_log_lines.find_or_create_by(values)
else
puts "Skipping address #{to} from postfix queue id #{queue_id} - it's not recognised: #{line}"
end
end
end
def self.match_main_content(line)
# Assume the log file was written using syslog and parse accordingly
p = SyslogProtocol.parse("<13>" + line)
content_match = p.content.match /^postfix\/(\w+)\[(\d+)\]: (([0-9A-F]+): )?(.*)/
program_content = content_match[5]
to_match = program_content.match(/to=<([^>]+)>/)
relay_match = program_content.match(/relay=([^,]+)/)
delay_match = program_content.match(/delay=([^,]+)/)
delays_match = program_content.match(/delays=([^,]+)/)
dsn_match = program_content.match(/dsn=([^,]+)/)
status_match = program_content.match(/status=(.*)$/)
result = {
:time => p.time,
:program => content_match[1],
:queue_id => content_match[4],
}
result[:to] = to_match[1] if to_match
result[:relay] = relay_match[1] if relay_match
result[:delay] = delay_match[1] if delay_match
result[:delays] = delays_match[1] if delays_match
result[:dsn] = dsn_match[1] if dsn_match
result[:extended_status] = status_match[1] if status_match
result
end
end
| 27.95 | 141 | 0.639982 |
39c09323164a0ce795d9c6749205b3379e8e8e20 | 4,278 | module Fog
module Network
class OpenStack
class Real
# Create a new security group
#
# ==== Parameters
# * options<~Hash>:
# * 'name'<~String> - Name of the security group
# * 'description'<~String> - Description of the security group
# * 'tenant_id'<~String> - TenantId different than the current user, that should own the security group. Only allowed if user has 'admin' role.
#
# ==== Returns
# * response<~Excon::Response>:
# * body<~Hash>:
# * 'security_groups'<~Array>:
# * 'id'<~String> - UUID of the security group
# * 'name'<~String> - Name of the security group
# * 'description'<~String> - Description of the security group
# * 'tenant_id'<~String> - Tenant id that owns the security group
# * 'security_group_rules'<~Array>: - Array of security group rules
# * 'id'<~String> - UUID of the security group rule
# * 'direction'<~String> - Direction of traffic, must be in ['ingress', 'egress']
# * 'port_range_min'<~Integer> - Start port for rule i.e. 22 (or -1 for ICMP wildcard)
# * 'port_range_max'<~Integer> - End port for rule i.e. 22 (or -1 for ICMP wildcard)
# * 'protocol'<~String> - IP protocol for rule, must be in ['tcp', 'udp', 'icmp']
# * 'ethertype'<~String> - Type of ethernet support, must be in ['IPv4', 'IPv6']
# * 'security_group_id'<~String> - UUID of the parent security group
# * 'remote_group_id'<~String> - UUID of the remote security group
# * 'remote_ip_prefix'<~String> - IP cidr range address i.e. '0.0.0.0/0'
# * 'tenant_id'<~String> - Tenant id that owns the security group rule
def create_security_group(options = {})
data = {"security_group" => {}}
desired_options = [:name, :description, :tenant_id]
selected_options = desired_options.select{|o| options[o]}
selected_options.each { |key| data["security_group"][key] = options[key] }
request(
:body => Fog::JSON.encode(data),
:expects => 201,
:method => "POST",
:path => "security-groups"
)
end
end
class Mock
def create_security_group(options = {})
# Spaces are NOT removed from name and description, as in case of compute sec groups
tenant_id = Fog::Mock.random_numbers(14).to_s
sec_group_id = Fog::UUID.uuid
response = Excon::Response.new
response.status = 201
# by default every security group will come setup with an egress rule to "allow all out"
data = {
"security_group_rules" => [
{ "remote_group_id" => nil,
"direction" => "egress",
"remote_ip_prefix" => nil,
"protocol" => nil,
"ethertype" => "IPv4",
"tenant_id" => tenant_id,
"port_range_max" => nil,
"port_range_min" => nil,
"id" => Fog::UUID.uuid,
"security_group_id" => sec_group_id
},
{ "remote_group_id" => nil,
"direction" => "egress",
"remote_ip_prefix" => nil,
"protocol" => nil,
"ethertype" => "IPv6",
"tenant_id" => tenant_id,
"port_range_max" => nil,
"port_range_min" => nil,
"id" => Fog::UUID.uuid,
"security_group_id" => sec_group_id
}
],
"id" => sec_group_id,
"tenant_id" => tenant_id,
"name" => options[:name] || "",
"description" => options[:description] || ""
}
self.data[:security_groups][data["id"]] = data
response.body = {"security_group" => data}
response
end
end
end
end
end
| 45.031579 | 153 | 0.491117 |
f76d1d6acb05529c6b6836516aef743e43105cc0 | 337 | # frozen_string_literal: true
require 'active_support/concern'
module BetterRecord
module Authenticatable
extend ActiveSupport::Concern
included do
include BetterRecord::JWT::ControllerMethods
before_action :check_user
if use_bearer_token
after_action :set_auth_header
end
end
end
end
| 17.736842 | 50 | 0.732938 |
7a90572dc56a8bd329e37f077d63816af1457b99 | 327 | require 'rails_helper'
require 'dwca/generator'
describe Dwca::Generator do
specify "stores a compressed file in rails' temp directory" do
path = Dwca::Generator.get_archive
expect(File.exists?(path)).to be_truthy
File.delete(path)
expect(path.to_s.index(Rails.root.join('tmp').to_s)).to eq(0)
end
end
| 23.357143 | 65 | 0.718654 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.