hexsha
stringlengths 40
40
| size
int64 2
1.01M
| content
stringlengths 2
1.01M
| avg_line_length
float64 1.5
100
| max_line_length
int64 2
1k
| alphanum_fraction
float64 0.25
1
|
---|---|---|---|---|---|
03dd276e2aec13fad3012e81a67c059be55b274c | 3,006 | require "notiffany/notifier/base"
module Notiffany
class Notifier
# System notifications using the
# [growl](https://github.com/visionmedia/growl) gem.
#
# This gem is available for OS X and sends system notifications to
# [Growl](http://growl.info) through the
# [GrowlNotify](http://growl.info/downloads) executable.
#
# The `growlnotify` executable must be installed manually or by using
# [Homebrew](http://mxcl.github.com/homebrew/).
#
# Sending notifications with this notifier will not show the different
# notifications in the Growl preferences. Use the :gntp notifier if you
# want to customize each notification type in Growl.
#
# @example Install `growlnotify` with Homebrew
# brew install growlnotify
#
# @example Add the `growl` gem to your `Gemfile`
# group :development
# gem 'growl'
# end
#
# @example Add the `:growl` notifier to your `Guardfile`
# notification :growl
#
# @example Add the `:growl_notify` notifier with configuration options to
# your `Guardfile` notification :growl, sticky: true, host: '192.168.1.5',
# password: 'secret'
#
class Growl < Base
INSTALL_GROWLNOTIFY = "Please install the 'growlnotify' executable'\
' (available by installing the 'growl' gem)."
# Default options for the growl notifications.
DEFAULTS = {
sticky: false,
priority: 0
}
def _supported_hosts
%w(darwin)
end
def _check_available(_opts = {})
fail UnavailableError, INSTALL_GROWLNOTIFY unless ::Growl.installed?
end
# Shows a system notification.
#
# The documented options are for GrowlNotify 1.3, but the older options
# are also supported. Please see `growlnotify --help`.
#
# Priority can be one of the following named keys: `Very Low`,
# `Moderate`, `Normal`, `High`, `Emergency`. It can also be an integer
# between -2 and 2.
#
# @param [String] message the notification message body
# @param [Hash] opts additional notification library options
# @option opts [String] type the notification type. Either 'success',
# 'pending', 'failed' or 'notify'
# @option opts [String] title the notification title
# @option opts [String] image the path to the notification image
# @option opts [Boolean] sticky make the notification sticky
# @option opts [String, Integer] priority specify an int or named key
# (default is 0)
# @option opts [String] host the hostname or IP address to which to
# send a remote notification
# @option opts [String] password the password used for remote
# notifications
#
def _perform_notify(message, opts = {})
opts = { name: "Notiffany" }.merge(opts)
opts.select! { |k, _| ::Growl::Base.switches.include?(k) }
::Growl.notify(message, opts)
end
end
end
end
| 36.216867 | 78 | 0.643047 |
28231729568bf789a4d516cb25322360afde6741 | 5,341 | #
# Be sure to run `pod spec lint InformSDK.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
# ――― Spec Metadata ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# These will help people to find your library, and whilst it
# can feel like a chore to fill in it's definitely to your advantage. The
# summary should be tweet-length, and the description more in depth.
#
s.name = "InformSDKSpec"
s.version = "1.0.0"
s.summary = "InformDSK test Build to test its integration with Cocoa Pods."
# This description is used to generate tags and improve search results.
# * Think: What does it do? Why did you write it? What is the focus?
# * Try to keep it short, snappy and to the point.
# * Write the description between the DESC delimiters below.
# * Finally, don't worry about the indent, CocoaPods strips it!
s.description = <<-DESC
Testing INFORM SDK. InformDSK test Build to test its integration with Cocoa Pods.
DESC
s.homepage = "http://inform.com"
# s.screenshots = "www.example.com/screenshots_1.gif", "www.example.com/screenshots_2.gif"
# ――― Spec License ――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Licensing your code is important. See http://choosealicense.com for more info.
# CocoaPods will detect a license file if there is a named LICENSE*
# Popular ones are 'MIT', 'BSD' and 'Apache License, Version 2.0'.
#
#s.license = "MIT (example)"
s.license = { :type => "Commercial", :file => "LICENSE" }
# ――― Author Metadata ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the authors of the library, with email addresses. Email addresses
# of the authors are extracted from the SCM log. E.g. $ git log. CocoaPods also
# accepts just a name if you'd rather not provide an email address.
#
# Specify a social_media_url where others can refer to, for example a twitter
# profile URL.
#
s.author = { "Inform Inc" => "[email protected]" }
# Or just: s.author = "Mudit Jain"
# s.authors = { "Mudit Jain" => "[email protected]" }
# s.social_media_url = "http://twitter.com/Mudit Jain"
# ――― Platform Specifics ――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If this Pod runs only on iOS or OS X, then specify the platform and
# the deployment target. You can optionally include the target after the platform.
#
# s.platform = :ios
s.platform = :ios, "8.0"
# When using multiple platforms
# s.ios.deployment_target = "5.0"
# s.osx.deployment_target = "10.7"
# s.watchos.deployment_target = "2.0"
# s.tvos.deployment_target = "9.0"
# ――― Source Location ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Specify the location from where the source should be retrieved.
# Supports git, hg, bzr, svn and HTTP.
#
s.source = { :git => "https://github.com/inform-inc/inform-mobile-sdk-framework.git", :tag => "1.0.6" }
# ――― Source Code ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# CocoaPods is smart about how it includes source code. For source files
# giving a folder will include any swift, h, m, mm, c & cpp files.
# For header files it will include any header in the folder.
# Not including the public_header_files will make all headers public.
#
#s.source_files = "Classes", "Classes/**/*.{h,m}"
#s.exclude_files = "Classes/Exclude"
# s.public_header_files = "Classes/**/*.h"
# ――― Resources ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# A list of resources included with the Pod. These are copied into the
# target bundle with a build phase script. Anything else will be cleaned.
# You can preserve files from being cleaned, please don't preserve
# non-essential files like tests, examples and documentation.
#
# s.resource = "icon.png"
# s.resources = "Resources/*.png"
# s.preserve_paths = "FilesToSave", "MoreFilesToSave"
# ――― Project Linking ―――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# Link your library with frameworks, or libraries. Libraries do not include
# the lib prefix of their name.
#
# s.framework = "SomeFramework"
s.frameworks = "Foundation", "MediaPlayer", "UIKit"
# s.library = "iconv"
# s.libraries = "iconv", "xml2"
# ――― Project Settings ――――――――――――――――――――――――――――――――――――――――――――――――――――――――― #
#
# If your library depends on compiler flags you can set them in the xcconfig hash
# where they will only apply to your library. If you depend on other Podspecs
# you can include multiple dependencies to ensure it works.
# s.requires_arc = true
# s.xcconfig = { "HEADER_SEARCH_PATHS" => "$(SDKROOT)/usr/include/libxml2" }
s.dependency 'Alamofire', '~> 3.1.2'
s.dependency 'MBProgressHUD', '~> 0.9.2'
s.dependency 'Argo'
s.dependency 'Curry', '~> 1.4.0'
s.ios.vendored_framework = "dynamicFramework/V-1.0.0/InformSDK.framework"
end
| 36.834483 | 109 | 0.599888 |
abf44c724e75b765e08b0ea3991cee0a368a5cdb | 4,303 | require 'spec_helper'
describe 'datadog_agent::integrations::postgres' do
let(:facts) {{
operatingsystem: 'Ubuntu',
}}
let(:conf_dir) { '/etc/dd-agent/conf.d' }
let(:dd_user) { 'dd-agent' }
let(:dd_group) { 'root' }
let(:dd_package) { 'datadog-agent' }
let(:dd_service) { 'datadog-agent' }
let(:conf_file) { "#{conf_dir}/postgres.yaml" }
context 'with default parameters' do
it { should_not compile }
end
context 'with password set' do
let(:params) {{
password: 'abc123',
}}
it { should compile.with_all_deps }
it { should contain_file(conf_file).with(
owner: dd_user,
group: dd_group,
mode: '0600',
)}
it { should contain_file(conf_file).that_requires("Package[#{dd_package}]") }
it { should contain_file(conf_file).that_notifies("Service[#{dd_service}]") }
it { should contain_file(conf_file).with_content(/password: abc123/) }
context 'with default parameters' do
it { should contain_file(conf_file).with_content(%r{host: localhost}) }
it { should contain_file(conf_file).with_content(%r{dbname: postgres}) }
it { should contain_file(conf_file).with_content(%r{port: 5432}) }
it { should contain_file(conf_file).with_content(%r{username: datadog}) }
it { should contain_file(conf_file).without_content(%r{^\s*use_psycopg2: }) }
it { should contain_file(conf_file).without_content(%r{tags: })}
it { should contain_file(conf_file).without_content(%r{^[^#]*relations: }) }
end
context 'with use_psycopg2 enabled' do
let(:params) {{
use_psycopg2: true,
password: 'abc123',
}}
it { should contain_file(conf_file).with_content(%r{use_psycopg2: true}) }
end
context 'with parameters set' do
let(:params) {{
host: 'postgres1',
dbname: 'cats',
port: 4142,
username: 'monitoring',
password: 'abc123',
tags: %w{foo bar baz},
tables: %w{furry fuzzy funky}
}}
it { should contain_file(conf_file).with_content(%r{host: postgres1}) }
it { should contain_file(conf_file).with_content(%r{dbname: cats}) }
it { should contain_file(conf_file).with_content(%r{port: 4142}) }
it { should contain_file(conf_file).with_content(%r{username: monitoring}) }
it { should contain_file(conf_file).with_content(%r{^[^#]*tags:\s+- foo\s+- bar\s+- baz}) }
it { should contain_file(conf_file).with_content(%r{^[^#]*relations:\s+- furry\s+- fuzzy\s+- funky}) }
context 'with custom metric query missing %s' do
let(:params) {{
host: 'postgres1',
dbname: 'cats',
port: 4142,
username: 'monitoring',
password: 'abc123',
custom_metrics: {
'query_is_missing_%s' => {
'query' => 'select * from fuzz',
'metrics' => { },
}
}
}}
it do
expect {
is_expected.to compile
}.to raise_error(/custom_metrics require %s for metric substitution/)
end
end
context 'with custom metric query' do
let(:params) {{
host: 'postgres1',
dbname: 'cats',
port: 4142,
username: 'monitoring',
password: 'abc123',
custom_metrics: {
'foo_gooo_bar_query' => {
'query' => 'select foo, %s from bar',
'metrics' => {
"gooo" => ["custom_metric.tag.gooo", "GAUGE"]
},
'descriptors' => [["foo", "custom_metric.tag.foo"]]
}
}
}}
it { is_expected.to compile }
it { should contain_file(conf_file).with_content(%r{^[^#]*custom_metrics:}) }
it { should contain_file(conf_file).with_content(%r{\s+query:\s*['"]?select foo, %s from bar['"]?}) }
it { should contain_file(conf_file).with_content(%r{\s+metrics:}) }
it { should contain_file(conf_file).with_content(%r{\s+"gooo":\s+\[custom_metric.tag.gooo, GAUGE\]}) }
it { should contain_file(conf_file).with_content(%r{\s+query.*\n\s+relation:\s*false}) }
it { should contain_file(conf_file).with_content(%r{\s+descriptors.*\n\s+-\s+\[foo, custom_metric.tag.foo\]}) }
end
end
end
end
| 36.466102 | 119 | 0.5868 |
084a008fd2087696d6e091261871b1af66e43afd | 1,896 | Shindo.tests("Fog::Network[:openstack] | lb_health_monitor", ['openstack']) do
tests('success') do
before do
@lb_pool = Fog::Network[:openstack].lb_pools.create(:subnet_id => 'subnet_id',
:protocol => 'HTTP',
:lb_method => 'ROUND_ROBIN')
end
after do
@lb_pool.destroy
end
tests('#create').succeeds do
@instance = Fog::Network[:openstack].lb_health_monitors.create(:type => 'PING',
:delay => 1,
:timeout => 5,
:max_retries => 10,
:http_method => 'GET',
:url_path => '/',
:expected_codes => '200, 201',
:admin_state_up => true,
:tenant_id => 'tenant_id')
[email protected]?
end
tests('#update').succeeds do
@instance.delay = 5
@instance.timeout = 10
@instance.max_retries = 20
@instance.http_method = 'POST'
@instance.url_path = '/varz'
@instance.expected_codes = '200'
@instance.admin_state_up = false
@instance.update
end
tests('#associate_to_pool').succeeds do
@instance.associate_to_pool(@lb_pool.id)
end
tests('#disassociate_from_pool').succeeds do
@instance.disassociate_from_pool(@lb_pool.id)
end
tests('#destroy').succeeds do
@instance.destroy == true
end
end
end | 36.461538 | 99 | 0.41192 |
f72018465fbfcce6a5bc4dff5ba6eaf052d19308 | 5,007 | class ProtobufAT32 < Formula
desc "Protocol buffers (Google's data interchange format)"
homepage "https://github.com/google/protobuf/"
url "https://github.com/google/protobuf/archive/v3.2.0.tar.gz"
sha256 "2a25c2b71c707c5552ec9afdfb22532a93a339e1ca5d38f163fe4107af08c54c"
revision 2
head "https://github.com/google/protobuf.git"
# this will double the build time approximately if enabled
option "with-test", "Run build-time check"
option "without-python", "Build without python support"
deprecated_option "with-check" => "with-test"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "python" => :recommended if MacOS.version <= :snow_leopard
resource "appdirs" do
url "https://pypi.python.org/packages/48/69/d87c60746b393309ca30761f8e2b49473d43450b150cb08f3c6df5c11be5/appdirs-1.4.3.tar.gz"
sha256 "9e5896d1372858f8dd3344faf4e5014d21849c756c8d5701f78f8a103b372d92"
end
resource "packaging" do
url "https://files.pythonhosted.org/packages/c6/70/bb32913de251017e266c5114d0a645f262fb10ebc9bf6de894966d124e35/packaging-16.8.tar.gz"
sha256 "5d50835fdf0a7edf0b55e311b7c887786504efea1177abd7e69329a8e5ea619e"
end
resource "pyparsing" do
url "https://pypi.python.org/packages/3c/ec/a94f8cf7274ea60b5413df054f82a8980523efd712ec55a59e7c3357cf7c/pyparsing-2.2.0.tar.gz"
sha256 "0832bcf47acd283788593e7a0f542407bd9550a55a8a8435214a1960e04bcb04"
end
resource "six" do
url "https://files.pythonhosted.org/packages/b3/b2/238e2590826bfdd113244a40d9d3eb26918bd798fc187e2360a8367068db/six-1.10.0.tar.gz"
sha256 "105f8d68616f8248e24bf0e9372ef04d3cc10104f1980f54d57b2ce73a5ad56a"
end
resource "setuptools" do
url "https://pypi.python.org/packages/d5/b7/e52b7dccd3f91eec858309dcd931c1387bf70b6d458c86a9bfcb50134fbd/setuptools-34.3.3.zip"
sha256 "2cd244d3fca6ff7d0794a9186d1d19a48453e9813ae1d783edbfb8c348cde905"
end
resource "google-apputils" do
url "https://files.pythonhosted.org/packages/69/66/a511c428fef8591c5adfa432a257a333e0d14184b6c5d03f1450827f7fe7/google-apputils-0.4.2.tar.gz"
sha256 "47959d0651c32102c10ad919b8a0ffe0ae85f44b8457ddcf2bdc0358fb03dc29"
end
# Upstream's autogen script fetches this if not present
# but does no integrity verification & mandates being online to install.
resource "gmock" do
url "https://storage.googleapis.com/google-code-archive-downloads/v2/code.google.com/googlemock/gmock-1.7.0.zip"
mirror "https://dl.bintray.com/homebrew/mirror/gmock-1.7.0.zip"
sha256 "26fcbb5925b74ad5fc8c26b0495dfc96353f4d553492eb97e85a8a6d2f43095b"
end
needs :cxx11
def install
# Don't build in debug mode. See:
# https://github.com/Homebrew/homebrew/issues/9279
# https://github.com/google/protobuf/blob/5c24564811c08772d090305be36fae82d8f12bbe/configure.ac#L61
ENV.prepend "CXXFLAGS", "-DNDEBUG"
ENV.cxx11
(buildpath/"gmock").install resource("gmock")
system "./autogen.sh"
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}", "--with-zlib"
system "make"
system "make", "check" if build.with?("test") || build.bottle?
system "make", "install"
# Install editor support and examples
doc.install "editors", "examples"
Language::Python.each_python(build) do |python, version|
# google-apputils is a build-time dependency
ENV.prepend_create_path "PYTHONPATH", buildpath/"homebrew/lib/python#{version}/site-packages"
res = resources.map(&:name).to_set - ["gmock"]
res.each do |package|
resource(package).stage do
system python, *Language::Python.setup_install_args(buildpath/"homebrew")
end
end
# google is a namespace package and .pth files aren't processed from
# PYTHONPATH
touch buildpath/"homebrew/lib/python#{version}/site-packages/google/__init__.py"
chdir "python" do
ENV.append_to_cflags "-I#{include}"
ENV.append_to_cflags "-L#{lib}"
args = Language::Python.setup_install_args libexec
args << "--cpp_implementation"
system python, *args
end
site_packages = "lib/python#{version}/site-packages"
pth_contents = "import site; site.addsitedir('#{libexec/site_packages}')\n"
(prefix/site_packages/"homebrew-protobuf.pth").write pth_contents
end
end
def caveats; <<-EOS.undent
Editor support and examples have been installed to:
#{doc}
EOS
end
test do
testdata = <<-EOS.undent
syntax = "proto3";
package test;
message TestCase {
string name = 4;
}
message Test {
repeated TestCase case = 1;
}
EOS
(testpath/"test.proto").write testdata
system bin/"protoc", "test.proto", "--cpp_out=."
system "python", "-c", "import google.protobuf" if build.with? "python"
system "python3", "-c", "import google.protobuf" if build.with? "python3"
end
end
| 39.117188 | 145 | 0.724386 |
870b6bb93267fc8375a069648328b8fd22e3610f | 31,037 | module Components
module Appointments
module Doctors
class Index < RW
expose
def self.wdays
["sun", "mon", "tue", "wed", "thur", "fri", "sat" ]
end
def get_initial_state
{
date: Moment.new.startOf('day'),
current_controll_component: Native(:div, {}).to_n,
current_view: "week"
}
end
def component_did_mount
init_week_view
end
def render
t(:div, {className: 'appointments_calendar'},
modal,
t(:h2, {className: 'view_title_date'}, "#{state.date.month() + 1}.#{state.date.year()}"),
t(:div, {className: 'view_controlls'},
#t(:button, {className: 'btn btn-primary btn-xs', onClick: ->{init_month_view}}, "month"),
t(:button, {className: 'btn btn-primary btn-xs', onClick: ->{init_week_view(state.date.clone())}}, "week"),
t(:button, {className: 'btn btn-primary btn-xs', onClick: ->{init_day_view(state.date.clone())}}, "day"),
t(:button, {className: 'btn btn-primary btn-xs', onClick: ->{set_state date: Moment.new}}, "go to today"),
),
t(:div, {},
state.current_controll_component.to_n
)
)
end
def init_week_view(track_day)
state.current_view = "week"
set_state current_controll_component: ->{Native(t(Week, {ref: "week", index: self, date: state.date}))}
end
# def init_month_view
# state.current_view = "month"
# set_state current_controll_component: ->{Native(t(Month, {ref: "month", index: self, date: state.date}))}
# end
def init_day_view(day)
state.current_view = "day"
state.date = day
set_state date: state.date,current_controll_component: ->{Native(t(WeekDay, {ref: "day", date: state.date, index: self}))}, current_view: "day"
end
def init_appointments_show(appointment)
modal_open(
"appointment",
t(Components::Appointments::Doctors::Show, {appointment: appointment})
)
end
def init_user_show(user_id)
modal_open(
'patient',
t(Components::Users::Show, {user_id: user_id})
)
end
def current_view
self.ref(state.current_view).rb
end
end
class MonthBox < RW
expose
def prepare_dates
@cur_month = props.date.clone().startOf("month")
@first_wday = @cur_month.day()
@track_day = @cur_month.clone().subtract((@first_wday + 1), "days")
@current_week_num = `Math.ceil(#{props.date.diff(@track_day, 'days')} / 7)`
end
def render
prepare_dates
today = props.date.format('YYYY-MM-DD')
t(:div, {},
t(:div, {className: "month_box"},
t(:div, {className: "row week_row"},
*splat_each(Services::Calendar.wdays) do |wday_name|
t(:div, {className: "day"}, wday_name)
end,
),
*splat_each(0..5) do |week_num|
if week_num + 1 == @current_week_num
is_current = 'current_week'
else
is_current = ''
end
t_d = (@track_day).clone
t(:div, {className: "row week_row #{is_current}"},
*splat_each(0..6) do |d|
t_d_a = (@track_day.add(1, 'days')).clone()
is_today = (t_d_a.format('YYYY-MM-DD') == today) ? 'today' : ''
t(:div, {className: "day #{is_today}", onClick: ->{set_date(t_d_a)}},
t(:div, {},
t(:span, {}, @track_day.date())
)
)
end
)
end
)
)
end
def set_date(date)
props.index.set_state date: date
end
def prev_month
props.index.set_state date: (@date = props.index.state.date.subtract(1, "month"))
component_did_mount
end
def next_month
props.index.set_state date: (@date = props.index.state.date.add(1, "month"))
component_did_mount
end
end
# class Month < RW
# expose
# def prepare_dates
# @cur_month = props.date.clone().startOf("month")
# @first_wday = @cur_month.day()
# @track_day = @cur_month.clone().subtract((@first_wday + 1), "days")
# end
# def queries(date)
# date = date.clone()
# date.startOf("month")
# date = date.isBefore(x = Moment.new.set(hour: 0, min: 0)) ? x : date
# wd = date.day() + 1
# x = {}
# z = date.subtract((wd), "days")
# x[:to] = z.clone().add(weeks: 6, days: 1).format('YYYY-MM-DD')
# x[:from] = z.format('YYYY-MM-DD')
# x
# end
# def get_initial_state
# @date = props.date
# {
# appointment_availabilities: {}
# }
# end
# def component_did_mount
# AppointmentAvailability.index(component: self, payload: queries(props.date)).then do |users|
# props.index.prepare_availability_tree(self, users)
# end
# end
# def render
# prepare_dates
# t(:div, {},
# spinner,
# t(:button, {onClick: ->{prev_month}}, "<"),
# t(:button, {onClick: ->{next_month}}, ">"),
# t(:div, {className: "table", style: {display: "table", fontSize:"10px!important"}.to_n },
# t(:div, {className: "row", style: {display: "table-row"}.to_n },
# *splat_each(Services::Calendar.wdays) do |wday_name|
# t(:div, {className: "col-lg-1", style: {display: "table-cell", width: "12%"}.to_n}, wday_name)
# end,
# ),
# *splat_each(0..5) do |week_num|
# t_d = (@track_day).clone
# t(:div, {className: "row", style: {display: "table-row"}.to_n},
# t(:div, {},
# *splat_each(0..6) do |d|
# t_d_a = (@track_day.add(1, 'days')).clone()
# t(:div, {className: "col-lg-1", style: {"height" => "12em", display: "table-cell", width: "12%", overflow: "scroll"}.to_n},
# t(:div, {},
# t(:span, {}, @track_day.date())
# ),
# t(:div, {},
# *splat_each(props.index.fetch_appointments(self, @track_day.format("YYYY-MM-DD"))) do |k, v|
# t(:span, {},
# "#{k}",
# t(:br, {}),
# *splat_each(v[0].map) do |av|
# t(:span, {}, "#{av[0].format('HH:mm')} - #{av[1].format('HH:mm')}", t(:br, {}))
# end,
# "------------",
# t(:br, {})
# )
# end
# )
# )
# end
# )
# )
# end
# )
# )
# end
# def prev_month
# props.index.set_state date: (@date = props.index.state.date.subtract(1, "month"))
# component_did_mount
# end
# def next_month
# props.index.set_state date: (@date = props.index.state.date.add(1, "month"))
# component_did_mount
# end
# end
class Week < RW
expose
#resolves dates that'll be used for querrying the apppointments
def queries(date)
p date.format
z = date.clone().startOf("week")
#z = date.isBefore(x = Moment.new.set(hour: 0, min: 0)) ? x : z
x = {}
x[:from] = z.format('YYYY-MM-DD')
x[:to] = z.clone().endOf('week').format('YYYY-MM-DD')
x
end
def get_initial_state
{
appointments: ModelCollection.new
}
end
#fetches appointments
# if something else except fetching is added move to other method beacause on update it calls #component_did_mount
def component_did_mount
Appointment.index(component: self, namespace: "doctor", payload: {from: queries(props.date)[:from], to: queries(props.date)[:to], doctor_ids: [CurrentUser.user_instance.id]}).then do |appointments|
set_state appointments: appointments
end
end
#if date is different fetches for new date
def component_did_update(prev_props, prev_state)
if props.date.format != prev_props.date.format
component_did_mount
end
end
#NOT FETCHES FROM SERVER
#searches appointments in state.appointments : ModelCollection that start on given date
def fetch_appointments(t_d)
state.appointments.where do |a|
next if a == nil
a.attributes[:start_date].include? "#{t_d}"
end
end
def render
#TODO: move to flexbox
passed_day = ''
current_day = Moment.new
t_d = @track_day = props.date.clone().startOf('week').subtract(1, 'days')
t(:div, {},
spinner,
t(:div, {className: 'row'},
t(:div, {className: 'prev_next_controlls'},
t(:button, {onClick: ->{prev_week}}, "<"),
t(:button, {onClick: ->{next_week}}, ">"),
),
t(:div, {className: "col-lg-1 week_day_panel #{$DISPLAY_SIZE}"},
t(MonthBox, {date: props.date, index: props.index})
)
),
t(:div, {className: 'row'},
modal,
*splat_each(0..6) do |d|
t_d_a = (@track_day.add(1, 'days')).clone()
t(:div, {className: "col-lg-1 week_day_panel #{$DISPLAY_SIZE}"},
t(:div, {className: "day_heading #{passed_day}", onClick: ->{props.index.init_day_view(t_d_a)}},
t(:h4, {className: 'wday_name'},
Services::Calendar.wdays[d]
),
t(:p, {}, @track_day.date())
),
t(:div, {className: "day_body"},
*splat_each(fetch_appointments(@track_day.format("YYYY-MM-DD"))) do |appointment|
t(:div, {className: 'appointments_for_doctor'},
t(:p, {className: 'patient_name', onClick: ->{props.index.init_user_show(appointment.patient.id)}},
t(:a, {}, "#{appointment.patient.profile.name}")
),
t(:p, {className: 'appointment_time'},
"#{Moment.new(appointment.start_date).format("HH:mm")} - #{Moment.new(appointment.end_date).format("HH:mm")}"
),
t(:div, {className: 'controls'},
t(:button, {className: 'btn btn-xs', onClick: ->{props.index.init_appointments_show(appointment)}}, "more...")
)
)
end
)
)
end
)
)
end
# def init_appointments_proposals_new(date)
# modal_open(
# "book an appointment",
# t(Components::Appointments::Proposals::New, {date: date, appointment_availabilities: props.index.fetch_appointments(self, date.clone.format("YYYY-MM-DD")), user_accessor: state.user_accessor, on_appointment_proposal_created: event(->{modal_close})})
# )
# end
def prev_week
props.index.set_state date: (props.index.state.date.subtract(7, 'days'))
component_did_mount
end
def next_week
props.index.set_state date: (props.index.state.date.add(7, 'days'))
component_did_mount
end
end
class WeekDay < RW
expose
#date: the moment the state is on
def get_initial_state
{
appointments: ModelCollection.new
}
end
def component_did_mount
Appointment.index(component: self, namespace: "doctor", payload: {from: "#{props.date.format('YYYY-MM-DD')}", to: "#{props.date.clone().add(1, 'days').format('YYYY-MM-DD')}", doctor_ids: [CurrentUser.user_instance.id]}).then do |appointments|
set_state appointments: appointments
end
end
def component_did_update(prev_props, prev_state)
if props.date.format != prev_props.date.format
component_did_mount
end
end
def render
t(:div, {className: "row "},
spinner,
modal,
t(:div, {className: "col-lg-3"},
t(MonthBox, {date: props.date, index: props.index})
),
t(:div, {className: "col-lg-6 day_panel"},
t(:div, {className: 'prev_next_controlls'},
t(:button, {onClick: ->{prev_day}}, "<"),
t(:button, {onClick: ->{next_day}}, ">"),
),
t(:div, {className: "day_heading"},
t(:h4, {className: 'wday_name'},
Services::Calendar.wdays[props.date.day()]
),
t(:p, {}, props.date.format('DD'))
),
t(:div, {className: "day_body"},
*splat_each(state.appointments) do |appointment|
t(:div, {className: 'appointments_for_doctor'},
t(:p, {className: 'patient_name', onClick: ->{props.index.init_user_show(appointment.patient.id)}},
t(:a, {}, "#{appointment.patient.profile.name}")
),
t(:p, {className: 'appointment_time'},
"#{Moment.new(appointment.start_date).format("HH:mm")} - #{Moment.new(appointment.end_date).format("HH:mm")}"
),
t(:div, {className: 'controls'},
t(:button, {className: 'btn btn-xs', onClick: ->{props.index.init_appointments_show(appointment)}}, "more...")
)
)
end
)
),
t(:div, {className: 'col-lg-3'})
)
end
def init_appointments_proposals_new(date)
modal_open(
"book an appointment",
t(Components::Appointments::Proposals::New, {date: date, appointment_availabilities: props.index.fetch_appointments(self, date.clone.format("YYYY-MM-DD")), user_accessor: state.user_accessor, on_appointment_proposal_created: event(->{modal_close})})
)
end
def prev_day
props.index.set_state date: (props.index.state.date.subtract(1, 'day'))
component_did_mount
end
def next_day
props.index.set_state date: (props.index.state.date.add(1, 'day'))
component_did_mount
end
end
#OLD VERSION
#IF NECESSARY TO CREATE APPOINTMENTS BY DOCTOR METHODS CAN BE GRABBED HERE
# class Index < RW
# expose
# def self.wdays
# ["sun", "mon", "tue", "wed", "thur", "fri", "sat" ]
# end
# def get_initial_state
# {
# date: Moment.new,
# current_controll_component: 'div',
# current_view: "week"
# }
# end
# def component_did_mount
# init_week_view
# end
# def render
# t(:div, {},
# modal,
# t(:p, {}, "the month is #{state.date.month() + 1}, of year #{state.date.year()}"),
# t(:button, {onClick: ->{init_month_view}}, "month"),
# t(:button, {onClick: ->{init_week_view(state.date.clone())}}, "week"),
# t(:button, {onClick: ->{init_day_view}}, "day"),
# t(:button, {onClick: ->{set_state date: Moment.new}}, "go to today"),
# t(:br, {}),
# t(:div, {},
# state.current_controll_component.to_n
# )
# )
# end
# def init_week_view(track_day)
# state.current_view = "week"
# set_state current_controll_component: ->{Native(t(Week, {ref: "week", index: self, date: state.date}))}
# end
# def init_month_view
# state.current_view = "month"
# set_state current_controll_component: ->{Native(t(Month, {ref: "month", index: self, date: state.date}))}
# end
# def init_day_view
# state.current_view = "day"
# set_state current_controll_component: ->{Native(t(WeekDay, {ref: "day", date: state.date, index: self}))}, current_view: "day"
# end
# def init_appointments_new(date)
# modal_open(
# "create appointment",
# t(Components::Appointments::Doctors::New, {date: date, on_appointment_created: ->(appo){self.on_appointment_created(appo)}})
# )
# end
# def init_appointments_show(appointment)
# modal_open(
# "appointment",
# t(Components::Appointments::Doctors::Show, {appointment: appointment})
# )
# end
# def init_appointments_edit(appointment)
# modal_open(
# "edit",
# t(Components::Appointments::Doctors::Edit, {id: appointment.id, passed_appointment: appointment,
# on_appointment_updated: ->(a){on_appointment_updated(a)}})
# )
# end
# def current_view
# self.ref(state.current_view).rb
# end
# def delete_appointment(appointment)
# appointment.destroy(namespace: 'doctor').then do |_appointment|
# current_view.state.appointments.remove(appointment)
# current_view.set_state appointments: current_view.state.appointments
# current_view.prepare_availability if state.current_view == "day"
# end
# end
# def on_appointment_updated(appointment)
# current_view.set_state appointments: current_view.state.appointments
# current_view.prepare_availability if state.current_view == "day"
# end
# def on_appointment_created(appo)
# current_view.state.appointments << appo
# current_view.set_state appointments: current_view.state.appointments
# current_view.prepare_availability if state.current_view == "day"
# modal_close
# end
# end
# class Month < RW
# expose
# def prepare_dates
# @cur_month = props.date.clone().startOf("month")
# @first_wday = @cur_month.day()
# @track_day = @cur_month.clone().subtract((@first_wday + 1), "days")
# end
# def queries(date)
# date = @date.clone()
# date.startOf("month")
# wd = date.day() + 1
# x = {}
# z = date.subtract((wd), "days")
# x[:to] = z.clone().add(weeks: 6, days: 1).format('YYYY-MM-DD')
# x[:from] = z.format('YYYY-MM-DD')
# x
# end
# def get_initial_state
# @date = props.date
# {
# appointments: ModelCollection.new
# }
# end
# def component_did_mount
# Appointment.index(component: self, namespace: "doctor", payload: {from: queries(props.date)[:from], to: queries(props.date)[:to], doctor_ids: [CurrentUser.user_instance.id]}).then do |appointments|
# set_state appointments: appointments
# end
# end
# def render
# prepare_dates
# t(:div, {},
# spinner,
# t(:button, {onClick: ->{prev_month}}, "<"),
# t(:button, {onClick: ->{next_month}}, ">"),
# t(:div, {className: "table", style: {display: "table", fontSize:"10px!important"}.to_n },
# t(:div, {className: "row", style: {display: "table-row"}.to_n },
# *splat_each(Services::Calendar.wdays) do |wday_name|
# t(:div, {className: "col-lg-1", style: {display: "table-cell", width: "12%"}.to_n }, wday_name)
# end,
# ),
# *splat_each(0..5) do |week_num|
# t_d = (@track_day).clone
# t(:div, {className: "row", style: {display: "table-row"}.to_n },
# t(:div, {},
# *splat_each(0..6) do |d|
# t_d_a = (@track_day.add(1, 'days')).clone()
# t(:div, {className: "col-lg-1", style: {"height" => "12em", display: "table-cell", width: "12%", overflow: "scroll"}.to_n },
# t(:div, {},
# t(:span, {}, @track_day.date())#,
# #t(:button, {onClick: ->{props.index.init_appointments_new(t_d_a)}}, "add appointment")
# ),
# t(:div, {},
# *splat_each(fetch_appointments(@track_day.format("YYYY-MM-DD"))) do |appointment|
# t(:span, {},
# "#{Moment.new(appointment.start_date).format("HH:mm")} -
# #{Moment.new(appointment.end_date).format("HH:mm")}",
# t(:button, {onClick: ->{props.index.init_appointments_show(appointment)}}, "show this"),
# t(:button, {onClick: ->{props.index.init_appointments_edit(appointment)}}, "edit this"),
# t(:button, {onClick: ->{props.index.delete_appointment(appointment)}}, "delete this"),
# t(:br, {}),
# "#{appointment.patient.profile.name}",
# t(:br, {}),
# "------------",
# t(:br, {})
# )
# end
# )
# )
# end
# )
# )
# end
# )
# )
# end
# def fetch_appointments(t_d)
# state.appointments.where do |a|
# next if a == nil
# a.attributes[:start_date].include? "#{t_d}"
# end
# end
# def handle(track_day)
# props.on_init_week_view(track_day)
# end
# def prev_month
# props.index.set_state date: (@date = props.index.state.date.subtract(1, "month"))
# component_did_mount
# end
# def next_month
# props.index.set_state date: (@date = props.index.state.date.add(1, "month"))
# component_did_mount
# end
# end
# class Week < RW
# expose
# def queries(date)
# x = {}
# date = props.date.clone().startOf("week")
# x[:from] = date.format('YYYY-MM-DD')
# x[:to] = date.add(8, 'days').format('YYYY-MM-DD')
# x
# end
# def get_initial_state
# {
# appointments: ModelCollection.new
# }
# end
# def component_did_mount
# Appointment.index(component: self, namespace: "doctor", payload: {from: queries(props.date)[:from], to: queries(props.date)[:to], doctor_ids: [CurrentUser.user_instance.id]}).then do |appointments|
# set_state appointments: appointments
# end
# end
# def fetch_appointments(t_d)
# state.appointments.where do |a|
# next if a == nil
# a.attributes[:start_date].include? "#{t_d}"
# end
# end
# def render
# t_d = @track_day = props.date.clone().subtract(1, 'days')
# t(:div, {},
# spinner,
# t(:button, {onClick: ->{prev_week}}, "<"),
# t(:button, {onClick: ->{next_week}}, ">"),
# t(:div, {className: "table", style: {display: "table", fontSize:"10px!important"}.to_n },
# t(:div, {className: "row", style: {display: "table-row"}.to_n },
# *splat_each(Services::Calendar.wdays) do |wday_name|
# t(:div, {className: "col-lg-1", style: {display: "table-cell", width: "12%"}.to_n }, wday_name)
# end,
# ),
# t(:div, {className: "row", style: {display: "table-row"}.to_n },
# t(:div, {},
# *splat_each(0..6) do |d|
# t_d_a = (@track_day.add(1, 'days')).clone()
# t(:div, {className: "col-lg-1", style: {display: "table-cell", width: "12%"}.to_n },
# t(:div, {},
# t(:span, {}, @track_day.date())#,
# #t(:button, {onClick: ->{props.index.init_appointments_new(t_d_a)}}, "add appointment")
# ),
# t(:div, {},
# *splat_each(fetch_appointments(@track_day.format("YYYY-MM-DD"))) do |appointment|
# t(:span, {},
# "#{Moment.new(appointment.start_date).format("HH:mm")} -
# #{Moment.new(appointment.end_date).format("HH:mm")}",
# t(:button, {onClick: ->{props.index.init_appointments_show(appointment)}}, "show this"),
# t(:button, {onClick: ->{props.index.init_appointments_edit(appointment)}}, "edit this"),
# t(:button, {onClick: ->{props.index.delete_appointment(appointment)}}, "delete this"),
# t(:br, {}),
# "#{appointment.patient.profile.name}",
# t(:br, {}),
# "------------",
# t(:br, {})
# )
# end
# )
# )
# end
# )
# )
# )
# )
# end
# def prev_week
# props.index.set_state date: (props.index.state.date.subtract(7, 'days'))
# component_did_mount
# end
# def next_week
# props.index.set_state date: (props.index.state.date.add(7, 'days'))
# component_did_mount
# end
# end
# class WeekDay < RW
# expose
# #date: the moment the state is on
# def get_initial_state
# {
# appointments: ModelCollection.new,
# available: []
# }
# end
# def component_did_mount
# Appointment.index(component: self, namespace: "doctor", payload: {from: "#{props.date.format('YYYY-MM-DD')}", to: "#{props.date.clone().add(1, 'days').format('YYYY-MM-DD')}", doctor_ids: [CurrentUser.user_instance.id]}).then do |appointments|
# set_state appointments: appointments
# begin
# prepare_availability
# rescue Exception => e
# p e
# end
# end
# end
# def render
# t(:div, {className: "row"},
# spinner,
# t(:div, {className: "col-lg-6"},
# t(:button, {onClick: ->{prev_day}}, "<"),
# t(:button, {onClick: ->{next_day}}, ">"),
# t(:p, {}, "Today is #{props.date.format('YYYY-MM-DD HH:mm')}"),
# *splat_each(state.appointments) do |appointment|
# t(:span, {},
# "#{Moment.new(appointment.start_date).format("HH:mm")} -
# #{Moment.new(appointment.end_date).format("HH:mm")}",
# t(:button, {onClick: ->{props.index.init_appointments_show(appointment)}}, "show this"),
# t(:button, {onClick: ->{props.index.init_appointments_edit(appointment)}}, "edit this"),
# t(:button, {onClick: ->{props.index.delete_appointment(appointment)}}, "delete this"),
# t(:br, {}),
# "#{appointment.patient.profile.name}",
# t(:br, {}),
# "------------",
# t(:br, {})
# )
# end
# ),
# t(:div, {className: 'col-lg-6'},
# t(:p, {}, "here ll be appointment planning for day"),
# *splat_each(state.available) do |av|
# t(:p, {}, "#{av[:start].format("HH:mm")} - #{av[:end].format("HH:mm")}")
# end
# )
# )
# end
# def prepare_availability
# state.appointments.sort! do |x, y|
# x.end_date <=> y.start_date
# end
# available = []
# data = state.appointments.data
# if data.length > 0
# x = props.date.clone().set(hour: 9, minute: 0)
# y = Moment.new(data[0].start_date)
# d = y.diff(x, 'minutes')
# p "init dif : #{d}"
# if d > 20
# available << {start: x, end: y}
# end
# 0..data.length.times do |i|
# y = Moment.new(data[i].end_date)
# if (i + 1 == data.length)
# x = y.clone().set(hour: 19, minute: 0)
# else
# x = Moment.new(data[i + 1].start_date)
# end
# d = x.diff(y, 'minutes')
# if d >= 30
# available << {start: y, end: x}
# end
# end
# else
# available << {start: props.date.clone().set(hour: 9, minute: 0), end: props.date.clone().set(hour: 19, minute: 0)}
# end
# set_state available: available
# end
# def prev_day
# props.index.set_state date: (props.index.state.date.subtract(1, 'day'))
# component_did_mount
# end
# def next_day
# props.index.set_state date: (props.index.state.date.add(1, 'day'))
# component_did_mount
# end
# end
end
end
end
| 37.988984 | 263 | 0.4706 |
39adcede25d29fc88a1820d22b9899f6c332f865 | 10,553 | #encoding: us-ascii
=begin
Copyright (C) 2007 Stephan Maka <[email protected]>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
=end
require 'socket'
require 'resolv'
require 'socksify/debug'
class SOCKSError < RuntimeError
def initialize(msg)
Socksify::debug_error("#{self.class}: #{msg}")
super
end
class ServerFailure < SOCKSError
def initialize
super("general SOCKS server failure")
end
end
class NotAllowed < SOCKSError
def initialize
super("connection not allowed by ruleset")
end
end
class NetworkUnreachable < SOCKSError
def initialize
super("Network unreachable")
end
end
class HostUnreachable < SOCKSError
def initialize
super("Host unreachable")
end
end
class ConnectionRefused < SOCKSError
def initialize
super("Connection refused")
end
end
class TTLExpired < SOCKSError
def initialize
super("TTL expired")
end
end
class CommandNotSupported < SOCKSError
def initialize
super("Command not supported")
end
end
class AddressTypeNotSupported < SOCKSError
def initialize
super("Address type not supported")
end
end
def self.for_response_code(code)
case code
when 1
ServerFailure
when 2
NotAllowed
when 3
NetworkUnreachable
when 4
HostUnreachable
when 5
ConnectionRefused
when 6
TTLExpired
when 7
CommandNotSupported
when 8
AddressTypeNotSupported
else
self
end
end
end
class TCPSocket
@@socks_version ||= "5"
def self.socks_version
(@@socks_version == "4a" or @@socks_version == "4") ? "\004" : "\005"
end
def self.socks_version=(version)
@@socks_version = version.to_s
end
def self.socks_server
@@socks_server ||= nil
end
def self.socks_server=(host)
@@socks_server = host
end
def self.socks_port
@@socks_port ||= nil
end
def self.socks_port=(port)
@@socks_port = port
end
def self.socks_username
@@socks_username ||= nil
end
def self.socks_username=(username)
@@socks_username = username
end
def self.socks_password
@@socks_password ||= nil
end
def self.socks_password=(password)
@@socks_password = password
end
def self.socks_ignores
@@socks_ignores ||= %w(localhost)
end
def self.socks_ignores=(ignores)
@@socks_ignores = ignores
end
class SOCKSConnectionPeerAddress < String
attr_reader :socks_server, :socks_port
def initialize(socks_server, socks_port, peer_host)
@socks_server, @socks_port = socks_server, socks_port
super peer_host
end
def inspect
"#{to_s} (via #{@socks_server}:#{@socks_port})"
end
def peer_host
to_s
end
end
alias :initialize_tcp :initialize
# See http://tools.ietf.org/html/rfc1928
def initialize(host=nil, port=0, local_host=nil, local_port=nil)
if host.is_a?(SOCKSConnectionPeerAddress)
socks_peer = host
socks_server = socks_peer.socks_server
socks_port = socks_peer.socks_port
socks_ignores = []
host = socks_peer.peer_host
else
socks_server = self.class.socks_server
socks_port = self.class.socks_port
socks_ignores = self.class.socks_ignores
end
if socks_server and socks_port and not socks_ignores.include?(host)
Socksify::debug_notice "Connecting to SOCKS server #{socks_server}:#{socks_port}"
initialize_tcp socks_server, socks_port
socks_authenticate unless @@socks_version =~ /^4/
if host
socks_connect(host, port)
end
else
Socksify::debug_notice "Connecting directly to #{host}:#{port}"
initialize_tcp host, port, local_host, local_port
Socksify::debug_debug "Connected to #{host}:#{port}"
end
end
# Authentication
def socks_authenticate
if self.class.socks_username || self.class.socks_password
Socksify::debug_debug "Sending username/password authentication"
write "\005\001\002"
else
Socksify::debug_debug "Sending no authentication"
write "\005\001\000"
end
Socksify::debug_debug "Waiting for authentication reply"
auth_reply = recv(2)
if auth_reply.empty?
raise SOCKSError.new("Server doesn't reply authentication")
end
if auth_reply[0..0] != "\004" and auth_reply[0..0] != "\005"
raise SOCKSError.new("SOCKS version #{auth_reply[0..0]} not supported")
end
if self.class.socks_username || self.class.socks_password
if auth_reply[1..1] != "\002"
raise SOCKSError.new("SOCKS authentication method #{auth_reply[1..1]} neither requested nor supported")
end
auth = "\001"
auth += self.class.socks_username.to_s.length.chr
auth += self.class.socks_username.to_s
auth += self.class.socks_password.to_s.length.chr
auth += self.class.socks_password.to_s
write auth
auth_reply = recv(2)
if auth_reply[1..1] != "\000"
raise SOCKSError.new("SOCKS authentication failed")
end
else
if auth_reply[1..1] != "\000"
raise SOCKSError.new("SOCKS authentication method #{auth_reply[1..1]} neither requested nor supported")
end
end
end
# Connect
def socks_connect(host, port)
port = Socket.getservbyname(port) if port.is_a?(String)
req = String.new
Socksify::debug_debug "Sending destination address"
req << TCPSocket.socks_version
Socksify::debug_debug TCPSocket.socks_version.unpack "H*"
req << "\001"
req << "\000" if @@socks_version == "5"
req << [port].pack('n') if @@socks_version =~ /^4/
if @@socks_version == "4"
host = Resolv::DNS.new.getaddress(host).to_s
end
Socksify::debug_debug host
if host =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/ # to IPv4 address
req << "\001" if @@socks_version == "5"
_ip = [$1.to_i,
$2.to_i,
$3.to_i,
$4.to_i
].pack('CCCC')
req << _ip
elsif host =~ /^[:0-9a-f]+$/ # to IPv6 address
raise "TCP/IPv6 over SOCKS is not yet supported (inet_pton missing in Ruby & not supported by Tor"
req << "\004"
else # to hostname
if @@socks_version == "5"
req << "\003" + [host.size].pack('C') + host
else
req << "\000\000\000\001"
req << "\007\000"
Socksify::debug_notice host
req << host
req << "\000"
end
end
req << [port].pack('n') if @@socks_version == "5"
write req
socks_receive_reply
Socksify::debug_notice "Connected to #{host}:#{port} over SOCKS"
end
# returns [bind_addr: String, bind_port: Fixnum]
def socks_receive_reply
Socksify::debug_debug "Waiting for SOCKS reply"
if @@socks_version == "5"
connect_reply = recv(4)
if connect_reply.empty?
raise SOCKSError.new("Server doesn't reply")
end
Socksify::debug_debug connect_reply.unpack "H*"
if connect_reply[0..0] != "\005"
raise SOCKSError.new("SOCKS version #{connect_reply[0..0]} is not 5")
end
if connect_reply[1..1] != "\000"
raise SOCKSError.for_response_code(connect_reply.bytes.to_a[1])
end
Socksify::debug_debug "Waiting for bind_addr"
bind_addr_len = case connect_reply[3..3]
when "\001"
4
when "\003"
recv(1).bytes.first
when "\004"
16
else
raise SOCKSError.for_response_code(connect_reply.bytes.to_a[3])
end
bind_addr_s = recv(bind_addr_len)
bind_addr = case connect_reply[3..3]
when "\001"
bind_addr_s.bytes.to_a.join('.')
when "\003"
bind_addr_s
when "\004" # Untested!
i = 0
ip6 = ""
bind_addr_s.each_byte do |b|
if i > 0 and i % 2 == 0
ip6 += ":"
end
i += 1
ip6 += b.to_s(16).rjust(2, '0')
end
end
bind_port = recv(bind_addr_len + 2)
[bind_addr, bind_port.unpack('n')]
else
connect_reply = recv(8)
unless connect_reply[0] == "\000" and connect_reply[1] == "\x5A"
Socksify::debug_debug connect_reply.unpack 'H'
raise SOCKSError.new("Failed while connecting througth socks")
end
end
end
end
module Socksify
def self.resolve(host)
s = TCPSocket.new
begin
req = String.new
Socksify::debug_debug "Sending hostname to resolve: #{host}"
req << "\005"
if host =~ /^(\d+)\.(\d+)\.(\d+)\.(\d+)$/ # to IPv4 address
req << "\xF1\000\001" + [$1.to_i,
$2.to_i,
$3.to_i,
$4.to_i
].pack('CCCC')
elsif host =~ /^[:0-9a-f]+$/ # to IPv6 address
raise "TCP/IPv6 over SOCKS is not yet supported (inet_pton missing in Ruby & not supported by Tor"
req << "\004"
else # to hostname
req << "\xF0\000\003" + [host.size].pack('C') + host
end
req << [0].pack('n') # Port
s.write req
addr, _port = s.socks_receive_reply
Socksify::debug_notice "Resolved #{host} as #{addr} over SOCKS"
addr
ensure
s.close
end
end
def self.proxy(server, port)
default_server = TCPSocket::socks_server
default_port = TCPSocket::socks_port
begin
TCPSocket::socks_server = server
TCPSocket::socks_port = port
yield
ensure
TCPSocket::socks_server = default_server
TCPSocket::socks_port = default_port
end
end
end
| 28.912329 | 111 | 0.604188 |
d585a00e1bc6c83c23d161fec50ed4f350bfc421 | 358 | require 'fileutils'
include FileUtils
require 'rubygems'
%w[rake hoe newgem rubigen madeleine chronic].each do |req_gem|
begin
require req_gem
rescue LoadError
puts "This Rakefile requires the '#{req_gem}' RubyGem."
puts "Installation: gem install #{req_gem} -y"
exit
end
end
$:.unshift(File.join(File.dirname(__FILE__), %w[.. lib]))
| 22.375 | 63 | 0.709497 |
01230512c11e33a781565a76ee2d7849655dc143 | 1,286 | class Links < Formula
desc "Lynx-like WWW browser that supports tables, menus, etc."
homepage "http://links.twibright.com/"
url "http://links.twibright.com/download/links-2.18.tar.bz2"
sha256 "678cc1ab347cc90732b1925a11db7fbe12ce883fcca631f91696453a83819057"
bottle do
root_url "https://linuxbrew.bintray.com/bottles"
cellar :any
sha256 "eaa37994f1e8b6c4b940e2d9ef8bb54e87ad78a9abf2e2318f3b876793b18e7c" => :mojave
sha256 "8a2c300d607c3ded56d50d7c953ec9b574de9d734701ab4399cce3a472c95996" => :high_sierra
sha256 "d069f957d246521cafefeabc64104504f286a8634e97731f729ab6592acd47dd" => :sierra
sha256 "9a026ae16f4dc5b24205433e2d88f841c2c07559ad8e3f7da52d4930d1d1dbb4" => :x86_64_linux
end
depends_on "pkg-config" => :build
depends_on "jpeg"
depends_on "librsvg"
depends_on "libtiff"
depends_on "openssl"
depends_on "linuxbrew/xorg/xorg" unless OS.mac?
def install
args = %W[
--disable-debug
--disable-dependency-tracking
--prefix=#{prefix}
--mandir=#{man}
--with-ssl=#{Formula["openssl"].opt_prefix}
--without-lzma
]
system "./configure", *args
system "make", "install"
doc.install Dir["doc/*"]
end
test do
system bin/"links", "-dump", "https://duckduckgo.com"
end
end
| 30.619048 | 94 | 0.722395 |
03db0faefffd25fb76fe2c82f9712e7f24f3585a | 1,820 | module Voltron
module Upload
module Generators
class InstallGenerator < Rails::Generators::Base
source_root File.expand_path("../../../templates", __FILE__)
desc "Add Voltron Upload initializer"
def inject_initializer
voltron_initialzer_path = Rails.root.join("config", "initializers", "voltron.rb")
unless File.exist? voltron_initialzer_path
unless system("cd #{Rails.root.to_s} && rails generate voltron:install")
puts "Voltron initializer does not exist. Please ensure you have the 'voltron' gem installed and run `rails g voltron:install` to create it"
return false
end
end
current_initiailzer = File.read voltron_initialzer_path
unless current_initiailzer.match(Regexp.new(/# === Voltron Upload Configuration ===/))
inject_into_file(voltron_initialzer_path, after: "Voltron.setup do |config|\n") do
<<-CONTENT
# === Voltron Upload Configuration ===
# Whether or not calls to file_field should generate markup for dropzone uploads
# If false, simply returns what file_field would return normally
# config.upload.enabled = true
# Global defaults for Dropzone's with a defined preview template
# Should be a hash of keys matching a preview partial name,
# with a value hash containing any of the Dropzone configuration options
# found at http://www.dropzonejs.com/#configuration-options
config.upload.previews = {
vertical_tile: {
thumbnailWidth: 200,
thumbnailHeight: 175,
dictRemoveFile: 'Remove',
dictCancelUpload: 'Cancel'
},
horizontal_tile: {
dictRemoveFile: 'Remove',
dictCancelUpload: 'Cancel'
}
}
CONTENT
end
end
end
end
end
end
end | 31.929825 | 154 | 0.669231 |
1d9193dcabb69719a5b63651fc02c0955ac1dc8e | 38,757 | # typed: false
# frozen_string_literal: true
require "formula"
require "keg"
require "tab"
require "utils/bottles"
require "caveats"
require "cleaner"
require "formula_cellar_checks"
require "install_renamed"
require "sandbox"
require "development_tools"
require "cache_store"
require "linkage_checker"
require "install"
require "messages"
require "cask/cask_loader"
require "cmd/install"
require "find"
require "utils/spdx"
require "deprecate_disable"
require "unlink"
require "service"
# Installer for a formula.
#
# @api private
class FormulaInstaller
extend T::Sig
include FormulaCellarChecks
extend Predicable
attr_reader :formula
attr_accessor :options, :link_keg
attr_predicate :installed_as_dependency?, :installed_on_request?
attr_predicate :show_summary_heading?, :show_header?
attr_predicate :force_bottle?, :ignore_deps?, :only_deps?, :interactive?, :git?, :force?, :keep_tmp?
attr_predicate :verbose?, :debug?, :quiet?
# TODO: Remove when removed from `test-bot`.
attr_writer :build_bottle
def initialize(
formula,
link_keg: false,
installed_as_dependency: false,
installed_on_request: true,
show_header: false,
build_bottle: false,
force_bottle: false,
bottle_arch: nil,
ignore_deps: false,
only_deps: false,
include_test_formulae: [],
build_from_source_formulae: [],
env: nil,
git: false,
interactive: false,
keep_tmp: false,
cc: nil,
options: Options.new,
force: false,
debug: false,
quiet: false,
verbose: false
)
@formula = formula
@env = env
@force = force
@keep_tmp = keep_tmp
@link_keg = !formula.keg_only? || link_keg
@show_header = show_header
@ignore_deps = ignore_deps
@only_deps = only_deps
@build_from_source_formulae = build_from_source_formulae
@build_bottle = build_bottle
@bottle_arch = bottle_arch
@formula.force_bottle ||= force_bottle
@force_bottle = @formula.force_bottle
@include_test_formulae = include_test_formulae
@interactive = interactive
@git = git
@cc = cc
@verbose = verbose
@quiet = quiet
@debug = debug
@installed_as_dependency = installed_as_dependency
@installed_on_request = installed_on_request
@options = options
@requirement_messages = []
@poured_bottle = false
@start_time = nil
end
def self.attempted
@attempted ||= Set.new
end
sig { void }
def self.clear_attempted
@attempted = Set.new
end
def self.installed
@installed ||= Set.new
end
sig { void }
def self.clear_installed
@installed = Set.new
end
sig { returns(T::Boolean) }
def build_from_source?
@build_from_source_formulae.include?(formula.full_name)
end
sig { returns(T::Boolean) }
def include_test?
@include_test_formulae.include?(formula.full_name)
end
sig { returns(T::Boolean) }
def build_bottle?
return false unless @build_bottle
!formula.bottle_disabled?
end
sig { params(output_warning: T::Boolean).returns(T::Boolean) }
def pour_bottle?(output_warning: false)
return false if !formula.bottle_tag? && !formula.local_bottle_path
return true if force_bottle?
return false if build_from_source? || build_bottle? || interactive?
return false if @cc
return false unless options.empty?
return false if formula.bottle_disabled?
unless formula.pour_bottle?
if output_warning && formula.pour_bottle_check_unsatisfied_reason
opoo <<~EOS
Building #{formula.full_name} from source:
#{formula.pour_bottle_check_unsatisfied_reason}
EOS
end
return false
end
bottle = formula.bottle_specification
unless bottle.compatible_locations?
if output_warning
opoo <<~EOS
Building #{formula.full_name} from source as the bottle needs:
- HOMEBREW_CELLAR: #{bottle.cellar} (yours is #{HOMEBREW_CELLAR})
- HOMEBREW_PREFIX: #{bottle.prefix} (yours is #{HOMEBREW_PREFIX})
EOS
end
return false
end
true
end
sig { params(dep: Formula, build: BuildOptions).returns(T::Boolean) }
def install_bottle_for?(dep, build)
return pour_bottle? if dep == formula
@build_from_source_formulae.exclude?(dep.full_name) &&
dep.bottle.present? &&
dep.pour_bottle? &&
build.used_options.empty? &&
dep.bottle&.compatible_locations?
end
sig { void }
def prelude
type, reason = DeprecateDisable.deprecate_disable_info formula
if type.present?
case type
when :deprecated
if reason.present?
opoo "#{formula.full_name} has been deprecated because it #{reason}!"
else
opoo "#{formula.full_name} has been deprecated!"
end
when :disabled
if reason.present?
raise CannotInstallFormulaError, "#{formula.full_name} has been disabled because it #{reason}!"
end
raise CannotInstallFormulaError, "#{formula.full_name} has been disabled!"
end
end
Tab.clear_cache
verify_deps_exist unless ignore_deps?
forbidden_license_check
check_install_sanity
end
sig { void }
def verify_deps_exist
begin
compute_dependencies
rescue TapFormulaUnavailableError => e
raise if e.tap.installed?
e.tap.install
retry
end
rescue FormulaUnavailableError => e
e.dependent = formula.full_name
raise
end
def check_install_sanity
raise FormulaInstallationAlreadyAttemptedError, formula if self.class.attempted.include?(formula)
if force_bottle? && !pour_bottle?
raise CannotInstallFormulaError, "--force-bottle passed but #{formula.full_name} has no bottle!"
end
if Homebrew.default_prefix? &&
# TODO: re-enable this on Linux when we merge linuxbrew-core into
# homebrew-core and have full bottle coverage.
(OS.mac? || ENV["CI"]) &&
!build_from_source? && !build_bottle? && !formula.head? &&
formula.tap&.core_tap? && !formula.bottle_unneeded? &&
# Integration tests override homebrew-core locations
ENV["HOMEBREW_TEST_TMPDIR"].nil? &&
!pour_bottle?
message = if !formula.pour_bottle? && formula.pour_bottle_check_unsatisfied_reason
formula_message = formula.pour_bottle_check_unsatisfied_reason
formula_message[0] = formula_message[0].downcase
<<~EOS
#{formula}: #{formula_message}
EOS
# don't want to complain about no bottle available if doing an
# upgrade/reinstall/dependency install (but do in the case the bottle
# check fails)
elsif !Homebrew::EnvConfig.developer? &&
(!installed_as_dependency? || !formula.any_version_installed?) &&
(!OS.mac? || !OS::Mac.outdated_release?)
<<~EOS
#{formula}: no bottle available!
EOS
end
if message
message += <<~EOS
You can try to install from source with:
brew install --build-from-source #{formula}
Please note building from source is unsupported. You will encounter build
failures with some formulae. If you experience any issues please create pull
requests instead of asking for help on Homebrew's GitHub, Twitter or any other
official channels.
EOS
raise CannotInstallFormulaError, message
end
end
return if ignore_deps?
if Homebrew::EnvConfig.developer?
# `recursive_dependencies` trims cyclic dependencies, so we do one level and take the recursive deps of that.
# Mapping direct dependencies to deeper dependencies in a hash is also useful for the cyclic output below.
recursive_dep_map = formula.deps.to_h { |dep| [dep, dep.to_formula.recursive_dependencies] }
cyclic_dependencies = []
recursive_dep_map.each do |dep, recursive_deps|
if [formula.name, formula.full_name].include?(dep.name)
cyclic_dependencies << "#{formula.full_name} depends on itself directly"
elsif recursive_deps.any? { |rdep| [formula.name, formula.full_name].include?(rdep.name) }
cyclic_dependencies << "#{formula.full_name} depends on itself via #{dep.name}"
end
end
if cyclic_dependencies.present?
raise CannotInstallFormulaError, <<~EOS
#{formula.full_name} contains a recursive dependency on itself:
#{cyclic_dependencies.join("\n ")}
EOS
end
# Merge into one list
recursive_deps = recursive_dep_map.flat_map { |dep, rdeps| [dep] + rdeps }
Dependency.merge_repeats(recursive_deps)
else
recursive_deps = formula.recursive_dependencies
end
invalid_arch_dependencies = []
pinned_unsatisfied_deps = []
recursive_deps.each do |dep|
if (tab = Tab.for_formula(dep.to_formula)) && tab.arch.present? && tab.arch.to_s != Hardware::CPU.arch.to_s
invalid_arch_dependencies << "#{dep} was built for #{tab.arch}"
end
next unless dep.to_formula.pinned?
next if dep.satisfied?(inherited_options_for(dep))
next if dep.build? && pour_bottle?
pinned_unsatisfied_deps << dep
end
if invalid_arch_dependencies.present?
raise CannotInstallFormulaError, <<~EOS
#{formula.full_name} dependencies not built for the #{Hardware::CPU.arch} CPU architecture:
#{invalid_arch_dependencies.join("\n ")}
EOS
end
return if pinned_unsatisfied_deps.empty?
raise CannotInstallFormulaError,
"You must `brew unpin #{pinned_unsatisfied_deps * " "}` as installing " \
"#{formula.full_name} requires the latest version of pinned dependencies"
end
def build_bottle_preinstall
@etc_var_dirs ||= [HOMEBREW_PREFIX/"etc", HOMEBREW_PREFIX/"var"]
@etc_var_preinstall = Find.find(*@etc_var_dirs.select(&:directory?)).to_a
end
def build_bottle_postinstall
@etc_var_postinstall = Find.find(*@etc_var_dirs.select(&:directory?)).to_a
(@etc_var_postinstall - @etc_var_preinstall).each do |file|
Pathname.new(file).cp_path_sub(HOMEBREW_PREFIX, formula.bottle_prefix)
end
end
sig { void }
def install
lock
start_time = Time.now
if !formula.bottle_unneeded? && !pour_bottle? && DevelopmentTools.installed?
Homebrew::Install.perform_build_from_source_checks
end
# not in initialize so upgrade can unlink the active keg before calling this
# function but after instantiating this class so that it can avoid having to
# relink the active keg if possible (because it is slow).
if formula.linked_keg.directory?
message = <<~EOS
#{formula.name} #{formula.linked_version} is already installed
EOS
if formula.outdated? && !formula.head?
message += <<~EOS
To upgrade to #{formula.pkg_version}, run:
brew upgrade #{formula.full_name}
EOS
elsif only_deps?
message = nil
else
# some other version is already installed *and* linked
message += <<~EOS
To install #{formula.pkg_version}, first run:
brew unlink #{formula.name}
EOS
end
raise CannotInstallFormulaError, message if message
end
# Warn if a more recent version of this formula is available in the tap.
begin
if formula.pkg_version < (v = Formulary.factory(formula.full_name, force_bottle: force_bottle?).pkg_version)
opoo "#{formula.full_name} #{v} is available and more recent than version #{formula.pkg_version}."
end
rescue FormulaUnavailableError
nil
end
check_conflicts
raise UnbottledError, [formula] if !pour_bottle? && !formula.bottle_unneeded? && !DevelopmentTools.installed?
unless ignore_deps?
deps = compute_dependencies
if ((pour_bottle? && !DevelopmentTools.installed?) || build_bottle?) &&
(unbottled = unbottled_dependencies(deps)).presence
# Check that each dependency in deps has a bottle available, terminating
# abnormally with a UnbottledError if one or more don't.
raise UnbottledError, unbottled
end
install_dependencies(deps)
end
return if only_deps?
formula.deprecated_flags.each do |deprecated_option|
old_flag = deprecated_option.old_flag
new_flag = deprecated_option.current_flag
opoo "#{formula.full_name}: #{old_flag} was deprecated; using #{new_flag} instead!"
end
options = display_options(formula).join(" ")
oh1 "Installing #{Formatter.identifier(formula.full_name)} #{options}".strip if show_header?
unless formula.tap&.private?
action = "#{formula.full_name} #{options}".strip
Utils::Analytics.report_event("install", action)
Utils::Analytics.report_event("install_on_request", action) if installed_on_request?
end
self.class.attempted << formula
if pour_bottle?
begin
pour
rescue Exception # rubocop:disable Lint/RescueException
# any exceptions must leave us with nothing installed
ignore_interrupts do
begin
formula.prefix.rmtree if formula.prefix.directory?
rescue Errno::EACCES, Errno::ENOTEMPTY
odie <<~EOS
Could not remove #{formula.prefix.basename} keg! Do so manually:
sudo rm -rf #{formula.prefix}
EOS
end
formula.rack.rmdir_if_possible
end
raise
else
@poured_bottle = true
end
end
puts_requirement_messages
build_bottle_preinstall if build_bottle?
unless @poured_bottle
build
clean
# Store the formula used to build the keg in the keg.
formula_contents = if formula.local_bottle_path
Utils::Bottles.formula_contents formula.local_bottle_path, name: formula.name
else
formula.path.read
end
s = formula_contents.gsub(/ bottle do.+?end\n\n?/m, "")
brew_prefix = formula.prefix/".brew"
brew_prefix.mkdir
Pathname(brew_prefix/"#{formula.name}.rb").atomic_write(s)
keg = Keg.new(formula.prefix)
tab = Tab.for_keg(keg)
tab.installed_as_dependency = installed_as_dependency?
tab.installed_on_request = installed_on_request?
tab.write
end
build_bottle_postinstall if build_bottle?
opoo "Nothing was installed to #{formula.prefix}" unless formula.latest_version_installed?
end_time = Time.now
Homebrew.messages.formula_installed(formula, end_time - start_time)
end
def check_conflicts
return if force?
conflicts = formula.conflicts.select do |c|
f = Formulary.factory(c.name)
rescue TapFormulaUnavailableError
# If the formula name is a fully-qualified name let's silently
# ignore it as we don't care about things used in taps that aren't
# currently tapped.
false
rescue FormulaUnavailableError => e
# If the formula name doesn't exist any more then complain but don't
# stop installation from continuing.
opoo <<~EOS
#{formula}: #{e.message}
'conflicts_with \"#{c.name}\"' should be removed from #{formula.path.basename}.
EOS
raise if Homebrew::EnvConfig.developer?
$stderr.puts "Please report this issue to the #{formula.tap} tap (not Homebrew/brew or Homebrew/core)!"
false
else
f.linked_keg.exist? && f.opt_prefix.exist?
end
raise FormulaConflictError.new(formula, conflicts) unless conflicts.empty?
end
# Compute and collect the dependencies needed by the formula currently
# being installed.
def compute_dependencies
@compute_dependencies ||= begin
check_requirements(expand_requirements)
expand_dependencies
end
end
def unbottled_dependencies(deps)
deps.map(&:first).map(&:to_formula).reject do |dep_f|
next false unless dep_f.pour_bottle?
dep_f.bottle_unneeded? || dep_f.bottled?
end
end
def compute_and_install_dependencies
deps = compute_dependencies
install_dependencies(deps)
end
def check_requirements(req_map)
@requirement_messages = []
fatals = []
req_map.each_pair do |dependent, reqs|
reqs.each do |req|
next if dependent.latest_version_installed? && req.name == "maximummacos"
@requirement_messages << "#{dependent}: #{req.message}"
fatals << req if req.fatal?
end
end
return if fatals.empty?
puts_requirement_messages
raise UnsatisfiedRequirements, fatals
end
def runtime_requirements(formula)
runtime_deps = formula.runtime_formula_dependencies(undeclared: false)
recursive_requirements = formula.recursive_requirements do |dependent, _|
Requirement.prune unless runtime_deps.include?(dependent)
end
(recursive_requirements.to_a + formula.requirements.to_a).reject(&:build?).uniq
end
def expand_requirements
unsatisfied_reqs = Hash.new { |h, k| h[k] = [] }
formulae = [formula]
formula_deps_map = formula.recursive_dependencies
.index_by(&:name)
while (f = formulae.pop)
runtime_requirements = runtime_requirements(f)
f.recursive_requirements do |dependent, req|
build = effective_build_options_for(dependent)
install_bottle_for_dependent = install_bottle_for?(dependent, build)
keep_build_test = false
keep_build_test ||= runtime_requirements.include?(req)
keep_build_test ||= req.test? && include_test? && dependent == f
keep_build_test ||= req.build? && !install_bottle_for_dependent && !dependent.latest_version_installed?
if req.prune_from_option?(build) ||
req.satisfied?(env: @env, cc: @cc, build_bottle: @build_bottle, bottle_arch: @bottle_arch) ||
((req.build? || req.test?) && !keep_build_test) ||
formula_deps_map[dependent.name]&.build?
Requirement.prune
else
unsatisfied_reqs[dependent] << req
end
end
end
unsatisfied_reqs
end
def expand_dependencies
inherited_options = Hash.new { |hash, key| hash[key] = Options.new }
pour_bottle = pour_bottle?
# Cache for this expansion only. FormulaInstaller has a lot of inputs which can alter expansion.
cache_key = "FormulaInstaller-#{formula.full_name}-#{Time.now.to_f}"
expanded_deps = Dependency.expand(formula, cache_key: cache_key) do |dependent, dep|
inherited_options[dep.name] |= inherited_options_for(dep)
build = effective_build_options_for(
dependent,
inherited_options.fetch(dependent.name, []),
)
keep_build_test = false
keep_build_test ||= dep.test? && include_test? && @include_test_formulae.include?(dependent.full_name)
keep_build_test ||= dep.build? && !install_bottle_for?(dependent, build) &&
(formula.head? || !dependent.latest_version_installed?)
if dep.prune_from_option?(build) || ((dep.build? || dep.test?) && !keep_build_test)
Dependency.prune
elsif dep.satisfied?(inherited_options[dep.name])
Dependency.skip
else
pour_bottle ||= install_bottle_for?(dep.to_formula, build)
end
end
if pour_bottle && !Keg.bottle_dependencies.empty?
bottle_deps = if Keg.bottle_dependencies.exclude?(formula.name)
Keg.bottle_dependencies
elsif Keg.relocation_formulae.exclude?(formula.name)
Keg.relocation_formulae
else
[]
end
bottle_deps = bottle_deps.map { |formula| Dependency.new(formula) }
.reject do |dep|
inherited_options[dep.name] |= inherited_options_for(dep)
dep.satisfied? inherited_options[dep.name]
end
expanded_deps = Dependency.merge_repeats(bottle_deps + expanded_deps)
end
expanded_deps.map { |dep| [dep, inherited_options[dep.name]] }
end
def effective_build_options_for(dependent, inherited_options = [])
args = dependent.build.used_options
args |= (dependent == formula) ? options : inherited_options
args |= Tab.for_formula(dependent).used_options
args &= dependent.options
BuildOptions.new(args, dependent.options)
end
def display_options(formula)
options = if formula.head?
["--HEAD"]
else
[]
end
options += effective_build_options_for(formula).used_options.to_a
options
end
sig { params(dep: Dependency).returns(Options) }
def inherited_options_for(dep)
inherited_options = Options.new
u = Option.new("universal")
if (options.include?(u) || formula.require_universal_deps?) && !dep.build? && dep.to_formula.option_defined?(u)
inherited_options << u
end
inherited_options
end
sig { params(deps: T::Array[[Formula, Options]]).void }
def install_dependencies(deps)
if deps.empty? && only_deps?
puts "All dependencies for #{formula.full_name} are satisfied."
elsif !deps.empty?
oh1 "Installing dependencies for #{formula.full_name}: " \
"#{deps.map(&:first).map(&Formatter.method(:identifier)).to_sentence}",
truncate: false
deps.each { |dep, options| install_dependency(dep, options) }
end
@show_header = true unless deps.empty?
end
sig { params(dep: Dependency).void }
def fetch_dependency(dep)
df = dep.to_formula
fi = FormulaInstaller.new(
df,
force_bottle: false,
# When fetching we don't need to recurse the dependency tree as it's already
# been done for us in `compute_dependencies` and there's no requirement to
# fetch in a particular order.
ignore_deps: true,
installed_as_dependency: true,
include_test_formulae: @include_test_formulae,
build_from_source_formulae: @build_from_source_formulae,
keep_tmp: keep_tmp?,
force: force?,
debug: debug?,
quiet: quiet?,
verbose: verbose?,
)
fi.prelude
fi.fetch
end
sig { params(dep: Dependency, inherited_options: Options).void }
def install_dependency(dep, inherited_options)
df = dep.to_formula
if df.linked_keg.directory?
linked_keg = Keg.new(df.linked_keg.resolved_path)
tab = Tab.for_keg(linked_keg)
keg_had_linked_keg = true
keg_was_linked = linked_keg.linked?
linked_keg.unlink
end
if df.latest_version_installed?
installed_keg = Keg.new(df.prefix)
tab ||= Tab.for_keg(installed_keg)
tmp_keg = Pathname.new("#{installed_keg}.tmp")
installed_keg.rename(tmp_keg)
end
if df.tap.present? && tab.present? && (tab_tap = tab.source["tap"].presence) &&
df.tap.to_s != tab_tap.to_s
odie <<~EOS
#{df} is already installed from #{tab_tap}!
Please `brew uninstall #{df}` first."
EOS
end
options = Options.new
options |= tab.used_options if tab.present?
options |= Tab.remap_deprecated_options(df.deprecated_options, dep.options)
options |= inherited_options
options &= df.options
fi = FormulaInstaller.new(
df,
**{
options: options,
link_keg: keg_had_linked_keg ? keg_was_linked : nil,
installed_as_dependency: true,
installed_on_request: df.any_version_installed? && tab.present? && tab.installed_on_request,
force_bottle: false,
include_test_formulae: @include_test_formulae,
build_from_source_formulae: @build_from_source_formulae,
keep_tmp: keep_tmp?,
force: force?,
debug: debug?,
quiet: quiet?,
verbose: verbose?,
},
)
oh1 "Installing #{formula.full_name} dependency: #{Formatter.identifier(dep.name)}"
fi.install
fi.finish
rescue Exception => e # rubocop:disable Lint/RescueException
ignore_interrupts do
tmp_keg.rename(installed_keg) if tmp_keg && !installed_keg.directory?
linked_keg.link(verbose: verbose?) if keg_was_linked
end
raise unless e.is_a? FormulaInstallationAlreadyAttemptedError
# We already attempted to install f as part of another formula's
# dependency tree. In that case, don't generate an error, just move on.
nil
else
ignore_interrupts { tmp_keg.rmtree if tmp_keg&.directory? }
end
sig { void }
def caveats
return if only_deps?
audit_installed if Homebrew::EnvConfig.developer?
return if !installed_on_request? || installed_as_dependency?
caveats = Caveats.new(formula)
return if caveats.empty?
@show_summary_heading = true
ohai "Caveats", caveats.to_s
Homebrew.messages.record_caveats(formula, caveats)
end
sig { void }
def finish
return if only_deps?
ohai "Finishing up" if verbose?
install_service
keg = Keg.new(formula.prefix)
link(keg)
fix_dynamic_linkage(keg) if !@poured_bottle || !formula.bottle_specification.skip_relocation?
if build_bottle?
ohai "Not running 'post_install' as we're building a bottle"
puts "You can run it manually using:"
puts " brew postinstall #{formula.full_name}"
else
post_install
end
# Updates the cache for a particular formula after doing an install
CacheStoreDatabase.use(:linkage) do |db|
break unless db.created?
LinkageChecker.new(keg, formula, cache_db: db, rebuild_cache: true)
end
# Update tab with actual runtime dependencies
tab = Tab.for_keg(keg)
Tab.clear_cache
f_runtime_deps = formula.runtime_dependencies(read_from_tab: false)
tab.runtime_dependencies = Tab.runtime_deps_hash(formula, f_runtime_deps)
tab.write
# let's reset Utils::Git.available? if we just installed git
Utils::Git.clear_available_cache if formula.name == "git"
# use installed curl when it's needed and available
if formula.name == "curl" &&
!DevelopmentTools.curl_handles_most_https_certificates?
ENV["HOMEBREW_CURL"] = formula.opt_bin/"curl"
end
caveats
ohai "Summary" if verbose? || show_summary_heading?
puts summary
self.class.installed << formula
ensure
unlock
end
sig { returns(String) }
def summary
s = +""
s << "#{Homebrew::EnvConfig.install_badge} " unless Homebrew::EnvConfig.no_emoji?
s << "#{formula.prefix.resolved_path}: #{formula.prefix.abv}"
s << ", built in #{pretty_duration build_time}" if build_time
s.freeze
end
def build_time
@build_time ||= Time.now - @start_time if @start_time && !interactive?
end
sig { returns(T::Array[String]) }
def sanitized_argv_options
args = []
args << "--ignore-dependencies" if ignore_deps?
if build_bottle?
args << "--build-bottle"
args << "--bottle-arch=#{@bottle_arch}" if @bottle_arch
end
args << "--git" if git?
args << "--interactive" if interactive?
args << "--verbose" if verbose?
args << "--debug" if debug?
args << "--cc=#{@cc}" if @cc
args << "--keep-tmp" if keep_tmp?
if @env.present?
args << "--env=#{@env}"
elsif formula.env.std? || formula.deps.select(&:build?).any? { |d| d.name == "scons" }
args << "--env=std"
end
args << "--HEAD" if formula.head?
args
end
sig { returns(T::Array[String]) }
def build_argv
sanitized_argv_options + options.as_flags
end
sig { void }
def build
FileUtils.rm_rf(formula.logs)
@start_time = Time.now
# 1. formulae can modify ENV, so we must ensure that each
# installation has a pristine ENV when it starts, forking now is
# the easiest way to do this
args = [
"nice",
*HOMEBREW_RUBY_EXEC_ARGS,
"--",
HOMEBREW_LIBRARY_PATH/"build.rb",
formula.specified_path,
].concat(build_argv)
Utils.safe_fork do
if Sandbox.available?
sandbox = Sandbox.new
formula.logs.mkpath
sandbox.record_log(formula.logs/"build.sandbox.log")
sandbox.allow_write_path(ENV["HOME"]) if interactive?
sandbox.allow_write_temp_and_cache
sandbox.allow_write_log(formula)
sandbox.allow_cvs
sandbox.allow_fossil
sandbox.allow_write_xcode
sandbox.allow_write_cellar(formula)
sandbox.exec(*args)
else
exec(*args)
end
end
formula.update_head_version
raise "Empty installation" if !formula.prefix.directory? || Keg.new(formula.prefix).empty_installation?
rescue Exception => e # rubocop:disable Lint/RescueException
if e.is_a? BuildError
e.formula = formula
e.options = display_options(formula)
end
ignore_interrupts do
# any exceptions must leave us with nothing installed
formula.update_head_version
formula.prefix.rmtree if formula.prefix.directory?
formula.rack.rmdir_if_possible
end
raise e
end
sig { params(keg: Keg).void }
def link(keg)
Formula.clear_cache
unless link_keg
begin
keg.optlink(verbose: verbose?)
rescue Keg::LinkError => e
ofail "Failed to create #{formula.opt_prefix}"
puts "Things that depend on #{formula.full_name} will probably not build."
puts e
end
return
end
cask_installed_with_formula_name = begin
Cask::CaskLoader.load(formula.name).installed?
rescue Cask::CaskUnavailableError, Cask::CaskInvalidError
false
end
if cask_installed_with_formula_name
ohai "#{formula.name} cask is installed, skipping link."
return
end
if keg.linked?
opoo "This keg was marked linked already, continuing anyway"
keg.remove_linked_keg_record
end
Homebrew::Unlink.unlink_versioned_formulae(formula, verbose: verbose?)
link_overwrite_backup = {} # Hash: conflict file -> backup file
backup_dir = HOMEBREW_CACHE/"Backup"
begin
keg.link(verbose: verbose?)
rescue Keg::ConflictError => e
conflict_file = e.dst
if formula.link_overwrite?(conflict_file) && !link_overwrite_backup.key?(conflict_file)
backup_file = backup_dir/conflict_file.relative_path_from(HOMEBREW_PREFIX).to_s
backup_file.parent.mkpath
FileUtils.mv conflict_file, backup_file
link_overwrite_backup[conflict_file] = backup_file
retry
end
ofail "The `brew link` step did not complete successfully"
puts "The formula built, but is not symlinked into #{HOMEBREW_PREFIX}"
puts e
puts
puts "Possible conflicting files are:"
keg.link(dry_run: true, overwrite: true, verbose: verbose?)
@show_summary_heading = true
rescue Keg::LinkError => e
ofail "The `brew link` step did not complete successfully"
puts "The formula built, but is not symlinked into #{HOMEBREW_PREFIX}"
puts e
puts
puts "You can try again using:"
puts " brew link #{formula.name}"
@show_summary_heading = true
rescue Exception => e # rubocop:disable Lint/RescueException
ofail "An unexpected error occurred during the `brew link` step"
puts "The formula built, but is not symlinked into #{HOMEBREW_PREFIX}"
puts e
puts e.backtrace if debug?
@show_summary_heading = true
ignore_interrupts do
keg.unlink
link_overwrite_backup.each do |origin, backup|
origin.parent.mkpath
FileUtils.mv backup, origin
end
end
raise
end
return if link_overwrite_backup.empty?
opoo "These files were overwritten during the `brew link` step:"
puts link_overwrite_backup.keys
puts
puts "They have been backed up to: #{backup_dir}"
@show_summary_heading = true
end
sig { void }
def install_service
if formula.service? && formula.plist
ofail "Formula specified both service and plist"
return
end
if formula.service?
service_path = formula.systemd_service_path
service_path.atomic_write(formula.service.to_systemd_unit)
service_path.chmod 0644
end
service = if formula.service?
formula.service.to_plist
elsif formula.plist
formula.plist
end
return unless service
plist_path = formula.plist_path
plist_path.atomic_write(service)
plist_path.chmod 0644
log = formula.var/"log"
log.mkpath if service.include? log.to_s
rescue Exception => e # rubocop:disable Lint/RescueException
ofail "Failed to install service files"
odebug e, e.backtrace
end
sig { params(keg: Keg).void }
def fix_dynamic_linkage(keg)
keg.fix_dynamic_linkage
rescue Exception => e # rubocop:disable Lint/RescueException
ofail "Failed to fix install linkage"
puts "The formula built, but you may encounter issues using it or linking other"
puts "formulae against it."
odebug e, e.backtrace
@show_summary_heading = true
end
sig { void }
def clean
ohai "Cleaning" if verbose?
Cleaner.new(formula).clean
rescue Exception => e # rubocop:disable Lint/RescueException
opoo "The cleaning step did not complete successfully"
puts "Still, the installation was successful, so we will link it into your prefix."
odebug e, e.backtrace
Homebrew.failed = true
@show_summary_heading = true
end
sig { void }
def post_install
args = %W[
nice #{RUBY_PATH}
#{ENV["HOMEBREW_RUBY_WARNINGS"]}
-I #{$LOAD_PATH.join(File::PATH_SEPARATOR)}
--
#{HOMEBREW_LIBRARY_PATH}/postinstall.rb
#{formula.path}
]
Utils.safe_fork do
if Sandbox.available?
sandbox = Sandbox.new
formula.logs.mkpath
sandbox.record_log(formula.logs/"postinstall.sandbox.log")
sandbox.allow_write_temp_and_cache
sandbox.allow_write_log(formula)
sandbox.allow_write_xcode
sandbox.deny_write_homebrew_repository
sandbox.allow_write_cellar(formula)
Keg::KEG_LINK_DIRECTORIES.each do |dir|
sandbox.allow_write_path "#{HOMEBREW_PREFIX}/#{dir}"
end
sandbox.exec(*args)
else
exec(*args)
end
end
rescue Exception => e # rubocop:disable Lint/RescueException
opoo "The post-install step did not complete successfully"
puts "You can try again using:"
puts " brew postinstall #{formula.full_name}"
odebug e, e.backtrace, always_display: Homebrew::EnvConfig.developer?
Homebrew.failed = true
@show_summary_heading = true
end
sig { void }
def fetch_dependencies
return if ignore_deps?
deps = compute_dependencies
return if deps.empty?
deps.each { |dep, _options| fetch_dependency(dep) }
end
sig { void }
def fetch
fetch_dependencies
return if only_deps?
if pour_bottle?(output_warning: true)
formula.fetch_bottle_tab
else
formula.fetch_patches
formula.resources.each(&:fetch)
end
downloader.fetch
end
def downloader
if (bottle_path = formula.local_bottle_path)
LocalBottleDownloadStrategy.new(bottle_path)
elsif pour_bottle?
formula.bottle
else
formula
end
end
sig { void }
def pour
HOMEBREW_CELLAR.cd do
downloader.stage
end
Tab.clear_cache
tab = Utils::Bottles.load_tab(formula)
# fill in missing/outdated parts of the tab
# keep in sync with Tab#to_bottle_json
tab.used_options = []
tab.unused_options = []
tab.built_as_bottle = true
tab.poured_from_bottle = true
tab.installed_as_dependency = installed_as_dependency?
tab.installed_on_request = installed_on_request?
tab.time = Time.now.to_i
tab.aliases = formula.aliases
tab.arch = Hardware::CPU.arch
tab.source["versions"]["stable"] = formula.stable.version.to_s
tab.source["versions"]["version_scheme"] = formula.version_scheme
tab.source["path"] = formula.specified_path.to_s
tab.source["tap_git_head"] = formula.tap&.git_head
tab.tap = formula.tap
tab.write
keg = Keg.new(formula.prefix)
skip_linkage = formula.bottle_specification.skip_relocation?
# TODO: Remove `with_env` when bottles are built with RPATH relocation enabled
# https://github.com/Homebrew/brew/issues/11329
with_env(HOMEBREW_RELOCATE_RPATHS: "1") do
keg.replace_placeholders_with_locations tab.changed_files, skip_linkage: skip_linkage
end
end
sig { params(output: T.nilable(String)).void }
def problem_if_output(output)
return unless output
opoo output
@show_summary_heading = true
end
def audit_installed
unless formula.keg_only?
problem_if_output(check_env_path(formula.bin))
problem_if_output(check_env_path(formula.sbin))
end
super
end
def self.locked
@locked ||= []
end
private
attr_predicate :hold_locks?
sig { void }
def lock
return unless self.class.locked.empty?
unless ignore_deps?
formula.recursive_dependencies.each do |dep|
self.class.locked << dep.to_formula
end
end
self.class.locked.unshift(formula)
self.class.locked.uniq!
self.class.locked.each(&:lock)
@hold_locks = true
end
sig { void }
def unlock
return unless hold_locks?
self.class.locked.each(&:unlock)
self.class.locked.clear
@hold_locks = false
end
def puts_requirement_messages
return unless @requirement_messages
return if @requirement_messages.empty?
$stderr.puts @requirement_messages
end
sig { void }
def forbidden_license_check
forbidden_licenses = Homebrew::EnvConfig.forbidden_licenses.to_s.dup
SPDX::ALLOWED_LICENSE_SYMBOLS.each do |s|
pattern = /#{s.to_s.tr("_", " ")}/i
forbidden_licenses.sub!(pattern, s.to_s)
end
forbidden_licenses = forbidden_licenses.split.to_h do |license|
[license, SPDX.license_version_info(license)]
end
return if forbidden_licenses.blank?
return if ignore_deps?
compute_dependencies.each do |dep, _|
dep_f = dep.to_formula
next unless SPDX.licenses_forbid_installation? dep_f.license, forbidden_licenses
raise CannotInstallFormulaError, <<~EOS
The installation of #{formula.name} has a dependency on #{dep.name} where all its licenses are forbidden:
#{SPDX.license_expression_to_string dep_f.license}.
EOS
end
return if only_deps?
return unless SPDX.licenses_forbid_installation? formula.license, forbidden_licenses
raise CannotInstallFormulaError, <<~EOS
#{formula.name}'s licenses are all forbidden: #{SPDX.license_expression_to_string formula.license}.
EOS
end
end
| 30.589582 | 115 | 0.673685 |
ab51412e6ba44fddcd2f2605477e3d80480d39b9 | 397 | #!/usr/bin/env ruby
# This script simulates a flaky test by randomly
# "failing" approximately 1/8th of the time.
puts
puts 'Running test!'
def test_ok
puts 'Test OK!'
exit 0
end
def test_fail
puts 'The test failed! All is lost!'
exit 1
end
test_ok if ENV['ALWAYS_PASS'] == 'true'
test_fail if ENV['ALWAYS_FAIL'] == 'true'
if Random.rand(8) != 7
test_ok
else
test_fail
end
| 14.703704 | 48 | 0.68262 |
bf647ddd2f0cd57abdb576e801056dc66eef75c6 | 2,725 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Signalr::Mgmt::V2018_03_01_preview
module Models
#
# Object that describes a specific usage of SignalR resources.
#
class SignalRUsage
include MsRestAzure
# @return [String] Fully qualified ARM resource id
attr_accessor :id
# @return [Integer] Current value for the usage quota.
attr_accessor :current_value
# @return [Integer] The maximum permitted value for the usage quota. If
# there is no limit, this value will be -1.
attr_accessor :limit
# @return [SignalRUsageName] Localizable String object containing the
# name and a localized value.
attr_accessor :name
# @return [String] Representing the units of the usage quota. Possible
# values are: Count, Bytes, Seconds, Percent, CountPerSecond,
# BytesPerSecond.
attr_accessor :unit
#
# Mapper for SignalRUsage class as Ruby Hash.
# This will be used for serialization/deserialization.
#
def self.mapper()
{
client_side_validation: true,
required: false,
serialized_name: 'SignalRUsage',
type: {
name: 'Composite',
class_name: 'SignalRUsage',
model_properties: {
id: {
client_side_validation: true,
required: false,
serialized_name: 'id',
type: {
name: 'String'
}
},
current_value: {
client_side_validation: true,
required: false,
serialized_name: 'currentValue',
type: {
name: 'Number'
}
},
limit: {
client_side_validation: true,
required: false,
serialized_name: 'limit',
type: {
name: 'Number'
}
},
name: {
client_side_validation: true,
required: false,
serialized_name: 'name',
type: {
name: 'Composite',
class_name: 'SignalRUsageName'
}
},
unit: {
client_side_validation: true,
required: false,
serialized_name: 'unit',
type: {
name: 'String'
}
}
}
}
}
end
end
end
end
| 28.385417 | 77 | 0.502385 |
03ce489028b0b955a1e047d411036ae7e76bb772 | 1,298 | #
# a language data file for Ruby/CLDR
#
# Generated by: CLDR::Generator
#
# CLDR version: 1.3
#
# Original file name: common/main/vi_VN.xml
# Original file revision: 1.23 $
#
# Copyright (C) 2006 Masao Mutoh
#
# This file is distributed under the same license as the Ruby/CLDR.
#
private
def init_data
@format = "#,##0.00 ¤"
@before_match = "[:letter:]"
@before_match_surrounding = "[:digit:]"
@before_insertbetween = " "
@after_match = "[:letter:]"
@after_match_surrounding = "[:digit:]"
@after_insertbetween = " "
@currencies = {}
@currencies["EUR"] = "EUR"
@currencies["GBP"] = "GBP"
@currencies["INR"] = "INR"
@currencies["ITL"] = "ITL"
@currencies["JPY"] = "JPY"
@currencies["USD"] = "USD"
@currencies["VND"] = "đồng"
@symbols = {}
@symbols["EUR"] = "€"
@symbols["GBP"] = "£"
@symbols["INR"] = "=0#Rs.|1#Re.|1<Rs."
@symbols["ITL"] = "₤"
@symbols["JPY"] = "¥"
@symbols["USD"] = "$"
@symbols["VND"] = "đ"
end
public
attr_reader :format
attr_reader :before_match
attr_reader :before_match_surrounding
attr_reader :before_insertbetween
attr_reader :after_match
attr_reader :after_match_surrounding
attr_reader :after_insertbetween
attr_reader :currencies
attr_reader :symbols
| 24.490566 | 67 | 0.622496 |
ab361e20526d65dda19dde5c23cb0e8d90873fbd | 6,947 | module ModuleSpecs
CONST = :plain_constant
class Subclass < Module
end
class SubclassSpec
end
class RemoveClassVariable
end
module LookupModInMod
INCS = :ethereal
end
module LookupMod
include LookupModInMod
MODS = :rockers
end
class Lookup
include LookupMod
LOOKIE = :lookie
end
class LookupChild < Lookup
end
class Parent
# For private_class_method spec
def self.private_method; end
private_class_method :private_method
def undefed_method() end
undef_method :undefed_method
def parent_method; end
def another_parent_method; end
# For public_class_method spec
private
def self.public_method; end
public_class_method :public_method
public
def public_parent() end
protected
def protected_parent() end
private
def private_parent() end
end
module Basic
def public_module() end
protected
def protected_module() end
private
def private_module() end
end
module Super
include Basic
def public_super_module() end
protected
def protected_super_module() end
private
def private_super_module() end
def super_included_method; end
class SuperChild
end
end
module Internal
end
class Child < Parent
include Super
class << self
include Internal
end
attr_accessor :accessor_method
def undefed_child() end
def public_child() end
undef_method :parent_method
undef_method :another_parent_method
protected
def protected_child() end
private
def private_child() end
end
class Grandchild < Child
undef_method :super_included_method
end
class Child2 < Parent
attr_reader :foo
end
# Be careful touching the Counts* classes as there used for testing
# private_instance_methods, public_instance_methods, etc. So adding, removing
# a method will break those tests.
module CountsMixin
def public_3; end
public :public_3
def private_3; end
private :private_3
def protected_3; end
protected :protected_3
end
class CountsParent
include CountsMixin
def public_2; end
private
def private_2; end
protected
def protected_2; end
end
class CountsChild < CountsParent
def public_1; end
private
def private_1; end
protected
def protected_1; end
end
module AddConstant
end
module A
CONSTANT_A = :a
OVERRIDE = :a
def ma(); :a; end
def self.cma(); :a; end
end
module B
CONSTANT_B = :b
OVERRIDE = :b
include A
def mb(); :b; end
def self.cmb(); :b; end
end
class C
OVERRIDE = :c
include B
end
module Z
MODULE_SPEC_TOPLEVEL_CONSTANT = 1
end
module Alias
def report() :report end
alias publish report
end
class Allonym
include ModuleSpecs::Alias
end
class Aliasing
def self.make_alias(*a)
alias_method(*a)
end
def public_one; 1; end
def public_two(n); n * 2; end
private
def private_one; 1; end
protected
def protected_one; 1; end
end
module ReopeningModule
def foo; true; end
module_function :foo
private :foo
end
# Yes, we want to re-open the module
module ReopeningModule
alias :foo2 :foo
module_function :foo2
end
module Nesting
@tests = {}
def self.[](name); @tests[name]; end
def self.[]=(name, val); @tests[name] = val; end
def self.meta; class << self; self; end; end
Nesting[:basic] = Module.nesting
module ::ModuleSpecs
Nesting[:open_first_level] = Module.nesting
end
class << self
Nesting[:open_meta] = Module.nesting
end
def self.called_from_module_method
Module.nesting
end
class NestedClass
Nesting[:nest_class] = Module.nesting
def self.called_from_class_method
Module.nesting
end
def called_from_inst_method
Module.nesting
end
end
end
Nesting[:first_level] = Module.nesting
module InstanceMethMod
def bar(); :bar; end
end
class InstanceMeth
include InstanceMethMod
def foo(); :foo; end
end
class InstanceMethChild < InstanceMeth
end
module ClassVars
class A
@@a_cvar = :a_cvar
end
module M
@@m_cvar = :m_cvar
end
class B < A
include M
@@b_cvar = :b_cvar
end
end
class CVars
@@cls = :class
class << self
def cls
@@cls
end
@@meta = :meta
end
def self.meta
@@meta
end
def meta
@@meta
end
end
module MVars
@@mvar = :mvar
end
class SubModule < Module
attr_reader :special
def initialize
@special = 10
end
end
module MA; end
module MB
include MA
end
module MC; end
class MultipleIncludes
include MB
end
# empty modules
module M1; end
module M2; end
module Autoload
def self.use_ex1
begin
begin
raise "test exception"
rescue ModuleSpecs::Autoload::EX1
end
rescue RuntimeError
return :good
end
end
end
# This class isn't inherited from or included in anywhere. It exists to test
# 1.9's constant scoping rules
class Detached
DETATCHED_CONSTANT = :d
end
class ParentPrivateMethodRedef
private
def private_method_redefined
:before_redefinition
end
end
class ChildPrivateMethodMadePublic < ParentPrivateMethodRedef
public :private_method_redefined
end
class ParentPrivateMethodRedef
def private_method_redefined
:after_redefinition
end
end
module CyclicAppendA
end
module CyclicAppendB
include CyclicAppendA
end
module ExtendObject
C = :test
def test_method
"hello test"
end
end
module ExtendObjectPrivate
class << self
def extend_object(obj)
ScratchPad.record :extended
end
private :extend_object
end
end
end
class Object
def module_specs_public_method_on_object; end
def module_specs_private_method_on_object; end
private :module_specs_private_method_on_object
def module_specs_protected_method_on_object; end
protected :module_specs_private_method_on_object
def module_specs_private_method_on_object_for_kernel_public; end
private :module_specs_private_method_on_object_for_kernel_public
def module_specs_public_method_on_object_for_kernel_protected; end
def module_specs_public_method_on_object_for_kernel_private; end
end
module Kernel
def module_specs_public_method_on_kernel; end
alias_method :module_specs_alias_on_kernel, :module_specs_public_method_on_object
public :module_specs_private_method_on_object_for_kernel_public
protected :module_specs_public_method_on_object_for_kernel_protected
private :module_specs_public_method_on_object_for_kernel_private
end
ModuleSpecs::Nesting[:root_level] = Module.nesting
| 16.86165 | 83 | 0.685188 |
f79ae55ba15b8f884b2753be7835863ace44dda2 | 585 | module Admin
class DegreesController < Admin::ApplicationController
# To customize the behavior of this controller,
# you can overwrite any of the RESTful actions. For example:
#
# def index
# super
# @resources = Degree.
# page(params[:page]).
# per(10)
# end
# Define a custom finder by overriding the `find_resource` method:
# def find_resource(param)
# Degree.find_by!(slug: param)
# end
# See https://administrate-prototype.herokuapp.com/customizing_controller_actions
# for more information
end
end
| 26.590909 | 85 | 0.663248 |
1cd107b501b12ac242dbbe6d3262c60265247cbe | 263 | module Fitbark
module Data
# Defines structure for dog medical condition.
#
# Original attribute names from source API:
# - *id*
# - *name*
class MedicalCondition < StrictOpenStruct
include Fitbark::Data::Shared
end
end
end
| 20.230769 | 50 | 0.653992 |
39081a19f35c32ebabe51936005fcba0b5ccee93 | 749 | # frozen_string_literal: true
module Api
class RatesController < ::ApplicationController
def historical
dates = (params.require(:start_date).to_date..params.require(:end_date).to_date).to_a
return unless stale? CityHash.hash32([dates.first, dates.last]), template: false
rates = FetchHistoricalRate.new
render json: HistoricalRateSerializer.render(rates.perform(dates))
end
def live
timestamp = Time.zone.now.beginning_of_day - params.require(:utc_offset).to_i.minutes
rates = Rate.where('live_timestamp >= ?', timestamp)
return unless stale? rates.cache_key, template: false
rates = rates.order(:live_timestamp)
render json: LiveRateSerializer.render(rates)
end
end
end
| 32.565217 | 91 | 0.720961 |
21af81093f8bb2884fb188160cd5c343c29fa65d | 1,957 | # coding: utf-8
lib = File.expand_path("lib", __dir__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require "metanorma/iso/version"
Gem::Specification.new do |spec|
spec.name = "metanorma-iso"
spec.version = Metanorma::ISO::VERSION
spec.authors = ["Ribose Inc."]
spec.email = ["[email protected]"]
spec.summary = "metanorma-iso lets you write ISO standards "\
"in AsciiDoc."
spec.description = <<~DESCRIPTION
metanorma-iso lets you write ISO standards in AsciiDoc syntax.
This gem is in active development.
Formerly known as asciidoctor-iso.
DESCRIPTION
spec.homepage = "https://github.com/metanorma/metanorma-iso"
spec.license = "BSD-2-Clause"
spec.bindir = "bin"
spec.require_paths = ["lib"]
spec.files = `git ls-files`.split("\n")
spec.test_files = `git ls-files -- {spec}/*`.split("\n")
spec.required_ruby_version = Gem::Requirement.new(">= 2.5.0")
spec.add_dependency "isodoc", "~> 1.7.0"
spec.add_dependency "metanorma-standoc", "~> 1.10.0"
spec.add_dependency "mnconvert", "~> 1.8.0"
spec.add_dependency "ruby-jing"
spec.add_dependency "tokenizer", "~> 0.3.0"
spec.add_dependency "twitter_cldr"
spec.add_development_dependency "byebug"
spec.add_development_dependency "equivalent-xml", "~> 0.6"
spec.add_development_dependency "guard", "~> 2.14"
spec.add_development_dependency "guard-rspec", "~> 4.7"
spec.add_development_dependency "iev", "~> 0.2.0"
spec.add_development_dependency "rake", "~> 13.0"
spec.add_development_dependency "rspec", "~> 3.6"
spec.add_development_dependency "rubocop", "~> 1.5.2"
spec.add_development_dependency "sassc", "2.4.0"
spec.add_development_dependency "simplecov", "~> 0.15"
spec.add_development_dependency "timecop", "~> 0.9"
spec.add_development_dependency "vcr", "~> 5.0.0"
spec.add_development_dependency "webmock"
end
| 36.924528 | 69 | 0.677568 |
03152f11c88fafd806ee8d7acac9d9847a005d55 | 206 | class User < ApplicationRecord
has_secure_password
validates :username, :email, presence: true
validates :username, :email, uniqueness: true
has_many :lost_pets
has_many :found_pets
end
| 25.75 | 49 | 0.742718 |
011ed1f0d2a9daea4d4798fb570570180229bf89 | 35,009 | require 'test_helper'
class PersonTest < ActiveSupport::TestCase
fixtures :users, :people
# Replace this with your real tests.
def test_work_groups
p=Factory(:person_in_multiple_projects)
assert_equal 3,p.work_groups.size
end
test "registered user's profile can be edited by" do
admin = Factory(:admin)
project_administrator = Factory(:project_administrator)
project_administrator2 = Factory(:project_administrator)
person = Factory :person,:group_memberships=>[Factory(:group_membership,:work_group=>project_administrator.group_memberships.first.work_group)]
another_person = Factory :person
assert_equal person.projects,project_administrator.projects
assert_not_equal person.projects,project_administrator2.projects
assert person.can_be_edited_by?(person.user)
assert !person.can_be_edited_by?(project_administrator.user),"should not be editable by the project administrator of the same project, as user is registered"
assert person.can_be_edited_by?(admin.user)
assert !person.can_be_edited_by?(another_person.user)
assert !person.can_be_edited_by?(project_administrator2.user),"should be not editable by the project administrator of another project"
assert person.can_be_edited_by?(person), "You can also ask by passing in a person"
assert !person.can_be_edited_by?(project_administrator),"You can also ask by passing in a person"
end
test "userless profile can be edited by" do
admin = Factory(:admin)
project_administrator = Factory(:project_administrator)
project_administrator2 = Factory(:project_administrator)
profile = Factory :brand_new_person,:group_memberships=>[Factory(:group_membership,:work_group=>project_administrator.group_memberships.first.work_group)]
another_person = Factory :person
assert_equal profile.projects,project_administrator.projects
assert_not_equal profile.projects,project_administrator2.projects
assert profile.can_be_edited_by?(project_administrator.user),"should be editable by the project administrator of the same project, as user is not registered"
assert profile.can_be_edited_by?(admin.user)
assert !profile.can_be_edited_by?(another_person.user)
assert !profile.can_be_edited_by?(project_administrator2.user),"should be not editable by the project administrator of another project"
assert profile.can_be_edited_by?(project_administrator),"You can also ask by passing in a person"
end
test "me?" do
person = Factory(:person)
refute person.me?
User.current_user = person.user
assert person.me?
person = Factory(:brand_new_person)
assert_nil person.user
refute person.me?
User.current_user = nil
refute person.me?
end
test "programmes" do
person1=Factory(:person)
prog = Factory(:programme,:projects=>person1.projects)
prog2 = Factory(:programme)
assert_includes person1.programmes,prog
refute_includes person1.programmes,prog2
end
test "can be administered by" do
admin = Factory(:admin)
admin2 = Factory(:admin)
project_administrator = Factory(:project_administrator)
person_in_same_project = Factory :person,:group_memberships=>[Factory(:group_membership,:work_group=>project_administrator.group_memberships.first.work_group)]
person_in_different_project = Factory :person
assert admin.can_be_administered_by?(admin.user),"admin can administer themself"
assert admin2.can_be_administered_by?(admin.user),"admin can administer another admin"
assert project_administrator.can_be_administered_by?(admin.user),"admin should be able to administer another project administrator"
assert person_in_same_project.can_be_administered_by?(project_administrator.user),"project administrator should be able to administer someone from same project"
assert person_in_different_project.can_be_administered_by?(project_administrator.user),"project administrator should be able to administer someone from another project"
assert !project_administrator.can_be_administered_by?(person_in_same_project.user),"a normal person cannot administer someone else"
assert !project_administrator.can_be_administered_by?(project_administrator.user),"project administrator should not administer himself"
assert !person_in_same_project.can_be_administered_by?(person_in_same_project.user), "person should not administer themself"
assert !person_in_same_project.can_be_administered_by?(nil)
assert project_administrator.can_be_administered_by?(admin),"you can also ask by passing a person"
assert person_in_same_project.can_be_administered_by?(project_administrator),"you can also ask by passing a person"
#can be administered by a programme administrator
pa = Factory :programme_administrator
assert Factory(:person).can_be_administered_by?(pa.user)
end
test "project administrator cannot edit an admin within their project" do
admin = Factory(:admin)
project_administrator = Factory(:project_administrator,:group_memberships=>[Factory(:group_membership,:work_group=>admin.group_memberships.first.work_group)])
assert !(admin.projects & project_administrator.projects).empty?
assert !admin.can_be_edited_by?(project_administrator)
end
#checks the updated_at doesn't get artificially changed between created and reloading
def test_updated_at
person = Factory(:person, :updated_at=>1.week.ago)
updated_at = person.updated_at
person = Person.find(person.id)
assert_equal updated_at.to_s,person.updated_at.to_s
end
test "to_rdf" do
object = Factory :person, :skype_name=>"skypee",:email=>"[email protected]"
Factory(:study,:contributor=>object)
Factory(:investigation,:contributor=>object)
Factory(:assay,:contributor=>object)
Factory(:assay,:contributor=>object)
Factory(:assets_creator,:creator=>object)
Factory(:assets_creator,:asset=>Factory(:sop),:creator=>object)
object.web_page="http://google.com"
disable_authorization_checks do
object.save!
end
object.reload
rdf = object.to_rdf
RDF::Reader.for(:rdfxml).new(rdf) do |reader|
assert reader.statements.count > 1
assert_equal RDF::URI.new("http://localhost:3000/people/#{object.id}"), reader.statements.first.subject
assert reader.has_triple? ["http://localhost:3000/people/#{object.id}",RDF::FOAF.mbox_sha1sum,"b507549e01d249ee5ed98bd40e4d86d1470a13b8"]
end
end
test "orcid id validation" do
p = Factory :person
p.orcid = nil
assert p.valid?
p.orcid = "sdff-1111-1111-1111"
assert !p.valid?
p.orcid = "1111111111111111"
assert !p.valid?
p.orcid = "0000-0002-1694-2339"
assert !p.valid?,"checksum doesn't match"
p.orcid = "0000-0002-1694-233X"
assert p.valid?
p.orcid = "http://orcid.org/0000-0002-1694-233X"
assert p.valid?
p.orcid = "http://orcid.org/0000-0003-2130-0865"
assert p.valid?
end
test "orcid_uri" do
disable_authorization_checks do
p = Factory :person
p.orcid = "http://orcid.org/0000-0003-2130-0865"
assert p.valid?
p.save!
p.reload
assert_equal "http://orcid.org/0000-0003-2130-0865",p.orcid_uri
p.orcid = "0000-0002-1694-233X"
p.save!
p.reload
assert_equal "http://orcid.org/0000-0002-1694-233X",p.orcid_uri
p.orcid=nil
p.save!
p.reload
assert_nil p.orcid_uri
p.orcid=""
p.save!
p.reload
assert_nil p.orcid_uri
end
end
test "email uri" do
p = Factory :person, :email=>"sfkh^[email protected]"
assert_equal "mailto:sfkh%[email protected]",p.email_uri
end
test "only first admin person" do
Person.delete_all
person = Factory :admin
assert person.only_first_admin_person?
person.is_admin=false
disable_authorization_checks{person.save!}
assert !person.only_first_admin_person?
person.is_admin=true
disable_authorization_checks{person.save!}
assert person.only_first_admin_person?
Factory :person
assert !person.only_first_admin_person?
end
def test_active_ordered_by_updated_at_and_avatar_not_null
Person.delete_all
avatar = Factory :avatar
people = []
people << Factory(:person,:avatar=>avatar, :updated_at=>1.week.ago)
people << Factory(:person,:avatar=>avatar, :updated_at=>1.minute.ago)
people << Factory(:person,:updated_at=>1.day.ago)
people << Factory(:person,:updated_at=>1.hour.ago)
people << Factory(:person,:updated_at=>2.minutes.ago)
sorted = Person.all.sort do |x,y|
if x.avatar.nil? == y.avatar.nil?
y.updated_at <=> x.updated_at
else
if x.avatar.nil?
1
else
-1
end
end
end
assert_equal sorted, Person.active
end
def test_ordered_by_last_name
sorted = Person.all.sort_by do |p|
lname = "" || p.last_name.try(:downcase)
fname = "" || p.first_name.try(:downcase)
lname+fname
end
assert_equal sorted, Person.all
end
def test_is_asset
assert !Person.is_asset?
assert !people(:quentin_person).is_asset?
assert !people(:quentin_person).is_downloadable_asset?
end
def test_member_of
p=Factory :person
proj = Factory :project
assert !p.projects.empty?
assert p.member_of?(p.projects.first)
assert !p.member_of?(proj)
end
def test_avatar_key
p=people(:quentin_person)
assert_nil p.avatar_key
assert p.defines_own_avatar?
end
def test_first_person_is_admin
assert Person.count>0 #should already be people from fixtures
p=Person.new(:first_name=>"XXX",:email=>"[email protected]")
p.save!
assert !p.is_admin?, "Should not automatically be admin, since people already exist"
Person.delete_all
assert_equal 0,Person.count #no people should exist
p=Person.new(:first_name=>"XXX",:email=>"[email protected]")
p.save
p.reload
assert p.is_admin?, "Should automatically be admin, since it is the first created person"
end
test "first person in default project" do
Factory(:person) #make sure there is a person, project and institution registered
assert Person.count>0
assert Project.count>0
p=Person.new(:first_name=>"XXX",:email=>"[email protected]")
p.save!
assert !p.is_admin?, "Should not automatically be admin, since people already exist"
assert_empty p.projects
assert_empty p.institutions
Person.delete_all
project = Project.first
institution = project.institutions.first
refute_nil project
refute_nil institution
assert_equal 0,Person.count #no people should exist
p=Person.new(:first_name=>"XXX",:email=>"[email protected]")
p.save!
p.reload
assert_equal [project],p.projects
assert_equal [institution],p.institutions
end
def test_registered
registered=Person.registered
registered.each do |p|
assert_not_nil p.user
end
assert registered.include?(people(:quentin_person))
assert !registered.include?(people(:person_without_user))
end
def test_duplicates
dups=Person.duplicates
assert !dups.empty?
assert dups.include?(people(:duplicate_1))
assert dups.include?(people(:duplicate_2))
end
test "without group" do
no_group = Factory(:brand_new_person)
in_group = Factory(:person)
assert no_group.projects.empty?
assert !in_group.projects.empty?
all = Person.without_group
assert !all.include?(in_group)
assert all.include?(no_group)
end
test "with group" do
no_group = Factory(:brand_new_person)
in_group = Factory(:person)
assert no_group.projects.empty?
assert !in_group.projects.empty?
all = Person.with_group
assert all.include?(in_group)
assert !all.include?(no_group)
end
def test_expertise
p=Factory :person
Factory :expertise,:value=>"golf",:annotatable=>p
Factory :expertise,:value=>"fishing",:annotatable=>p
Factory :tool,:value=>"sbml",:annotatable=>p
assert_equal 2, p.expertise.size
p=Factory :person
Factory :expertise,:value=>"golf",:annotatable=>p
Factory :tool,:value=>"sbml",:annotatable=>p
assert_equal 1, p.expertise.size
assert_equal "golf",p.expertise[0].text
end
def test_tools
p=Factory :person
Factory :tool,:value=>"sbml",:annotatable=>p
Factory :tool,:value=>"java",:annotatable=>p
Factory :expertise,:value=>"sbml",:annotatable=>p
assert_equal 2, p.tools.size
p=Factory :person
Factory :tool,:value=>"sbml",:annotatable=>p
Factory :expertise,:value=>"fishing",:annotatable=>p
assert_equal 1, p.tools.size
assert_equal "sbml",p.tools[0].text
end
def test_assign_expertise
p=Factory :person
User.with_current_user p.user do
assert_equal 0,p.expertise.size
assert_difference("Annotation.count",2) do
assert_difference("TextValue.count",2) do
p.expertise = ["golf","fishing"]
end
end
assert_equal 2,p.expertise.size
assert p.expertise.collect{|e| e.text}.include?("golf")
assert p.expertise.collect{|e| e.text}.include?("fishing")
assert_difference("Annotation.count",-1) do
assert_no_difference("TextValue.count") do
p.expertise = ["golf"]
end
end
assert_equal 1,p.expertise.size
assert_equal "golf",p.expertise[0].text
p2=Factory :person
assert_difference("Annotation.count") do
assert_no_difference("TextValue.count") do
p2.expertise = ["golf"]
end
end
end
end
def test_assigns_tools
p=Factory :person
User.with_current_user p.user do
assert_equal 0,p.tools.size
assert_difference("Annotation.count",2) do
assert_difference("TextValue.count",2) do
p.tools = ["golf","fishing"]
end
end
assert_equal 2,p.tools.size
assert p.tools.collect{|e| e.text}.include?("golf")
assert p.tools.collect{|e| e.text}.include?("fishing")
assert_difference("Annotation.count",-1) do
assert_no_difference("TextValue.count") do
p.tools = ["golf"]
end
end
assert_equal 1,p.tools.size
assert_equal "golf",p.tools[0].text
p2=Factory :person
assert_difference("Annotation.count") do
assert_no_difference("TextValue.count") do
p2.tools = ["golf"]
end
end
end
end
def test_removes_previously_assigned
p=Factory :person
User.with_current_user p.user do
p.tools = ["one","two"]
assert_equal 2,p.tools.size
p.tools = ["three"]
assert_equal 1,p.tools.size
assert_equal "three",p.tools[0].text
p=Factory :person
p.expertise = ["aaa","bbb"]
assert_equal 2,p.expertise.size
p.expertise = ["ccc"]
assert_equal 1,p.expertise.size
assert_equal "ccc",p.expertise[0].text
end
end
def test_expertise_and_tools_with_same_name
p=Factory :person
User.with_current_user p.user do
assert_difference("Annotation.count",2) do
assert_difference("TextValue.count",2) do
p.tools = ["golf","fishing"]
end
end
assert_difference("Annotation.count",2) do
assert_no_difference("TextValue.count") do
p.expertise = ["golf","fishing"]
end
end
end
end
def test_institutions
person = Factory(:person_in_multiple_projects)
institution = person.group_memberships.first.work_group.institution
institution2 = Factory(:institution)
assert_equal 3,person.institutions.count
assert person.institutions.include?(institution)
assert !person.institutions.include?(institution2)
end
def test_projects
p=Factory(:person_in_multiple_projects)
assert_equal 3,p.projects.size
end
test "not registered" do
peeps=Person.not_registered
assert_not_nil peeps
assert peeps.size>0,"There should be some userless people"
assert_nil(peeps.find{|p| !p.user.nil?},"There should be no people with a non nil user")
p=people(:three)
assert_not_nil(peeps.find{|person| p.id==person.id},"Person :three should be userless and therefore in the list")
p=people(:quentin_person)
assert_nil(peeps.find{|person| p.id==person.id},"Person :one should have a user and not be in the list")
end
def test_name
p=people(:quentin_person)
assert_equal "Quentin Jones", p.name
p.first_name="Tom"
assert_equal "Tom Jones", p.name
end
def test_email_with_name
p=people(:quentin_person)
assert_equal("Quentin Jones <[email protected]>",p.email_with_name)
end
def test_email_with_name_no_last_name
p=Person.new(:first_name=>"Fred",:email=>"[email protected]")
assert_equal("Fred <[email protected]>",p.email_with_name)
end
def test_capitalization_with_nil_last_name
p=people(:no_first_name)
assert_equal "Lastname",p.name
end
def test_capitalization_with_nil_first_name
p=people(:no_last_name)
assert_equal "Firstname",p.name
end
def test_double_firstname_capitalised
p=people(:double_firstname)
assert_equal "Fred David Bloggs", p.name
end
def test_double_lastname_capitalised
p=people(:double_lastname)
assert_equal "Fred Smith Jones",p.name
end
def test_double_barrelled_lastname_capitalised
p=people(:double_barrelled_lastname)
assert_equal "Fred Smith-Jones",p.name
end
def test_valid
p=people(:quentin_person)
assert p.valid?
p.email=nil
assert !p.valid?
p.email="sdf"
assert !p.valid?
p.email="sdf@"
assert !p.valid?
p.email="[email protected]"
assert p.valid?
p.web_page=nil
assert p.valid?
p.web_page=""
assert p.valid?
p.web_page="sdfsdf"
assert !p.valid?
p.web_page="http://google.com"
assert p.valid?
p.web_page="https://google.com"
assert p.valid?
p.web_page="http://google.com/fred"
assert p.valid?
p.web_page="http://google.com/fred?param=bob"
assert p.valid?
p.web_page="http://www.mygrid.org.uk/dev/issues/secure/IssueNavigator.jspa?reset=true&mode=hide&sorter/order=DESC&sorter/field=priority&resolution=-1&pid=10051&fixfor=10110"
assert p.valid?
end
def test_email_with_capitalise_valid
p=people(:quentin_person)
assert p.valid?
p.email="[email protected]"
assert p.valid?
p.email="[email protected]"
assert p.valid?,"Capitals in email should be valid"
end
def test_email_unique
p=people(:quentin_person)
newP=Person.new(:first_name=>"Fred",:email=>p.email)
assert !newP.valid?,"Should not be valid as email is not unique"
newP.email = p.email.capitalize
assert !newP.valid?,"Should not be valid as email is not case sensitive"
newP.email="[email protected]"
assert newP.valid?
end
def test_disciplines
p = Factory :person,:disciplines=>[Factory(:discipline,:title=>"A"),Factory(:discipline, :title=>"B")]
p.reload
assert_equal 2,p.disciplines.size
assert_equal "A",p.disciplines[0].title
assert_equal "B",p.disciplines[1].title
end
def test_positions_association
position = Factory(:project_position)
p=Factory :person
p.group_memberships.first.project_positions << position
assert_equal 1, p.project_positions.size
assert p.project_positions.include?(position)
end
def test_update_first_letter
p=Person.new(:first_name=>"Fred",:last_name=>"Monkhouse",:email=>"[email protected]")
assert p.valid?,"The new person should be valid"
p.save
assert_equal "M",p.first_letter
p=Person.new(:first_name=>"Freddy",:email=>"[email protected]")
assert p.valid?,"The new person should be valid"
p.save
assert_equal "F",p.first_letter
p=Person.new(:first_name=>"Zebedee",:email=>"[email protected]")
assert p.valid?,"The new person should be valid"
p.save
assert_equal "Z",p.first_letter
end
def test_update_first_letter_blank_last_name
p=Person.new(:first_name=>"Zebedee",:last_name=>"",:email=>"[email protected]")
assert p.valid?,"The new person should be valid"
p.save
assert_equal "Z",p.first_letter
end
def test_notifiee_info_inserted
p=Person.new(:first_name=>"Zebedee",:last_name=>"",:email=>"[email protected]")
assert_nil p.notifiee_info
assert_difference("NotifieeInfo.count") do
p.save!
end
p=Person.find(p.id)
assert_not_nil p.notifiee_info
assert p.receive_notifications?
end
def test_dependent_notifiee_info_is_destroyed_with_person
p=Person.new(:first_name=>"Zebedee",:last_name=>"",:email=>"[email protected]")
p.save!
assert_not_nil p.notifiee_info
assert_difference("NotifieeInfo.count",-1) do
p.destroy
end
end
def test_user_is_destroyed_with_person
p=people(:quentin_person)
u=users(:quentin)
assert_difference("Person.count",-1) do
assert_difference("User.count",-1) do
p.destroy
end
end
assert_nil User.find_by_id(u.id)
p=people(:random_userless_person)
assert_difference("Person.count",-1) do
assert_no_difference("User.count") do
p.destroy
end
end
end
def test_updated_not_changed_when_adding_notifiee_info
p=people(:modeller_person)
up_at=p.updated_at
sleep(2)
p.check_for_notifiee_info
assert_equal up_at,p.updated_at
end
test "test uuid generated" do
p = people(:modeller_person)
assert_nil p.attributes["uuid"]
p.save
assert_not_nil p.attributes["uuid"]
end
test "uuid doesn't change" do
x = people(:modeller_person)
x.save
uuid = x.attributes["uuid"]
x.save
assert_equal x.uuid, uuid
end
test 'projects method notices changes via both group_memberships and work_groups' do
person = Factory.build(:person, :group_memberships => [Factory(:group_membership)])
group_membership_projects = person.group_memberships.map(&:work_group).map(&:project).uniq.sort_by(&:title)
work_group_projects = person.work_groups.map(&:project).uniq.sort_by(&:title)
assert_equal (group_membership_projects | work_group_projects), person.projects.sort_by(&:title)
end
test 'should retrieve the list of people who have the manage right on the item' do
user = Factory(:user)
person = user.person
data_file = Factory(:data_file, :contributor => user)
people_can_manage = data_file.people_can_manage
assert_equal 1, people_can_manage.count
assert_equal person.id, people_can_manage.first[0]
new_person = Factory(:person_in_project)
policy = data_file.policy
policy.permissions.build(:contributor => new_person, :access_type => Policy::MANAGING)
policy.save
people_can_manage = data_file.people_can_manage
assert_equal 2, people_can_manage.count
people_ids = people_can_manage.collect{|p| p[0]}
assert people_ids.include? person.id
assert people_ids.include? new_person.id
end
test "related resource" do
user = Factory :user
person = user.person
User.with_current_user(user) do
AssetsCreator.create :asset=>Factory(:data_file),:creator=> person
AssetsCreator.create :asset=>Factory(:model),:creator=> person
AssetsCreator.create :asset=>Factory(:sop),:creator=> person
Factory :event,:contributor=>user
AssetsCreator.create :asset=>Factory(:presentation),:creator=> person
AssetsCreator.create :asset=>Factory(:publication),:creator=>person
assert_equal person.created_data_files, person.related_data_files
assert_equal person.created_models, person.related_models
assert_equal person.created_sops, person.related_sops
assert_equal user.events, person.related_events
assert_equal person.created_presentations, person.related_presentations
assert_equal person.created_publications, person.related_publications
end
end
test 'related isa' do
person = Factory(:person)
AssetsCreator.create :asset=>(inv1=Factory(:investigation)),:creator=> person
inv2=Factory(:investigation,:contributor=>person)
assert_equal [inv1,inv2].sort, person.related_investigations.sort
AssetsCreator.create :asset=>(study1=Factory(:study)),:creator=> person
study2=Factory(:study,:contributor=>person)
assert_equal [study1,study2].sort, person.related_studies.sort
AssetsCreator.create :asset=>(assay1=Factory(:assay)),:creator=> person
assay2=Factory(:assay,:contributor=>person)
assert_equal [assay1,assay2].sort, person.related_assays.sort
end
test "get the correct investigations and studides" do
p = Factory(:person)
u = p.user
inv1 = Factory(:investigation, :contributor=>p)
inv2 = Factory(:investigation, :contributor=>u)
study1 = Factory(:study, :contributor=>p)
study2 = Factory(:study, :contributor=>u)
p = Person.find(p.id)
assert_equal [study1,study2],p.studies.sort_by(&:id)
assert_equal [inv1,inv2],p.investigations.sort_by(&:id)
end
test "should be able to remove the workgroup whose project is not subcribed" do
p=Factory :person
wg = Factory :work_group
p.work_groups = [wg]
p.project_subscriptions.delete_all
assert p.project_subscriptions.empty?
p.work_groups = []
p.save
assert_empty p.work_groups
assert_empty p.projects
end
test "add to project and institution subscribes to project" do
person = Factory (:brand_new_person)
inst = Factory(:institution)
proj = Factory(:project)
assert_empty person.project_subscriptions
person.add_to_project_and_institution(proj,inst)
person.save!
person.reload
assert_includes person.project_subscriptions.map(&:project),proj
end
test "shares programme?" do
person1 = Factory(:person)
person2 = Factory(:person)
person3 = Factory(:person)
prog1 = Factory :programme,:projects=>(person1.projects | person2.projects)
prog2 = Factory :programme,:projects=>person3.projects
assert person1.shares_programme?(person2)
assert person2.shares_programme?(person1)
refute person3.shares_programme?(person1)
refute person3.shares_programme?(person2)
refute person1.shares_programme?(person3)
refute person2.shares_programme?(person3)
#also with project rather than person
assert person1.shares_programme?(person2.projects.first)
refute person2.shares_programme?(person3.projects.first)
end
test "shares project?" do
person1 = Factory(:person)
project = person1.projects.first
person2 = Factory(:person,:work_groups=>[project.work_groups.first])
person3 = Factory(:person)
assert person1.shares_project?(person2)
refute person1.shares_project?(person3)
assert person1.shares_project?(project)
refute person1.shares_project?(person3.projects.first)
assert person1.shares_project?([project])
assert person1.shares_project?([project,Factory(:project)])
refute person1.shares_project?([person3.projects.first])
refute person1.shares_project?([person3.projects.first,Factory(:project)])
end
test "add to project and institution" do
proj1=Factory :project
proj2=Factory :project
inst1=Factory :institution
inst2=Factory :institution
p1=Factory :brand_new_person
p2=Factory :brand_new_person
assert_difference("WorkGroup.count",1) do
assert_difference("GroupMembership.count",1) do
p1.add_to_project_and_institution(proj1,inst1)
p1.save!
end
end
p1.reload
assert_equal 1,p1.projects.count
assert_include p1.projects,proj1
assert_equal 1,p1.institutions.count
assert_include p1.institutions,inst1
assert_no_difference("WorkGroup.count") do
assert_difference("GroupMembership.count",1) do
p2.add_to_project_and_institution(proj1,inst1)
end
end
p2.reload
assert_equal 1,p2.projects.count
assert_include p2.projects,proj1
assert_equal 1,p2.institutions.count
assert_include p2.institutions,inst1
assert_difference("WorkGroup.count",1) do
assert_difference("GroupMembership.count",1) do
p1.add_to_project_and_institution(proj2,inst1)
end
end
assert_difference("WorkGroup.count",1) do
assert_difference("GroupMembership.count",1) do
p1.add_to_project_and_institution(proj1,inst2)
end
end
p1.reload
assert_equal 2,p1.projects.count
assert_include p1.projects,proj2
assert_equal 2,p1.institutions.count
assert_include p1.institutions,inst2
assert_no_difference("WorkGroup.count") do
assert_no_difference("GroupMembership.count") do
p1.add_to_project_and_institution(proj1,inst1)
end
end
end
test "cache-key changes with workgroup" do
person = Factory :person
refute_empty person.projects
cachekey = person.cache_key
person.add_to_project_and_institution(Factory(:project),Factory(:institution))
refute_equal cachekey,person.cache_key
end
test "can create" do
User.current_user=Factory(:project_administrator).user
assert Person.can_create?
User.current_user=Factory(:admin).user
assert Person.can_create?
User.current_user=Factory(:brand_new_user)
refute User.current_user.registration_complete?
assert Person.can_create?
User.current_user = nil
refute Person.can_create?
User.current_user=Factory(:person).user
refute Person.can_create?
User.current_user=Factory(:pal).user
refute Person.can_create?
User.current_user=Factory(:asset_gatekeeper).user
refute Person.can_create?
User.current_user=Factory(:asset_housekeeper).user
refute Person.can_create?
User.current_user=Factory(:programme_administrator).user
assert Person.can_create?
end
test "administered programmes" do
pa = Factory(:programme_administrator)
admin = Factory(:admin)
other_prog = Factory(:programme)
progs = pa.programmes
assert_equal progs.sort,pa.administered_programmes.sort
refute_includes pa.administered_programmes,other_prog
assert_empty Factory(:person).administered_programmes
assert_equal Programme.all.sort,admin.administered_programmes
end
test "not_registered_with_matching_email" do
3.times do
Factory :person
end
p1 = Factory :brand_new_person, :email=>"[email protected]"
p2 = Factory :person, :email=>"[email protected]"
refute p1.registered?
assert p2.registered?
assert_includes Person.not_registered_with_matching_email("[email protected]"),p1
assert_includes Person.not_registered_with_matching_email("[email protected]"),p1
refute_includes Person.not_registered_with_matching_email("[email protected]"),p2
assert_empty Person.not_registered_with_matching_email("[email protected]")
end
test "orcid required for new person" do
with_config_value(:orcid_required, true) do
assert_nothing_raised do
has_orcid = Factory :brand_new_person, :email => "[email protected]",
:orcid => 'http://orcid.org/0000-0002-0048-3300'
assert has_orcid.valid?
assert_empty has_orcid.errors[:orcid]
end
assert_raises ActiveRecord::RecordInvalid do
no_orcid = Factory :brand_new_person, :email => "[email protected]"
assert !no_orcid.valid?
assert_not_empty no_orcid.errors[:orcid]
end
assert_raises ActiveRecord::RecordInvalid do
bad_orcid = Factory :brand_new_person, :email => "[email protected]",
:orcid => 'banana'
assert !bad_orcid.valid?
assert_not_empty bad_orcid.errors[:orcid]
end
end
end
test "orcid not required for existing person" do
no_orcid = Factory :brand_new_person, :email => "[email protected]"
with_config_value(:orcid_required, true) do
assert_nothing_raised do
no_orcid.update_attributes(:email => "[email protected]")
assert no_orcid.valid?
end
end
end
test "orcid must be valid even if not required" do
bad_orcid = Factory :brand_new_person, :email => "[email protected]"
with_config_value(:orcid_required, true) do
bad_orcid.update_attributes(:email => "[email protected]", :orcid => 'big mac')
assert !bad_orcid.valid?
assert_not_empty bad_orcid.errors[:orcid]
end
with_config_value(:orcid_required, false) do
assert_raises ActiveRecord::RecordInvalid do
another_bad_orcid = Factory :brand_new_person, :email => "[email protected]", :orcid => 'こんにちは'
assert !another_bad_orcid.valid?
assert_not_empty bad_orcid.errors[:orcid]
end
end
end
test "ensures full orcid uri is stored" do
semi_orcid = Factory :brand_new_person, :email => "[email protected]",
:orcid => '0000-0002-0048-3300'
full_orcid = Factory :brand_new_person, :email => "[email protected]",
:orcid => 'http://orcid.org/0000-0002-0048-3300'
assert_equal 'http://orcid.org/0000-0002-0048-3300', semi_orcid.orcid
assert_equal 'http://orcid.org/0000-0002-0048-3300', full_orcid.orcid
end
test "can flag has having left a project" do
person = Factory(:person)
project = person.projects.first
assert_not_includes person.former_projects, project
assert_includes person.current_projects, project
assert_includes person.projects, project
gm = person.group_memberships.first
gm.time_left_at = 1.day.ago
gm.save
assert gm.has_left
person.reload
assert_includes person.former_projects, project
assert_not_includes person.current_projects, project
assert_includes person.projects, project
end
test "can flag has leaving a project" do
person = Factory(:person)
project = person.projects.first
assert_not_includes person.former_projects, project
assert_includes person.current_projects, project
assert_includes person.projects, project
gm = person.group_memberships.first
gm.time_left_at = 1.day.from_now
gm.save
assert !gm.has_left
person.reload
assert_not_includes person.former_projects, project
assert_includes person.current_projects, project
assert_includes person.projects, project
end
test 'trim spaces from email, first_name, last_name' do
person = Factory(:brand_new_person)
person.email = ' [email protected] '
person.first_name = ' bob '
person.last_name = ' monkhouse '
person.web_page = ' http://fish.com '
assert person.valid?
disable_authorization_checks do
person.save!
end
person.reload
assert_equal '[email protected]',person.email
assert_equal 'bob',person.first_name
assert_equal 'monkhouse',person.last_name
assert_equal 'http://fish.com',person.web_page
end
end
| 31.9134 | 177 | 0.712731 |
6231da4f833e0e5708f8debb3ee9734786c5371d | 3,591 | # frozen_string_literal: true
module Blacklight
class Install < Rails::Generators::Base
source_root File.expand_path('../templates', __FILE__)
argument :model_name , type: :string , default: "user"
argument :controller_name, type: :string , default: "catalog"
argument :document_name, type: :string , default: "solr_document"
argument :search_builder_name, type: :string , default: "search_builder"
argument :solr_version, type: :string , default: "latest"
class_option :devise , type: :boolean, default: false, aliases: "-d", desc: "Use Devise as authentication logic."
class_option :jettywrapper, type: :boolean, default: false, desc: "Use jettywrapper to download and control Jetty"
class_option :marc , type: :boolean, default: false, aliases: "-m", desc: "Generate MARC-based demo ."
desc """
This generator makes the following changes to your application:
1. Generates blacklight:models
2. Generates utilities for working with solr
3. Adds globalid to the Gemfile
4. Creates a number of public assets, including images, stylesheets, and javascript
5. Injects behavior into your user application_controller.rb
6. Adds example configurations for dealing with MARC-like data
7. Adds Blacklight routes to your ./config/routes.rb
Thank you for Installing Blacklight.
"""
def add_solr_wrapper
if options[:jettywrapper]
generate 'blacklight:solr4'
elsif solr_version == 'latest'
generate 'blacklight:solr5'
else
generate "blacklight:solr#{solr_version}"
end
end
def add_globalid_gem
gem "globalid"
end
def bundle_install
Bundler.with_clean_env do
run "bundle install"
end
end
# Copy all files in templates/public/ directory to public/
# Call external generator in AssetsGenerator, so we can
# leave that callable seperately too.
def copy_public_assets
generate "blacklight:assets"
end
def generate_blacklight_document
generate 'blacklight:document', document_name
end
def generate_search_builder
generate 'blacklight:search_builder', search_builder_name
end
def generate_blacklight_models
generate 'blacklight:models'
end
def generate_blacklight_user
generator_args = [model_name]
if options[:devise]
generator_args << "--devise #{options[:devise]}"
end
generate 'blacklight:user', generator_args.join(" ")
end
def generate_controller
generate 'blacklight:controller', controller_name
end
def add_default_catalog_route
route("root to: \"#{controller_name}#index\"")
end
def add_sass_configuration
insert_into_file "config/application.rb", :after => "config.assets.enabled = true" do <<EOF
# Default SASS Configuration, check out https://github.com/rails/sass-rails for details
config.assets.compress = !Rails.env.development?
EOF
end
end
def inject_blacklight_i18n_strings
copy_file "blacklight.en.yml", "config/locales/blacklight.en.yml"
end
def generate_blacklight_marc_demo
if options[:marc]
blacklight_marc = String.new('blacklight-marc')
gem blacklight_marc, '~> 6.0'
Bundler.with_clean_env do
run "bundle install"
end
generate 'blacklight:marc:install'
end
end
def add_routes
route <<-EOF
mount Blacklight::Engine => '/'
EOF
end
end
end
| 29.677686 | 122 | 0.674742 |
1a20968614b7a479ede2cd70df9ebc1b9703a9c0 | 499 | module Cms
class PanelTag < Liquid::Block
def initialize(tag_name, markup, tokens)
super
@header = markup
end
def render(context)
if @header
"<div class=\"panel panel-default\"><div class=\"panel-heading\">#{@header}</div><div class=\"panel-body\">#{super}</div></div>"
else
"<div class=\"panel panel-default\"><div class=\"panel-body\">#{super}</div></div>"
end
end
end
end
Liquid::Template.register_tag('panel', Cms::PanelTag)
| 26.263158 | 136 | 0.607214 |
610de4ccafebe818c8c34b1222113ad5923e1688 | 715 | # frozen_string_literal: true
module Html2Text
module Parsers
class Html
# Parses an an image node, returning the supplied output
# including the text
class ImageNode < NodeParser
def call
image_text
end
# Parses the supplied node as arguments, and returns
# a text representation of the image
#
# @return [String] text replacement for an image
def image_text
if node.attribute('title')
'[' + node.attribute('title').to_s + ']'
elsif node.attribute('alt')
'[' + node.attribute('alt').to_s + ']'
else
''
end
end
end
end
end
end
| 23.833333 | 62 | 0.54965 |
33d63b3bac976354319216c69f76f69a739829ee | 172 | class CreateConnections < ActiveRecord::Migration
def change
create_table :connections do |t|
t.string :facebook_page_url
t.timestamps
end
end
end
| 17.2 | 49 | 0.709302 |
621d0edcc4cba9d36487200bd5377129039877f0 | 1,056 | class SdlRtf < Formula
desc "Sample library to display Rich Text Format (RTF) documents"
homepage "https://www.libsdl.org/projects/SDL_rtf/"
url "https://www.libsdl.org/projects/SDL_rtf/release/SDL_rtf-0.1.0.tar.gz"
sha256 "3dc0274b666e28010908ced24844ca7d279e07b66f673c990d530d4ea94b757e"
head "https://hg.libsdl.org/SDL_rtf", :using => :hg
bottle do
cellar :any
sha256 "310bcc2756a0ba5dd9287af9159809c2519609830e07e4ef0773edfc51c8bda5" => :mojave
sha256 "319fe65012c94d20675b0b3dc3c9e4df59838ccca7496b81a425bded94e3c9fc" => :high_sierra
sha256 "c34abb198f384916d7b2a09a88c69cb84f29674031329bb7a1733e8a5ed39255" => :sierra
sha256 "6c7e9f7459ff062fbb48ee1a383a4fd4acc2c29f5ee9b57dea93710c94ccda11" => :el_capitan
sha256 "8dd89df32c9ea02bcab36932c2f22bcb6de58d6002bd6fb9e95f9bbfe5ccf41e" => :yosemite
sha256 "9d077d10fc0102738e3c7d445cf2c8290150f98b4fb92e1b72bb3e5857dc3b3e" => :mavericks
end
depends_on "sdl"
def install
system "./configure", "--prefix=#{prefix}"
system "make", "install"
end
end
| 42.24 | 93 | 0.789773 |
219dd6ff593ae1051f8c82a40306f49b7dbe6ea4 | 349 | module LapisConstants
class ErrorCodes
UNAUTHORIZED = 1
MISSING_PARAMETERS = 2
ID_NOT_FOUND = 3
INVALID_VALUE = 4
UNKNOWN = 5
AUTH = 6
WARNING = 7
MISSING_OBJECT = 8
DUPLICATED = 9
ALL = %w(UNAUTHORIZED MISSING_PARAMETERS ID_NOT_FOUND INVALID_VALUE UNKNOWN AUTH WARNING MISSING_OBJECT DUPLICATED)
end
end
| 23.266667 | 119 | 0.710602 |
f7cd2c189cbcf4a3f89797ee5f7ce3a4a84f562f | 1,373 | # frozen_string_literal: true
# Copyright 2015 Australian National Botanic Gardens
#
# This file is part of the NSL Editor.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require "test_helper"
load "test/models/search/users.rb"
load "test/models/search/on_name/test_helper.rb"
# Single Search model test.
class SearchOnNameNameStatusNomInvalSimpleTest < ActiveSupport::TestCase
test "search on name name status is nom inval category simple" do
params = ActiveSupport::HashWithIndifferentAccess.new(
query_target: "name",
query_string: "name-status-nom-inval:",
current_user: build_edit_user
)
search = Search::Base.new(params)
confirm_results_class(search.executed_query.results)
assert !search.executed_query.results.empty?,
"Expected at least one search result for name-status-nom-inval"
end
end
| 36.131579 | 76 | 0.747269 |
d59067219c5324295f792f034a18c307bd8c3801 | 1,639 | module Services
class Payment < Base
def initialize(order_id, url)
@order_id = order_id
@return_url = url
end
def fetch!
connection = Faraday.new
@payment_params = set_payment_params
checksum = generate_checksum
response = connection.post do |req|
req.url URI.encode("https://secure.paygate.co.za/payweb3/initiate.trans")
req.headers['Content-Type'] = 'application/x-www-form-urlencoded'
req.body = URI.encode_www_form(@payment_params.merge(CHECKSUM: Digest::MD5.hexdigest("#{checksum}#{key}")))
end.body
pay_request_id = response.split("&PAY_REQUEST_ID=")[1].split("&REFERENCE")[0]
checkusm_from_response = response.split("&CHECKSUM=")[1]
return paygate_response = {
:pay_request_id => pay_request_id,
:checkusm_from_response => checkusm_from_response,
:order => set_order
}
end
def set_payment_params
{
PAYGATE_ID: paygate_id,
REFERENCE: set_order.user.full_name,
AMOUNT: set_order.price.to_i * 100,
CURRENCY: "ZAR",
RETURN_URL: "#{@return_url}payments/update_payment/#{@order_id}",
TRANSACTION_DATE: Time.now.strftime("%Y-%m-%d %H:%M:%S"),
LOCALE: "en-za",
COUNTRY: "ZAF",
EMAIL: set_order.user.email
}
end
def generate_checksum
set_payment_params.map{|k,v| "#{v}"}.join('')
end
def key
"AXtyAK4E3FEPsHLWjGoqEC4cvus4"
end
def paygate_id
"1029122100018"
end
def set_order
order = CleanRequest.find(@order_id)
return order
end
end
end | 28.258621 | 115 | 0.625991 |
8774cf904f0ad7d94719a62372eaecb0d5a0544e | 13,837 | # frozen_string_literal: true
module Types
class ProjectType < BaseObject
graphql_name 'Project'
authorize :read_project
expose_permissions Types::PermissionTypes::Project
field :id, GraphQL::ID_TYPE, null: false,
description: 'ID of the project'
field :full_path, GraphQL::ID_TYPE, null: false,
description: 'Full path of the project'
field :path, GraphQL::STRING_TYPE, null: false,
description: 'Path of the project'
field :name_with_namespace, GraphQL::STRING_TYPE, null: false,
description: 'Full name of the project with its namespace'
field :name, GraphQL::STRING_TYPE, null: false,
description: 'Name of the project (without namespace)'
field :description, GraphQL::STRING_TYPE, null: true,
description: 'Short description of the project'
markdown_field :description_html, null: true
field :tag_list, GraphQL::STRING_TYPE, null: true,
description: 'List of project topics (not Git tags)'
field :ssh_url_to_repo, GraphQL::STRING_TYPE, null: true,
description: 'URL to connect to the project via SSH'
field :http_url_to_repo, GraphQL::STRING_TYPE, null: true,
description: 'URL to connect to the project via HTTPS'
field :web_url, GraphQL::STRING_TYPE, null: true,
description: 'Web URL of the project'
field :star_count, GraphQL::INT_TYPE, null: false,
description: 'Number of times the project has been starred'
field :forks_count, GraphQL::INT_TYPE, null: false, calls_gitaly: true, # 4 times
description: 'Number of times the project has been forked'
field :created_at, Types::TimeType, null: true,
description: 'Timestamp of the project creation'
field :last_activity_at, Types::TimeType, null: true,
description: 'Timestamp of the project last activity'
field :archived, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates the archived status of the project'
field :visibility, GraphQL::STRING_TYPE, null: true,
description: 'Visibility of the project'
field :container_registry_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if the project stores Docker container images in a container registry'
field :shared_runners_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if shared runners are enabled for the project'
field :lfs_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if the project has Large File Storage (LFS) enabled'
field :merge_requests_ff_only_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if no merge commits should be created and all merges should instead be fast-forwarded, which means that merging is only allowed if the branch could be fast-forwarded.'
field :service_desk_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if the project has service desk enabled.'
field :service_desk_address, GraphQL::STRING_TYPE, null: true,
description: 'E-mail address of the service desk.'
field :avatar_url, GraphQL::STRING_TYPE, null: true, calls_gitaly: true,
description: 'URL to avatar image file of the project',
resolve: -> (project, args, ctx) do
project.avatar_url(only_path: false)
end
%i[issues merge_requests wiki snippets].each do |feature|
field "#{feature}_enabled", GraphQL::BOOLEAN_TYPE, null: true,
description: "Indicates if #{feature.to_s.titleize.pluralize} are enabled for the current user",
resolve: -> (project, args, ctx) do
project.feature_available?(feature, ctx[:current_user])
end
end
field :jobs_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if CI/CD pipeline jobs are enabled for the current user',
resolve: -> (project, args, ctx) do
project.feature_available?(:builds, ctx[:current_user])
end
field :public_jobs, GraphQL::BOOLEAN_TYPE, method: :public_builds, null: true,
description: 'Indicates if there is public access to pipelines and job details of the project, including output logs and artifacts'
field :open_issues_count, GraphQL::INT_TYPE, null: true,
description: 'Number of open issues for the project',
resolve: -> (project, args, ctx) do
project.open_issues_count if project.feature_available?(:issues, ctx[:current_user])
end
field :import_status, GraphQL::STRING_TYPE, null: true,
description: 'Status of import background job of the project'
field :jira_import_status, GraphQL::STRING_TYPE, null: true,
description: 'Status of Jira import background job of the project'
field :only_allow_merge_if_pipeline_succeeds, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if merge requests of the project can only be merged with successful jobs'
field :allow_merge_on_skipped_pipeline, GraphQL::BOOLEAN_TYPE, null: true,
description: 'If `only_allow_merge_if_pipeline_succeeds` is true, indicates if merge requests of the project can also be merged with skipped jobs'
field :request_access_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if users can request member access to the project'
field :only_allow_merge_if_all_discussions_are_resolved, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if merge requests of the project can only be merged when all the discussions are resolved'
field :printing_merge_request_link_enabled, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if a link to create or view a merge request should display after a push to Git repositories of the project from the command line'
field :remove_source_branch_after_merge, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if `Delete source branch` option should be enabled by default for all new merge requests of the project'
field :autoclose_referenced_issues, GraphQL::BOOLEAN_TYPE, null: true,
description: 'Indicates if issues referenced by merge requests and commits within the default branch are closed automatically'
field :suggestion_commit_message, GraphQL::STRING_TYPE, null: true,
description: 'The commit message used to apply merge request suggestions'
field :namespace, Types::NamespaceType, null: true,
description: 'Namespace of the project'
field :group, Types::GroupType, null: true,
description: 'Group of the project'
field :statistics, Types::ProjectStatisticsType,
null: true,
description: 'Statistics of the project',
resolve: -> (obj, _args, _ctx) { Gitlab::Graphql::Loaders::BatchProjectStatisticsLoader.new(obj.id).find }
field :repository, Types::RepositoryType, null: true,
description: 'Git repository of the project'
field :merge_requests,
Types::MergeRequestType.connection_type,
null: true,
description: 'Merge requests of the project',
extras: [:lookahead],
resolver: Resolvers::MergeRequestsResolver
field :merge_request,
Types::MergeRequestType,
null: true,
description: 'A single merge request of the project',
resolver: Resolvers::MergeRequestsResolver.single
field :issues,
Types::IssueType.connection_type,
null: true,
description: 'Issues of the project',
extras: [:lookahead],
resolver: Resolvers::IssuesResolver
field :issue_status_counts,
Types::IssueStatusCountsType,
null: true,
description: 'Counts of issues by status for the project',
extras: [:lookahead],
resolver: Resolvers::IssueStatusCountsResolver
field :milestones, Types::MilestoneType.connection_type, null: true,
description: 'Milestones of the project',
resolver: Resolvers::ProjectMilestonesResolver
field :project_members,
Types::MemberInterface.connection_type,
description: 'Members of the project',
resolver: Resolvers::ProjectMembersResolver
field :environments,
Types::EnvironmentType.connection_type,
null: true,
description: 'Environments of the project',
resolver: Resolvers::EnvironmentsResolver
field :environment,
Types::EnvironmentType,
null: true,
description: 'A single environment of the project',
resolver: Resolvers::EnvironmentsResolver.single
field :issue,
Types::IssueType,
null: true,
description: 'A single issue of the project',
resolver: Resolvers::IssuesResolver.single
field :packages, Types::PackageType.connection_type, null: true,
description: 'Packages of the project',
resolver: Resolvers::PackagesResolver
field :pipelines,
Types::Ci::PipelineType.connection_type,
null: true,
description: 'Build pipelines of the project',
resolver: Resolvers::ProjectPipelinesResolver
field :pipeline,
Types::Ci::PipelineType,
null: true,
description: 'Build pipeline of the project',
resolver: Resolvers::ProjectPipelineResolver
field :sentry_detailed_error,
Types::ErrorTracking::SentryDetailedErrorType,
null: true,
description: 'Detailed version of a Sentry error on the project',
resolver: Resolvers::ErrorTracking::SentryDetailedErrorResolver
field :grafana_integration,
Types::GrafanaIntegrationType,
null: true,
description: 'Grafana integration details for the project',
resolver: Resolvers::Projects::GrafanaIntegrationResolver
field :snippets,
Types::SnippetType.connection_type,
null: true,
description: 'Snippets of the project',
resolver: Resolvers::Projects::SnippetsResolver
field :sentry_errors,
Types::ErrorTracking::SentryErrorCollectionType,
null: true,
description: 'Paginated collection of Sentry errors on the project',
resolver: Resolvers::ErrorTracking::SentryErrorCollectionResolver
field :boards,
Types::BoardType.connection_type,
null: true,
description: 'Boards of the project',
max_page_size: 2000,
resolver: Resolvers::BoardsResolver
field :board,
Types::BoardType,
null: true,
description: 'A single board of the project',
resolver: Resolvers::BoardsResolver.single
field :jira_imports,
Types::JiraImportType.connection_type,
null: true,
description: 'Jira imports into the project',
resolver: Resolvers::Projects::JiraImportsResolver
field :services,
Types::Projects::ServiceType.connection_type,
null: true,
description: 'Project services',
resolver: Resolvers::Projects::ServicesResolver
field :alert_management_alerts,
Types::AlertManagement::AlertType.connection_type,
null: true,
description: 'Alert Management alerts of the project',
extras: [:lookahead],
resolver: Resolvers::AlertManagement::AlertResolver
field :alert_management_alert,
Types::AlertManagement::AlertType,
null: true,
description: 'A single Alert Management alert of the project',
resolver: Resolvers::AlertManagement::AlertResolver.single
field :alert_management_alert_status_counts,
Types::AlertManagement::AlertStatusCountsType,
null: true,
description: 'Counts of alerts by status for the project',
resolver: Resolvers::AlertManagement::AlertStatusCountsResolver
field :releases,
Types::ReleaseType.connection_type,
null: true,
description: 'Releases of the project',
resolver: Resolvers::ReleasesResolver
field :release,
Types::ReleaseType,
null: true,
description: 'A single release of the project',
resolver: Resolvers::ReleasesResolver.single,
authorize: :download_code
field :container_expiration_policy,
Types::ContainerExpirationPolicyType,
null: true,
description: 'The container expiration policy of the project'
field :label,
Types::LabelType,
null: true,
description: 'A label available on this project' do
argument :title, GraphQL::STRING_TYPE,
required: true,
description: 'Title of the label'
end
def label(title:)
BatchLoader::GraphQL.for(title).batch(key: project) do |titles, loader, args|
LabelsFinder
.new(current_user, project: args[:key], title: titles)
.execute
.each { |label| loader.call(label.title, label) }
end
end
field :labels,
Types::LabelType.connection_type,
null: true,
description: 'Labels available on this project' do
argument :search_term, GraphQL::STRING_TYPE,
required: false,
description: 'A search term to find labels with'
end
def labels(search_term: nil)
LabelsFinder
.new(current_user, project: project, search: search_term)
.execute
end
private
def project
@project ||= object.respond_to?(:sync) ? object.sync : object
end
end
end
Types::ProjectType.prepend_if_ee('::EE::Types::ProjectType')
| 41.930303 | 201 | 0.674135 |
33ea670d192f3a4523bb414f7318b0ccb1d04635 | 273 | cask 'container-rar' do
version '1.2.3'
sha256 '419af7864c0e1f125515c49b08bd22e0f7de39f5285897c440fe03c714871763'
url "file://#{TEST_FIXTURE_DIR}/cask/container.rar"
homepage 'https://example.com/container-rar'
depends_on formula: 'unar'
app 'container'
end
| 22.75 | 75 | 0.761905 |
287e05d2760304e5ead552a87866651c613bfa83 | 448 | require 'rails_helper'
# Specs in this file have access to a helper object that includes
# the Students::CompletedLessonPartsHelper. For example:
#
# describe Students::CompletedLessonPartsHelper do
# describe "string concat" do
# it "concats two strings with spaces" do
# expect(helper.concat_strings("this","that")).to eq("this that")
# end
# end
# end
RSpec.describe Students::CompletedLessonPartsHelper, type: :helper do
end
| 29.866667 | 71 | 0.741071 |
d5ea491496bfd20692b1af1360bc11769d718b5a | 4,387 | require File.expand_path(File.dirname(__FILE__) + '/../test_helper')
module Beetle
class RedisAssumptionsTest < Minitest::Test
def setup
@r = DeduplicationStore.new.redis
@r.flushdb
end
test "trying to delete a non existent key doesn't throw an error" do
assert [email protected]?("hahahaha")
assert_equal 0, @r.del("hahahaha")
end
test "msetnx returns a boolean" do
assert_equal true, @r.msetnx("a", 1, "b", 2)
assert_equal "1", @r.get("a")
assert_equal "2", @r.get("b")
assert_equal false, @r.msetnx("a", 3, "b", 4)
assert_equal "1", @r.get("a")
assert_equal "2", @r.get("b")
end
end
class RedisServerStringTest < Minitest::Test
def setup
@original_redis_server = Beetle.config.redis_server
@store = DeduplicationStore.new
@server_string = "my_test_host_from_file:9999"
Beetle.config.redis_server = @server_string
end
def teardown
Beetle.config.redis_server = @original_redis_server
end
test "redis should match the redis server string" do
assert_equal @server_string, @store.redis.server
end
end
class RedisServerFileTest < Minitest::Test
def setup
@original_redis_server = Beetle.config.redis_server
@original_system_name = Beetle.config.system_name
@store = DeduplicationStore.new
@server_string = "my_test_host_from_file:6379"
Beetle.config.redis_server = redis_test_master_file(@server_string)
end
def teardown
Beetle.config.redis_server = @original_redis_server
Beetle.config.system_name = @original_system_name
end
test "redis should match the redis master file" do
assert_equal @server_string, @store.redis.server
end
test "redis should be nil if the redis master file is blank" do
redis_test_master_file("")
assert_nil @store.redis
end
test "should keep using the current redis if the redis master file hasn't changed since the last request" do
@store.expects(:read_master_file).once.returns("localhost:1")
2.times { @store.redis }
end
test "should blow up if the master file doesn't exist" do
Beetle.config.redis_server = "/tmp/__i_don_not_exist__.txt"
assert_raises(Errno::ENOENT) { @store.redis_master_from_master_file }
end
test "should retrieve the redis master for the configured system name if the master file contains a mapping for it" do
redis_test_master_file("blabber/localhost:2\nblubber/localhost:1")
Beetle.config.system_name = "blubber"
assert_equal "localhost:1", @store.redis.server
end
test "should retrieve the redis master for the default system name if the master file contains a simple host:port entry" do
redis_test_master_file("localhost:2\nblubber/localhost:1")
assert_equal "localhost:2", @store.redis.server
end
private
def redis_test_master_file(server_string)
tmp_dir = File.expand_path("../../../tmp", __FILE__)
Dir.mkdir(tmp_dir) unless File.exist?(tmp_dir)
path = tmp_dir + "/redis-master-for-unit-tests"
File.open(path, "w"){|f| f.puts server_string}
path
end
end
class RedisFailoverTest < Minitest::Test
def setup
@store = DeduplicationStore.new
Beetle.config.expects(:redis_failover_timeout).returns(1)
end
test "a redis operation protected with a redis failover block should succeed if it can find a new master" do
redis1 = stub("redis 1")
redis2 = stub("redis 2")
s = sequence("redis accesses")
@store.expects(:redis).returns(redis1).in_sequence(s)
redis1.expects(:get).with("foo:x").raises("disconnected").in_sequence(s)
@store.expects(:redis).returns(redis2).in_sequence(s)
redis2.expects(:get).with("foo:x").returns("42").in_sequence(s)
@store.logger.expects(:info)
@store.logger.expects(:error)
assert_equal("42", @store.get("foo", "x"))
end
test "a redis operation protected with a redis failover block should fail if it cannot find a new master" do
redis1 = stub()
@store.stubs(:redis).returns(redis1)
redis1.stubs(:get).with("foo:x").raises("disconnected")
@store.stubs(:sleep)
@store.logger.stubs(:info)
@store.logger.stubs(:error)
assert_raises(NoRedisMaster) { @store.get("foo", "x") }
end
end
end
| 34.273438 | 127 | 0.682243 |
bb27aa32075c75a465b0b71f9abe974468ab7f42 | 1,220 | class ThemesController < ApplicationController
before_action :set_theme, only: [:show, :edit, :update, :destroy]
# GET /themes
def index
@themes = Theme.all
end
# GET /themes/1
def show
end
# GET /themes/new
def new
@theme = Theme.new
end
# GET /themes/1/edit
def edit
end
# POST /themes
def create
@theme = Theme.new(theme_params)
if @theme.save
redirect_to @theme, notice: 'Theme was successfully created.'
else
render :new
end
end
# PATCH/PUT /themes/1
def update
if @theme.update(theme_params)
redirect_to @theme, notice: 'Theme was successfully updated.'
else
render :edit
end
end
# DELETE /themes/1
def destroy
@theme.destroy
redirect_to themes_url, notice: 'Theme was successfully destroyed.'
end
def change_themes
Theme.change_theme(params[:id], session[:user_id])
redirect_to "/settings"
end
private
# Use callbacks to share common setup or constraints between actions.
def set_theme
@theme = Theme.find(params[:id])
end
# Only allow a trusted parameter "white list" through.
def theme_params
params.require(:theme).permit(:name)
end
end
| 19.0625 | 73 | 0.659836 |
f7d2a683059851ae115efae02341db6a7bdf863f | 2,320 | # frozen_string_literal: true
require_relative "helper"
require "rack"
class TestRackServer < Minitest::Test
parallelize_me!
class ErrorChecker
def initialize(app)
@app = app
@exception = nil
end
attr_reader :exception, :env
def call(env)
begin
@app.call(env)
rescue Exception => e
@exception = e
[ 500, {}, ["Error detected"] ]
end
end
end
class ServerLint < Rack::Lint
def call(env)
check_env env
@app.call(env)
end
end
def setup
@simple = lambda { |env| [200, { "X-Header" => "Works" }, ["Hello"]] }
@server = Puma::Server.new @simple
@server.add_tcp_listener "127.0.0.1", 0
@stopped = false
end
def stop
@server.stop(true)
@stopped = true
end
def teardown
@server.stop(true) unless @stopped
end
def test_lint
@checker = ErrorChecker.new ServerLint.new(@simple)
@server.app = @checker
@server.run
hit(["http://127.0.0.1:#{ @server.connected_ports[0] }/test"])
stop
refute @checker.exception, "Checker raised exception"
end
def test_large_post_body
@checker = ErrorChecker.new ServerLint.new(@simple)
@server.app = @checker
@server.run
big = "x" * (1024 * 16)
Net::HTTP.post_form URI.parse("http://127.0.0.1:#{ @server.connected_ports[0] }/test"),
{ "big" => big }
stop
refute @checker.exception, "Checker raised exception"
end
def test_path_info
input = nil
@server.app = lambda { |env| input = env; @simple.call(env) }
@server.run
hit(["http://127.0.0.1:#{ @server.connected_ports[0] }/test/a/b/c"])
stop
assert_equal "/test/a/b/c", input['PATH_INFO']
end
def test_after_reply
closed = false
@server.app = lambda do |env|
env['rack.after_reply'] << lambda { closed = true }
@simple.call(env)
end
@server.run
hit(["http://127.0.0.1:#{ @server.connected_ports[0] }/test"])
stop
assert_equal true, closed
end
def test_common_logger
log = StringIO.new
logger = Rack::CommonLogger.new(@simple, log)
@server.app = logger
@server.run
hit(["http://127.0.0.1:#{ @server.connected_ports[0] }/test"])
stop
assert_match %r!GET /test HTTP/1\.1!, log.string
end
end
| 18.412698 | 91 | 0.602586 |
03e9b047691f3c4b48813a135c4c4e23f69739d1 | 485 | # Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::ApiManagement::Mgmt::V2019_12_01
module Models
#
# Defines values for AuthorizationMethod
#
module AuthorizationMethod
HEAD = "HEAD"
OPTIONS = "OPTIONS"
TRACE = "TRACE"
GET = "GET"
POST = "POST"
PUT = "PUT"
PATCH = "PATCH"
DELETE = "DELETE"
end
end
end
| 22.045455 | 70 | 0.63299 |
0847d8ece9fecba5a14e0242fb28bf57845505e8 | 186 | Delayed::Web::Engine.routes.draw do
root to: 'jobs#index'
resources :jobs, only: [:destroy, :index, :show] do
put :queue, on: :member
post :batch, on: :collection
end
end
| 20.666667 | 53 | 0.650538 |
ed5c72adead0b658e90e4da4e341630ef7d17553 | 925 |
Pod::Spec.new do |spec|
spec.name = 'YFTestKit'
spec.version = '1.0'
spec.summary = 'Guide:'
spec.description = <<-DESC
Guide for private pods
DESC
spec.homepage = 'https://github.com/yfGit/'
spec.license = { :type => 'MIT', :file => 'LICENSE' }
spec.author = { 'YFTestKit' => '[email protected]' }
spec.source = { :git => 'https://github.com/yfGit/YFTestKit.git', :tag => spec.version.to_s }
spec.ios.deployment_target = '8.0'
spec.source_files = 'YFTestKit/Classes/*.{h,m}', 'YFTestKit/Classes/ThirdParty/*.{h}'
# .a 和 依赖
spec.vendored_libraries = 'YFTestKit/Classes/ThirdParty/*.{a}'
spec.frameworks = 'SystemConfiguration','CoreGraphics','CoreTelephony','Security','CoreLocation','JavaScriptCore'
spec.libraries = 'iconv','sqlite3','stdc++','z'
# 依赖pod
#spec.dependency 'AFNetworking'
end
| 34.259259 | 113 | 0.603243 |
1a568ef69393d0e0a11a9cde1385324055c8ff4c | 1,676 | # encoding: utf-8
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require '../azure_mgmt_bot_service/lib/module_definition'
require '../azure_mgmt_bot_service/lib/version'
Gem::Specification.new do |spec|
spec.name = 'azure_mgmt_bot_service'
spec.version = Azure::BotService::Mgmt::VERSION
spec.authors = 'Microsoft Corporation'
spec.email = '[email protected]'
spec.homepage = 'https://aka.ms/azure-sdk-for-ruby'
spec.summary = 'Official Ruby client library to consume BotService'
spec.license = 'MIT'
spec.metadata = {
'bug_tracker_uri' => 'https://github.com/Azure/azure-sdk-for-ruby/issues',
'changelog_uri' => 'https://github.com/Azure/azure-sdk-for-ruby/blob/master/ChangeLog.md',
'documentation_uri' => 'https://azure.microsoft.com/en-us/develop/ruby/',
'homepage_uri' => 'https://aka.ms/azure-sdk-for-ruby'
}
spec.files = Dir["LICENSE.txt", "lib/**/*"]
spec.files.reject! { |fn| fn.include? "build.json" }
spec.bindir = 'bin'
spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
spec.require_paths = ['lib']
spec.required_ruby_version = '>= 2.0.0'
spec.add_development_dependency 'bundler', '~> 1.9'
spec.add_development_dependency 'rake', '~> 10'
spec.add_development_dependency 'rspec', '~> 3'
spec.add_development_dependency 'dotenv', '~> 2'
spec.add_runtime_dependency 'ms_rest_azure', '~> 0.12.0'
end
| 40.878049 | 94 | 0.681981 |
084cef0b0b1a8ec94e1b57e54f4cc37b6f21d6b8 | 496 | # Be sure to restart your server when you modify this file.
# Your secret key for verifying the integrity of signed cookies.
# If you change this key, all old signed cookies will become invalid!
# Make sure the secret is at least 30 characters and all random,
# no regular words or you'll be exposed to dictionary attacks.
Dummy::Application.config.secret_token = '9bbf8df4647276c9c31d053aa509b1ce7aa9f99744765f892a0509745f5c3b627eb050097c7a5f517d3728b26bf9e56eedd2e758c71410a00a54317c266e1650'
| 62 | 171 | 0.832661 |
7a1bb7efdc95ee300b60b07db1ddd9a59a93615f | 2,574 | # frozen_string_literal: true
require "rails/generators/active_record"
module ActiveRecord
module Generators # :nodoc:
class ModelGenerator < Base # :nodoc:
argument :attributes, type: :array, default: [], banner: "field[:type][:index] field[:type][:index]"
check_class_collision
class_option :migration, type: :boolean
class_option :timestamps, type: :boolean
class_option :parent, type: :string, desc: "The parent class for the generated model"
class_option :indexes, type: :boolean, default: true, desc: "Add indexes for references and belongs_to columns"
class_option :primary_key_type, type: :string, desc: "The type for primary key"
# creates the migration file for the model.
def create_migration_file
return unless options[:migration] && options[:parent].nil?
attributes.each { |a| a.attr_options.delete(:index) if a.reference? && !a.has_index? } if options[:indexes] == false
migration_template "../../migration/templates/create_table_migration.rb", File.join(db_migrate_path, "create_#{table_name}.rb")
end
def create_model_file
generate_application_record
template "model.rb", File.join("app/models", class_path, "#{file_name}.rb")
end
def create_module_file
return if regular_class_path.empty?
generate_application_record
template "module.rb", File.join("app/models", "#{class_path.join('/')}.rb") if behavior == :invoke
end
hook_for :test_framework
private
def attributes_with_index
attributes.select { |a| !a.reference? && a.has_index? }
end
# FIXME: Change this file to a symlink once RubyGems 2.5.0 is required.
def generate_application_record
if behavior == :invoke && !application_record_exist?
template "application_record.rb", application_record_file_name
end
end
# Used by the migration template to determine the parent name of the model
def parent_class_name
options[:parent] || "ApplicationRecord"
end
def application_record_exist?
file_exist = nil
in_root { file_exist = File.exist?(application_record_file_name) }
file_exist
end
def application_record_file_name
@application_record_file_name ||= if mountable_engine?
"app/models/#{namespaced_path}/application_record.rb"
else
"app/models/application_record.rb"
end
end
end
end
end
| 35.75 | 135 | 0.662005 |
b91919a5fc4edd0615580ad579e5032b640228d6 | 203 | # frozen_string_literal: true
module Works
# Draws a popup for selecting work type and subtype
class WorkTypeModalComponent < ApplicationComponent
def types
WorkType.all
end
end
end
| 18.454545 | 53 | 0.748768 |
1a8586f96eb478bea516342bb37b1850c47cc78e | 4,878 | module Slack
module Presenters
module Formatting
def render_id(id) = "<@#{id}>"
def render_quote(text) = text.each_line.map { |line| "> #{line.strip}" }.join("\n")
def one_or_many(quantity, one_val)
# TODO: Maybe a slightly better error?
raise ArgumentError, "Error: `one_or_many` requires `quantity` >= 1" if quantity < 1
if quantity == 1
one_val
else
yield quantity
end
end
def with_reason(point, message)
reason = point.reason
if reason.present?
yield [reason]
else
message
end
end
end
module AwardAnnouncement
class Example
extend Slack::Presenters::Formatting
def self.render(point, total_points)
:FIXME
end
end
class Lorem
extend Slack::Presenters::Formatting
def self.render(point, total_points) =
<<~MSG
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et #{render_id point.to_id} habet #{total_points} Spidermanus Pointiae™
Sorry you got this one, it's pretty stupid, but at least #{render_id point.from_id} had this nice thing to say about you:
#{render_quote point.reason}
MSG
end
class Classic
extend Slack::Presenters::Formatting
def self.render(point, total_points)
message = "#{render_id point.from_id} has awarded ONE (1) Spiderman Point™ to #{render_id point.to_id}!"
message = with_reason(point, message) do
<<~MSG
#{message} Why?
#{render_quote point.reason}
MSG
end
points_token = one_or_many(
total_points,
'your first Spiderman Point™'
) { |many| "#{many} Spiderman Points™" }
<<~MSG
#{message} 🎉 Congratulations, #{render_id point.to_id}, you now have #{points_token}! We're all so proud of you, keep it up!!!
MSG
end
end
class PizzaTime
extend Slack::Presenters::Formatting
def self.render(point, total_points)
points_token = one_or_many(
total_points,
'your first Spiderman Point™'
) { |many| "#{many} Spiderman Points™" }
<<~MSG
🍕 PIZZA TIME!!!! 🍕 One hot, fresh Spiderman Point™ coming up for #{render_id point.to_id}, courtesy of #{render_id point.from_id}! That makes #{points_token}! How does it feel?"
In between bites of stuffed crust, #{render_id point.from_id} managed to give this reason for awarding you your latest Spiderman Point™:
#{render_quote point.reason}
MSG
end
end
class Mfw
extend Slack::Presenters::Formatting
def self.render(point, total_points)
r_to = render_id point.to_id
r_from = render_id point.from_id
<<~MSG
mfw #{r_from} just gave #{r_to} a Spiderman Point™... (btw #{r_to}, you have #{total_points} of them now). #{r_from}'s reason for this Spiderman Point™:
#{render_quote point.reason}
MSG
end
end
class Hey
extend Slack::Presenters::Formatting
def self.render(point, total_points)
message = "Hey #{render_id point.to_id}, have a Spiderman Point™! Don't spend it all in one place!"
with_reason(point, message) do
<<~MSG
#{message} Oh, btw #{render_id point.from_id} also wanted me to tell you this:
#{render_quote point.reason}
MSG
end
end
end
class HaveYouEver
extend Slack::Presenters::Formatting
def self.render(point, total_points)
message = <<~MSG
Has Anyone Really Been Far Even as Decided to Use Even Go Want to do Spideman Points™? #{render_id point.to_id} sure has!!!
MSG
with_reason(point, message) do
<<~MSG
#{message} When asked for comment, #{render_id point.from_id} said this:
#{render_quote point.reason}
MSG
end
end
end
class AndSoCanYou
extend Slack::Presenters::Formatting
def self.render(point, total_points) =
<<~MSG
#{render_id point.to_id} is Spiderman Point™ and so can you! #{total_points} points sure is nothing to sneeze at...
#{render_id point.from_id} give Spiderman Point™ reason why?
#{render_quote point.reason}
MSG
end
KLASSES = [
AndSoCanYou,
Classic,
HaveYouEver,
Hey,
Lorem,
Mfw,
PizzaTime,
]
def self.random(...) = KLASSES.sample.render(...)
end
end
end
| 29.385542 | 189 | 0.575236 |
91855708f0049f50019c7cdff2dedf3b6b5334c2 | 3,576 | # frozen_string_literal: true
module Parslet
module Atoms
class Infix < Parslet::Atoms::Base
attr_reader :element, :operations, :reducer
def initialize(element, operations, &reducer)
super()
@element = element
@operations = operations
@reducer = reducer || ->(left, op, right) { { l: left, o: op, r: right } }
end
def try(source, context, consume_all)
catch(:error) do
return succ(
produce_tree(
precedence_climb(source, context, consume_all)
)
)
end
end
# Turns an array of the form ['1', '+', ['2', '*', '3']] into a hash that
# reflects the same structure.
#
def produce_tree(ary)
return ary unless ary.is_a? Array
left = ary.shift
until ary.empty?
op, right = ary.shift(2)
# p [left, op, right]
left = if right.is_a? Array
# Subexpression -> Subhash
reducer.call(left, op, produce_tree(right))
else
reducer.call(left, op, right)
end
end
left
end
# A precedence climbing algorithm married to parslet, as described here
# http://eli.thegreenplace.net/2012/08/02/parsing-expressions-by-precedence-climbing/
#
# @note Error handling in this routine is done by throwing :error and
# as a value the error to return to parslet. This avoids cluttering
# the recursion logic here with parslet error handling.
#
def precedence_climb(source, context, consume_all, current_prec = 1, _needs_element = false)
result = []
# To even begin parsing an arithmetic expression, there needs to be
# at least one @element.
success, value = @element.apply(source, context, false)
unless success
throw :error, context.err(self, source, "#{@element.inspect} was expected", [value])
end
result << flatten(value, true)
# Loop until we fail on operator matching or until input runs out.
loop do
op_pos = source.bytepos
op_match, prec, assoc = match_operation(source, context, false)
# If no operator could be matched here, one of several cases
# applies:
#
# - end of file
# - end of expression
# - syntax error
#
# We abort matching the expression here.
break unless op_match
if prec >= current_prec
next_prec = assoc == :left ? prec + 1 : prec
result << op_match
result << precedence_climb(
source, context, consume_all, next_prec, true
)
else
source.bytepos = op_pos
return unwrap(result)
end
end
unwrap(result)
end
def unwrap(expr)
expr.size == 1 ? expr.first : expr
end
def match_operation(source, context, consume_all)
errors = []
@operations.each do |op_atom, prec, assoc|
success, value = op_atom.apply(source, context, consume_all)
return flatten(value, true), prec, assoc if success
# assert: this was in fact an error, accumulate
errors << value
end
nil
end
def to_s_inner(_prec)
ops = @operations.map { |o, _, _| o.inspect }.join(', ')
"infix_expression(#{@element.inspect}, [#{ops}])"
end
end
end
end
| 28.608 | 98 | 0.555089 |
0131f7297ff902df1688edfbfe1269af5b6569c5 | 1,335 | require "speed_read/version"
require "colorize"
module SpeedRead
ORP_VISUAL_POS = 20;
class << self
trap("INT") { puts "\nGoodbye!"; exit;}
def start(words_per_minute)
puts " " * ORP_VISUAL_POS + "v".colorize(:red)
ARGF.each do |line|
words = tokenize(line)
words.each do |word|
# pad the end of your lines with spaces if they might be shorter than the previous line.
orp = find_ORP(word);
output = " " * (ORP_VISUAL_POS-orp) + colorize_word(word,orp)
print output.ljust(80, " ") + "#{words_per_minute} wpm\r"
$stdout.flush
sleep (60.0 / words_per_minute.to_i)
end
end
puts
end
def tokenize(input)
input.force_encoding("utf-8").chomp.split(/(?:-|\s)+/).compact.reject{|e| e.empty?}
end
# ORP: Optical Recognition Point (the red-colored alignment pilot),
# # the way Spritz probably does it.
def find_ORP(word)
return 0 if word.nil?
return 4 if word.length > 13
return [0, 0, 1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3][word.size];
end
def colorize_word(word, i)
return "" unless word
pre = word[0...i]
pivot = word[i] ? word[i].colorize(:red) : ""
suffix = word[i+1..-1]
"#{pre}#{pivot}#{suffix}"
end
end
end
| 27.244898 | 99 | 0.570787 |
61e6aea72074e51b65cbededf589f6059e84566b | 241 | # -*- coding: utf-8 -*-
require "sixarm_ruby_markdown_test"
describe SixArm::Markdown::String do
describe "#new" do
it "ok" do
expect(SixArm::Markdown::String.new).must_be_kind_of(SixArm::Markdown::String)
end
end
end
| 16.066667 | 84 | 0.680498 |
e224a6dbb418980eb0a3851488ca2e037ae270d2 | 1,311 | # encoding: utf-8
require 'spec_helper'
require 'eval_helper'
describe 'EvalHelper' do
context :if_code_after do
class EvalHelperAfterTest
include EvalHelper
def hoge(hash)
msg = hash[:input]
code = if_code_after(hash[:if_cond], hash[:if_proc])
ret = 'dafault'
instance_eval code
ret
end
end
cases = [
{
case_no: 1,
case_title: 'if case',
input: 'test',
if_cond: "msg == 'test'",
if_proc: 'ret = "true"',
expected: 'true'
},
{
case_no: 2,
case_title: 'else case',
input: 'not_test',
if_cond: "msg == 'test'",
if_proc: 'ret = "true"',
expected: 'dafault'
}
]
cases.each do |c|
it "|case_no=#{c[:case_no]}|case_title=#{c[:case_title]}" do
begin
case_before c
# -- given --
eval_helper = EvalHelperAfterTest.new
# -- when --
actual = eval_helper.hoge(c)
# -- then --
expect(actual).to eq(c[:expected])
ensure
case_after c
end
end
def case_before(c)
# implement each case before
end
def case_after(c)
# implement each case after
end
end
end
end
| 20.169231 | 66 | 0.505721 |
39aeca196a6693f869edbc0f9fab4ec419a8ad50 | 977 |
require 'test_helper'
class UsersSignupTest < ActionDispatch::IntegrationTest
test "invalid signup information" do
get signup_path
assert_no_difference 'User.count' do
post users_path, params: { user: { name: "",
email: "user@invalid",
password: "foo",
password_confirmation: "bar" } }
end
assert_template 'users/new'
end
test "valid signup information" do
get signup_path
assert_difference 'User.count', 1 do
post users_path, params: { user: { name: "Example User",
email: "[email protected]",
password: "password",
password_confirmation: "password" } }
end
follow_redirect!
assert_template 'users/show'
assert is_logged_in?
end
end | 32.566667 | 78 | 0.501535 |
79a1cbb2bc2c3223252da47d65e72d0b8ef09c19 | 1,948 | # frozen_string_literal: true
namespace :jobs do
desc 'Import USAJobs XML file'
task :import_usajobs_xml, [:filename] => :environment do |_t, args|
if args.filename.nil?
puts 'usage: rake jobs:import_usajobs_xml[filename.xml]'
else
importer = UsajobsData.new(args.filename)
importer.import
end
end
desc 'Import Neogov YAML file containing agency info'
task :import_neogov_rss, [:yaml_filename] => :environment do |_t, args|
begin
YAML.safe_load(File.read(args.yaml_filename)).each do |config|
agency, details = config
tags = details['tags']
organization_id = details['organization_id']
organization_name = details['organization_name']
if agency.blank? || tags.blank? || organization_id.blank?
puts 'Agency, tags, and organization ID are required for each record. Skipping record....'
else
importer = NeogovData.new(agency, tags, organization_id, organization_name)
importer.import
puts "Imported jobs for #{agency} at #{Time.now}"
end
end
rescue StandardError => e
puts "Trouble running import script: #{e}"
puts e.backtrace
puts '-' * 80
puts 'usage: rake jobs:import_neogov_rss[yaml_filename]'
puts 'Example YAML file syntax:'
puts 'bloomingtonmn:'
puts "\ttags: city tag_2"
puts "\torganization_id: US-MN:CITY-BLOOMINGTON"
puts "\torganization_name: City of Bloomington"
puts 'ohio:'
puts "\ttags: state tag_3"
puts "\torganization_id: US-OH"
end
end
desc 'Recreate position openings index'
task recreate_index: :environment do
PositionOpening.delete_search_index if PositionOpening.search_index_exists?
PositionOpening.create_search_index
end
desc 'Delete expired position openings'
task delete_expired_position_openings: :environment do
PositionOpening.delete_expired_docs
end
end
| 34.175439 | 100 | 0.685832 |
ed0a8a43d0e0fe4c879819e9f75d6d34f1eac3b4 | 7,820 |
if defined?(Wice::Defaults)
# Default number of rows to show per page.
Wice::Defaults::PER_PAGE = 20
# Default order direction
Wice::Defaults::ORDER_DIRECTION = 'asc'
# Default name for a grid. A grid name is the basis for a lot of
# names including parameter names, DOM IDs, etc
# The shorter the name is the shorter the request URI will be.
Wice::Defaults::GRID_NAME = 'grid'
# If REUSE_LAST_COLUMN_FOR_FILTER_ICONS is true and the last column doesn't have any filter and column name, it will be used
# for filter related icons (filter icon, reset icon, show/hide icon), otherwise an additional table column is added.
Wice::Defaults::REUSE_LAST_COLUMN_FOR_FILTER_ICONS = true
# The label of the first option of a custom dropdown list meaning 'All items'
Wice::Defaults::CUSTOM_FILTER_ALL_LABEL = '--'
# A list of classes for the table tag of the grid
Wice::Defaults::DEFAULT_TABLE_CLASSES = ['table', 'table-bordered', 'table-striped', 'list']
# Allow switching between a single and multiple selection modes in custom filters (dropdown boxes)
Wice::Defaults::ALLOW_MULTIPLE_SELECTION = true
# Show the upper pagination panel by default or not
Wice::Defaults::SHOW_UPPER_PAGINATION_PANEL = false
# Disabling CSV export by default
Wice::Defaults::ENABLE_EXPORT_TO_CSV = false
# Default CSV field separator
Wice::Defaults::CSV_FIELD_SEPARATOR = ','
# The strategy when to show the filter.
# * <tt>:when_filtered</tt> - when the table is the result of filtering
# * <tt>:always</tt> - show the filter always
# * <tt>:no</tt> - never show the filter
Wice::Defaults::SHOW_FILTER = :always
# A boolean value specifying if a change in a filter triggers reloading of the grid.
Wice::Defaults::AUTO_RELOAD = false
# SQL operator used for matching strings in string filters.
Wice::Defaults::STRING_MATCHING_OPERATOR = 'LIKE'
# STRING_MATCHING_OPERATOR = 'ILIKE' # Use this for Postgresql case-insensitive matching.
# Defining one string matching operator globally for the whole application turns is not enough
# when you connect to two databases one of which is MySQL and the other is Postgresql.
# If the key for an adapter is missing it will fall back to Wice::Defaults::STRING_MATCHING_OPERATOR
Wice::Defaults::STRING_MATCHING_OPERATORS = {
'ActiveRecord::ConnectionAdapters::MysqlAdapter' => 'LIKE',
'ActiveRecord::ConnectionAdapters::PostgreSQLAdapter' => 'ILIKE'
}
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Advanced Filters #
# Switch of the negation checkbox in all text filters
Wice::Defaults::NEGATION_IN_STRING_FILTERS = false
# Each WiceGrid filter column is defined in two classes, one used for rendering the filter, the other
# for generating query conditions. All these columns are in lib/wice/columns/*.rb .
# File lib/wice/columns/column_processor_index.rb lists all predefined processors.
# In most cases a processor is chosen automatically based on the DB column type,
# for example, integer columns
# can have two of processors, the default one with one input field, and a processor called "range",
# with 2 input fields. In this case it is possible to specify a processor in the column definition:
#
# g.column filter_type: :range
#
# It is also possible to define you own processors:
#
# Wice::Defaults::ADDITIONAL_COLUMN_PROCESSORS = {
# some_key_identifying_new_column_type: ['AViewColumnProcessorClass', 'ConditionsGeneratorClass'],
# another_key_identifying_new_column_type: ['AnotherViewColumnProcessorClass', 'AnotherConditionsGeneratorClass']
# }
#
# Column processor keys/names should not coincide with the existing keys/names (see lib/wice/columns/column_processor_index.rb)
# the value is a 2-element array with 2 strings, the first should be a name of view processor class inherited from
# Wice::Columns::ViewColumn, the second should be a name of conditions generator class inherited from
# Wice::Columns::ConditionsGeneratorColumn .
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Showing All Queries #
# Enable or disable showing all queries (non-paginated table)
Wice::Defaults::ALLOW_SHOWING_ALL_QUERIES = true
Wice::Defaults::ALLOW_SHOWING_ALL_RECORDS = true
# If number of all queries is more than this value, the user will be given a warning message
Wice::Defaults::START_SHOWING_WARNING_FROM = 200
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Saving Queries #
# ActiveRecord model to store queries. Read the documentation for details
# QUERY_STORE_MODEL = 'WiceGridSerializedQuery'
Wice::Defaults::QUERY_STORE_MODEL = 'WiceGridSerializedQuery'
# # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # #
# Here go settings related to the calendar helpers #
# The default style of the date and datetime helper
# * <tt>:calendar</tt> - JS calendar
# * <tt>:html5</tt> - HTML5 date input field
# * <tt>:standard</tt> - standard Rails date and datetime helpers
Wice::Defaults::HELPER_STYLE = :calendar
# Format of the datetime displayed.
# If you change the format, make sure to check if +DATETIME_PARSER+ can still parse this string.
Wice::Defaults::DATETIME_FORMAT = "%Y-%m-%d %H:%M"
# Format of the date displayed.
# If you change the format, make sure to check if +DATE_PARSER+ can still parse this string.
Wice::Defaults::DATE_FORMAT = "%Y-%m-%d"
# Format of the date displayed in jQuery's Datepicker
# If you change the format, make sure to check if +DATE_PARSER+ can still parse this string.
Wice::Defaults::DATE_FORMAT_JQUERY = "yy-mm-dd"
# With Calendar helpers enabled the parameter sent is the string displayed. This lambda will be given a date string in the
# format defined by +DATETIME_FORMAT+ and must generate a DateTime object.
# In many cases <tt>Time.zone.parse</tt> is enough, for instance, <tt>%Y-%m-%d</tt>. If you change the format, make sure to check this code
# and modify it if needed.
Wice::Defaults::DATETIME_PARSER = lambda{|datetime_string|
if datetime_string.blank?
nil
elsif Time.zone
Time.zone.parse(datetime_string)
else
Time.parse(datetime_string)
end
}
# The range of years to display in jQuery Datepicker.
# It can always be changed dynamically with the following javascript:
# $( ".hasDatepicker" ).datepicker( "option", "yearRange", "2000:2042" );
Wice::Defaults::DATEPICKER_YEAR_RANGE = (from = Date.current.year - 10).to_s + ':' + (from + 15).to_s
# With Calendar helpers enabled the parameter sent is the string displayed. This lambda will be given a date string in the
# format defined by +DATETIME+ and must generate a Date object.
# In many cases <tt>Date.parse</tt> is enough, for instance, <tt>%Y-%m-%d</tt>. If you change the format, make sure to check this code
# and modify it if needed.
Wice::Defaults::DATE_PARSER = lambda{|date_string|
if date_string.blank?
nil
else
Date.parse(date_string)
end
}
# Icon to popup the calendar.
Wice::Defaults::CALENDAR_ICON = '/plugin_assets/redmine_login_audit/images/wice_grid/calendar_view_month.png'
# popup calendar will be shown relative to the popup trigger element or to the mouse pointer
Wice::Defaults::POPUP_PLACEMENT_STRATEGY = :trigger # :pointer
# The name of the page method (should correspond to Kaminari.config.page_method_name)
Wice::Defaults::PAGE_METHOD_NAME = :page
end | 44.942529 | 142 | 0.685806 |
ff18da69e897f82cc88c1abffe4dcc200acdb814 | 986 | # -*- encoding: utf-8 -*-
$:.push File.expand_path('../lib', __FILE__)
require 'whois/parser/version'
Gem::Specification.new do |s|
s.name = 'whois-parser'
s.version = Whois::Parser::VERSION
s.authors = ['Simone Carletti']
s.email = ['[email protected]']
s.homepage = 'https://whoisrb.org/'
s.summary = 'A pure Ruby WHOIS parser.'
s.description = 'Whois Parser is a WHOIS parser written in pure Ruby. It can parse and convert responses into easy-to-use Ruby objects.'
s.license = 'MIT'
s.required_ruby_version = ">= 2.3"
s.require_paths = %w( lib )
s.files = `git ls-files`.split("\n")
s.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
s.extra_rdoc_files = %w( LICENSE.txt .yardopts )
s.add_dependency "whois", ">= 4.1.0"
s.add_dependency "activesupport", ">= 4"
s.add_development_dependency "rake"
s.add_development_dependency "rspec"
s.add_development_dependency "yard"
end | 35.214286 | 138 | 0.64503 |
1d719b68858e2efb08e7ed6d816023a1fe53f92d | 480 | class CompanyMailer < ApplicationMailer
def company_registered(company)
@company = company
mail(to: "[email protected]",
subject: "Company Registration Request")
end
def company_approved(company)
@company = company
mail(to: @company.email,
subject: "#{@company.name} is live on UpStage Community!")
@company.company_members.each do |member|
mail(to: member.user.email,
subject: "#{@company.name} is live on UpStage Community!")
end
end
end
| 24 | 62 | 0.725 |
9152e04fe4cdfe6f6ec4d13f06fc66b1fbbc74a0 | 1,129 | require 'spec_helper_acceptance'
broken = false
if fact('osfamily') == 'windows'
puts "Not implemented on Windows"
broken = true
elsif fact('osfamily') == 'RedHat'
docker_args = "repo_opt => '--enablerepo=localmirror-extras'"
end
describe 'docker network', :win_broken => broken do
command = 'docker'
before(:all) do
install_code = "class { 'docker': #{docker_args}}"
apply_manifest(install_code, :catch_failures=>true)
end
describe command("#{command} network --help") do
its(:exit_status) { should eq 0 }
end
context 'with a local bridge network described in Puppet' do
before(:all) do
@name = 'test-network'
@pp = <<-code
docker_network { '#{@name}':
ensure => present,
}
code
apply_manifest(@pp, :catch_failures=>true)
end
it 'should be idempotent' do
apply_manifest(@pp, :catch_changes=>true)
end
it 'should have created a network' do
shell("#{command} network inspect #{@name}", :acceptable_exit_codes => [0])
end
after(:all) do
shell("#{command} network rm #{@name}")
end
end
end
| 23.520833 | 81 | 0.632418 |
edb82f8343d8142b56212df2cc8e1bc324ffeeaa | 1,691 | require File.expand_path('../../../spec_helper', __FILE__)
ruby_version_is "1.8.7" do
require 'securerandom'
describe "SecureRandom.hex" do
it "generates a random hex string of length twice the specified argement" do
(1..64).each do |idx|
hex = SecureRandom.hex(idx)
hex.should be_kind_of(String)
hex.length.should == 2 * idx
end
base64 = SecureRandom.hex(5.5)
base64.should be_kind_of(String)
base64.length.should eql(10)
end
it "returns an empty string when argument is 0" do
SecureRandom.hex(0).should == ""
end
it "generates different hex strings with subsequent invocations" do
# quick and dirty check, but good enough
values = []
256.times do
hex = SecureRandom.hex
# make sure the random values are not repeating
values.include?(hex).should == false
values << hex
end
end
it "generates a random hex string of length 32 if no argument is provided" do
SecureRandom.hex.should be_kind_of(String)
SecureRandom.hex.length.should == 32
end
it "treats nil agrument as default one and generates a random hex string of length 32" do
SecureRandom.hex(nil).should be_kind_of(String)
SecureRandom.hex(nil).length.should == 32
end
it "raises ArgumentError on negative arguments" do
lambda {
SecureRandom.hex(-1)
}.should raise_error(ArgumentError)
end
it "tries to convert the passed argument to an Integer using #to_int" do
obj = mock("to_int")
obj.should_receive(:to_int).and_return(5)
SecureRandom.hex(obj).size.should eql(10)
end
end
end
| 29.666667 | 93 | 0.655825 |
f84e6e26a8c8d52240ead8fd69d47293db15b6f2 | 5,780 | require 'spec_helper'
module Bosh::Director
describe Jobs::Helpers::StemcellDeleter do
let(:blobstore) { instance_double(Bosh::Blobstore::BaseClient) }
let(:cloud) { instance_double(Bosh::Clouds::ExternalCpi) }
let(:cloud_factory) { instance_double(BD::CloudFactory) }
let(:stemcell_deleter) { Jobs::Helpers::StemcellDeleter.new(logger) }
let(:stemcell) { Models::Stemcell.make(name: 'test_stemcell', version: 'test_version', cid: 'stemcell_cid') }
before do
fake_locks
allow(Bosh::Director::CloudFactory).to receive(:create).and_return(cloud_factory)
allow(cloud_factory).to receive(:get).with('').and_return(cloud)
end
context 'when stemcell deletion fails' do
it "should raise error if CPI can't delete the stemcell" do
expect(cloud).to receive(:delete_stemcell).with('stemcell_cid').and_raise('error')
expect {
stemcell_deleter.delete(stemcell)
}.to raise_error(/error/)
end
it 'should raise error if the deployments still reference this stemcell' do
deployment_1 = Models::Deployment.make(name: 'test-1')
deployment_1.add_stemcell(stemcell)
deployment_2 = Models::Deployment.make(name: 'test-2')
deployment_2.add_stemcell(stemcell)
expect {
stemcell_deleter.delete(stemcell)
}.to raise_error StemcellInUse, "Stemcell 'test_stemcell/test_version' is still in use by: test-1, test-2"
end
end
context 'when CPI raises an error AND the "force" option is used' do
it 'should not raise an error' do
expect(cloud).to receive(:delete_stemcell).with('stemcell_cid').and_raise('error')
expect { stemcell_deleter.delete(stemcell, 'force' => true) }.not_to raise_error
end
it 'should delete stemcell metadata' do
expect(cloud).to receive(:delete_stemcell).with('stemcell_cid').and_raise('error')
stemcell_deleter.delete(stemcell, 'force' => true)
expect(Models::Stemcell.all).to be_empty
end
it 'should NOT delete associated compiled packages, but set stemcell_id to nil' do
associated_package = Models::CompiledPackage.make(
package: Models::Package.make,
blobstore_id: 'compiled-package-blb-1',
stemcell_os: 'Plan 9',
stemcell_version: '9'
)
expect(cloud).to receive(:delete_stemcell).with('stemcell_cid').and_raise('error')
expect(blobstore).not_to receive(:delete).with('compiled-package-blb-1')
stemcell_deleter.delete(stemcell, 'force' => true)
expect(Models::CompiledPackage[associated_package.id]).to eq(associated_package)
end
end
context 'when stemcell deletion succeeds' do
let(:stemcell_stage) { instance_double(Bosh::Director::EventLog::Stage) }
let(:stemcell_metadata_stage) { instance_double(Bosh::Director::EventLog::Stage) }
let(:compiled_package_stage) { instance_double(Bosh::Director::EventLog::Stage) }
it 'should delete the stemcell models if the CPI deleted the stemcell' do
expect(cloud).to receive(:delete_stemcell).with('stemcell_cid')
stemcell_deleter.delete(stemcell)
expect(Models::Stemcell.all).to be_empty
end
it 'should NOT delete the associated compiled packages, but set stemcell_id to nil' do
associated_package = Models::CompiledPackage.make(
package: Models::Package.make(name: 'package-name', version: 'version'),
blobstore_id: 'compiled-package-blb-1',
stemcell_os: 'AIX',
stemcell_version: '7.1'
)
unassociated_package = Models::CompiledPackage.make(
package: Models::Package.make,
blobstore_id: 'compiled-package-blb-2',
stemcell_os: 'AIX',
stemcell_version: '7.2'
)
expect(cloud).to receive(:delete_stemcell).with('stemcell_cid')
expect(blobstore).not_to receive(:delete).with('compiled-package-blb-1')
stemcell_deleter.delete(stemcell)
expect(Models::CompiledPackage[associated_package.id]).to eq(associated_package)
expect(Models::CompiledPackage[unassociated_package.id]).to eq(unassociated_package)
end
end
describe 'looking up clouds for a stemcell' do
let(:cloud_factory) { instance_double(BD::CloudFactory) }
before {
allow(BD::CloudFactory).to receive(:create).and_return(cloud_factory)
}
context 'if no cpi is set on stemcell' do
let(:stemcell) { Models::Stemcell.make(name: 'test_stemcell', version: 'test_version', cid: 'stemcell_cid', cpi: '') }
it 'calls the default cloud' do
cloud = instance_double(Bosh::Clouds::ExternalCpi)
expect(cloud_factory).to receive(:get).with('').and_return(cloud)
expect(cloud).to receive(:delete_stemcell)
stemcell_deleter.delete(stemcell)
end
end
context 'if a certain cpi is set on a stemcell' do
let(:stemcell) { Models::Stemcell.make(name: 'test_stemcell', version: 'test_version', cid: 'stemcell_cid', cpi: 'cpi1') }
it 'calls the cloud that cloud factory returns' do
cloud = instance_double(Bosh::Clouds::ExternalCpi)
expect(cloud_factory).to receive(:get).with('cpi1').and_return(cloud)
expect(cloud).to receive(:delete_stemcell)
stemcell_deleter.delete(stemcell)
end
it 'fails if cloud factory does not return a cloud for the cpi' do
expect(cloud_factory).to receive(:get).with('cpi1').and_return(nil)
expect{
stemcell_deleter.delete(stemcell)
}.to raise_error /Stemcell has CPI defined \(cpi1\) that is not configured anymore./
end
end
end
end
end
| 40.41958 | 130 | 0.669377 |
1defdef5ce5e73abb890505355e3dd2e136e3f37 | 45 | fput "go glowing exit"
move "go glowing exit" | 22.5 | 22 | 0.755556 |
01324a7ae9a0e5a89e7894aaf8aa5df03975ed2a | 195 | require 'test_helper'
class SomethingWrongControllerTest < ActionDispatch::IntegrationTest
test "should get index" do
get something_wrong_index_url
assert_response :success
end
end
| 19.5 | 68 | 0.8 |
5dcedaa6d9567e7acda24e878c8f06a0b0301cc9 | 196 | class MadlibsController < ApplicationController
# INDEX -- index route for all completed mad libs (stories)
get '/madlibs' do
@madlibs = Madlib.all
erb :'madlibs/madlibs'
end
end
| 21.777778 | 61 | 0.704082 |
03177d9954276aedcaf27fb75b3a043f177ee7c8 | 171 | desc "Restart app by touching tmp/restart.txt"
task :restart do
verbose(false) do
mkdir_p "tmp"
touch "tmp/restart.txt"
rm_f "tmp/pids/server.pid"
end
end
| 19 | 46 | 0.690058 |
2841f47b7ccb85dae666acf2bf4a2270b38a4523 | 2,585 | # frozen_string_literal: true
module RuboCop
module Cop
module Lint
# This cop checks for nested method definitions.
#
# @example
#
# # bad
#
# # `bar` definition actually produces methods in the same scope
# # as the outer `foo` method. Furthermore, the `bar` method
# # will be redefined every time `foo` is invoked.
# def foo
# def bar
# end
# end
#
# @example
#
# # good
#
# def foo
# bar = -> { puts 'hello' }
# bar.call
# end
#
# @example
#
# # good
#
# def foo
# self.class_eval do
# def bar
# end
# end
# end
#
# def foo
# self.module_exec do
# def bar
# end
# end
# end
#
# @example
#
# # good
#
# def foo
# class << self
# def bar
# end
# end
# end
class NestedMethodDefinition < Cop
include OnMethodDef
extend RuboCop::NodePattern::Macros
MSG = 'Method definitions must not be nested. ' \
'Use `lambda` instead.'.freeze
def on_method_def(node, _method_name, _args, _body)
find_nested_defs(node) do |nested_def_node|
add_offense(nested_def_node, :expression)
end
end
def find_nested_defs(node, &block)
node.each_child_node do |child|
if child.def_type?
yield child
elsif child.defs_type?
subject, = *child
next if subject.lvar_type?
yield child
elsif !scoping_method_call?(child)
find_nested_defs(child, &block)
end
end
end
private
def scoping_method_call?(child)
eval_call?(child) || exec_call?(child) || child.sclass_type? ||
class_or_module_or_struct_new_call?(child)
end
def_node_matcher :eval_call?, <<-PATTERN
(block (send _ {:instance_eval :class_eval :module_eval} ...) ...)
PATTERN
def_node_matcher :exec_call?, <<-PATTERN
(block (send _ {:instance_exec :class_exec :module_exec} ...) ...)
PATTERN
def_node_matcher :class_or_module_or_struct_new_call?, <<-PATTERN
(block (send (const nil {:Class :Module :Struct}) :new ...) ...)
PATTERN
end
end
end
end
| 24.386792 | 76 | 0.494778 |
1d6acc72bfa709de2acf5c0326ff7e2d42d15e9b | 1,769 | =begin
Swagger Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
License: Apache 2.0
http://www.apache.org/licenses/LICENSE-2.0.html
Terms of Service: http://swagger.io/terms/
=end
require 'spec_helper'
require 'json'
# Unit tests for Petstore::FakeApi
# Automatically generated by swagger-codegen (github.com/swagger-api/swagger-codegen)
# Please update as you see appropriate
describe 'FakeApi' do
before do
# run before each test
@instance = Petstore::FakeApi.new
end
after do
# run after each test
end
describe 'test an instance of FakeApi' do
it 'should create an instance of FakeApi' do
expect(@instance).to be_instance_of(Petstore::FakeApi)
end
end
# unit tests for test_endpoint_parameters
# Fake endpoint for testing various parameters 假端點 偽のエンドポイント 가짜 엔드 포인트
# Fake endpoint for testing various parameters 假端點 偽のエンドポイント 가짜 엔드 포인트
# @param number None
# @param double None
# @param string None
# @param byte None
# @param [Hash] opts the optional parameters
# @option opts [Integer] :integer None
# @option opts [Integer] :int32 None
# @option opts [Integer] :int64 None
# @option opts [Float] :float None
# @option opts [String] :binary None
# @option opts [Date] :date None
# @option opts [DateTime] :date_time None
# @option opts [String] :password None
# @return [nil]
describe 'test_endpoint_parameters test' do
it "should work" do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 28.079365 | 130 | 0.726399 |
8753e083e8791014b32a6e92acba33419830cca2 | 71 | class Author
include MongoMapper::Document
key :handle, String
end
| 14.2 | 31 | 0.774648 |
1c48e48d7a85288f5b1a9b19b57b62d38db139a3 | 1,068 | require File.expand_path('../../spec_helper', __FILE__)
describe RightApi::Client, :unit=>true do
context 'when the RightScale API misbehaves by sending empty bodies with 200 response' do
before(:each) do
given_user_facing_client
@result = Net::HTTPOK.new('1.1', '200', 'OK')
@result.set_content_type('application/vnd.rightscale.server+json')
@request = RestClient::Request.new(:method => 'GET', :headers => {}, :url => '/api/servers/1')
@response = RestClient::Response.create('', @result, {}, @request)
flexmock(@rest_client).should_receive(:get).with(Hash, Proc).and_yield(@response, @request, @result)
flexmock(@rest_client).should_receive(:post).with(Hash, Hash, Proc).and_yield(@response, @request, @result)
end
it 'raises an empty body error for a GET' do
expect { @client.servers(:id => 1).show }.to raise_error(RightApi::EmptyBodyError)
end
it 'raises an empty body error for a POST' do
expect { @client.servers.create }.to raise_error(RightApi::EmptyBodyError)
end
end
end
| 44.5 | 113 | 0.683521 |
2184a550a5466e86350999f1159e35550ccf1974 | 15,066 | Sketchup::require "geores_src/geores_import/geores_rexml/validation/validation.rb"
Sketchup::require "geores_src/geores_import/geores_rexml/parsers/baseparser.rb"
module REXML
module Validation
# Implemented:
# * empty
# * element
# * attribute
# * text
# * optional
# * choice
# * oneOrMore
# * zeroOrMore
# * group
# * value
# * interleave
# * mixed
# * ref
# * grammar
# * start
# * define
#
# Not implemented:
# * data
# * param
# * include
# * externalRef
# * notAllowed
# * anyName
# * nsName
# * except
# * name
class RelaxNG
include Validator
INFINITY = 1.0 / 0.0
EMPTY = Event.new( nil )
TEXT = [:start_element, "text"]
attr_accessor :current
attr_accessor :count
attr_reader :references
# FIXME: Namespaces
def initialize source
parser = REXML::Parsers::BaseParser.new( source )
@count = 0
@references = {}
@root = @current = Sequence.new(self)
@root.previous = true
states = [ @current ]
begin
event = parser.pull
case event[0]
when :start_element
case event[1]
when "empty"
when "element", "attribute", "text", "value"
states[-1] << event
when "optional"
states << Optional.new( self )
states[-2] << states[-1]
when "choice"
states << Choice.new( self )
states[-2] << states[-1]
when "oneOrMore"
states << OneOrMore.new( self )
states[-2] << states[-1]
when "zeroOrMore"
states << ZeroOrMore.new( self )
states[-2] << states[-1]
when "group"
states << Sequence.new( self )
states[-2] << states[-1]
when "interleave"
states << Interleave.new( self )
states[-2] << states[-1]
when "mixed"
states << Interleave.new( self )
states[-2] << states[-1]
states[-1] << TEXT
when "define"
states << [ event[2]["name"] ]
when "ref"
states[-1] << Ref.new( event[2]["name"] )
when "anyName"
states << AnyName.new( self )
states[-2] << states[-1]
when "nsName"
when "except"
when "name"
when "data"
when "param"
when "include"
when "grammar"
when "start"
when "externalRef"
when "notAllowed"
end
when :end_element
case event[1]
when "element", "attribute"
states[-1] << event
when "zeroOrMore", "oneOrMore", "choice", "optional",
"interleave", "group", "mixed"
states.pop
when "define"
ref = states.pop
@references[ ref.shift ] = ref
#when "empty"
end
when :end_document
states[-1] << event
when :text
states[-1] << event
end
end while event[0] != :end_document
end
def receive event
validate( event )
end
end
class State
def initialize( context )
@previous = []
@events = []
@current = 0
@count = context.count += 1
@references = context.references
@value = false
end
def reset
return if @current == 0
@current = 0
@events.each {|s| s.reset if s.kind_of? State }
end
def previous=( previous )
@previous << previous
end
def next( event )
#print "In next with #{event.inspect}. "
#puts "Next (#@current) is #{@events[@current]}"
#p @previous
return @previous.pop.next( event ) if @events[@current].nil?
expand_ref_in( @events, @current ) if @events[@current].class == Ref
if ( @events[@current].kind_of? State )
@current += 1
@events[@current-1].previous = self
return @events[@current-1].next( event )
end
#puts "Current isn't a state"
if ( @events[@current].matches?(event) )
@current += 1
if @events[@current].nil?
#puts "#{inspect[0,5]} 1RETURNING #{@previous.inspect[0,5]}"
return @previous.pop
elsif @events[@current].kind_of? State
@current += 1
#puts "#{inspect[0,5]} 2RETURNING (#{@current-1}) #{@events[@current-1].inspect[0,5]}; on return, next is #{@events[@current]}"
@events[@current-1].previous = self
return @events[@current-1]
else
#puts "#{inspect[0,5]} RETURNING self w/ next(#@current) = #{@events[@current]}"
return self
end
else
return nil
end
end
def to_s
# Abbreviated:
self.class.name =~ /(?:::)(\w)\w+$/
# Full:
#self.class.name =~ /(?:::)(\w+)$/
"#$1.#@count"
end
def inspect
"< #{to_s} #{@events.collect{|e|
pre = e == @events[@current] ? '#' : ''
pre + e.inspect unless self == e
}.join(', ')} >"
end
def expected
return [@events[@current]]
end
def <<( event )
add_event_to_arry( @events, event )
end
protected
def expand_ref_in( arry, ind )
new_events = []
@references[ arry[ind].to_s ].each{ |evt|
add_event_to_arry(new_events,evt)
}
arry[ind,1] = new_events
end
def add_event_to_arry( arry, evt )
evt = generate_event( evt )
if evt.kind_of? String
arry[-1].event_arg = evt if arry[-1].kind_of? Event and @value
@value = false
else
arry << evt
end
end
def generate_event( event )
return event if event.kind_of? State or event.class == Ref
evt = nil
arg = nil
case event[0]
when :start_element
case event[1]
when "element"
evt = :start_element
arg = event[2]["name"]
when "attribute"
evt = :start_attribute
arg = event[2]["name"]
when "text"
evt = :text
when "value"
evt = :text
@value = true
end
when :text
return event[1]
when :end_document
return Event.new( event[0] )
else # then :end_element
case event[1]
when "element"
evt = :end_element
when "attribute"
evt = :end_attribute
end
end
return Event.new( evt, arg )
end
end
class Sequence < State
def matches?(event)
@events[@current].matches?( event )
end
end
class Optional < State
def next( event )
if @current == 0
rv = super
return rv if rv
@prior = @previous.pop
return @prior.next( event )
end
super
end
def matches?(event)
@events[@current].matches?(event) ||
(@current == 0 and @previous[-1].matches?(event))
end
def expected
return [ @prior.expected, @events[0] ].flatten if @current == 0
return [@events[@current]]
end
end
class ZeroOrMore < Optional
def next( event )
expand_ref_in( @events, @current ) if @events[@current].class == Ref
if ( @events[@current].matches?(event) )
@current += 1
if @events[@current].nil?
@current = 0
return self
elsif @events[@current].kind_of? State
@current += 1
@events[@current-1].previous = self
return @events[@current-1]
else
return self
end
else
@prior = @previous.pop
return @prior.next( event ) if @current == 0
return nil
end
end
def expected
return [ @prior.expected, @events[0] ].flatten if @current == 0
return [@events[@current]]
end
end
class OneOrMore < State
def initialize context
super
@ord = 0
end
def reset
super
@ord = 0
end
def next( event )
expand_ref_in( @events, @current ) if @events[@current].class == Ref
if ( @events[@current].matches?(event) )
@current += 1
@ord += 1
if @events[@current].nil?
@current = 0
return self
elsif @events[@current].kind_of? State
@current += 1
@events[@current-1].previous = self
return @events[@current-1]
else
return self
end
else
return @previous.pop.next( event ) if @current == 0 and @ord > 0
return nil
end
end
def matches?( event )
@events[@current].matches?(event) ||
(@current == 0 and @ord > 0 and @previous[-1].matches?(event))
end
def expected
if @current == 0 and @ord > 0
return [@previous[-1].expected, @events[0]].flatten
else
return [@events[@current]]
end
end
end
class Choice < State
def initialize context
super
@choices = []
end
def reset
super
@events = []
@choices.each { |c| c.each { |s| s.reset if s.kind_of? State } }
end
def <<( event )
add_event_to_arry( @choices, event )
end
def next( event )
# Make the choice if we haven't
if @events.size == 0
c = 0 ; max = @choices.size
while c < max
if @choices[c][0].class == Ref
expand_ref_in( @choices[c], 0 )
@choices += @choices[c]
@choices.delete( @choices[c] )
max -= 1
else
c += 1
end
end
@events = @choices.find { |evt| evt[0].matches? event }
# Remove the references
# Find the events
end
#puts "In next with #{event.inspect}."
#puts "events is #{@events.inspect}"
unless @events
@events = []
return nil
end
#puts "current = #@current"
super
end
def matches?( event )
return @events[@current].matches?( event ) if @events.size > 0
[email protected]{|evt| evt[0].matches?(event)}.nil?
end
def expected
#puts "IN CHOICE EXPECTED"
#puts "EVENTS = #{@events.inspect}"
return [@events[@current]] if @events.size > 0
return @choices.collect do |x|
if x[0].kind_of? State
x[0].expected
else
x[0]
end
end.flatten
end
def inspect
"< #{to_s} #{@choices.collect{|e| e.collect{|f|f.to_s}.join(', ')}.join(' or ')} >"
end
protected
def add_event_to_arry( arry, evt )
if evt.kind_of? State or evt.class == Ref
arry << [evt]
elsif evt[0] == :text
if arry[-1] and
arry[-1][-1].kind_of?( Event ) and
arry[-1][-1].event_type == :text and @value
arry[-1][-1].event_arg = evt[1]
@value = false
end
else
arry << [] if evt[0] == :start_element
arry[-1] << generate_event( evt )
end
end
end
class Interleave < Choice
def initialize context
super
@choice = 0
end
def reset
@choice = 0
end
def next_current( event )
# Expand references
c = 0 ; max = @choices.size
while c < max
if @choices[c][0].class == Ref
expand_ref_in( @choices[c], 0 )
@choices += @choices[c]
@choices.delete( @choices[c] )
max -= 1
else
c += 1
end
end
@events = @choices[@choice..-1].find { |evt| evt[0].matches? event }
@current = 0
if @events
# reorder the choices
old = @choices[@choice]
idx = @choices.index( @events )
@choices[@choice] = @events
@choices[idx] = old
@choice += 1
end
#puts "In next with #{event.inspect}."
#puts "events is #{@events.inspect}"
@events = [] unless @events
end
def next( event )
# Find the next series
next_current(event) unless @events[@current]
return nil unless @events[@current]
expand_ref_in( @events, @current ) if @events[@current].class == Ref
#puts "In next with #{event.inspect}."
#puts "Next (#@current) is #{@events[@current]}"
if ( @events[@current].kind_of? State )
@current += 1
@events[@current-1].previous = self
return @events[@current-1].next( event )
end
#puts "Current isn't a state"
return @previous.pop.next( event ) if @events[@current].nil?
if ( @events[@current].matches?(event) )
@current += 1
if @events[@current].nil?
#puts "#{inspect[0,5]} 1RETURNING self" unless @choices[@choice].nil?
return self unless @choices[@choice].nil?
#puts "#{inspect[0,5]} 1RETURNING #{@previous[-1].inspect[0,5]}"
return @previous.pop
elsif @events[@current].kind_of? State
@current += 1
#puts "#{inspect[0,5]} 2RETURNING (#{@current-1}) #{@events[@current-1].inspect[0,5]}; on return, next is #{@events[@current]}"
@events[@current-1].previous = self
return @events[@current-1]
else
#puts "#{inspect[0,5]} RETURNING self w/ next(#@current) = #{@events[@current]}"
return self
end
else
return nil
end
end
def matches?( event )
return @events[@current].matches?( event ) if @events[@current]
!@choices[@choice..-1].find{|evt| evt[0].matches?(event)}.nil?
end
def expected
#puts "IN CHOICE EXPECTED"
#puts "EVENTS = #{@events.inspect}"
return [@events[@current]] if @events[@current]
return @choices[@choice..-1].collect do |x|
if x[0].kind_of? State
x[0].expected
else
x[0]
end
end.flatten
end
def inspect
"< #{to_s} #{@choices.collect{|e| e.collect{|f|f.to_s}.join(', ')}.join(' and ')} >"
end
end
class Ref
def initialize value
@value = value
end
def to_s
@value
end
def inspect
"{#{to_s}}"
end
end
end
end
| 26.903571 | 139 | 0.484601 |
bf9d675c3cb8bc125b093bff002cce915464943b | 4,337 | require "spec_helper"
require "sauce/connect"
Sauce.config do |c|
c[:start_tunnel] = false
end
describe "Sauce::Utilities::Connect" do
before :each do
@mock_tunnel = double()
end
after :each do
Sauce::Utilities::Connect.instance_variable_set(:@tunnel, nil)
end
describe "##start" do
it "should call Sauce Connect when included" do
@mock_tunnel.stub(:connect).and_return true
@mock_tunnel.stub(:wait_until_ready).and_return true
Sauce::Connect.should_receive(:new).with(anything) {@mock_tunnel}
Sauce::Utilities::Connect.start
end
it "should throw an exception when Sauce Connect is not included" do
Object.should_receive(:require).with("sauce/connect").and_raise LoadError
lambda {Sauce::Utilities::Connect.start}.should raise_error SystemExit
end
it "should connect the new tunnel" do
@mock_tunnel.should_receive(:connect).with().and_return(true)
@mock_tunnel.should_receive(:wait_until_ready).and_return(true)
Sauce::Connect.stub(:new).with(anything).and_return @mock_tunnel
Sauce::Utilities::Connect.start
end
it "should return the tunnel when done" do
@mock_tunnel.stub(:connect).and_return true
@mock_tunnel.stub(:wait_until_ready).and_return true
Sauce::Connect.should_receive(:new).with(anything) {@mock_tunnel}
tunnel = Sauce::Utilities::Connect.start
tunnel.should be @mock_tunnel
end
it "only opens one tunnel" do
@mock_tunnel.stub(:connect).and_return true
@mock_tunnel.stub(:wait_until_ready).and_return true
Sauce::Connect.should_receive(:new).with(anything) {@mock_tunnel}
tunnel = Sauce::Utilities::Connect.start
tunnel_2 = Sauce::Utilities::Connect.start
tunnel.should be tunnel_2
end
end
describe "#close" do
it "makes the tunnel nil when terminated" do
@mock_tunnel.stub(:connect).and_return true
@mock_tunnel.stub(:wait_until_ready).and_return true
@mock_tunnel.should_receive(:disconnect).and_return true
Sauce::Connect.stub(:new).with(anything) {@mock_tunnel}
Sauce::Utilities::Connect.start
Sauce::Utilities::Connect.close
Sauce::Utilities::Connect.instance_variable_get(:@tunnel).should be nil
end
it "calls disconnect" do
@mock_tunnel.stub(:connect).and_return true
@mock_tunnel.stub(:wait_until_ready).and_return true
@mock_tunnel.should_receive(:disconnect).and_return true
Sauce::Connect.stub(:new).with(anything) {@mock_tunnel}
tunnel = Sauce::Utilities::Connect.start
Sauce::Utilities::Connect.close
end
it "does not error if no tunnel exists" do
Sauce::Utilities::Connect.close
end
end
describe "##warn_if_suspect_misconfiguration" do
it "does not fire if no Selenium sessions have been fired" do
end
end
describe "##incorrectly_integrated_warning" do
it "should return rspec warning by default" do
Sauce::Utilities.incorrectly_integrated_warning.should eq rspec_warning
end
it "should allow for the cucumber warning to be selected" do
Sauce::Utilities.incorrectly_integrated_warning(:cuke).should eq cuke_warning
end
end
end
def rspec_warning
return <<-stringend
===============================================================================
Your specs used the Sauce Selenium driver, but not the RSpec integration.
This may result in undesired behaviour, such as configured platforms being
skipped.
You can correct this by tagging specs intended for Sauce with
':sauce => true'.
You can disable this message by setting the 'warn_on_skipped_integration'
config option to false.
===============================================================================
stringend
end
def cuke_warning
return <<-stringend
===============================================================================
Your features used the Sauce Selenium driver, but not the Cucumber integration.
This may result in undesired behaviour, such as configured platforms being
skipped.
You can correct this by tagging features intended for Sauce with
'@selenium'.
You can disable this message by setting the 'warn_on_skipped_integration'
config option to false.
===============================================================================
stringend
end | 31.656934 | 83 | 0.677427 |
39ef9d23b5014db32369057c1442f43bc0348972 | 162 | # frozen_string_literal: true
folders = %w[config infrastructure domain application workers]
folders.each do |folder|
require_relative "#{folder}/init.rb"
end
| 23.142857 | 62 | 0.783951 |
f7892bae49cfb43d6a1f7641802fab1a7bff3cc9 | 3,113 |
module Kzen
module DbHelpers
def self.included(base) #:nodoc:
super(base)
base.extend ClassMethods
end
module ClassMethods
end
private
#### DB SQLITE HELPERS
def db_sqlite?
confs.fetch('db.type') === 'sqlite'
end
alias_method :sqlite?, :db_sqlite?
def db_sqlite_exists?(dbname = 'database')
res = File.exists?("database/#{dbname}.sqlite")
if res
logger.info("SQLite DB: #{bg(dbname)} exists")
return true
else
logger.warn("SQLite DB: #{bdy(dbname)} does NOT exist")
return false
end
end
def db_sqlite_create(dbname = 'database')
run("touch database/#{dbname}.sqlite", debug_opts)
logger.success("created SQLite DB: #{dbname}")
end
def db_sqlite_drop(dbname)
run("rm database/#{dbname}.sqlite", debug_opts)
logger.success("deleted SQLite DB: #{dbname}")
end
#### DB MYSQL HELPERS
def db_mysql?
confs.fetch('db.type') === 'mysql'
end
alias_method :mysql?, :db_mysql?
def db_mysql_exists?(dbname = confs.fetch('db.database'))
res = `/usr/bin/mysql -u #{confs.fetch('db.username')} -e "SHOW DATABASES" | grep #{confs.fetch('db.database')}`.chomp!
if res.nil?
logger.warn("MySQL DB: #{bdy(confs.fetch('db.database'))} does NOT exist")
return false
else
logger.info("MySQL DB: #{bbg(confs.fetch('db.database'))} exists")
return true
end
end
def db_mysql_create(dbname, dbuser)
run("/usr/bin/mysql -u #{dbuser} -e \"CREATE DATABASE IF NOT EXISTS #{dbname}\";", debug_opts)
logger.success("created MySQL DB: #{dbname}")
end
def db_mysql_drop(dbname, dbuser)
run("/usr/bin/mysql -u #{dbuser} -e \"DROP DATABASE IF EXISTS #{dbname}\";", debug_opts)
logger.success("dropped MySQL DB: #{dbname}")
end
#### DB PGSQL HELPERS
def db_pgsql?
confs.fetch('db.type') === 'pgsql'
end
alias_method :pgsql?, :db_pgsql?
def db_pgsql_exists?(dbname = confs.fetch('db.database'))
res = `/usr/bin/psql -U #{confs.fetch('db.username')} -l | grep #{confs.fetch('db.database')} | wc -l`.chomp!
logger.warn("db_pgsql_exists? => [#{res}]")
if res.to_s === '1'
logger.info("PostgreSQL DB: #{bbg(confs.fetch('db.database'))} exists")
return true
else
logger.warn("PostgreSQL DB: #{bdy(confs.fetch('db.database'))}) does NOT exist")
return false
end
end
def db_pgsql_create(dbname, dbuser)
logger.info("about to create a PostgresSQL DB with dbname=[#{dbname}], user=[#{dbuser}]")
run("/usr/bin/createdb -U #{dbuser} #{dbname}", debug_opts)
logger.success("created PostgresSQL DB: #{dbname}")
end
def db_pgsql_drop(dbname, dbuser)
run("/usr/bin/dropdb -U #{dbuser} #{dbname}", debug_opts)
logger.success("dropped PostgresSQL DB: #{dbname}")
end
end
end
| 28.559633 | 127 | 0.578542 |
08a0ca98ec87a3776f1e2746805c01c8bd115b8c | 984 | # -*- encoding : utf-8 -*-
require 'active_support/core_ext/module/aliasing'
module ActiveResource
class Connection
attr_reader :response
def handle_response_with_response_capture(response)
@response = handle_response_without_response_capture(response)
end
def request_with_detailed_log_subscriber(method, path, *arguments)
result = request_without_detailed_log_subscriber(method, path, *arguments)
detailed_log_subscriber(result, arguments)
result
rescue => e
detailed_log_subscriber(e.response, arguments) if e.respond_to?(:response)
raise
end
def detailed_log_subscriber(response, arguments)
ActiveSupport::Notifications.instrument("request.active_resource_detailed") do |payload|
payload[:response] = response
payload[:data] = arguments
end
end
alias_method_chain :handle_response, :response_capture
alias_method_chain :request, :detailed_log_subscriber
end
end
| 29.818182 | 94 | 0.74187 |
abcd2cac3511e9fed6bf694633269c2b14b51582 | 1,255 | class GstLibav < Formula
desc "GStreamer plugins for Libav (a fork of FFmpeg)"
homepage "https://gstreamer.freedesktop.org/"
url "https://gstreamer.freedesktop.org/src/gst-libav/gst-libav-1.8.2.tar.xz"
sha256 "b5f3c7a27b39b5f5c2f0bfd546b0c655020faf6b38d27b64b346c43e5ebf687a"
bottle do
sha256 "bee838b91f4f275a1ed5a42338217ca8220a49e1d6452240fa445466742587ad" => :el_capitan
sha256 "0885f86ea9b76c6c63858b8119c85e992ae26eea92aad512d7b40eb6671cf06e" => :yosemite
sha256 "d7f0ce837f63e3eef73e092aa3d38e38b39d080ab277d87ed9d853f02b7b0a01" => :mavericks
end
head do
url "https://anongit.freedesktop.org/git/gstreamer/gst-libav.git"
depends_on "autoconf" => :build
depends_on "automake" => :build
depends_on "libtool" => :build
depends_on "gettext"
end
depends_on "pkg-config" => :build
depends_on "yasm" => :build
depends_on "gst-plugins-base"
depends_on "xz" # For LZMA
def install
if build.head?
ENV["NOCONFIGURE"] = "yes"
system "./autogen.sh"
end
system "./configure", "--prefix=#{prefix}", "--disable-dependency-tracking"
system "make"
system "make", "install"
end
test do
system "#{Formula["gstreamer"].opt_bin}/gst-inspect-1.0", "libav"
end
end
| 29.880952 | 92 | 0.717928 |
f7aa6fd63c832e4c342d33ce93405b72d06b64eb | 769 | # frozen_string_literal: true
require 'archival'
RSpec.describe Archival::Config do
context 'init' do
it 'inits with defaults when not provided a config' do
config = Archival::Config.new
expect(config).to be_a(Archival::Config)
expect(config.pages_dir).to be_a(String)
expect(config.objects_dir).to be_a(String)
expect(config.root).to be_a(String)
expect(config.build_dir).to be_a(String)
expect(config.helper_port).to be_a(Integer)
end
it 'is not :dev_mode by default' do
config = Archival::Config.new
expect(config.dev_mode).to be(false)
end
it 'accepts a :dev_mode symbol' do
config = Archival::Config.new(dev_mode: true)
expect(config.dev_mode).to be(true)
end
end
end
| 29.576923 | 58 | 0.685306 |
d50d0b5a6580a5516cf20f1febcd31461cadba31 | 8,782 | module TrackerApi
class Client
USER_AGENT = "Ruby/#{RUBY_VERSION} (#{RUBY_PLATFORM}; #{RUBY_ENGINE}) TrackerApi/#{TrackerApi::VERSION} Faraday/#{Faraday::VERSION}".freeze
# Header keys that can be passed in options hash to {#get},{#paginate}
CONVENIENCE_HEADERS = Set.new([:accept, :content_type])
attr_reader :url, :api_version, :token, :logger, :connection, :auto_paginate, :last_response
# Create Pivotal Tracker API client.
#
# @param [Hash] options the connection options
# @option options [String] :token API token to use for requests
# @option options [String] :url Main HTTP API root
# @option options [Boolean] :auto_paginate Client should perform pagination automatically. Default true.
# @option options [String] :api_version The API version URL path
# @option options [String] :logger Custom logger
# @option options [String] :adapter Custom http adapter to configure Faraday with
# @option options [String] :connection_options Connection options to pass to Faraday
#
# @example Creating a Client
# Client.new token: 'my-super-special-token'
def initialize(options={}, &block)
url = options.fetch(:url, 'https://www.pivotaltracker.com')
@url = Addressable::URI.parse(url).to_s
@api_version = options.fetch(:api_version, '/services/v5')
@logger = options.fetch(:logger, ::Logger.new(nil))
adapter = options.fetch(:adapter, :excon)
connection_options = options.fetch(:connection_options, { ssl: { verify: true } })
@auto_paginate = options.fetch(:auto_paginate, true)
@token = options[:token]
raise 'Missing required options: :token' unless @token
@faraday_block = block if block_given?
@connection = Faraday.new({ url: @url }.merge(connection_options)) do |builder|
# response
builder.use Faraday::Response::RaiseError
builder.response :json
# request
builder.request :multipart
builder.request :json
builder.use TrackerApi::Logger, @logger
@faraday_block.call(builder) if @faraday_block
builder.adapter adapter
end
end
# Make a HTTP GET request
#
# @param path [String] The path, relative to api endpoint
# @param options [Hash] Query and header params for request
# @return [Faraday::Response]
def get(path, options = {})
request(:get, parse_query_and_convenience_headers(path, options))
end
# Make a HTTP POST request
#
# @param path [String] The path, relative to api endpoint
# @param options [Hash] Query and header params for request
# @return [Faraday::Response]
def post(path, options = {})
request(:post, parse_query_and_convenience_headers(path, options))
end
# Make a HTTP PUT request
#
# @param path [String] The path, relative to api endpoint
# @param options [Hash] Query and header params for request
# @return [Faraday::Response]
def put(path, options = {})
request(:put, parse_query_and_convenience_headers(path, options))
end
# Make one or more HTTP GET requests, optionally fetching
# the next page of results from information passed back in headers
# based on value in {#auto_paginate}.
#
# @param path [String] The path, relative to {#api_endpoint}
# @param options [Hash] Query and header params for request
# @param block [Block] Block to perform the data concatenation of the
# multiple requests. The block is called with two parameters, the first
# contains the contents of the requests so far and the second parameter
# contains the latest response.
# @return [Array]
def paginate(path, options = {}, &block)
opts = parse_query_and_convenience_headers path, options.dup
auto_paginate = opts[:params].delete(:auto_paginate) { |k| @auto_paginate }
@last_response = request :get, opts
data = @last_response.body
raise TrackerApi::Errors::UnexpectedData, 'Array expected' unless data.is_a? Array
if auto_paginate
pager = Pagination.new @last_response.headers
while pager.more?
opts[:params].update(pager.next_page_params)
@last_response = request :get, opts
pager = Pagination.new @last_response.headers
if block_given?
yield(data, @last_response)
else
data.concat(@last_response.body) if @last_response.body.is_a?(Array)
end
end
end
data
end
# Get projects
#
# @param [Hash] params
# @return [Array[TrackerApi::Resources::Project]]
def projects(params={})
Endpoints::Projects.new(self).get(params)
end
# Get project
#
# @param [Hash] params
# @return [TrackerApi::Resources::Project]
def project(id, params={})
Endpoints::Project.new(self).get(id, params)
end
# Get information about the authenticated user
#
# @return [TrackerApi::Resources::Me]
def me
Endpoints::Me.new(self).get
end
# Get information about a client story without knowing what project the story belongs to
#
# @param [String] story_id
# @param [Hash] params
# @return [TrackerApi::Resources::Story]
def story(story_id, params={})
Endpoints::Story.new(self).get_story(story_id, params)
end
# Get information about an epic without knowing what project the epic belongs to
#
# @param [String] epic_id
# @param [Hash] params
# @return [TrackerApi::Resources::Epic]
def epic(epic_id, params={})
Endpoints::Epic.new(self).get_epic(epic_id, params)
end
# Get notifications for the authenticated person
#
# @param [Hash] params
# @return [Array[TrackerApi::Resources::Notification]]
def notifications(params={})
Endpoints::Notifications.new(self).get(params)
end
# Provides a list of all the activity performed the authenticated person.
#
# @param [Hash] params
# @return [Array[TrackerApi::Resources::Activity]]
def activity(params={})
Endpoints::Activity.new(self).get(params)
end
private
def parse_query_and_convenience_headers(path, options)
raise 'Path can not be blank.' if path.to_s.empty?
opts = { body: options[:body] }
opts[:url] = options[:url] || File.join(@url, @api_version, path.to_s)
opts[:method] = options[:method] || :get
opts[:params] = options[:params] || {}
opts[:token] = options[:token] || @token
headers = { 'User-Agent' => USER_AGENT,
'X-TrackerToken' => opts.fetch(:token) }.merge(options.fetch(:headers, {}))
CONVENIENCE_HEADERS.each do |h|
if header = options[h]
headers[h] = header
end
end
opts[:headers] = headers
opts
end
def request(method, options = {})
url = options.fetch(:url)
params = options[:params] || {}
body = options[:body]
headers = options[:headers]
if (method == :post || method == :put) && options[:body].blank?
body = MultiJson.dump(params)
headers['Content-Type'] = 'application/json'
params = {}
end
@last_response = response = connection.send(method) do |req|
req.url(url)
req.headers.merge!(headers)
req.params.merge!(params)
req.body = body
end
response
rescue Faraday::Error::ClientError => e
raise TrackerApi::Error.new(e)
end
class Pagination
attr_accessor :headers, :total, :limit, :offset, :returned
def initialize(headers)
@headers = headers
@total = headers['x-tracker-pagination-total'].to_i
@limit = headers['x-tracker-pagination-limit'].to_i
@offset = headers['x-tracker-pagination-offset'].to_i
@returned = headers['x-tracker-pagination-returned'].to_i
# if offset is negative (e.g. Iterations Endpoint).
# For the 'Done' scope, negative numbers can be passed, which
# specifies the number of iterations preceding the 'Current' iteration.
# then need to adjust the negative offset to account for a smaller total,
# and set total to zero since we are paginating from -X to 0.
if @offset < 0
@offset = -@total if @offset.abs > @total
@total = 0
end
end
def more?
(offset + limit) < total
end
def next_offset
offset + limit
end
def next_page_params
{ limit: limit, offset: next_offset }
end
end
end
end
| 34.439216 | 152 | 0.629583 |
1d1472b17e94f550fd9a9afc1732096bb37c9885 | 2,825 | #! /your/favourite/path/to/ruby
# -*- mode: ruby; coding: utf-8; indent-tabs-mode: nil; ruby-indent-level 2 -*-
# Copyright (c) 2015 Urabe, Shyouhei
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
require_relative 'test_helper'
require 'sized_parallel'
class Test001SizedParallel < Test::Unit::TestCase
def test_class
assert { SizedParallel.is_a? Class }
end
def test_const
assert { SizedParallel.const_get('Pool').is_a? Class }
assert { SizedParallel.const_get('Task').is_a? Class }
assert { SizedParallel.const_get('VERSION') =~ /\A\d+\.\d+\.\d+\z/ }
end
def test_new
assert { SizedParallel.new.is_a? SizedParallel }
assert { SizedParallel.new(4).is_a? SizedParallel }
end
def test_new_with_block
# In case a method _ignores_ a block, which is the default behaviour for a
# ruby method, no exception happens. So if you want to check sanity of a
# block-accepting method, you should intentionally raise an exception
# inside, then check to see if that exception _you_ raised is seen, not
# others.
assert_raise_message(/foo/) { SizedParallel.new { raise 'foo' } }
assert_raise_message(/foo/) { SizedParallel.new(4) { raise 'foo' } }
assert_nothing_raised {
SizedParallel.new { |sp|
assert { sp.is_a? SizedParallel }
}
}
assert_nothing_raised {
SizedParallel.new(4) { |sp|
assert { sp.is_a? SizedParallel }
}
}
end
def test_start
this = SizedParallel.new
assert_raise(ArgumentError) { this.start }
assert { this.start { false }.is_a? SizedParallel::Task }
end
def test_wait
this = SizedParallel.new
assert { this.wait == this }
this.start { true }
assert { this.wait == this }
end
end
| 36.217949 | 79 | 0.697345 |
03ee0c604e6d987c4361f5e834d0e7c489c388b4 | 1,914 | # frozen_string_literal: true
require_relative '../lib/eiscp/dictionary'
require 'minitest/autorun'
class TestDictionary < MiniTest::Test
def test_zone_from_command
assert_equal(EISCP::Dictionary.zone_from_command('PWR'), 'main')
assert_equal(EISCP::Dictionary.zone_from_command('ZPW'), 'zone2')
assert_equal(EISCP::Dictionary.zone_from_command('CDS'), 'dock')
end
def test_command_to_name
assert_equal(EISCP::Dictionary.command_to_name('PWR'), 'system-power')
assert_equal(EISCP::Dictionary.command_to_name('ZPW'), 'power2')
assert_equal(EISCP::Dictionary.command_to_name('PW3'), 'power3')
assert_equal(EISCP::Dictionary.command_to_name('PW4'), 'power4')
end
def test_command_name_to_command
assert_equal(EISCP::Dictionary.command_name_to_command('system-power'), 'PWR')
assert_equal(EISCP::Dictionary.command_name_to_command('master-volume'), 'MVL')
assert_equal(EISCP::Dictionary.command_name_to_command('power2'), 'ZPW')
end
def test_command_value_to_value_name
assert_equal(EISCP::Dictionary.command_value_to_value_name('PWR', '01'), 'on')
assert_equal(EISCP::Dictionary.command_value_to_value_name('PWR', 'QSTN'), 'query')
end
def test_command_value_name_to_value
assert_equal(EISCP::Dictionary.command_value_name_to_value('PWR', 'on'), '01')
assert_equal(EISCP::Dictionary.command_value_name_to_value('ZPW', 'on'), '01')
end
def test_description_from_command_name
assert_equal(EISCP::Dictionary.description_from_command_name('system-power', 'main'), 'System Power Command')
assert_equal(EISCP::Dictionary.description_from_command_name('power2', 'zone2'), 'Zone2 Power Command')
end
def test_description_from_command
assert_equal(EISCP::Dictionary.description_from_command('PWR'), 'System Power Command')
assert_equal(EISCP::Dictionary.description_from_command('ZPW'), 'Zone2 Power Command')
end
end
| 41.608696 | 113 | 0.768025 |
87cc89ac406c2919b8ab4e7ef909c281d3dc7d8f | 451 | # frozen_string_literal: true
# This is a Rack middleware that we use in testing. It injects headers
# that simulate mod_shib so we can test.
# This is certainly not thread safe as it uses class level variables
class TestShibbolethHeaders
class_attribute :user, :groups
def initialize(app)
@app = app
end
def call(env)
env['REMOTE_USER'] = user
env['eduPersonEntitlement'] = Array(groups).join(';')
@app.call(env)
end
end
| 23.736842 | 70 | 0.718404 |
1d7b9ce84dbaf80c51e8cfe68a50b5040b1dee9b | 1,560 | # Copyright (c) 2018-2019 VMware, Inc. All Rights Reserved.
# SPDX-License-Identifier: MIT
# DO NOT MODIFY. THIS CODE IS GENERATED. CHANGES WILL BE OVERWRITTEN.
# appliance - The vCenter Server Appliance is a preconfigured Linux-based virtual machine optimized for running vCenter Server and associated services.
require 'spec_helper'
require 'json'
# Unit tests for VSphereAutomation::Appliance::UpdateStagedApi
# Automatically generated by openapi-generator (https://openapi-generator.tech)
# Please update as you see appropriate
describe 'UpdateStagedApi' do
before do
# run before each test
@instance = VSphereAutomation::Appliance::UpdateStagedApi.new
end
after do
# run after each test
end
describe 'test an instance of UpdateStagedApi' do
it 'should create an instance of UpdateStagedApi' do
expect(@instance).to be_instance_of(VSphereAutomation::Appliance::UpdateStagedApi)
end
end
# unit tests for delete
# Deletes the staged update
# @param [Hash] opts the optional parameters
# @return [nil]
describe 'delete test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
# unit tests for get
# Gets the current status of the staged update
# @param [Hash] opts the optional parameters
# @return [ApplianceUpdateStagedResult]
describe 'get test' do
it 'should work' do
# assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers
end
end
end
| 30 | 151 | 0.740385 |
035fd1a968b1b17f9e5d760facd305f5a734fdb9 | 182 | class AddClassToUsers < ActiveRecord::Migration
def change
change_table :users do |t|
t.boolean :guide, default: false
t.boolean :admin, default: false
end
end
end
| 20.222222 | 47 | 0.708791 |
e8255c65373fc15d98326ebd02026d29aaadd8ad | 248 | require 'rom/sql/extensions/postgres/commands'
require 'rom/sql/extensions/postgres/types'
require 'rom/sql/extensions/postgres/type_builder'
require 'rom/sql/extensions/postgres/type_serializer'
require 'rom/plugins/relation/sql/postgres/explain'
| 41.333333 | 53 | 0.834677 |
ac0c8d3f071aef941780bc8d8cfe85b47085d56f | 157 | # frozen_string_literal: true
class LabelComponent < ViewComponent::Base
def initialize(form:)
@form = form
end
private
attr_reader :form
end
| 13.083333 | 42 | 0.726115 |
d5ff44d6d0fe22ef22e1b2fd743c54482cc37692 | 1,876 | benefit_sponsorships = BenefitSponsors::BenefitSponsorships::BenefitSponsorship.where(
:"source_kind" => :mid_plan_year_conversion,
:"benefit_applications.aasm_state" => :imported
)
benefit_sponsorships.each do |sponsorship|
myc_application = sponsorship.benefit_applications.where(aasm_state: :active).first
imported_application = sponsorship.benefit_applications.where(aasm_state: :imported).first
if myc_application.blank? || imported_application.blank?
puts "No MYC Plan year present ER: #{sponsorship.organization.legal_name}"
next
end
sponsorship.census_employees.each do |census_employee|
census_employee.benefit_group_assignments.where(
:"benefit_package_id".in => imported_application.benefit_packages.map(&:id),
:"is_active" => true
).each do |bga|
if bga.update_attributes(is_active: false)
puts "Disabling BGA for #{census_employee.full_name} of ER: #{sponsorship.organization.legal_name}"
end
end
if census_employee.active_benefit_group_assignment.blank? || imported_application.benefit_packages.map(&:id).include?(census_employee.active_benefit_group_assignment.id)
myc_benefit_package = myc_application.benefit_packages.first
myc_bga = census_employee.benefit_group_assignments.where(
:"benefit_package_id".in => myc_application.benefit_packages.map(&:id)
).first || census_employee.benefit_group_assignments.build(benefit_package_id: myc_benefit_package.id, start_on: myc_benefit_package.start_on)
if myc_bga.persisted?
puts "Set is active true on existing myc BGA EE: #{census_employee.full_name} ER: #{sponsorship.organization.legal_name}"
else
puts "Created new BGA for myc. EE: #{census_employee.full_name} ER: #{sponsorship.organization.legal_name}"
end
myc_bga.update_attributes(is_active: true)
end
end
end
| 48.102564 | 173 | 0.765458 |
e2e0588266ffcdfc1c883d43e60f2f45ef953b33 | 978 | #
# Be sure to run `pod spec lint WSTwo_Category.podspec' to ensure this is a
# valid spec and to remove all comments including this before submitting the spec.
#
# To learn more about Podspec attributes see http://docs.cocoapods.org/specification.html
# To see working Podspecs in the CocoaPods repo see https://github.com/CocoaPods/Specs/
#
Pod::Spec.new do |s|
s.name = "WSTwo_Category"
s.version = "4"
s.summary = "WSTwo_Category."
s.description = <<-DESC
this is WSTwo_Category.
DESC
s.homepage = "https://github.com/greatLock/WSTwo_Category"
s.license = "MIT"
s.author = { "WS" => "[email protected]" }
s.platform = :ios, "8.0"
s.source = { :git => "https://github.com/greatLock/WSTwo_Category.git", :tag => "#{s.version}" }
s.source_files = "WSTwo_Category/WSTwo_Category/**/*.{h,m}"
s.framework = "UIKit", "Foundation"
s.dependency "CTMediator"
end
| 25.736842 | 104 | 0.637014 |
2112c63cb8764c84c253c974998db1566c6dde64 | 348 | require "rubocop/extension/generator/version"
require "rubocop/extension/generator/cli"
require "rubocop/extension/generator/generator"
require 'active_support'
require 'active_support/core_ext/string/inflections'
require 'optparse'
require 'pathname'
require 'fileutils'
module RuboCop
module Extension
module Generator
end
end
end
| 19.333333 | 52 | 0.804598 |
7937157cd65ca089530a944907774adb6e32a497 | 2,979 | # This file is auto-generated from the current state of the database. Instead
# of editing this file, please use the migrations feature of Active Record to
# incrementally modify your database, and then regenerate this schema definition.
#
# This file is the source Rails uses to define your schema when running `rails
# db:schema:load`. When creating a new database, `rails db:schema:load` tends to
# be faster and is potentially less error prone than running all of your
# migrations from scratch. Old migrations may fail to apply correctly if those
# migrations use external dependencies or application code.
#
# It's strongly recommended that you check this file into your version control system.
ActiveRecord::Schema.define(version: 2020_06_13_075753) do
create_table "entries", force: :cascade do |t|
t.integer "user_id"
t.integer "event_id"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["event_id"], name: "index_entries_on_event_id"
t.index ["user_id", "event_id"], name: "index_entries_on_user_id_and_event_id", unique: true
t.index ["user_id"], name: "index_entries_on_user_id"
end
create_table "events", force: :cascade do |t|
t.text "content"
t.datetime "start_date"
t.datetime "end_date"
t.integer "user_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "title"
t.integer "capacity", default: 30
t.index ["user_id", "created_at"], name: "index_events_on_user_id_and_created_at"
t.index ["user_id"], name: "index_events_on_user_id"
end
create_table "infos", force: :cascade do |t|
t.text "message"
t.integer "event_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "title"
t.index ["event_id", "created_at"], name: "index_infos_on_event_id_and_created_at"
t.index ["event_id"], name: "index_infos_on_event_id"
end
create_table "inquiries", force: :cascade do |t|
t.text "message"
t.integer "user_id", null: false
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.index ["user_id"], name: "index_inquiries_on_user_id"
end
create_table "users", force: :cascade do |t|
t.string "name"
t.string "email"
t.datetime "created_at", precision: 6, null: false
t.datetime "updated_at", precision: 6, null: false
t.string "password_digest"
t.string "remember_digest"
t.string "activation_digest"
t.boolean "activated", default: false
t.datetime "activated_at"
t.string "reset_digest"
t.datetime "reset_send_at"
t.text "message"
t.index ["email"], name: "index_users_on_email", unique: true
end
add_foreign_key "events", "users"
add_foreign_key "infos", "events"
add_foreign_key "inquiries", "users"
end
| 39.197368 | 96 | 0.713998 |
1d54c66425ca0e9faa87da860a77fc55837fc2be | 7,238 | # coding: utf-8
require 'rails_helper'
RSpec.describe Reward, type: :model do
let(:reward){ create(:reward, description: 'envie um email para [email protected]') }
describe "Log modifications" do
describe "when change something" do
before do
reward.update_attributes(description: 'foo')
end
it "should save the last changes" do
expect(reward.last_changes).to eq("{\"description\":[\"envie um email para [email protected]\",\"foo\"]}")
end
end
end
describe "Associations" do
it{ is_expected.to belong_to :project }
it{ is_expected.to have_many :contributions }
it{ is_expected.to have_many(:payments).through(:contributions) }
end
it "should have a minimum value" do
r = build(:reward, minimum_value: nil)
expect(r).not_to be_valid
end
describe "check_if_is_destroyable" do
before do
create(:confirmed_contribution, project: reward.project, reward: reward)
reward.reload
reward.destroy
end
it { expect(reward.persisted?).to eq(true) }
end
it "should have a greater than 10.00 minimum value" do
r = build(:reward)
r.minimum_value = -0.01
expect(r).not_to be_valid
r.minimum_value = 9.99
expect(r).not_to be_valid
r.minimum_value = 10.00
expect(r).to be_valid
r.minimum_value = 10.01
expect(r).to be_valid
end
it "should have a description" do
r = build(:reward, description: nil)
expect(r).not_to be_valid
end
it "should have integer maximum contributions" do
r = build(:reward)
r.maximum_contributions = 10.01
expect(r).not_to be_valid
r.maximum_contributions = 10
expect(r).to be_valid
end
it "should not allow delivery in the past" do
r = build(:reward, project: create(:project, online_date: nil))
r.deliver_at = Time.current - 1.month
expect(r).not_to be_valid
r.deliver_at = Time.current + 1.month
expect(r).to be_valid
end
it "should have maximum contributions > 0" do
r = build(:reward)
r.maximum_contributions = -1
expect(r).not_to be_valid
r.maximum_contributions = 0
expect(r).not_to be_valid
r.maximum_contributions = 1
expect(r).to be_valid
end
describe '.remaining' do
let(:project){ create(:project) }
subject { Reward.remaining }
before do
project.rewards.first.destroy!
@remaining = create(:reward, maximum_contributions: 3, project: project)
create(:confirmed_contribution, reward: @remaining, project: @remaining.project)
create(:pending_contribution, reward: @remaining, project: @remaining.project)
payment = create(:pending_contribution, reward: @remaining, project: @remaining.project).payments.first
payment.update_column(:created_at, 9.days.ago)
@sold_out = create(:reward, maximum_contributions: 2, project: project)
create(:confirmed_contribution, reward: @sold_out, project: @sold_out.project)
create(:pending_contribution, reward: @sold_out, project: @sold_out.project)
end
it{ is_expected.to eq([@remaining]) }
end
describe "#valid?" do
subject{ reward.valid? }
context "when we have online_date in project and deliver_at is after expires_at" do
let(:project){ create(:project, online_date: Time.now, online_days: 60) }
let(:reward){ build(:reward, project: project, deliver_at: project.expires_at + 1.day) }
it{ is_expected.to eq true }
end
context "when we have online_date in project and deliver_at is before expires_at month" do
let(:reward){ build(:reward, project: project, deliver_at: project.expires_at - 1.month) }
let(:project){ create(:project, online_date: Time.now, online_days: 60) }
it{ is_expected.to eq false }
end
context "when online_date in project is nil and deliver_at is after current month" do
let(:reward){ build(:reward, project: project, deliver_at: Time.now + 1.month) }
let(:project){ create(:project, online_date: nil) }
it{ is_expected.to eq true }
end
context "when online_date in project is nil and deliver_at is before current month" do
let(:reward){ build(:reward, project: project, deliver_at: Time.now - 1.month) }
let(:project){ create(:project, online_date: nil) }
it{ is_expected.to eq false }
end
end
describe "#total_contributions" do
before do
@remaining = create(:reward, maximum_contributions: 20)
create(:confirmed_contribution, reward: @remaining, project: @remaining.project)
create(:pending_contribution, reward: @remaining, project: @remaining.project)
create(:refunded_contribution, reward: @remaining, project: @remaining.project)
end
context "get total of paid and peding contributions" do
subject { @remaining.total_contributions %w(paid pending)}
it { is_expected.to eq(2) }
end
context "get total of refunded contributions" do
subject { @remaining.total_contributions %w(refunded)}
it { is_expected.to eq(1) }
end
context "get tota of pending contributions" do
subject { @remaining.total_contributions %w(pending)}
it { is_expected.to eq(1) }
end
end
describe "#total_compromised" do
before do
@remaining = create(:reward, maximum_contributions: 20)
create(:confirmed_contribution, reward: @remaining, project: @remaining.project)
create(:pending_contribution, reward: @remaining, project: @remaining.project)
payment = create(:pending_contribution, reward: @remaining, project: @remaining.project).payments.first
payment.update_column(:created_at, 8.days.ago)
end
subject { @remaining.total_compromised }
it { is_expected.to eq(2) }
end
describe "#in_time_to_confirm" do
before do
@remaining = create(:reward, maximum_contributions: 20)
create(:confirmed_contribution, reward: @remaining, project: @remaining.project)
create(:pending_contribution, reward: @remaining, project: @remaining.project)
payment = create(:pending_contribution, reward: @remaining, project: @remaining.project).payments.first
payment.update_column(:created_at, 8.days.ago)
end
subject { @remaining.in_time_to_confirm }
it { is_expected.to eq(1) }
end
describe '#sold_out?' do
let(:reward) { create(:reward, maximum_contributions: 3) }
subject { reward.sold_out? }
context 'when reward not have limits' do
let(:reward) { create(:reward, maximum_contributions: nil) }
it { is_expected.to eq(nil) }
end
context 'when reward contributions waiting confirmation and confirmed are greater than limit' do
before do
2.times { create(:confirmed_contribution, reward: reward, project: reward.project) }
create(:pending_contribution, reward: reward, project: reward.project)
end
it { is_expected.to eq(true) }
end
context 'when reward contributions waiting confirmation and confirmed are lower than limit' do
before do
create(:confirmed_contribution, reward: reward, project: reward.project)
create(:pending_contribution, reward: reward, project: reward.project)
end
it { is_expected.to eq(false) }
end
end
end
| 33.665116 | 109 | 0.690799 |
ac3ac0e9f1a6eb49d7e50e3b5e1e14a5613bc42b | 133 | class ChangeTeacherIndexToString < ActiveRecord::Migration[5.1]
def change
change_column :users, :teacher_id, :string
end
end
| 22.166667 | 63 | 0.774436 |
bfb30f4a99cdbf3031510f3d182fe15bd27ca9d0 | 6,060 | require_relative '../../test_helper'
require 'cqm/models'
module QRDA
module Cat1
class PatientRoundTripTest < MiniTest::Test
include QRDA::Cat1
def setup
@importer = Cat1::PatientImporter.instance
bd = 75.years.ago
@patient = QDM::Patient.new(birthDatetime: bd, givenNames: %w['First Middle'], familyName: 'Family', bundleId: '1')
@patient.extendedData = { 'medical_record_number' => '123', 'insurance_providers' => nil }
@patient.dataElements << QDM::PatientCharacteristicBirthdate.new(birthDatetime: bd)
@patient.dataElements << QDM::PatientCharacteristicRace.new(dataElementCodes: [QDM::Code.new('2106-3', 'Race & Ethnicity - CDC', 'White', '2.16.840.1.113883.6.238')])
@patient.dataElements << QDM::PatientCharacteristicEthnicity.new(dataElementCodes: [QDM::Code.new('2186-5', 'Race & Ethnicity - CDC', 'Not Hispanic or Latino', '2.16.840.1.113883.6.238')])
@patient.dataElements << QDM::PatientCharacteristicSex.new(dataElementCodes: [QDM::Code.new('M', 'Administrative sex (HL7)', 'Male', '2.16.840.1.113883.12.1')])
end
def create_test_measure
mes = QDM::Measure.new
mes.hqmf_id = 'b794a9c2-8e83-11e8-9eb6-529269fb1459'
mes.hqmf_set_id = 'bdfa0e38-8e83-11e8-9eb6-529269fb1459'
mes.description = 'Test Measure'
mes.populations = [{"IPP" => "IPP"}]
mes.elm = []
mes.save
end
def generate_doc(patient)
measures = QDM::Measure.all
options = { start_time: Date.new(2012, 1, 1), end_time: Date.new(2012, 12, 31) }
rawxml = Qrda1R5.new(patient, measures, options).render
xml = Tempfile.new(['test_patient', '.xml'])
xml.write rawxml
xml.close
doc = Nokogiri::XML(File.read(xml.path))
doc.root.add_namespace_definition('cda', 'urn:hl7-org:v3')
doc.root.add_namespace_definition('sdtc', 'urn:hl7-org:sdtc')
doc
end
def add_adverse_event(patient)
@adverse_event_author_time = Time.new(2012, 1, 1, 4, 0, 0)
@adverse_event_relevant_period = QDM::Interval.new(Time.new(2012, 1, 2, 4, 0, 0), Time.new(2012, 1, 2, 5, 0, 0))
@adverse_event_codes = [QDM::Code.new('E08.311', 'ICD-10-CM'), QDM::Code.new('362.01', 'ICD-9-CM'), QDM::Code.new('4855003', 'SNOMED-CT')]
patient.dataElements << QDM::AdverseEvent.new(authorDatetime: @adverse_event_author_time,
relevantPeriod: @adverse_event_relevant_period,
dataElementCodes: @adverse_event_codes)
end
def add_allergy_intolerance(patient)
@allergy_intolerance_author_time = Time.new(2012, 1, 1, 4, 0, 0)
@allergy_intolerance_prevalence_period = QDM::Interval.new(Time.new(2012, 1, 2, 4, 0, 0), Time.new(2012, 1, 2, 5, 0, 0))
@allergy_intolerance_codes = [QDM::Code.new('E08.311', 'ICD-10-CM'), QDM::Code.new('362.01', 'ICD-9-CM'), QDM::Code.new('4855003', 'SNOMED-CT')]
patient.dataElements << QDM::AllergyIntolerance.new(authorDatetime: @allergy_intolerance_author_time,
prevalencePeriod: @allergy_intolerance_prevalence_period,
dataElementCodes: @allergy_intolerance_codes)
end
def confirm_adverse_event(doc)
assert_equal 1, doc.xpath("//cda:entry/cda:observation[cda:templateId/@root = '2.16.840.1.113883.10.20.24.3.146']").size
end
def confirm_allergy_intolerance(doc)
assert_equal 1, doc.xpath("//cda:entry/cda:observation[cda:templateId/@root = '2.16.840.1.113883.10.20.24.3.147']").size
end
def compare_adverse_event(imported_patient)
adverse_events = imported_patient.get_data_elements('adverse_event')
assert_equal 1, adverse_events.size
adverse_event = adverse_events[0]
compare_time(@adverse_event_author_time, adverse_event.authorDatetime)
compare_interval(@adverse_event_relevant_period, adverse_event.relevantPeriod)
compare_codes(@adverse_event_codes, adverse_event.dataElementCodes)
end
def compare_allergy_intolerance(imported_patient)
allergy_intolerances = imported_patient.get_data_elements('allergy', 'intolerance')
assert_equal 1, allergy_intolerances.size
allergy_intolerance = allergy_intolerances[0]
compare_time(@allergy_intolerance_author_time, allergy_intolerance.authorDatetime)
compare_interval(@allergy_intolerance_prevalence_period, allergy_intolerance.prevalencePeriod)
compare_codes(@allergy_intolerance_codes, allergy_intolerance.dataElementCodes)
end
def test_patient_roundtrip
add_adverse_event(@patient)
add_allergy_intolerance(@patient)
exported_qrda = generate_doc(@patient)
confirm_adverse_event(exported_qrda)
confirm_allergy_intolerance(exported_qrda)
imported_patient = Cat1::PatientImporter.instance.parse_cat1(exported_qrda)
compare_adverse_event(imported_patient)
compare_allergy_intolerance(imported_patient)
end
def compare_time(exported_time, imported_time)
assert_equal exported_time, imported_time
end
def compare_interval(exported_interval, imported_interval)
assert_equal exported_interval.low, imported_interval.low
assert_equal exported_interval.high, imported_interval.high
end
def compare_codes(exported_codes, imported_codes)
assert_equal exported_codes.size, imported_codes.size
exported_codes.each do |ec|
assert imported_codes.collect { |ic| ic[:code] == ec.code && ic[:codeSystem] == ec.codeSystem }.include? true
end
end
def compare_code(exported_code, imported_code)
assert_equal exported_code.code = imported_code[:code]
assert_equal exported_code.codeSystem = imported_code[:codeSystem]
end
end
end
end | 48.870968 | 196 | 0.670462 |
b99d2e4e347cdd4035b3c6265e0b644aef655eb6 | 1,393 | require 'rubygems'
require 'bundler'
Bundler.setup
require "minitest/autorun"
require 'kyotocabinet_ffi'
require 'tempfile'
class FileHashTest < Minitest::Test
def test_paths
file = Tempfile.new(['db', '.kch'])
begin
db = KyotoCabinet::Db::FileHash.new file.path, :writer, :create
assert db.file_path.end_with? '.kch'
close_result = db.close
assert close_result, "close error: #{db.last_error_message} (#{db.last_error_code})"
ensure
file.close(true)
file.unlink
end
end
def test_full
file = Tempfile.new(['db', '.kch'])
begin
db = KyotoCabinet::Db::FileHash.new file.path, :writer, :create
assert_equal 0, db.last_error_code
start = "A"
100.times do |key|
set_result = (db[key] = start)
assert set_result, "set error: #{db.last_error_message} (#{db.last_error_code})"
assert_equal start, db[key]
start.next
end
assert !db.empty?
assert_equal 100, db.size
clear_result = db.clear
assert clear_result, "clear error: #{db.last_error_message} (#{db.last_error_code})"
assert db.empty?
assert_equal 0, db.size
close_result = db.close
assert close_result, "close error: #{db.last_error_message} (#{db.last_error_code})"
ensure
file.close(true)
file.unlink
end
end
end
# vim: ts=2 sts=2 sw=2
| 24.875 | 90 | 0.642498 |
7a093faadd7e971423f4d1486baddbee93a362c8 | 1,257 | #!/usr/bin/env ruby
require 'json'
require 'yaml'
require 'plist'
require 'fog-aws'
#
# Interfacing with the builds bucket on S3
#
def fog
@fog ||= Fog::Storage.new({
provider: 'AWS',
aws_secret_access_key: ENV['AWS_SECRET_ACCESS_KEY'],
aws_access_key_id: ENV['AWS_ACCESS_KEY_ID']
})
end
def bucket_name
'ios-ksr-builds'
end
def bucket
@bucket ||= fog.directories.new(key: bucket_name)
end
def current_builds
@current_builds ||= YAML::load(bucket.files.get('builds.yaml').body)
end
#
# Parsing app plist
#
def plist_path
@plist_path ||= File.join('./../', 'Kickstarter-iOS', 'Info.plist')
end
def plist
@plist ||= Plist::parse_xml(plist_path)
end
def plist_version
plist["CFBundleShortVersionString"]
end
def plist_build
plist["CFBundleVersion"].to_i
end
#
# Library
#
def strip_commented_lines(str)
str.split("\n").select {|line| line.strip[0] != '#'}.join("\n")
end
#
# Script
#
file_name = '.RELEASE_NOTES.tmp'
changelog = strip_commented_lines(File.read(file_name)).strip
build = {
'build' => plist_build,
'changelog' => changelog,
}
bucket.files.create({
key: 'builds.yaml',
body: (current_builds.select {|b| b[:build] != plist_build} + [build]).to_yaml,
public: false
}).save
| 16.539474 | 81 | 0.680191 |
018aa1db2be0a066c31425af1b765ff514f303e1 | 969 | # frozen_string_literal: true
FactoryBot.define do
factory :access_grant, class: "Doorkeeper::AccessGrant" do
sequence(:resource_owner_id) { |n| n }
application
redirect_uri { "https://app.com/callback" }
expires_in { 100 }
scopes { "public write" }
end
factory :access_token, class: "Doorkeeper::AccessToken" do
sequence(:resource_owner_id) { |n| n }
application
expires_in { 2.hours }
factory :clientless_access_token do
application { nil }
end
end
factory :application, class: "Doorkeeper::Application" do
sequence(:name) { |n| "Application #{n}" }
redirect_uri { "https://app.com/callback" }
factory :application_with_owner, class: "ApplicationWithOwner"
end
# do not name this factory :user, otherwise it will conflict with factories
# from applications that use doorkeeper factories in their own tests
factory :doorkeeper_testing_user, class: :user, aliases: [:resource_owner]
end
| 29.363636 | 77 | 0.705882 |
0194ef1aabc07fef16f404d4144cbf5167d1643e | 247 | require File.dirname(__FILE__) + '/../../spec_helper'
require File.dirname(__FILE__) + '/fixtures/classes'
require File.dirname(__FILE__) + '/shared/find_all'
describe "Enumerable#select" do
it_behaves_like(:enumerable_find_all , :select)
end
| 30.875 | 53 | 0.757085 |
282fdddc24964ead4a2de7188a3bc19cd9613686 | 4,605 | #-- copyright
# OpenProject is an open source project management software.
# Copyright (C) 2012-2021 the OpenProject GmbH
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License version 3.
#
# OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows:
# Copyright (C) 2006-2013 Jean-Philippe Lang
# Copyright (C) 2010-2013 the ChiliProject Team
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# See docs/COPYRIGHT.rdoc for more details.
#++
require 'spec_helper'
##
# The concern is implemented in the ApplicationController and is therewore applicable
# in every controller. We're just using this particular one for the test.
describe MyController, type: :controller do
render_views
let(:sso_config) do
{
header: header,
secret: secret
}
end
let(:header) { "X-Remote-User" }
let(:secret) { "42" }
let!(:auth_source) { DummyAuthSource.create name: "Dummy LDAP" }
let!(:user) { FactoryBot.create :user, login: login, auth_source_id: auth_source.id }
let(:login) { "h.wurst" }
shared_examples "auth source sso failure" do
def attrs(user)
user.attributes.slice(:login, :mail, :auth_source_id)
end
it "should redirect to AccountController#sso to show the error" do
expect(response).to redirect_to "/sso"
failure = session[:auth_source_sso_failure]
expect(failure).to be_present
expect(attrs(failure[:user])).to eq attrs(user)
expect(failure[:login]).to eq login
expect(failure[:back_url]).to eq "http://test.host/my/account"
expect(failure[:ttl]).to eq 1
end
context 'when the config is marked optional' do
let(:sso_config) do
{
header: header,
secret: secret,
optional: true
}
end
it "should redirect to login" do
expect(response).to redirect_to("/login?back_url=http%3A%2F%2Ftest.host%2Fmy%2Faccount")
end
end
end
before do
if sso_config
allow(OpenProject::Configuration)
.to receive(:auth_source_sso)
.and_return(sso_config)
end
separator = secret ? ':' : ''
request.headers[header] = "#{login}#{separator}#{secret}"
end
describe 'login' do
before do
get :account
end
it "should log in given user" do
expect(response.body.squish).to have_content("Username h.wurst")
end
context 'when the secret being null' do
let(:secret) { nil }
it "should log in given user" do
expect(response.body.squish).to have_content("Username h.wurst")
end
end
context 'when the user is invited' do
let!(:user) do
FactoryBot.create :user, login: login, status: Principal.statuses[:invited], auth_source_id: auth_source.id
end
it "should log in given user and activate it" do
expect(response.body.squish).to have_content("Username h.wurst")
expect(user.reload).to be_active
end
end
context "with no auth source sso configured" do
let(:sso_config) { nil }
it "should redirect to login" do
expect(response).to redirect_to("/login?back_url=http%3A%2F%2Ftest.host%2Fmy%2Faccount")
end
end
context "with a non-active user user" do
let(:user) { FactoryBot.create :user, login: login, auth_source_id: auth_source.id, status: 2 }
it_should_behave_like "auth source sso failure"
end
context "with an invalid user" do
let(:auth_source) { DummyAuthSource.create name: "Onthefly LDAP", onthefly_register: true }
let!(:duplicate) { FactoryBot.create :user, mail: "[email protected]" }
let(:login) { "dummy_dupuser" }
let(:user) do
FactoryBot.build :user, login: login, mail: duplicate.mail, auth_source_id: auth_source.id
end
it_should_behave_like "auth source sso failure"
end
end
end
| 30.496689 | 115 | 0.681216 |
d5fe549845c6ae0b5849c24095e8606b7d5f1fbb | 17,317 | # frozen_string_literal: true
module Jekyll
class Document
include Comparable
extend Forwardable
attr_reader :path, :site, :extname, :collection, :type
attr_accessor :content, :output
def_delegator :self, :read_post_data, :post_read
YAML_FRONT_MATTER_REGEXP = %r!\A(---\s*\n.*?\n?)^((---|\.\.\.)\s*$\n?)!m.freeze
DATELESS_FILENAME_MATCHER = %r!^(?:.+/)*(.*)(\.[^.]+)$!.freeze
DATE_FILENAME_MATCHER = %r!^(?>.+/)*?(\d{2,4}-\d{1,2}-\d{1,2})-([^/]*)(\.[^.]+)$!.freeze
SASS_FILE_EXTS = %w(.sass .scss).freeze
YAML_FILE_EXTS = %w(.yaml .yml).freeze
#
# Class-wide cache to stash and retrieve regexp to detect "super-directories"
# of a particular Jekyll::Document object.
#
# dirname - The *special directory* for the Document.
# e.g. "_posts" or "_drafts" for Documents from the `site.posts` collection.
def self.superdirs_regex(dirname)
@superdirs_regex ||= {}
@superdirs_regex[dirname] ||= %r!#{dirname}.*!
end
#
# Create a new Document.
#
# path - the path to the file
# relations - a hash with keys :site and :collection, the values of which
# are the Jekyll::Site and Jekyll::Collection to which this
# Document belong.
#
# Returns nothing.
def initialize(path, relations = {})
@site = relations[:site]
@path = path
@extname = File.extname(path)
@collection = relations[:collection]
@type = @collection.label.to_sym
@has_yaml_header = nil
if draft?
categories_from_path("_drafts")
else
categories_from_path(collection.relative_directory)
end
data.default_proc = proc do |_, key|
site.frontmatter_defaults.find(relative_path, type, key)
end
trigger_hooks(:post_init)
end
# Fetch the Document's data.
#
# Returns a Hash containing the data. An empty hash is returned if
# no data was read.
def data
@data ||= {}
end
# Merge some data in with this document's data.
#
# Returns the merged data.
def merge_data!(other, source: "YAML front matter")
merge_categories!(other)
Utils.deep_merge_hashes!(data, other)
merge_date!(source)
data
end
# Returns the document date. If metadata is not present then calculates it
# based on Jekyll::Site#time or the document file modification time.
#
# Return document date string.
def date
data["date"] ||= (draft? ? source_file_mtime : site.time)
end
# Return document file modification time in the form of a Time object.
#
# Return document file modification Time object.
def source_file_mtime
File.mtime(path)
end
# Returns whether the document is a draft. This is only the case if
# the document is in the 'posts' collection but in a different
# directory than '_posts'.
#
# Returns whether the document is a draft.
def draft?
data["draft"] ||= relative_path.index(collection.relative_directory).nil? &&
collection.label == "posts"
end
# The path to the document, relative to the collections_dir.
#
# Returns a String path which represents the relative path from the collections_dir
# to this document.
def relative_path
@relative_path ||= path.sub("#{site.collections_path}/", "")
end
# The output extension of the document.
#
# Returns the output extension
def output_ext
renderer.output_ext
end
# The base filename of the document, without the file extname.
#
# Returns the basename without the file extname.
def basename_without_ext
@basename_without_ext ||= File.basename(path, ".*")
end
# The base filename of the document.
#
# Returns the base filename of the document.
def basename
@basename ||= File.basename(path)
end
def renderer
@renderer ||= Jekyll::Renderer.new(site, self)
end
# Produces a "cleaned" relative path.
# The "cleaned" relative path is the relative path without the extname
# and with the collection's directory removed as well.
# This method is useful when building the URL of the document.
#
# NOTE: `String#gsub` removes all trailing periods (in comparison to `String#chomp`)
#
# Examples:
# When relative_path is "_methods/site/generate...md":
# cleaned_relative_path
# # => "/site/generate"
#
# Returns the cleaned relative path of the document.
def cleaned_relative_path
@cleaned_relative_path ||=
relative_path[0..-extname.length - 1]
.sub(collection.relative_directory, "")
.gsub(%r!\.*\z!, "")
end
# Determine whether the document is a YAML file.
#
# Returns true if the extname is either .yml or .yaml, false otherwise.
def yaml_file?
YAML_FILE_EXTS.include?(extname)
end
# Determine whether the document is an asset file.
# Asset files include CoffeeScript files and Sass/SCSS files.
#
# Returns true if the extname belongs to the set of extensions
# that asset files use.
def asset_file?
sass_file? || coffeescript_file?
end
# Determine whether the document is a Sass file.
#
# Returns true if extname == .sass or .scss, false otherwise.
def sass_file?
SASS_FILE_EXTS.include?(extname)
end
# Determine whether the document is a CoffeeScript file.
#
# Returns true if extname == .coffee, false otherwise.
def coffeescript_file?
extname == ".coffee"
end
# Determine whether the file should be rendered with Liquid.
#
# Returns false if the document is either an asset file or a yaml file,
# or if the document doesn't contain any Liquid Tags or Variables,
# true otherwise.
def render_with_liquid?
return false if data["render_with_liquid"] == false
!(coffeescript_file? || yaml_file? || !Utils.has_liquid_construct?(content))
end
# Determine whether the file should be rendered with a layout.
#
# Returns true if the Front Matter specifies that `layout` is set to `none`.
def no_layout?
data["layout"] == "none"
end
# Determine whether the file should be placed into layouts.
#
# Returns false if the document is set to `layouts: none`, or is either an
# asset file or a yaml file. Returns true otherwise.
def place_in_layout?
!(asset_file? || yaml_file? || no_layout?)
end
# The URL template where the document would be accessible.
#
# Returns the URL template for the document.
def url_template
collection.url_template
end
# Construct a Hash of key-value pairs which contain a mapping between
# a key in the URL template and the corresponding value for this document.
#
# Returns the Hash of key-value pairs for replacement in the URL.
def url_placeholders
@url_placeholders ||= Drops::UrlDrop.new(self)
end
# The permalink for this Document.
# Permalink is set via the data Hash.
#
# Returns the permalink or nil if no permalink was set in the data.
def permalink
data && data.is_a?(Hash) && data["permalink"]
end
# The computed URL for the document. See `Jekyll::URL#to_s` for more details.
#
# Returns the computed URL for the document.
def url
@url ||= URL.new(
:template => url_template,
:placeholders => url_placeholders,
:permalink => permalink
).to_s
end
def [](key)
data[key]
end
# The full path to the output file.
#
# base_directory - the base path of the output directory
#
# Returns the full path to the output file of this document.
def destination(base_directory)
@destination ||= {}
@destination[base_directory] ||= begin
path = site.in_dest_dir(base_directory, URL.unescape_path(url))
if url.end_with? "/"
path = File.join(path, "index.html")
else
path << output_ext unless path.end_with? output_ext
end
path
end
end
# Write the generated Document file to the destination directory.
#
# dest - The String path to the destination dir.
#
# Returns nothing.
def write(dest)
path = destination(dest)
FileUtils.mkdir_p(File.dirname(path))
Jekyll.logger.debug "Writing:", path
File.write(path, output, :mode => "wb")
trigger_hooks(:post_write)
end
# Whether the file is published or not, as indicated in YAML front-matter
#
# Returns 'false' if the 'published' key is specified in the
# YAML front-matter and is 'false'. Otherwise returns 'true'.
def published?
!(data.key?("published") && data["published"] == false)
end
# Read in the file and assign the content and data based on the file contents.
# Merge the frontmatter of the file with the frontmatter default
# values
#
# Returns nothing.
def read(opts = {})
Jekyll.logger.debug "Reading:", relative_path
if yaml_file?
@data = SafeYAML.load_file(path)
else
begin
merge_defaults
read_content(**opts)
read_post_data
rescue StandardError => e
handle_read_error(e)
end
end
end
# Create a Liquid-understandable version of this Document.
#
# Returns a Hash representing this Document's data.
def to_liquid
@to_liquid ||= Drops::DocumentDrop.new(self)
end
# The inspect string for this document.
# Includes the relative path and the collection label.
#
# Returns the inspect string for this document.
def inspect
"#<#{self.class} #{relative_path} collection=#{collection.label}>"
end
# The string representation for this document.
#
# Returns the content of the document
def to_s
output || content || "NO CONTENT"
end
# Compare this document against another document.
# Comparison is a comparison between the 2 paths of the documents.
#
# Returns -1, 0, +1 or nil depending on whether this doc's path is less than,
# equal or greater than the other doc's path. See String#<=> for more details.
def <=>(other)
return nil unless other.respond_to?(:data)
cmp = data["date"] <=> other.data["date"]
cmp = path <=> other.path if cmp.nil? || cmp.zero?
cmp
end
# Determine whether this document should be written.
# Based on the Collection to which it belongs.
#
# True if the document has a collection and if that collection's #write?
# method returns true, and if the site's Publisher will publish the document.
# False otherwise.
#
# rubocop:disable Naming/MemoizedInstanceVariableName
def write?
return @write_p if defined?(@write_p)
@write_p = collection&.write? && site.publisher.publish?(self)
end
# rubocop:enable Naming/MemoizedInstanceVariableName
# The Document excerpt_separator, from the YAML Front-Matter or site
# default excerpt_separator value
#
# Returns the document excerpt_separator
def excerpt_separator
@excerpt_separator ||= (data["excerpt_separator"] || site.config["excerpt_separator"]).to_s
end
# Whether to generate an excerpt
#
# Returns true if the excerpt separator is configured.
def generate_excerpt?
!excerpt_separator.empty?
end
def next_doc
pos = collection.docs.index { |post| post.equal?(self) }
collection.docs[pos + 1] if pos && pos < collection.docs.length - 1
end
def previous_doc
pos = collection.docs.index { |post| post.equal?(self) }
collection.docs[pos - 1] if pos && pos.positive?
end
def trigger_hooks(hook_name, *args)
Jekyll::Hooks.trigger collection.label.to_sym, hook_name, self, *args if collection
Jekyll::Hooks.trigger :documents, hook_name, self, *args
end
def id
@id ||= File.join(File.dirname(url), (data["slug"] || basename_without_ext).to_s)
end
# Calculate related posts.
#
# Returns an Array of related Posts.
def related_posts
@related_posts ||= Jekyll::RelatedPosts.new(self).build
end
# Override of method_missing to check in @data for the key.
def method_missing(method, *args, &blck)
if data.key?(method.to_s)
Jekyll::Deprecator.deprecation_message "Document##{method} is now a key "\
"in the #data hash."
Jekyll::Deprecator.deprecation_message "Called by #{caller(0..0)}."
data[method.to_s]
else
super
end
end
def respond_to_missing?(method, *)
data.key?(method.to_s) || super
end
# Add superdirectories of the special_dir to categories.
# In the case of es/_posts, 'es' is added as a category.
# In the case of _posts/es, 'es' is NOT added as a category.
#
# Returns nothing.
def categories_from_path(special_dir)
if relative_path.start_with?(special_dir)
superdirs = []
else
superdirs = relative_path.sub(Document.superdirs_regex(special_dir), "")
superdirs = superdirs.split(File::SEPARATOR)
superdirs.reject! { |c| c.empty? || c == special_dir || c == basename }
end
merge_data!({ "categories" => superdirs }, :source => "file path")
end
def populate_categories
categories = Array(data["categories"]) + Utils.pluralized_array_from_hash(
data, "category", "categories"
)
categories.map!(&:to_s)
categories.flatten!
categories.uniq!
merge_data!({ "categories" => categories })
end
def populate_tags
tags = Utils.pluralized_array_from_hash(data, "tag", "tags")
tags.flatten!
merge_data!({ "tags" => tags })
end
private
def merge_categories!(other)
if other.key?("categories") && !other["categories"].nil?
other["categories"] = other["categories"].split if other["categories"].is_a?(String)
if data["categories"].is_a?(Array)
other["categories"] = data["categories"] | other["categories"]
end
end
end
def merge_date!(source)
if data.key?("date")
data["date"] = Utils.parse_date(
data["date"].to_s,
"Document '#{relative_path}' does not have a valid date in the #{source}."
)
end
end
def merge_defaults
defaults = @site.frontmatter_defaults.all(relative_path, type)
merge_data!(defaults, :source => "front matter defaults") unless defaults.empty?
end
def read_content(**opts)
self.content = File.read(path, **Utils.merged_file_read_opts(site, opts))
if content =~ YAML_FRONT_MATTER_REGEXP
self.content = Regexp.last_match.post_match
data_file = SafeYAML.load(Regexp.last_match(1))
merge_data!(data_file, :source => "YAML front matter") if data_file
end
end
def read_post_data
populate_title
populate_categories
populate_tags
generate_excerpt
end
def handle_read_error(error)
if error.is_a? Psych::SyntaxError
Jekyll.logger.error "Error:", "YAML Exception reading #{path}: #{error.message}"
else
Jekyll.logger.error "Error:", "could not read file #{path}: #{error.message}"
end
if site.config["strict_front_matter"] || error.is_a?(Jekyll::Errors::FatalException)
raise error
end
end
def populate_title
if relative_path =~ DATE_FILENAME_MATCHER
date, slug, ext = Regexp.last_match.captures
modify_date(date)
elsif relative_path =~ DATELESS_FILENAME_MATCHER
slug, ext = Regexp.last_match.captures
end
# `slug` will be nil for documents without an extension since the regex patterns
# above tests for an extension as well.
# In such cases, assign `basename_without_ext` as the slug.
slug ||= basename_without_ext
# slugs shouldn't end with a period
# `String#gsub!` removes all trailing periods (in comparison to `String#chomp!`)
slug.gsub!(%r!\.*\z!, "")
# Try to ensure the user gets a title.
data["title"] ||= Utils.titleize_slug(slug)
# Only overwrite slug & ext if they aren't specified.
data["slug"] ||= slug
data["ext"] ||= ext
end
def modify_date(date)
if !data["date"] || data["date"].to_i == site.time.to_i
merge_data!({ "date" => date }, :source => "filename")
end
end
def generate_excerpt
data["excerpt"] ||= Jekyll::Excerpt.new(self) if generate_excerpt?
end
end
end
| 31.774312 | 98 | 0.618525 |
4ad087bb53c4ce0b3aad8b37dd989c77ed466571 | 1,473 | # coding: utf-8
lib = File.expand_path('../lib', __FILE__)
$LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
require 'icharger/log/version'
Gem::Specification.new do |spec|
spec.name = 'icharger-log'
spec.version = ICharger::Log::VERSION
spec.authors = ['Nick Veys']
spec.email = ['[email protected]']
spec.description = %q{Read and interpret iCharger log files.}
spec.summary = %q{iCharger log file reader}
spec.homepage = 'http://github.com/code-lever/icharger-log'
spec.license = 'MIT'
spec.files = `git ls-files`.split($/)
spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
spec.require_paths = ['lib']
spec.add_development_dependency 'awesome_print'
spec.add_development_dependency 'bundler', '~> 1.8'
spec.add_development_dependency 'ci_reporter_rspec', '~> 1.0'
spec.add_development_dependency 'rake', '~> 10.0'
spec.add_development_dependency 'rspec', '~> 3.3'
spec.add_development_dependency 'rspec-collection_matchers'
spec.add_development_dependency 'rspec-its'
spec.add_development_dependency 'rubocop'
spec.add_development_dependency 'rubocop-checkstyle_formatter'
spec.add_development_dependency 'simplecov'
spec.add_development_dependency 'simplecov-gem-adapter'
spec.add_development_dependency 'simplecov-rcov'
spec.add_development_dependency 'yard'
end
| 42.085714 | 74 | 0.718262 |
1822475d5d1477a8d6755aa84934dc88465746ea | 442 | # frozen_string_literal: true
# :nocov:
class MyUniqueJobWithFilterMethod
include Sidekiq::Worker
sidekiq_options backtrace: true,
lock: :until_executed,
queue: :customqueue,
retry: true,
unique_args: :filtered_args
def perform(*)
# NO-OP
end
def self.filtered_args(args)
options = args.extract_options!
[args.first, options['type']]
end
end
| 20.090909 | 45 | 0.61086 |
ede02d820c94f995280307c6193a467883df012e | 233 | RSpec.describe TestGem do
it "has a version number" do
expect(TestGem::VERSION).not_to be nil
end
describe '#greet' do
it 'returns "Hello World!"' do
expect(TestGem.greet).to eq('Hello World!')
end
end
end
| 19.416667 | 49 | 0.660944 |
613ab3bad2a5dad0088399e4e0a189e37752bc4c | 417 | # encoding: utf-8
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
module Azure::Compute::Mgmt::V2020_09_30
module Models
#
# Defines values for NetworkAccessPolicy
#
module NetworkAccessPolicy
AllowAll = "AllowAll"
AllowPrivate = "AllowPrivate"
DenyAll = "DenyAll"
end
end
end
| 23.166667 | 70 | 0.702638 |
ff871b477a1694a4c074a455d36acc3413034713 | 16,492 |
require 'rubygems'
require 'pathname'
module HgtClusPar
def init
puts "in HgtParClus init"
#active record object initialization
super
end
def initialize()
puts "in HgtParClus initialize"
#initialize included modules
end
def scan_hgt_par_fens()
puts "in scan_hgt_par_fens()..."
sql=<<-EOF
truncate table #{HgtParFen.table_name}
EOF
puts "#{sql}"
@conn.execute sql
#sleep 5
@conn.execute <<-EOF
truncate table #{HgtParFenStat.table_name}
EOF
@genes.each { |gn|
@gene = gn
#debugging
#172 = secE, the smallest gene, without transfers
#132 = thrC, the second smallest gene, with transfers
#152 = oppA, problem in window 10/12-275-313 with all gaps
#next if @gene.name != "thrC"
#
puts "gn.name: #{@gene.name}, gn.id: #{@gene.id}"
[10,25,50].each { |win_size|
@win_size = win_size
puts "alix_gene_hgt_par_raxml: #{alix_gene_hgt_par_raxml}"
alix_gene_hgt_par_raxml.children.each { |pn|
if pn.directory?
dir = pn.basename
elems = dir.to_s.split("-")
#puts elems.inspect
@fen_no = elems[0]
@fen_idx_min = elems[1]
@fen_idx_max = elems[2]
#puts "fen_no: #{fen_no}, idx_min: #{idx_min}, idx_max: #{idx_max}"
treat_window
end
}
#
#next
}
}
end
def gen_hgt_par_fens()
puts "in gen_hgt_par_fens()..."
sql=<<-EOF
truncate table #{HgtParFen.table_name}
EOF
puts "#{sql}"
@conn.execute sql
#sleep 5
@genes.each { |gn|
@gene = gn
#debugging
#172 = secE, the smallest gene, without transfers
#132 = thrC, the second smallest gene, with transfers
#152 = oppA, problem in window 10/12-275-313 with all gaps
#next if @gene.name != "thrC"
#
#get whole alignment
oa = @ud.fastafile_to_original_alignment fasta_align_f(:orig)
oa_len=oa.alignment_length
puts "gn.name: #{@gene.name}, gn.id: #{@gene.id}, oa.length: #{oa_len}"
[10,25,50].each { |win_size|
@win_size = win_size
win_wide = (oa_len * win_size / 100).to_i
@win_wide = win_wide
win_step = [25,15,10,5,1].select{|v| v < win_wide}.max
@win_step = win_step
fen_no = 0
idx_min = 0
idx_max = idx_min + win_wide - 1
idx_max = [idx_max,oa_len -1].min
while idx_max <= oa_len -1
fen_no += 1
treat_window(fen_no,idx_min,idx_max)
#next
idx_min += win_step
idx_max += win_step
end
#last window right aligned
if oa_len -1
idx_max = oa_len -1
idx_min = idx_max - win_wide + 1
fen_no += 1
treat_window(fen_no,idx_min,idx_max)
end
#puts "------------------------"
#sleep 2
}
}
end
def test_fen_over_thr
max_hgt_bs = 0
if alix_output_fen_hgt_par_raxml.exist?
alix_output_fen_hgt_par_raxml.open("r") { |hci|
#parse results file
hci.each { |ln|
#puts ln
if ln =~ /^\|\sIteration\s\#(\d+)\s:/
#puts "------------#{$1}---------->#{ln}"
@iter_no = $1
elsif ln =~ /^\|\sHGT\s(\d+)\s\/\s(\d+)\s+Trivial\s+\(bootstrap\svalue\s=\s([\d|\.]+)\%\sinverse\s=\s([\d|\.]+)\%\)/
#puts "Trivial: ------#{$1}--#{$3}--#{$4}---------->#{ln}"
@hgt_fragm_type = "Trivial"
@hgt_no= $1
@bs_direct = $3.to_f
@bs_inverse = $4.to_f
elsif ln =~ /^\|\sHGT\s(\d+)\s\/\s(\d+)\s+Regular\s+\(bootstrap\svalue\s=\s([\d|\.]+)\%\sinverse\s=\s([\d|\.]+)\%\)/
#puts "Regular: ------#{$1}--#{$3}--#{$4}---------->#{ln}"
@hgt_fragm_type = "Regular"
@hgt_no= $1
@bs_direct = $3.to_f
@bs_inverse = $4.to_f
elsif ln =~ /^\|\sFrom\ssubtree\s\(([\d|\,|\s]+)\)\sto\ssubtree\s\(([\d|\,|\s]+)\)/
#puts "------#{$1}---#{$2}----------->#{ln}"
@from_subtree=$1
@to_subtree = $2
#insert row in interior loop
#calculate weights
@bs_val=@bs_direct+@bs_inverse
@weight_direct=@bs_direct/@bs_val
@weight_inverse=@bs_inverse/@bs_val
#if tranfer is worthy, boostrap value better than threshold
#also if hgt_type is included in selected @hgt_type_avail_db
if hgt_type_avail_db.include? @hgt_fragm_type
#puts "@bs_val: #{@bs_val}"
max_hgt_bs = [max_hgt_bs,@bs_val].max
#insert direct transfer, with weight_inverse information for inverse weight
#@hpf_ins_pstmt.set_int(1,@gene.id)
#@hpf_ins_pstmt.set_int(2,@fen_no.to_i)
#@hpf_ins_pstmt.set_int(3,@fen_idx_min.to_i)
#@hpf_ins_pstmt.set_int(4,@fen_idx_max.to_i)
#@hpf_ins_pstmt.set_int(5,@iter_no.to_i)
#@hpf_ins_pstmt.set_int(6,@hgt_no.to_i)
#@hpf_ins_pstmt.set_string(7,@hgt_fragm_type)
#@hpf_ins_pstmt.set_string(8,@from_subtree)
#@hpf_ins_pstmt.set_int(9,@from_subtree.split(",").length)
#@hpf_ins_pstmt.set_string(10,@to_subtree)
#@hpf_ins_pstmt.set_int(11,@to_subtree.split(",").length)
#@hpf_ins_pstmt.set_double(12,@bs_val.to_f)
#@hpf_ins_pstmt.set_double(13,@bs_direct)
#@hpf_ins_pstmt.set_double(14,@bs_inverse)
#@hpf_ins_pstmt.set_int(15,win_size)
#@hpf_ins_pstmt.add_batch()
end
else
#puts ln
end #end if ln
} # each ln
} #each hci
else
max_hgt_bs = 0
end
#puts "max_hgt_bs: #{max_hgt_bs}, over: #{max_hgt_bs >= self.thres}"
return (max_hgt_bs >= self.thres)
end
#win_status phylo_design
def treat_window
#puts "gene_id: #{@gene.id}, win_step: #{@win_step}, win_size: #{@win_size}, win_wide: #{@win_wide}, fen_no: #{@fen_no}, @fen_idx_min: #{@fen_idx_min}, @fen_idx_max: #{@fen_idx_max}"
win_status = case test_fen_over_thr
when true then "alix_design"
else
"alix_err_th50all"
end
fen = HgtParFen.new
fen.gene_id = @gene.id
fen.win_size = @win_size
fen.fen_no = @fen_no
fen.fen_idx_min = @fen_idx_min
fen.fen_idx_max = @fen_idx_max
#fen.win_wide = @win_wide
#fen.win_step = @win_step
#fen.win_status = "phylo_design"
fen.save
#update status based on self.thres
hpfs = HgtParFenStat.new
hpfs.hgt_par_fen_id = fen.id
hpfs.win_status = win_status
hpfs.save
end
#skip windows with no hgt results: output.txt
def iterate_over_win(fen_stage,win_status,&aproc)
[10,25,50].each { |win_size|
@win_size = win_size
sql=<<-EOF
select hpf.id,
hpf.fen_no,
hpf.fen_idx_min,
hpf.fen_idx_max
from hgt_par_fens hpf
join HGT_PAR_FEN_STATS hpfs on hpfs.HGT_PAR_FEN_ID = hpf.id
where hpfs.fen_stage_id = #{fen_stage} and
hpfs.WIN_STATUS = '#{win_status.to_s}' and
hpf.gene_id = #{@gene.id} and
hpf.win_size = #{win_size}
order by hpf.fen_no
EOF
#puts "sql: #{sql}"
fens = HgtParFen.find_by_sql(sql)
#puts "fens: #{fens.length}"
#sleep 2
fens.each {|fen|
@fen_id = fen.id
@fen_no = fen.fen_no
@fen_idx_min = fen.fen_idx_min
@fen_idx_max = fen.fen_idx_max
@win_dir = "#{@fen_no}-#{@fen_idx_min}-#{@fen_idx_max}"
#puts "fen_no: #{@fen_no}, fen_idx_min: #{@fen_idx_min}, fen_idx_max: #{@fen_idx_max}"
#skip if no results in window
#next if not File.exists?(hgt_results_f)
aproc.call "win_size: #{@win_size}, fen_no: #{@fen_no}, fen_idx_min: #{@fen_idx_min}, fen_idx_max: #{@fen_idx_max}"
}
} #win size
end
#takes a pathway
#returns the medium bootstrap
def med_nwk_bootstrap(nwk_tree)
s = nwk_tree.read #File.open(_tr_unrooted_f, 'rb') { |f| f.read }
puts "s: #{s}"
tr1 = Bio::Newick.new(s).tree
#bs_arr = tr1.nodes.select {|nd| nd.bootstrap_string.nil? }
bs_arr = tr1.nodes.select{|nd| !nd.bootstrap_string.nil? }.compact.collect{|nd| nd.bootstrap_string.to_f}
#puts "name: #{nd.name}, bootstrap_string: >#{nd.bootstrap_string}<"
bs_val = bs_arr.sum / bs_arr.length
puts "bs_arr: #{bs_arr.inspect}, bs_val: #{bs_val}"
bs_val
end
def section_create_work_folder(recreate = false)
#puts "fen_d(:work): #{fen_d(:work)}"
#first time
#sys "mkdir -p #{fen_d(:work)}"
#thereafter
#create folder if non existant
#puts "fen_d(:work): #{fen_d(:work)}"
#puts "fen_d(:work).exist?: #{fen_d(:work).exist?}"
#recreate work folder
fen_d(:work).rmtree if recreate == true and fen_d(:work).exist?
#create on first use
FileUtils.mkdir_p fen_d(:work) unless fen_d(:work).exist?
fen_d(:work).chmod "755".to_i(8)
#sys "rm -fr #{fen_d(:work)}" if recreate == true
end
def merge_fen_stat(fen_stage, win_status)
#equivalent of merge
#delete status
sql=<<-EOF
delete from hgt_par_fen_stats hpfs
where hpfs.id = #{@fen_id} and
hpfs.fen_stage_id = #{fen_stage} and
hpfs.win_status = '#{win_status}'
EOF
puts "sql: #{sql}"
#sleep 5
@conn.execute sql
#insert status
sql=<<-EOF
insert into hgt_par_fen_stats hpfs
(hgt_par_fen_id,fen_stage_id,win_status,created_at,updated_at)
values
(#{@fen_id},#{fen_stage},'#{win_status}',current_timestamp,current_timestamp)
EOF
puts "sql: #{sql}"
#sleep 5
@conn.execute sql
end
#update selector(for jobs) based on fen_stage and status
def update_sel_from_status(fen_stage, status)
@conn.execute <<-EOF
update HGT_PAR_FENS hpf
set hpf.WIN_SEL = '#{status}'
where id in (select hpfs.HGT_PAR_FEN_ID
from HGT_PAR_FEN_STATS hpfs
where hpfs.FEN_STAGE_ID = #{fen_stage} and
hpfs.WIN_STATUS = '#{status}')
EOF
end
def update_fen_win_section_phylo_result
status_s = nil
#@fen_bootstrap_limit = 50.0
#discard windows with insufficient bootstrap
#if fen_nwk_re(:res).exist?
# bs_val = med_nwk_bootstrap(fen_nwk_re(:res))
# puts "bs_val: #{bs_val}"
#end
#if fen_nwk_re(:res).exist? and bs_val >= @fen_bootstrap_limit
# status_s = "phylo_result"
#elsif fen_nwk_re(:res).exist? and bs_val < @fen_bootstrap_limit
# status_s = "phylo_err_lowbs"
#else
# status_s = "phylo_err_nocalc"
#end
if fen_nwk_re(:res).exist?
status_s = "phylo_result"
else
status_s = "phylo_err_nocalc"
end
merge_fen_stat(status_s)
end
#update current fen_stage window status
def update_fen_win_section_hgt_result
status_s = nil
if fen_hgt_output(:res).exist?
status_s = "result"
else
status_s = "err_nocalc"
end
#update current fen_stage
merge_fen_stat(self.fen_stage,status_s)
end
def section_prep_inp_files
puts "in section_prep_inp_files..."
section_prep_phylo_phy_align if calc_section == :phylo
section_prep_timing_files if calc_section == :timing
end
def section_parse_out_files
puts "in section_parse_out_files..."
update_fen_win_section_phylo_result if calc_section == :phylo
update_fen_win_section_hgt_result if calc_section == :hgt
# for beast & treepl
parse_output_files if calc_section == :timing
end
def section_prep_all_inp_files(win_status = :result)
iterate_over_exec_elem(self.prev_fen_stage,win_status){ |i|
#debugging
#172 = secE, the smallest gene, without transfers
#132 = thrC, the second smallest gene, with transfers
#152 = oppA, problem in window 10/12-275-313 with all gaps
#next if @gene.name != "dnaK"
#next if @win_size == 10
#next if @win_size == 25
puts "active exec elem: #{i}"
#recreate = true
section_create_work_folder(true)
#recreate = false
#section_create_work_folder(true)
#order by m.time_max asc /time_med
filter_mrcas
section_prep_inp_files
}
end
#parse results annotations
def section_parse_all_out_files(win_status = :result)
iterate_over_exec_elem(self.prev_fen_stage,win_status){ |i|
#debugging
#next if @gene.name != 'fabG'
#next if @win_size != 50
#next if @fen_no != "7"
#next if @gene.name != "thrC"
puts "active exec elem: #{i}"
#puts "timed_tr_annot_f(:res): #{timed_tr_annot_f(:res)}"
#do the work for existing files
#next if not File.exists? timed_tr_annot_f(:res)
section_parse_out_files
}
end
#load alignment in memory for constraints checking as @win_seq_hsh
#also write it to file
def section_prep_phylo_phy_align
#get original whole alignment
#puts "fasta_align_f(:orig): #{fasta_align_f(:orig)}"
oa = @ud.fastafile_to_original_alignment fasta_align_f(:orig)
#slice
@win_seq_hsh = oa.alignment_collect() { |str|
str[self.fen_idx_min..self.fen_idx_max]
}
#save
#puts "fen_phy_align_f(:work): #{fen_phy_align_f(:work)}"
@ud.seqshash_to_phylipfile(@win_seq_hsh,fen_phy_align_f(:work))
end
def section_prep_timing_files
puts "in section_prep_timing_files..."
prepare_root_file
exp_mrcas_yaml if @timing_prog == :treepl
prepare_exec_files if @timing_prog == :beast
end
#export genes by nb_contins by window
def section_exp_tasks_yaml(tasks_page_dim, task_start_nb, win_sel)
#tasks = tasks_already_worked_out
#contr = ""
#tasks.each {|tsk|
# contr += "(#{tsk[0]},#{tsk[1]},#{tsk[2]}),"
#}
#contr.chomp! ","
#puts contr
sql=<<-EOF
select hpf.GENE_ID,
gn.NAME,
hpf.WIN_SIZE,
hpf.FEN_NO,
hpf.FEN_IDX_MIN,
hpf.FEN_IDX_MAX
from hgt_par_fens hpf
join genes gn on gn.id = hpf.gene_id
where hpf.win_sel = '#{win_sel}'
EOF
#puts "sql: #{sql}"
#sleep 5
#add limit 20
tasks = HgtParFen.find_by_sql(sql)
#mrca_ids = mrcas.collect{|m| m.id}
#puts tasks.inspect
puts "length: #{tasks.length}"
tasks_len = tasks.length
#tasks_len = 1
nb_pages = tasks_len / tasks_page_dim
nb_pages_rem = tasks_len % tasks_page_dim
puts "nb_pages: #{nb_pages}, nb_pages_rem: #{nb_pages_rem}"
page_ranges = []
#whole pages
nb_pages.times { |pn|
#puts "page number: #{pn}"
range_min = pn * tasks_page_dim
range_max = range_min + tasks_page_dim - 1
#puts "range_min: #{range_min}, range_max: #{range_max}"
page_ranges << [range_min,range_max]
}
#adds remainder
if nb_pages_rem != 0
range_min = nb_pages * tasks_page_dim
range_max = range_min + nb_pages_rem - 1
#puts "range_min: #{range_min}, range_max: #{range_max}"
page_ranges << [range_min,range_max]
end
page_ranges.each_index { |i|
rng_min = page_ranges[i][0]
rng_max = page_ranges[i][1]
job_tasks = tasks[rng_min..rng_max]
job_s = "%05d" % (task_start_nb + i )
File.open("#{compart_d(:jobs)}/job-tasks-#{job_s}.yaml", "w+") do |f|
f.puts job_tasks.to_yaml()
end
}
end
#floc = :exec, :jobs, :work, :res
def section_rsync_folder(floc)
case floc
when :exec, :jobs, :work
sys "rsync -avzx --del #{compart_d(floc)}/ #{AppConfig.remote_server}:#{remote_compart_d(floc)}/"
#puts "rsync -avzx --del #{compart_d(floc)}/ #{AppConfig.remote_server}:#{remote_compart_d(floc)}/"
when :res
sys "rsync -avzx --del #{AppConfig.remote_server}:#{remote_compart_d(floc)}/ #{compart_d(floc)}/ "
#puts "rsync -avzx --del #{AppConfig.remote_server}:#{remote_compart_d(floc)}/ #{compart_d(floc)}/ "
else
raise "Please review rsync options !"
end
end
end
| 24.28866 | 186 | 0.588952 |
6a25f59497058c90bec934f5c71c2698e1db8fc6 | 352 | Spree::Core::Engine.routes.draw do
namespace :blog do
get '/', to: 'contents#index', as: 'index'
get '/:slug', to: 'contents#show', as: 'content'
end
namespace :api do
resources :posts
resources :tags
end
namespace :admin do
resources :authors
resources :categories
resources :posts
resources :tags
end
end
| 18.526316 | 52 | 0.647727 |
62449d27c3fec4823a24ce1d2548de9f3e0784c2 | 267 | require 'spec_helper'
describe RubyAck::Invoker do
it "should invoke ack with --column as a default for the given parameter" do
IO.should_receive(:popen).with("ack jackpot --column")
invoker = RubyAck::Invoker.new
invoker.invoke("jackpot")
end
end
| 22.25 | 78 | 0.719101 |
7a4d32973f861a08a6c8a0e6cf1aecc2d36e66cc | 1,428 | # mundi_api
#
# This file was automatically generated by APIMATIC v2.0 (
# https://apimatic.io ).
module MundiApi
# Response model for listing subscription items
class ListSubscriptionItemsResponse < BaseModel
# The subscription items
# @return [List of GetSubscriptionItemResponse]
attr_accessor :data
# Paging object
# @return [PagingResponse]
attr_accessor :paging
# A mapping from model property names to API property names.
def self.names
@_hash = {} if @_hash.nil?
@_hash['data'] = 'data'
@_hash['paging'] = 'paging'
@_hash
end
def initialize(data = nil,
paging = nil)
@data = data
@paging = paging
end
# Creates an instance of the object from a hash.
def self.from_hash(hash)
return nil unless hash
# Extract variables from the hash.
# Parameter is an array, so we need to iterate through it
data = nil
unless hash['data'].nil?
data = []
hash['data'].each do |structure|
data << (GetSubscriptionItemResponse.from_hash(structure) if structure)
end
end
paging = PagingResponse.from_hash(hash['paging']) if hash['paging']
# Create object from extracted values.
ListSubscriptionItemsResponse.new(data,
paging)
end
end
end
| 27.461538 | 82 | 0.59944 |
acd68e165c072bc87fcd16248b5fe3dbb631ace0 | 1,008 | class Popt < Formula
desc "Library like getopt(3) with a number of enhancements"
homepage "http://rpm5.org"
url "http://rpm5.org/files/popt/popt-1.16.tar.gz"
sha256 "e728ed296fe9f069a0e005003c3d6b2dde3d9cad453422a10d6558616d304cc8"
bottle do
rebuild 1
sha256 "ceae94fc8e588309670a7a045186eee8ff3a9966a68650f044a14d101267b7b2" => :sierra
sha256 "60a7f19e8fecafd92a5beb7d6438efac915e8f3afe3d83575fb64bb4a6190aab" => :el_capitan
sha256 "56d1104516e23bb314a248904b8ec85afe2fdbf71555417eb8f91edc1286e6da" => :yosemite
sha256 "ba122e7f34b9b03ab5a32ab01124b61eb608c29e0c0d023462953ed03782dd2a" => :mavericks
sha256 "6d95c3530a7bd4d7099d91f448669b53bb51a071c5e9a8b9915cdc750bd72aec" => :mountain_lion
sha256 "fbfb8248da18982c7b9e80c1a3ef7016c5f4e2afdbf8e0e1ba8ca086c87a5d55" => :x86_64_linux
end
def install
system "./configure", "--disable-debug", "--disable-dependency-tracking",
"--prefix=#{prefix}"
system "make", "install"
end
end
| 43.826087 | 95 | 0.774802 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.