_id
stringlengths
2
6
title
stringlengths
9
130
partition
stringclasses
3 values
text
stringlengths
30
4.3k
language
stringclasses
1 value
meta_information
dict
q26500
Barby.Code93.c_checksum
test
def c_checksum sum = 0 checksum_values.each_with_index do
ruby
{ "resource": "" }
q26501
Barby.Code93.k_checksum
test
def k_checksum sum = 0 checksum_values_with_c_checksum.each_with_index do |value, index|
ruby
{ "resource": "" }
q26502
Barby.PDFWriterOutputter.annotate_pdf
test
def annotate_pdf(pdf, options={}) with_options options do xpos, ypos = x, y orig_xpos = xpos if barcode.two_dimensional? boolean_groups.reverse_each do |groups| groups.each do |bar,amount| if bar pdf.move_to(xpos, ypos). line_to(xpos, ypos+xdim). line_to(xpos+(xdim*amount), ypos+xdim). line_to(xpos+(xdim*amount), ypos). line_to(xpos, ypos). fill end xpos += (xdim*amount) end xpos = orig_xpos ypos += xdim end else boolean_groups.each
ruby
{ "resource": "" }
q26503
Barby.Code39.characters
test
def characters chars = raw_characters extended ? chars.map{|c|
ruby
{ "resource": "" }
q26504
Barby.Code128.characters
test
def characters chars = data.split(//n) if type == 'C' result = [] count = 0 while count < chars.size if chars[count] =~ /^\d$/ #If encountering a digit, next char/byte *must* be second digit in pair. I.e. if chars[count] is 5, #chars[count+1] must be /[0-9]/, otherwise it's not valid
ruby
{ "resource": "" }
q26505
Barby.Code128.checksum
test
def checksum pos = 0 (numbers+extra_numbers).inject(start_num) do |sum,number| pos += 1
ruby
{ "resource": "" }
q26506
Barby.Code25.encoding_for_bars
test
def encoding_for_bars(*bars) wide, narrow, space = wide_encoding, narrow_encoding, space_encoding
ruby
{ "resource": "" }
q26507
Barby.CairoOutputter.render_to_cairo_context
test
def render_to_cairo_context(context, options={}) if context.respond_to?(:have_current_point?) and context.have_current_point? current_x, current_y = context.current_point else current_x = x(options) || margin(options) current_y = y(options) || margin(options) end _xdim = xdim(options) _height = height(options) original_current_x = current_x context.save do context.set_source_color(:black) context.fill do if barcode.two_dimensional? boolean_groups.each do |groups| groups.each do |bar,amount| current_width = _xdim * amount if bar context.rectangle(current_x, current_y, current_width, _xdim) end current_x += current_width end
ruby
{ "resource": "" }
q26508
Barby.CairoOutputter.to_png
test
def to_png(options={}) output_to_string_io do |io| Cairo::ImageSurface.new(options[:format], full_width(options),
ruby
{ "resource": "" }
q26509
Barby.CairoOutputter.to_ps
test
def to_ps(options={}) output_to_string_io do |io| Cairo::PSSurface.new(io, full_width(options), full_height(options)) do |surface|
ruby
{ "resource": "" }
q26510
Barby.CairoOutputter.to_pdf
test
def to_pdf(options={}) output_to_string_io do |io| Cairo::PDFSurface.new(io, full_width(options),
ruby
{ "resource": "" }
q26511
Barby.CairoOutputter.to_svg
test
def to_svg(options={}) output_to_string_io do |io| Cairo::SVGSurface.new(io, full_width(options),
ruby
{ "resource": "" }
q26512
CMSScanner.Browser.max_threads=
test
def max_threads=(number) @max_threads = number.to_i.positive? && throttle.zero? ? number.to_i : 1
ruby
{ "resource": "" }
q26513
CMSScanner.WebSite.online?
test
def online?(path = nil) NS::Browser.get(url(path)
ruby
{ "resource": "" }
q26514
CMSScanner.WebSite.head_and_get
test
def head_and_get(path, codes = [200], params = {}) url_to_get = url(path) head_params = (params[:head] || {}).merge(head_or_get_params) head_res = NS::Browser.forge_request(url_to_get, head_params).run
ruby
{ "resource": "" }
q26515
GHTorrent.Mirror.db
test
def db return @db unless @db.nil? Sequel.single_threaded = true @db = Sequel.connect(config(:sql_url), :encoding => 'utf8') #@db.loggers << Logger.new(STDOUT)
ruby
{ "resource": "" }
q26516
GHTorrent.Mirror.ensure_commit
test
def ensure_commit(repo, sha, user, comments = true) ensure_repo(user, repo) c = retrieve_commit(repo, sha, user) if c.nil? warn "Commit #{user}/#{repo} -> #{sha} does not exist" return end stored = store_commit(c, repo, user) ensure_parents(c) if
ruby
{ "resource": "" }
q26517
GHTorrent.Mirror.ensure_parents
test
def ensure_parents(commit) commits = db[:commits] parents = db[:commit_parents] commit['parents'].map do |p| save do url = p['url'].split(/\//) this = commits.first(:sha => commit['sha']) parent = commits.first(:sha => url[7]) if parent.nil? c = retrieve_commit(url[5], url[7], url[4]) if c.nil? warn "Could not retrieve commit_parent #{url[4]}/#{url[5]} -> #{url[7]} to #{this[:sha]}" next end parent = store_commit(c, url[5], url[4]) end if parent.nil? warn "Could not find #{url[4]}/#{url[5]} -> #{url[7]}, parent to commit #{this[:sha]}" next end if parents.first(:commit_id => this[:id], :parent_id => parent[:id]).nil?
ruby
{ "resource": "" }
q26518
GHTorrent.Mirror.ensure_user_followers
test
def ensure_user_followers(followed) curuser = ensure_user(followed, false, false) followers = db.from(:followers, :users).\ where(Sequel.qualify('followers', 'follower_id') => Sequel.qualify('users', 'id')).\ where(Sequel.qualify('followers', 'user_id') => curuser[:id]).select(:login).all retrieve_user_followers(followed).reduce([]) do |acc, x|
ruby
{ "resource": "" }
q26519
GHTorrent.Mirror.ensure_user_follower
test
def ensure_user_follower(followed, follower, date_added = nil) follower_user = ensure_user(follower, false, false) followed_user = ensure_user(followed, false, false) if followed_user.nil? or follower_user.nil? warn "Could not find follower #{follower} or user #{followed}" return end followers = db[:followers] follower_id = follower_user[:id] followed_id = followed_user[:id] follower_exists = followers.first(:user_id => followed_id, :follower_id => follower_id) if follower_exists.nil? added = if date_added.nil? max(follower_user[:created_at], followed_user[:created_at]) else date_added end retrieved = retrieve_user_follower(followed, follower) if retrieved.nil? warn "Could not retrieve follower #{follower} for #{followed}" return end followers.insert(:user_id => followed_id,
ruby
{ "resource": "" }
q26520
GHTorrent.Mirror.ensure_user_byemail
test
def ensure_user_byemail(email, name) users = db[:users] usr = users.first(:email => email) if usr.nil? u = retrieve_user_byemail(email, name) if u.nil? or u['login'].nil? warn "Could not retrieve user #{email} through search API query" login = (0...8).map { 65.+(rand(25)).chr }.join users.insert(:email => email, :name => name, :login => login, :fake => true, :deleted => false, :created_at => Time.now) info "Added user fake #{login} -> #{email}" users.first(:login => login) else in_db = users.first(:login => u['login']) geo = geolocate(location: u['location']) if in_db.nil? users.insert(:login => u['login'], :name => u['name'], :company => u['company'], :email => u['email'], :long => geo[:long], :lat => geo[:lat], :country_code => geo[:country_code], :state => geo[:state], :city => geo[:city], :fake => false, :deleted => false, :created_at => date(u['created_at']))
ruby
{ "resource": "" }
q26521
GHTorrent.Mirror.ensure_repo
test
def ensure_repo(user, repo, recursive = false) repos = db[:projects] curuser = ensure_user(user, false, false) if curuser.nil? warn "Could not find user #{user}" return end currepo = repos.first(:owner_id => curuser[:id], :name => repo) unless currepo.nil? debug "Repo #{user}/#{repo} exists" return refresh_repo(user, repo, currepo) end r = retrieve_repo(user, repo, true) if r.nil? warn "Could not retrieve repo #{user}/#{repo}" return end if r['owner']['login'] != curuser[:login] info "Repo changed owner from #{curuser[:login]} to #{r['owner']['login']}" curuser = ensure_user(r['owner']['login'], false, false) end repos.insert(:url => r['url'], :owner_id => curuser[:id], :name => r['name'], :description => unless r['description'].nil? then r['description'][0..254] else nil end, :language => r['language'], :created_at => date(r['created_at']), :updated_at => date(Time.now), :etag => unless r['etag'].nil? then r['etag'] end) unless r['parent'].nil? parent_owner = r['parent']['owner']['login'] parent_repo = r['parent']['name'] parent = ensure_repo(parent_owner, parent_repo)
ruby
{ "resource": "" }
q26522
GHTorrent.Mirror.ensure_languages
test
def ensure_languages(owner, repo) currepo = ensure_repo(owner, repo) langs = retrieve_languages(owner, repo) if langs.nil? or langs.empty? warn "Could not find languages for repo #{owner}/#{repo}" return end ts = Time.now langs.keys.each do |lang|
ruby
{ "resource": "" }
q26523
GHTorrent.Mirror.ensure_fork_commits
test
def ensure_fork_commits(owner, repo, parent_owner, parent_repo) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repo #{owner}/#{repo}" return end parent = ensure_repo(parent_owner, parent_repo) if parent.nil? warn "Could not find repo #{parent_owner}/#{parent_repo}, parent of #{owner}/#{repo}" return end strategy = case when config(:fork_commits).match(/all/i) :all when config(:fork_commits).match(/fork_point/i) :fork_point when config(:fork_commits).match(/none/i) :none else :fork_point end fork_commit = ensure_fork_point(owner, repo) if fork_commit.nil? or fork_commit.empty? warn "Could not find fork commit for repo #{owner}/#{repo}. Retrieving all commits." return ensure_commits(owner, repo, fork_all: true) end debug "Retrieving commits for fork #{owner}/#{repo}: strategy is #{strategy}" return if strategy == :none if strategy == :fork_point # Retrieve commits up to fork point (fork_commit strategy) info "Retrieving commits for #{owner}/#{repo} until fork commit #{fork_commit[:sha]}" master_branch = retrieve_default_branch(parent_owner, parent_repo) return if master_branch.nil? sha = master_branch found = false while not found commits = retrieve_commits(repo, sha, owner, 1) # This means that we retrieved no commits if commits.size == 0 break end # This means we retrieved the last page again if commits.size == 1 and commits[0]['sha'] == sha break end for c in commits ensure_commit(repo, c['sha'], owner) sha = c['sha'] if c['sha'] == fork_commit[:sha] found = true break
ruby
{ "resource": "" }
q26524
GHTorrent.Mirror.ensure_fork_point
test
def ensure_fork_point(owner, repo) fork = ensure_repo(owner, repo, false) if fork[:forked_from].nil? warn "Repo #{owner}/#{repo} is not a fork" return nil end # Return commit if already specified unless fork[:forked_commit_id].nil? commit = db[:commits].where(:id => fork[:forked_commit_id]).first return commit unless commit.nil? end parent = db.from(:projects, :users).\ where(Sequel.qualify('projects', 'owner_id') => Sequel.qualify('users', 'id')).\ where(Sequel.qualify('projects', 'id') => fork[:forked_from]).\ select(Sequel.qualify('users', 'login'), Sequel.qualify('projects','name')).first if parent.nil? warn "Unknown parent for repo #{owner}/#{repo}" return nil end default_branch = retrieve_default_branch(parent[:login], parent[:name]) # Retrieve diff between parent and fork master branch diff = retrieve_master_branch_diff(owner, repo, default_branch, parent[:login], parent[:name], default_branch) if diff.nil? or diff.empty? # Try a bit harder by refreshing the default branch default_branch = retrieve_default_branch(parent[:login], parent[:name], true) diff = retrieve_master_branch_diff(owner, repo, default_branch, parent[:login], parent[:name], default_branch) end if diff.nil? or diff.empty? # This means that the are no common ancestors between the repos # This can apparently happen when the parent repo was renamed or force-pushed # example: https://github.com/openzipkin/zipkin/compare/master...aa1wi:master warn
ruby
{ "resource": "" }
q26525
GHTorrent.Mirror.ensure_orgs
test
def ensure_orgs(user) retrieve_orgs(user).map{|o|
ruby
{ "resource": "" }
q26526
GHTorrent.Mirror.ensure_participation
test
def ensure_participation(user, organization, members = true) org = ensure_org(organization, members) if org.nil? warn "Could not find organization #{organization}" return end usr = ensure_user(user, false, false) org_members = db[:organization_members] participates = org_members.first(:user_id => usr[:id], :org_id => org[:id]) if participates.nil? org_members.insert(:user_id => usr[:id], :org_id => org[:id]) info
ruby
{ "resource": "" }
q26527
GHTorrent.Mirror.ensure_org
test
def ensure_org(organization, members = true) org = db[:users].first(:login => organization, :type => 'org') if org.nil? org = ensure_user(organization, false, false) # Not an organization, don't go ahead if org[:type] != 'ORG'
ruby
{ "resource": "" }
q26528
GHTorrent.Mirror.ensure_commit_comments
test
def ensure_commit_comments(user, repo, sha) commit_id = db[:commits].first(:sha => sha)[:id] stored_comments = db[:commit_comments].filter(:commit_id => commit_id) commit_comments = retrieve_commit_comments(user, repo, sha) not_saved = commit_comments.reduce([]) do |acc, x| if stored_comments.find{|y| y[:comment_id] == x['id']}.nil?
ruby
{ "resource": "" }
q26529
GHTorrent.Mirror.ensure_watchers
test
def ensure_watchers(owner, repo) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repo #{owner}/#{repo} for retrieving watchers" return end watchers = db.from(:watchers, :users).\ where(Sequel.qualify('watchers', 'user_id') => Sequel.qualify('users', 'id')).\ where(Sequel.qualify('watchers', 'repo_id') => currepo[:id]).select(:login).all retrieve_watchers(owner, repo).reduce([]) do |acc, x|
ruby
{ "resource": "" }
q26530
GHTorrent.Mirror.ensure_pull_requests
test
def ensure_pull_requests(owner, repo, refresh = false) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repo #{owner}/#{repo} for retrieving pull requests" return end raw_pull_reqs = if refresh retrieve_pull_requests(owner, repo, refresh = true) else pull_reqs = db[:pull_requests].filter(:base_repo_id => currepo[:id]).all retrieve_pull_requests(owner, repo).reduce([]) do |acc, x| if
ruby
{ "resource": "" }
q26531
GHTorrent.Mirror.ensure_pull_request_history
test
def ensure_pull_request_history(id, ts, act, actor) user = unless actor.nil? ensure_user(actor, false, false) end pull_req_history = db[:pull_request_history] entry = if ['opened', 'merged'].include? act pull_req_history.first(:pull_request_id => id, :action => act) else pull_req_history.first(:pull_request_id => id, :created_at => (ts - 3)..(ts + 3), :action => act) end if entry.nil? pull_req_history.insert(:pull_request_id => id, :created_at => ts, :action => act, :actor_id => unless user.nil? then user[:id] end)
ruby
{ "resource": "" }
q26532
GHTorrent.Mirror.pr_is_intra_branch
test
def pr_is_intra_branch(req) return false unless pr_has_head_repo(req) if req['head']['repo']['owner']['login'] == req['base']['repo']['owner']['login'] and
ruby
{ "resource": "" }
q26533
GHTorrent.Mirror.ensure_forks
test
def ensure_forks(owner, repo) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repo #{owner}/#{repo} for retrieving forks" return end existing_forks = db.from(:projects, :users).\ where(Sequel.qualify('users', 'id') => Sequel.qualify('projects', 'owner_id')). \ where(Sequel.qualify('projects', 'forked_from') => currepo[:id]).\
ruby
{ "resource": "" }
q26534
GHTorrent.Mirror.ensure_fork
test
def ensure_fork(owner, repo, fork_id) fork = retrieve_fork(owner, repo, fork_id) if fork.nil? warn "Could not retrieve fork #{owner}/#{repo} -> #{fork_id}" return end fork_name = if fork['full_name'].nil? then fork['url'].split(/\//)[4..5].join('/') else fork['full_name'] end fork_owner = fork_name.split(/\//)[0] fork_name = fork_name.split(/\//)[1] r = ensure_repo(fork_owner, fork_name, true)
ruby
{ "resource": "" }
q26535
GHTorrent.Mirror.ensure_issues
test
def ensure_issues(owner, repo) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repo #{owner}/#{repo} for retrieving issues" return end issues = db[:issues].filter(:repo_id => currepo[:id]).all raw_issues = retrieve_issues(owner, repo).reduce([]) do |acc, x| if issues.find { |y| y[:issue_id] == x['number'] }.nil? acc << x
ruby
{ "resource": "" }
q26536
GHTorrent.Mirror.ensure_issue
test
def ensure_issue(owner, repo, issue_id, events = true, comments = true, labels = true) issues = db[:issues] repository = ensure_repo(owner, repo) if repository.nil? warn "Could not find repo #{owner}/#{repo} for retrieving issue #{issue_id}" return end cur_issue = issues.first(:issue_id => issue_id, :repo_id => repository[:id]) retrieved = retrieve_issue(owner, repo, issue_id) if retrieved.nil? warn "Could not retrieve issue #{owner}/#{repo} -> #{issue_id}" return end # Pull requests and issues share the same issue_id pull_req = unless retrieved['pull_request'].nil? or retrieved['pull_request']['patch_url'].nil? debug "Issue #{owner}/#{repo}->#{issue_id} is a pull request" ensure_pull_request(owner, repo, issue_id, false, false, false) end if cur_issue.nil? reporter = ensure_user(retrieved['user']['login'], false, false) assignee = unless retrieved['assignee'].nil? ensure_user(retrieved['assignee']['login'], false, false) end issues.insert(:repo_id => repository[:id], :assignee_id => unless assignee.nil? then assignee[:id] end, :reporter_id => reporter[:id],
ruby
{ "resource": "" }
q26537
GHTorrent.Mirror.ensure_issue_events
test
def ensure_issue_events(owner, repo, issue_id) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repository #{owner}/#{repo} for retrieving events for issue #{issue_id}" return end issue = ensure_issue(owner, repo, issue_id, false, false, false) if issue.nil? warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving events" return end
ruby
{ "resource": "" }
q26538
GHTorrent.Mirror.ensure_issue_event
test
def ensure_issue_event(owner, repo, issue_id, event_id) issue = ensure_issue(owner, repo, issue_id, false, false, false) if issue.nil? warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving event #{event_id}" return end issue_event_str = "#{owner}/#{repo} -> #{issue_id}/#{event_id}" curevent = db[:issue_events].first(:issue_id => issue[:id], :event_id => event_id) if curevent.nil? retrieved = retrieve_issue_event(owner, repo, issue_id, event_id) if retrieved.nil? warn "Could not retrieve issue_event #{owner}/#{repo} -> #{issue_id}/#{issue_event_str}" return elsif retrieved['actor'].nil? warn "Could not find issue_event_actor #{owner}/#{repo} -> #{issue_id}/#{issue_event_str}" return end actor = ensure_user(retrieved['actor']['login'], false, false) action_specific = case retrieved['event'] when "referenced" then retrieved['commit_id'] when "merged" then retrieved['commit_id'] when "closed" then retrieved['commit_id'] else nil end if retrieved['event'] == 'assigned' def update_assignee(owner, repo, issue, actor) db[:issues].first(:id => issue[:id]).update(:assignee_id => actor[:id]) info "Updated #{owner}/#{repo} -> #{issue[:id]}, assignee -> #{actor[:id]}" end if issue[:assignee_id].nil? then update_assignee(owner, repo, issue, actor) else existing = db[:issue_events].\ filter(:issue_id => issue[:id],:action => 'assigned').\ order(Sequel.desc(:created_at)).first if existing.nil?
ruby
{ "resource": "" }
q26539
GHTorrent.Mirror.ensure_issue_comments
test
def ensure_issue_comments(owner, repo, issue_id, pull_req_id = nil) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find repository #{owner}/#{repo} for retrieving issue comments for issue #{issue_id}" return end issue = if pull_req_id.nil? ensure_issue(owner, repo, issue_id, false, false, false) else db[:issues].first(:pull_request_id => pull_req_id) end if issue.nil? warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving issue comments" return end retrieve_issue_comments(owner, repo,
ruby
{ "resource": "" }
q26540
GHTorrent.Mirror.ensure_issue_comment
test
def ensure_issue_comment(owner, repo, issue_id, comment_id, pull_req_id = nil) issue = if pull_req_id.nil? ensure_issue(owner, repo, issue_id, false, false, false) else db[:issues].first(:pull_request_id => pull_req_id) end if issue.nil? warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving comment #{comment_id}" return end issue_comment_str = "#{owner}/#{repo} -> #{issue_id}/#{comment_id}" curcomment = db[:issue_comments].first(:issue_id => issue[:id], :comment_id => comment_id) if curcomment.nil? retrieved = retrieve_issue_comment(owner, repo, issue_id, comment_id) if retrieved.nil? warn "Could not retrieve issue_comment #{issue_comment_str}" return end user = ensure_user(retrieved['user']['login'], false, false) db[:issue_comments].insert( :comment_id
ruby
{ "resource": "" }
q26541
GHTorrent.Mirror.ensure_labels
test
def ensure_labels(owner, repo) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find #{owner}/#{repo} for retrieving issue labels" return end repo_labels = db[:repo_labels].filter(:repo_id => currepo[:id]).all retrieve_repo_labels(owner, repo).reduce([]) do |acc, x|
ruby
{ "resource": "" }
q26542
GHTorrent.Mirror.ensure_repo_label
test
def ensure_repo_label(owner, repo, name) currepo = ensure_repo(owner, repo) if currepo.nil? warn "Could not find #{owner}/#{repo} for retrieving label #{name}" return end label = db[:repo_labels].first(:repo_id => currepo[:id], :name => name) if label.nil? retrieved = retrieve_repo_label(owner, repo, name) if retrieved.nil? warn "Could not retrieve repo_label #{owner}/#{repo} -> #{name}" return end db[:repo_labels].insert(
ruby
{ "resource": "" }
q26543
GHTorrent.Mirror.ensure_issue_labels
test
def ensure_issue_labels(owner, repo, issue_id) issue = ensure_issue(owner, repo, issue_id, false, false, false) if issue.nil? warn "Could not find issue #{owner}/#{repo} -> #{issue_id} for retrieving labels" return end issue_labels = db.from(:issue_labels, :repo_labels)\ .where(Sequel.qualify('issue_labels', 'label_id') => Sequel.qualify('repo_labels', 'id'))\
ruby
{ "resource": "" }
q26544
GHTorrent.Mirror.ensure_issue_label
test
def ensure_issue_label(owner, repo, issue_id, name) issue = ensure_issue(owner, repo, issue_id, false, false, false) if issue.nil? warn "Could not find issue #{owner}/#{repo} -> #{issue_id} to assign label #{name}" return end label = ensure_repo_label(owner, repo, name) if label.nil? warn "Could not find repo label #{owner}/#{repo} -> #{name}" return end
ruby
{ "resource": "" }
q26545
GHTorrent.Mirror.transaction
test
def transaction(&block) db persister result = nil start_time = Time.now begin db.transaction(:rollback => :reraise, :isolation => :repeatable, :retry_on => @retry_on_error, :num_retries => 3) do
ruby
{ "resource": "" }
q26546
GHTorrent.Mirror.store_commit
test
def store_commit(c, repo, user) commits = db[:commits] commit = commits.first(:sha => c['sha']) if commit.nil? author = commit_user(c['author'], c['commit']['author']) commiter = commit_user(c['committer'], c['commit']['committer']) repository = ensure_repo(user, repo) if repository.nil? warn "Could not find repo #{user}/#{repo} for storing commit #{c['sha']}" end commits.insert(:sha => c['sha'], :author_id => author[:id],
ruby
{ "resource": "" }
q26547
GHTorrent.Logging.log
test
def log(level, msg) case level when :fatal then loggerr.fatal (retrieve_caller + msg) when :error then loggerr.error (retrieve_caller + msg) when :warn then loggerr.warn (retrieve_caller + msg) when :info then
ruby
{ "resource": "" }
q26548
GHTorrent.APIClient.paged_api_request
test
def paged_api_request(url, pages = config(:mirror_history_pages_back), last = nil) url = ensure_max_per_page(url) data = api_request_raw(url) return [] if data.nil? unless data.meta['link'].nil? links = parse_links(data.meta['link']) last = links['last'] if last.nil? if pages > 0 pages = pages - 1 if pages == 0 return parse_request_result(data)
ruby
{ "resource": "" }
q26549
GHTorrent.APIClient.last_updated
test
def last_updated(url, etag) begin ts = Time.now response = do_request(url, '', etag) info "Successful etag request. URL: #{url}, Etag: #{etag}, Remaining: #{@remaining}, Total: #{Time.now.to_ms - ts.to_ms} ms" rescue OpenURI::HTTPError => e response = e.io if response.status.first != '304'
ruby
{ "resource": "" }
q26550
GHTorrent.APIClient.num_pages
test
def num_pages(url) url = ensure_max_per_page(url) data = api_request_raw(url) if data.nil? or data.meta.nil? or data.meta['link'].nil? return 1 end links = parse_links(data.meta['link']) if links.nil? or links['last'].nil?
ruby
{ "resource": "" }
q26551
GHTorrent.APIClient.parse_links
test
def parse_links(links) links.split(/,/).reduce({}) do |acc, x| matches = x.strip.match(/<(.*)>; rel=\"(.*)\"/)
ruby
{ "resource": "" }
q26552
GHTorrent.APIClient.parse_request_result
test
def parse_request_result(result) if result.nil? [] else json = result.read if json.nil? [] else r = JSON.parse(json)
ruby
{ "resource": "" }
q26553
GHTorrent.APIClient.api_request_raw
test
def api_request_raw(url, media_type = '') begin start_time = Time.now contents = do_request(url, media_type) total = Time.now.to_ms - start_time.to_ms info "Successful request. URL: #{url}, Remaining: #{@remaining}, Total: #{total} ms" contents rescue OpenURI::HTTPError => e @remaining = e.io.meta['x-ratelimit-remaining'].to_i @reset = e.io.meta['x-ratelimit-reset'].to_i case e.io.status[0].to_i # The following indicate valid Github return codes when 400, # Bad request 403, # Forbidden 404, # Not found 409, # Conflict -- returned on gets of empty repos 422 then # Unprocessable entity warn request_error_msg(url, e) return nil when 401 # Unauthorized warn request_error_msg(url, e) warn "Unauthorised request with token: #{@token}" raise e when 451 # DMCA takedown warn request_error_msg(url, e) warn "Repo was taken down (DMCA)" return nil
ruby
{ "resource": "" }
q26554
GHTorrent.APIClient.attach_to
test
def attach_to(ip) TCPSocket.instance_eval do (class << self; self; end).instance_eval do alias_method :original_open, :open case RUBY_VERSION when /1.8/, /1.9/ define_method(:open) do |conn_address, conn_port| original_open(conn_address, conn_port, ip) end else define_method(:open) do |conn_address, conn_port, local_host, local_port| original_open(conn_address, conn_port, ip, local_port) end end
ruby
{ "resource": "" }
q26555
GHTorrent.Persister.connect
test
def connect(adapter, settings) driver = ADAPTERS[adapter.intern]
ruby
{ "resource": "" }
q26556
GHTorrent.Retriever.retrieve_commit
test
def retrieve_commit(repo, sha, user) commit = persister.find(:commits, {'sha' => "#{sha}"}) if commit.empty? url = ghurl "repos/#{user}/#{repo}/commits/#{sha}" c = api_request(url) if c.nil? or c.empty? return
ruby
{ "resource": "" }
q26557
GHTorrent.Retriever.retrieve_commits
test
def retrieve_commits(repo, sha, user, pages = -1) url = if sha.nil? ghurl "repos/#{user}/#{repo}/commits" else ghurl "repos/#{user}/#{repo}/commits?sha=#{sha}" end
ruby
{ "resource": "" }
q26558
GHTorrent.Retriever.retrieve_orgs
test
def retrieve_orgs(user) url = ghurl "users/#{user}/orgs"
ruby
{ "resource": "" }
q26559
GHTorrent.Retriever.retrieve_watchers
test
def retrieve_watchers(user, repo) repo_bound_items(user, repo, :watchers, ["repos/#{user}/#{repo}/stargazers"],
ruby
{ "resource": "" }
q26560
GHTorrent.Retriever.retrieve_watcher
test
def retrieve_watcher(user, repo, watcher) repo_bound_item(user, repo, watcher, :watchers,
ruby
{ "resource": "" }
q26561
GHTorrent.Retriever.get_repo_events
test
def get_repo_events(owner, repo) url = ghurl("repos/#{owner}/#{repo}/events") r = paged_api_request(url) r.each do |e| unless get_event(e['id']).empty? debug "Repository event #{owner}/#{repo} -> #{e['type']}-#{e['id']} already exists" else persister.store(:events, e)
ruby
{ "resource": "" }
q26562
GHTorrent.Retriever.retrieve_master_branch_diff
test
def retrieve_master_branch_diff(owner, repo, branch, parent_owner, parent_repo, parent_branch) branch = retrieve_default_branch(owner, repo) if branch.nil? parent_branch = retrieve_default_branch(parent_owner, parent_repo) if parent_branch.nil? return nil if branch.nil? or parent_branch.nil?
ruby
{ "resource": "" }
q26563
GHTorrent.Retriever.retrieve_default_branch
test
def retrieve_default_branch(owner, repo, refresh = false) retrieved = retrieve_repo(owner, repo, refresh) return nil if retrieved.nil? master_branch = 'master' if retrieved['default_branch'].nil? # The currently stored repo entry has been created before the # default_branch field was added to the schema
ruby
{ "resource": "" }
q26564
GHTorrent.Command.process_options
test
def process_options command = self @options = Trollop::options(command.args) do command.prepare_options(self) banner <<-END Standard options: END opt :config, 'config.yaml file location', :short => 'c', :default => 'config.yaml' opt :verbose, 'verbose mode', :short => 'v' opt :addr, 'IP address to use for performing requests', :short => 'a', :type => String opt :token, 'GitHub OAuth token', :type => String, :short => 't'
ruby
{ "resource": "" }
q26565
GHTorrent.Command.validate
test
def validate if options[:config].nil? unless (File.exist?("config.yaml")) Trollop::die "No config file in default location (#{Dir.pwd}). You need to specify the #{:config} parameter. Read the documentation on how to create a config.yaml file." end else Trollop::die "Cannot find file #{options[:config]}" \ unless File.exist?(options[:config]) end unless @options[:user].nil? if not Process.uid == 0
ruby
{ "resource": "" }
q26566
GHTorrent.Command.queue_client
test
def queue_client(queue, key = queue, ack = :after, block) stopped = false while not stopped begin conn = Bunny.new(:host => config(:amqp_host), :port => config(:amqp_port), :username => config(:amqp_username), :password => config(:amqp_password)) conn.start ch = conn.create_channel debug "Queue setting prefetch to #{config(:amqp_prefetch)}" ch.prefetch(config(:amqp_prefetch)) debug "Queue connection to #{config(:amqp_host)} succeeded" x = ch.topic(config(:amqp_exchange), :durable => true, :auto_delete => false) q = ch.queue(queue, :durable => true) q.bind(x, :routing_key => key) q.subscribe(:block => true, :manual_ack => true) do |delivery_info, properties, msg| if ack == :before ch.acknowledge(delivery_info.delivery_tag) end begin block.call(msg) ensure
ruby
{ "resource": "" }
q26567
GHTorrent.Utils.read_value
test
def read_value(from, key) return from if key.nil? or key == "" key.split(/\./).reduce({}) do |acc, x| unless acc.nil? if acc.empty? # Initial run acc = from[x] else if acc.has_key?(x) acc = acc[x] else # Some intermediate key does not exist
ruby
{ "resource": "" }
q26568
GHTorrent.Geolocator.location_filter
test
def location_filter(location) return nil if location.nil? location.\ strip.\ downcase.\ tr('#"<>[]', '').\
ruby
{ "resource": "" }
q26569
Karafka.BaseResponder.validate_usage!
test
def validate_usage! registered_topics = self.class.topics.map do |name, topic| topic.to_h.merge!( usage_count: messages_buffer[name]&.count || 0 ) end used_topics = messages_buffer.map do |name, usage| topic = self.class.topics[name] || Responders::Topic.new(name, registered: false) topic.to_h.merge!(usage_count: usage.count) end result = Karafka::Schemas::ResponderUsage.call(
ruby
{ "resource": "" }
q26570
Karafka.BaseResponder.validate_options!
test
def validate_options! return true unless self.class.options_schema messages_buffer.each_value do |messages_set| messages_set.each do |message_data| result = self.class.options_schema.call(message_data.last)
ruby
{ "resource": "" }
q26571
Karafka.BaseResponder.deliver!
test
def deliver! messages_buffer.each_value do |data_elements| data_elements.each do |data, options| # We map this topic name, so it will match namespaced/etc topic in Kafka # @note By default will not change topic (if default mapper used)
ruby
{ "resource": "" }
q26572
Karafka.Process.notice_signal
test
def notice_signal(signal) Thread.new do Karafka.monitor.instrument('process.notice_signal',
ruby
{ "resource": "" }
q26573
TensorStream.EmbeddingLookup.embedding_lookup
test
def embedding_lookup(params, ids, partition_strategy: "mod", name: nil, validate_indices: true, max_norm: nil) _embedding_lookup_and_transform(params, ids,
ruby
{ "resource": "" }
q26574
TensorStream.EmbeddingLookup._embedding_lookup_and_transform
test
def _embedding_lookup_and_transform(params, ids, partition_strategy: "mod", name: nil, max_norm: nil, transform_fn: nil) raise TensorStream::ValueError, "Need at least one param" if params.nil? params = [params] unless params.is_a?(Array) TensorStream.name_scope(name, "embedding_lookup", values: params + [ids]) do |name| np = params.size ids = TensorStream.convert_to_tensor(ids, name: "ids") if (np == 1) && (transform_fn.nil? || (ids.shape.size == 1)) result = nil TensorStream.colocate_with(params[0]) do result = _clip(TensorStream.gather(params[0], ids, name: name), ids, max_norm) result = transform_fn.call(result) if transform_fn end return TensorStream.identity(result) else flat_ids = TensorStream.reshape(ids, [-1]) original_indices = TensorStream.range(TensorStream.size(flat_ids)) p_assignments = nil new_ids = nil if partition_strategy == "mod" p_assignments = flat_ids % np new_ids = floor_div(flat_ids, np) elsif partition_strategy == "div" raise "not yet supported!" else raise TensorStream::ValueError, "Unrecognized partition strategy: " + partition_strategy end p_assignments = TensorStream.cast(p_assignments, :int32) gather_ids = TensorStream.dynamic_partition(new_ids, p_assignments, np) pindices = TensorStream.dynamic_partition(original_indices, p_assignments, np) partitioned_result = [] (0...np).each do |p| pids = gather_ids[p] result = nil TensorStream.colocate_with(params[p]) do result = TensorStream.gather(params[p], pids) if transform_fn # If transform_fn is provided, the clip_by_norm precedes # the transform and hence must be co-located. See below # for the counterpart if transform_fn is not proveded. result = transform_fn.call(_clip(result, pids, max_norm)) end end partitioned_result << result end
ruby
{ "resource": "" }
q26575
TensorStream.Protobuf.load
test
def load(pbfile) f = File.new(pbfile, "r") lines = [] while !f.eof? && (str = f.readline.strip)
ruby
{ "resource": "" }
q26576
TensorStream.Ops.assert_equal
test
def assert_equal(x, y, data: nil, summarize: nil, message: nil, name: nil) _op(:assert_equal, x, y,
ruby
{ "resource": "" }
q26577
TensorStream.Ops.gradients
test
def gradients(tensor_ys, wrt_xs, name: "gradients", stop_gradients: nil) tensor_ys = tensor_ys.op gs = wrt_xs.map(&:op).collect { |x| stops = stop_gradients ? stop_gradients.map(&:name).join("_") : "" gradient_program_name = "grad_#{tensor_ys.name}_#{x.name}_#{stops}".to_sym tensor_graph = tensor_ys.graph tensor_program = if tensor_graph.node_added?(gradient_program_name) tensor_graph.get_node(gradient_program_name) else tensor_graph.name_scope("gradient_wrt_#{x.name}") do
ruby
{ "resource": "" }
q26578
TensorStream.Ops.random_normal
test
def random_normal(shape, dtype: :float32, mean: 0.0, stddev: 1.0, seed: nil, name: nil) options = {dtype: dtype, mean: mean, stddev:
ruby
{ "resource": "" }
q26579
TensorStream.Ops.eye
test
def eye(num_rows, num_columns: nil, dtype: :float32, name: nil) _op(:eye, num_rows, num_columns
ruby
{ "resource": "" }
q26580
TensorStream.Ops.glorot_uniform_initializer
test
def glorot_uniform_initializer(seed: nil, dtype: nil) TensorStream::Initializer.new(->
ruby
{ "resource": "" }
q26581
TensorStream.Ops.random_uniform_initializer
test
def random_uniform_initializer(minval: 0, maxval: 1, seed: nil, dtype: nil) TensorStream::Initializer.new(-> { _op(:random_uniform,
ruby
{ "resource": "" }
q26582
TensorStream.Ops.slice
test
def slice(input, start, size, name: nil)
ruby
{ "resource": "" }
q26583
TensorStream.Ops.ones
test
def ones(shape, dtype: :float32, name: nil)
ruby
{ "resource": "" }
q26584
TensorStream.Ops.logical_and
test
def logical_and(input_a, input_b, name: nil) check_data_types(input_a,
ruby
{ "resource": "" }
q26585
TensorStream.Ops.reduce_mean
test
def reduce_mean(input_tensor, axis = nil, keepdims: false, name: nil) reduce(:mean, input_tensor,
ruby
{ "resource": "" }
q26586
TensorStream.Ops.concat
test
def concat(values, axis, name: "concat") if values.is_a?(Array) _op(:concat, axis, *values, name: name) else
ruby
{ "resource": "" }
q26587
TensorStream.Ops.dynamic_partition
test
def dynamic_partition(data, partitions, num_partitions, name: nil) result = _op(:dynamic_partition, data, partitions, num_partitions: num_partitions, name: nil)
ruby
{ "resource": "" }
q26588
TensorStream.Ops.where
test
def where(condition, true_t = nil, false_t = nil, name:
ruby
{ "resource": "" }
q26589
TensorStream.Ops.asin
test
def asin(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26590
TensorStream.Ops.acos
test
def acos(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26591
TensorStream.Ops.atan
test
def atan(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26592
TensorStream.Ops.cast
test
def cast(input, dtype, name: nil) input = convert_to_tensor(input) return input if input.data_type == dtype
ruby
{ "resource": "" }
q26593
TensorStream.Ops.print
test
def print(input, data, message: nil, name: nil) _op(:print, input, data,
ruby
{ "resource": "" }
q26594
TensorStream.Ops.sec
test
def sec(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26595
TensorStream.Ops.sqrt
test
def sqrt(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26596
TensorStream.Ops.log
test
def log(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26597
TensorStream.Ops.exp
test
def exp(input, name: nil) check_allowed_types(input,
ruby
{ "resource": "" }
q26598
TensorStream.Ops.pad
test
def pad(tensor, paddings, mode: "CONSTANT", name: nil) _op(:pad, tensor,
ruby
{ "resource": "" }
q26599
TensorStream.Ops.gather
test
def gather(params, indices, validate_indices: nil, name: nil, axis: 0) _op(:gather, params,
ruby
{ "resource": "" }