_id
stringlengths 2
6
| title
stringlengths 9
130
| partition
stringclasses 3
values | text
stringlengths 30
4.3k
| language
stringclasses 1
value | meta_information
dict |
---|---|---|---|---|---|
q26300
|
Grenache.Base.announce
|
validation
|
def announce(key, port, opts={}, &block)
payload = [key,port]
link.send 'announce', payload, opts, &block
|
ruby
|
{
"resource": ""
}
|
q26301
|
Validation.Condition._logical_operator
|
validation
|
def _logical_operator(delegated, *conditions)
unless conditions.all?{|c|conditionable? c}
raise TypeError, 'wrong object for condition'
end
|
ruby
|
{
"resource": ""
}
|
q26302
|
RightHook.SpecHelpers.post_with_signature
|
validation
|
def post_with_signature(opts)
path = opts.fetch(:path)
payload = opts.fetch(:payload)
secret = opts.fetch(:secret)
post path, {payload: payload},
|
ruby
|
{
"resource": ""
}
|
q26303
|
Rothko.Drawing.get_height
|
validation
|
def get_height(img)
new_height = (img.height
|
ruby
|
{
"resource": ""
}
|
q26304
|
Rothko.Drawing.create_color_string
|
validation
|
def create_color_string
(0...img.height).map do |y|
(0...img.width).map do |x|
pix = self.img[x,y]
pix_vals
|
ruby
|
{
"resource": ""
}
|
q26305
|
Rothko.Drawing.find_closest_term_color
|
validation
|
def find_closest_term_color(pixel_values)
color = ""
lil_dist = 195075
@@palette.each do |col_name, col_values|
dist = find_distance(col_values, pixel_values)
|
ruby
|
{
"resource": ""
}
|
q26306
|
Rothko.Drawing.draw_line
|
validation
|
def draw_line(pixels)
pix_line = ""
pixels.each do |pixel|
pix_line = pix_line + "
|
ruby
|
{
"resource": ""
}
|
q26307
|
Jail.Cdnjs.tree
|
validation
|
def tree
@tree and return @tree
@tree = []
file_set = version_files
while child = file_set.shift
tree << child #if child.dir?
if child.type == "dir"
|
ruby
|
{
"resource": ""
}
|
q26308
|
NasaApod.Client.search
|
validation
|
def search(options = {})
self.date = options[:date] || date
self.hd = options[:hd] || hd
|
ruby
|
{
"resource": ""
}
|
q26309
|
QuackConcurrency.Sleeper.process_timeout
|
validation
|
def process_timeout(timeout)
unless timeout == nil
raise TypeError, "'timeout' must be nil or a Numeric" unless timeout.is_a?(Numeric)
|
ruby
|
{
"resource": ""
}
|
q26310
|
RightHook.Authenticator.find_or_create_authorization_by_note
|
validation
|
def find_or_create_authorization_by_note(note)
found_auth = list_authorizations.find {|auth| auth.note == note}
if found_auth
|
ruby
|
{
"resource": ""
}
|
q26311
|
Grenache.Link.send
|
validation
|
def send(type, payload, opts = {}, &block)
res = http_send type, Oj.dump({"rid" => uuid, "data"
|
ruby
|
{
"resource": ""
}
|
q26312
|
Jquids.IncludesHelper.jquids_includes
|
validation
|
def jquids_includes(options = {})
# Set the format for the datepickers
Jquids.format = options[:format] if options.has_key?(:format)
html_out = ""
if options.has_key?(:style)
html_out << stylesheet_link_tag(jq_ui_stylesheet(options[:style])) + "\n" unless options[:style] == nil or options[:style] == :none or options[:style] == false
else
html_out << stylesheet_link_tag(jq_ui_stylesheet) + "\n"
end
jq_vrs = options.has_key?(:jQuery) ? options[:jQuery] : Jquids::JQVersions.last
ui_vrs = options.has_key?(:jQueryUI) ? options[:jQueryUI] : Jquids::UIVersions.last
trtp_vrs = options.has_key?(:TRTimepicker) ? options[:TRTimepicker] : :none
# A little bit of css of the timepicker, and it is not added if the
# timepicker javascript file is not included
unless trtp_vrs == :none or trtp_vrs == false or trtp_vrs == nil
html_out << "<style type=\"text/css\">.ui-timepicker-div .ui-widget-header{margin-bottom:8px;}.ui-timepicker-div dl{text-align:left;}.ui-timepicker-div dl dt{height:25px;}.ui-timepicker-div dl dd{margin:-25px 0 10px 65px;}.ui-timepicker-div td{font-size:90%;}</style>\n"
end
html_out << javascript_include_tag(jq_ui_javascripts(jq_vrs, ui_vrs, trtp_vrs)) + "\n"
options[:datepicker_options] ||= {}
# Some opiniated defaults (basically an attempt to make the jQuery
# datepicker similar to the calendar_date_select with out making
# modifications or having local dependencies)
options[:datepicker_options][:showOtherMonths] = true if options[:datepicker_options][:showOtherMonths].nil?
options[:datepicker_options][:selectOtherMonths] = true if options[:datepicker_options][:selectOtherMonths].nil?
options[:datepicker_options][:changeMonth] = true if options[:datepicker_options][:changeMonth].nil?
options[:datepicker_options][:changeYear] = true if options[:datepicker_options][:changeYear].nil?
options[:datepicker_options][:dateFormat] = Jquids.format[:js_date]
Jquids.jquids_process_options(options)
# Decides whether the 'to_json' method exists (part of rails 3) or if the
# gem needs to us the json gem
datepicker_options =
if options[:datepicker_options].respond_to?(:to_json)
options.delete(:datepicker_options).to_json
else
begin
JSON.unparse(options.delete(:datepicker_options))
rescue
""
end
end
html_out << '<script type="text/javascript">$.datepicker.setDefaults(' + datepicker_options + ');'
unless trtp_vrs == :none or trtp_vrs == false or trtp_vrs == nil
options[:timepicker_options] ||= {}
# Some opiniated defaults (basically an attempt to make the jQuery
# datepicker similar to the calendar_date_select with out making
# modifications or having local dependencies)
# Sets the time format based off of the current format
options[:timepicker_options][:ampm] = Jquids.format[:ampm]
options[:timepicker_options][:timeFormat] = Jquids.format[:tr_js_time]
|
ruby
|
{
"resource": ""
}
|
q26313
|
Euler.ConfigOptions.method_missing
|
validation
|
def method_missing method, *args, &block
if args.empty?
@config.send(method)
else
|
ruby
|
{
"resource": ""
}
|
q26314
|
HijriUmmAlqura.Hijri.to_s
|
validation
|
def to_s
today = arabno_to_hindi(day) + " "
today = today + HijriUmmAlqura::MONTHNAMES[month] + " "
|
ruby
|
{
"resource": ""
}
|
q26315
|
HijriUmmAlqura.Hijri.jd
|
validation
|
def jd(date = self)
index = (12 * (date.year - 1)) + date.month - 16260
mcjdn
|
ruby
|
{
"resource": ""
}
|
q26316
|
HijriUmmAlqura.Hijri.gd
|
validation
|
def gd(date = self)
j_date = jd(date)
|
ruby
|
{
"resource": ""
}
|
q26317
|
HijriUmmAlqura.Hijri.add
|
validation
|
def add(date = self, offset, period)
y = period == 'y' ? (date.year + offset) : date.year
m = period == 'm' ? (month_of_year(date.year, date.month) + offset) : month_of_year(date.year, date.month)
d = date.day
begin
if (period == 'd' || period == 'w')
week_days = period == 'w' ? 7 : 1
j_date = jd
j_date = j_date + offset * week_days
result = HijriUmmAlqura.jd(j_date)
return result
elsif (period == 'm')
rys
|
ruby
|
{
"resource": ""
}
|
q26318
|
HijriUmmAlqura.Hijri.+
|
validation
|
def + (n)
case n
when Numeric then
j_date = jd + n * 1
|
ruby
|
{
"resource": ""
}
|
q26319
|
QuackConcurrency.Future.raise
|
validation
|
def raise(exception = nil)
exception = case
when exception == nil then StandardError.new
when exception.is_a?(Exception) then exception
when Exception >= exception then exception.new
else
Kernel.raise(TypeError, "'exception' must be nil or an instance of or descendant of Exception")
end
|
ruby
|
{
"resource": ""
}
|
q26320
|
Megam.ScmmAccount.to_hash
|
validation
|
def to_hash
index_hash = Hash.new
index_hash["json_claz"] = self.class.name
index_hash["creationDate"] = creationDate
index_hash["admin"] = admin
index_hash["type"] = type
|
ruby
|
{
"resource": ""
}
|
q26321
|
Mingle4r.CommonClassMethods.site=
|
validation
|
def site=(site)
if site != self.site
@site = site
uri = URI.parse(site)
@user = URI.decode(uri.user) if(uri.user)
|
ruby
|
{
"resource": ""
}
|
q26322
|
Mingle4r.CommonClassMethods.find
|
validation
|
def find(*args)
scope = args.slice!(0)
options = args.slice!(0) || {}
|
ruby
|
{
"resource": ""
}
|
q26323
|
Mingle4r.CommonClassMethods.create_resource_class
|
validation
|
def create_resource_class
raise "Please set the site for #{self} class." unless(self.site)
|
ruby
|
{
"resource": ""
}
|
q26324
|
Bixby.App.run!
|
validation
|
def run!
# load agent from config or cli opts
agent = load_agent()
fix_ownership()
# debug mode, stay in front
if @config[:debug] then
Logging::Logger.root.add_appenders("stdout")
return start_websocket_client()
end
# start daemon
validate_argv()
|
ruby
|
{
"resource": ""
}
|
q26325
|
Bixby.App.start_websocket_client
|
validation
|
def start_websocket_client
# make sure log level is still set correctly here
Bixby::Log.setup_logger(:level => Logging.appenders["file"].level)
|
ruby
|
{
"resource": ""
}
|
q26326
|
Bixby.App.fix_ownership
|
validation
|
def fix_ownership
return if Process.uid != 0
begin
uid = Etc.getpwnam("bixby").uid
gid = Etc.getgrnam("bixby").gid
# user/group exists, chown
File.chown(uid,
|
ruby
|
{
"resource": ""
}
|
q26327
|
KubernetesDeploy.DeferredSummaryLogging.print_summary
|
test
|
def print_summary(status)
status_string = status.to_s.humanize.upcase
if status == :success
heading("Result: ", status_string, :green)
level = :info
elsif status == :timed_out
heading("Result: ", status_string, :yellow)
level = :fatal
else
|
ruby
|
{
"resource": ""
}
|
q26328
|
KubernetesDeploy.DeployTask.find_bad_files_from_kubectl_output
|
test
|
def find_bad_files_from_kubectl_output(line)
# stderr often contains one or more lines like the following, from which we can extract the file path(s):
# Error from server (TypeOfError): error when creating "/path/to/service-gqq5oh.yml": Service "web" is invalid:
line.scan(%r{"(/\S+\.ya?ml\S*)"}).each_with_object([]) do |matches, bad_files|
|
ruby
|
{
"resource": ""
}
|
q26329
|
KubernetesDeploy.DeployTask.confirm_ejson_keys_not_prunable
|
test
|
def confirm_ejson_keys_not_prunable
secret = ejson_provisioner.ejson_keys_secret
return unless secret.dig("metadata", "annotations", KubernetesResource::LAST_APPLIED_ANNOTATION)
@logger.error("Deploy cannot proceed because protected resource " \
"Secret/#{EjsonSecretProvisioner::EJSON_KEYS_SECRET} would be
|
ruby
|
{
"resource": ""
}
|
q26330
|
Omnibus.Compressor.for_current_system
|
test
|
def for_current_system(compressors)
family = Ohai["platform_family"]
if family == "mac_os_x"
if compressors.include?(:dmg)
return DMG
end
if compressors.include?(:tgz)
return TGZ
end
|
ruby
|
{
"resource": ""
}
|
q26331
|
Omnibus.Packager::BFF.create_bff_file
|
test
|
def create_bff_file
# We are making the assumption that sudo exists.
# Unforunately, the owner of the file in the staging directory is what
# will be on the target machine, and mkinstallp can't tell you if that
# is a bad thing (it usually is).
# The match is so we only pick the lowest level of the project dir.
# This implies that if we are in /tmp/staging/project/dir/things,
# we will chown from 'project' on, rather than 'project/dir', which leaves
# project owned by the build user (which is incorrect)
# First - let's find out who we are.
shellout!("sudo chown -Rh 0:0 #{File.join(staging_dir, project.install_dir.match(/^\/?(\w+)/).to_s)}")
log.info(log_key) { "Creating .bff file" }
# Since
|
ruby
|
{
"resource": ""
}
|
q26332
|
Omnibus.FileSyncer.glob
|
test
|
def glob(pattern)
pattern = Pathname.new(pattern).cleanpath.to_s
Dir.glob(pattern, File::FNM_DOTMATCH).sort.reject do |file|
|
ruby
|
{
"resource": ""
}
|
q26333
|
Omnibus.FileSyncer.sync
|
test
|
def sync(source, destination, options = {})
unless File.directory?(source)
raise ArgumentError, "`source' must be a directory, but was a " \
"`#{File.ftype(source)}'! If you just want to sync a file, use " \
"the `copy' method instead."
end
source_files = all_files_under(source, options)
# Ensure the destination directory exists
FileUtils.mkdir_p(destination) unless File.directory?(destination)
# Copy over the filtered source files
source_files.each do |source_file|
relative_path = relative_path_for(source_file, source)
# Create the parent directory
parent = File.join(destination, File.dirname(relative_path))
FileUtils.mkdir_p(parent) unless File.directory?(parent)
case File.ftype(source_file).to_sym
when :directory
FileUtils.mkdir_p("#{destination}/#{relative_path}")
when :link
target = File.readlink(source_file)
Dir.chdir(destination) do
FileUtils.ln_sf(target, "#{destination}/#{relative_path}")
end
when :file
source_stat = File.stat(source_file)
# Detect 'files' which are hard links and use ln instead of cp to
# duplicate them, provided their source is in place already
if hardlink? source_stat
if existing = hardlink_sources[[source_stat.dev, source_stat.ino]]
FileUtils.ln(existing, "#{destination}/#{relative_path}", force: true)
else
begin
FileUtils.cp(source_file, "#{destination}/#{relative_path}")
rescue Errno::EACCES
FileUtils.cp_r(source_file, "#{destination}/#{relative_path}", remove_destination: true)
end
hardlink_sources.store([source_stat.dev, source_stat.ino], "#{destination}/#{relative_path}")
end
else
# First attempt a regular copy. If we don't have write
# permission on the File, open will probably fail with
# EACCES (making it hard to sync files with permission
# r--r--r--). Rescue this error and use cp_r's
# :remove_destination option.
begin
|
ruby
|
{
"resource": ""
}
|
q26334
|
Omnibus.FileSyncer.relative_path_for
|
test
|
def relative_path_for(path, parent)
Pathn
|
ruby
|
{
"resource": ""
}
|
q26335
|
Omnibus.Compressor::DMG.clean_disks
|
test
|
def clean_disks
log.info(log_key) { "Cleaning previously mounted disks" }
existing_disks = shellout!("mount | grep \"/Volumes/#{volume_name}\" | awk '{print $1}'")
existing_disks.stdout.lines.each do |existing_disk|
|
ruby
|
{
"resource": ""
}
|
q26336
|
Omnibus.Compressor::DMG.copy_assets_to_dmg
|
test
|
def copy_assets_to_dmg
log.info(log_key) { "Copying assets into dmg" }
FileSyncer.glob("#{resources_dir}/*").each do |file|
|
ruby
|
{
"resource": ""
}
|
q26337
|
Omnibus.Packager::IPS.write_transform_file
|
test
|
def write_transform_file
render_template(resource_path("doc-transform.erb"),
destination: transform_file,
variables: {
|
ruby
|
{
"resource": ""
}
|
q26338
|
Omnibus.Packager::IPS.write_pkg_metadata
|
test
|
def write_pkg_metadata
render_template(resource_path("gen.manifestfile.erb"),
destination: pkg_metadata_file,
variables: {
name: safe_base_package_name,
fmri_package_name: fmri_package_name,
description: project.description,
summary: project.friendly_name,
arch: safe_architecture,
}
)
# Append the contents of symlinks_file if it exists
if symlinks_file
File.open(pkg_metadata_file, "a") do |symlink|
|
ruby
|
{
"resource": ""
}
|
q26339
|
Omnibus.HealthCheck.health_check_otool
|
test
|
def health_check_otool
current_library = nil
bad_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | egrep '\.(dylib|bundle)$' | xargs otool -L") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
when /^\s+(.+) \(.+\)$/
|
ruby
|
{
"resource": ""
}
|
q26340
|
Omnibus.HealthCheck.health_check_aix
|
test
|
def health_check_aix
current_library = nil
bad_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f | xargs file | grep \"RISC System\" | awk -F: '{print $1}' | xargs -n 1 ldd") do |line|
case line
when /^(.+) needs:$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+)$/
name = Regexp.last_match[1]
|
ruby
|
{
"resource": ""
}
|
q26341
|
Omnibus.HealthCheck.health_check_ldd
|
test
|
def health_check_ldd
regexp_ends = ".*(" + IGNORED_ENDINGS.map { |e| e.gsub(/\./, '\.') }.join("|") + ")$"
regexp_patterns = IGNORED_PATTERNS.map { |e| ".*" + e.gsub(/\//, '\/') + ".*" }.join("|")
regexp = regexp_ends + "|" + regexp_patterns
current_library = nil
bad_libs = {}
read_shared_libs("find #{project.install_dir}/ -type f -regextype posix-extended ! -regex '#{regexp}' | xargs ldd") do |line|
case line
when /^(.+):$/
current_library = Regexp.last_match[1]
log.debug(log_key) { "Analyzing dependencies for #{current_library}" }
when /^\s+(.+) \=\>\s+(.+)( \(.+\))?$/
name = Regexp.last_match[1]
linked = Regexp.last_match[2]
bad_libs = check_for_bad_library(bad_libs, current_library, name, linked)
|
ruby
|
{
"resource": ""
}
|
q26342
|
Omnibus.HealthCheck.read_shared_libs
|
test
|
def read_shared_libs(command)
cmd = shellout(command)
|
ruby
|
{
"resource": ""
}
|
q26343
|
Omnibus.HealthCheck.check_for_bad_library
|
test
|
def check_for_bad_library(bad_libs, current_library, name, linked)
safe = nil
whitelist_libs = case Ohai["platform"]
when "arch"
ARCH_WHITELIST_LIBS
when "mac_os_x"
MAC_WHITELIST_LIBS
when "solaris2"
SOLARIS_WHITELIST_LIBS
when "smartos"
SMARTOS_WHITELIST_LIBS
when "freebsd"
FREEBSD_WHITELIST_LIBS
when "aix"
AIX_WHITELIST_LIBS
else
WHITELIST_LIBS
end
whitelist_libs.each do |reg|
safe ||= true if reg.match(name)
end
whitelist_files.each do |reg|
safe ||= true if reg.match(current_library)
end
log.debug(log_key) { " --> Dependency: #{name}" }
log.debug(log_key) {
|
ruby
|
{
"resource": ""
}
|
q26344
|
Omnibus.Digestable.digest
|
test
|
def digest(path, type = :md5)
digest = digest_from_type(type)
|
ruby
|
{
"resource": ""
}
|
q26345
|
Omnibus.Digestable.update_with_file_contents
|
test
|
def update_with_file_contents(digest, filename)
File.open(filename) do |io|
while (chunk = io.read(1024 * 8))
|
ruby
|
{
"resource": ""
}
|
q26346
|
Omnibus.Publisher.packages
|
test
|
def packages
@packages ||= begin
publish_packages = Array.new
build_packages = FileSyncer.glob(@pattern).map { |path| Package.new(path) }
if @options[:platform_mappings]
# the platform map is a simple hash with publish to build platform mappings
@options[:platform_mappings].each_pair do |build_platform, publish_platforms|
# Splits `ubuntu-12.04` into `ubuntu` and `12.04`
build_platform, build_platform_version = build_platform.rpartition("-") - %w{ - }
# locate the package for the build platform
packages = build_packages.select do |p|
p.metadata[:platform] == build_platform &&
p.metadata[:platform_version] == build_platform_version
end
if packages.empty?
log.warn(log_key) do
"Could not locate a package for build platform #{build_platform}-#{build_platform_version}. " \
"Publishing will be skipped for: #{publish_platforms.join(', ')}"
end
end
publish_platforms.each do |publish_platform|
publish_platform, publish_platform_version = publish_platform.rpartition("-") - %w{ - }
packages.each do |p|
# create a copy of our package before mucking with its metadata
|
ruby
|
{
"resource": ""
}
|
q26347
|
Omnibus.Packager::PKG.write_distribution_file
|
test
|
def write_distribution_file
render_template(resource_path("distribution.xml.erb"),
destination: "#{staging_dir}/Distribution",
mode: 0600,
variables: {
friendly_name: project.friendly_name,
|
ruby
|
{
"resource": ""
}
|
q26348
|
Omnibus.ArtifactoryPublisher.artifact_for
|
test
|
def artifact_for(artifact)
md5 = artifact.respond_to?(:metadata) ? artifact.metadata[:md5] : digest(artifact.path, :md5)
sha1 = artifact.respond_to?(:metadata) ? artifact.metadata[:sha1] : digest(artifact.path, :sha1)
Artifactory::Resource::Artifact.new(
|
ruby
|
{
"resource": ""
}
|
q26349
|
Omnibus.ArtifactoryPublisher.build_for
|
test
|
def build_for(packages)
metadata = packages.first.metadata
name = metadata[:name]
# Attempt to load the version manifest data from the packages metadata
manifest = if version_manifest = metadata[:version_manifest]
Manifest.from_hash(version_manifest)
else
Manifest.new(
metadata[:version],
# we already know the `version_manifest` entry is
# missing so we can't pull in the `build_git_revision`
nil,
metadata[:license]
)
end
# Upload the actual package
log.info(log_key) { "Saving build info for #{name}, Build ##{manifest.build_version}" }
Artifactory::Resource::Build.new(
client: client,
|
ruby
|
{
"resource": ""
}
|
q26350
|
Omnibus.ArtifactoryPublisher.client
|
test
|
def client
@client ||= Artifactory::Client.new(
endpoint: Config.artifactory_endpoint,
username: Config.artifactory_username,
password: Config.artifactory_password,
ssl_pem_file:
|
ruby
|
{
"resource": ""
}
|
q26351
|
Omnibus.ArtifactoryPublisher.metadata_properties_for
|
test
|
def metadata_properties_for(package)
metadata = {
"omnibus.project" => package.metadata[:name],
"omnibus.platform" => package.metadata[:platform],
"omnibus.platform_version" => package.metadata[:platform_version],
"omnibus.architecture" => package.metadata[:arch],
"omnibus.version" => package.metadata[:version],
"omnibus.iteration" => package.metadata[:iteration],
"omnibus.license"
|
ruby
|
{
"resource": ""
}
|
q26352
|
Omnibus.ArtifactoryPublisher.remote_path_for
|
test
|
def remote_path_for(package)
File.join(
Config.artifactory_base_path,
|
ruby
|
{
"resource": ""
}
|
q26353
|
Omnibus.Packager::MSI.parameters
|
test
|
def parameters(val = NULL)
if null?(val)
@parameters || {}
else
unless val.is_a?(Hash)
|
ruby
|
{
"resource": ""
}
|
q26354
|
Omnibus.Packager::MSI.wix_light_extension
|
test
|
def wix_light_extension(extension)
unless extension.is_a?(String)
raise InvalidValue.new(:wix_light_extension, "be an
|
ruby
|
{
"resource": ""
}
|
q26355
|
Omnibus.Packager::MSI.wix_light_delay_validation
|
test
|
def wix_light_delay_validation(val = false)
unless val.is_a?(TrueClass) || val.is_a?(FalseClass)
raise InvalidValue.new(:iwix_light_delay_validation, "be
|
ruby
|
{
"resource": ""
}
|
q26356
|
Omnibus.Packager::MSI.wix_candle_extension
|
test
|
def wix_candle_extension(extension)
unless extension.is_a?(String)
raise InvalidValue.new(:wix_candle_extension, "be an
|
ruby
|
{
"resource": ""
}
|
q26357
|
Omnibus.Packager::MSI.write_localization_file
|
test
|
def write_localization_file
render_template(resource_path("localization-#{localization}.wxl.erb"),
destination: "#{staging_dir}/localization-#{localization}.wxl",
|
ruby
|
{
"resource": ""
}
|
q26358
|
Omnibus.Packager::MSI.write_parameters_file
|
test
|
def write_parameters_file
render_template(resource_path("parameters.wxi.erb"),
destination: "#{staging_dir}/parameters.wxi",
variables: {
name: project.package_name,
friendly_name: project.friendly_name,
maintainer: project.maintainer,
upgrade_code: upgrade_code,
|
ruby
|
{
"resource": ""
}
|
q26359
|
Omnibus.Packager::MSI.write_source_file
|
test
|
def write_source_file
paths = []
# Remove C:/
install_dir = project.install_dir.split("/")[1..-1].join("/")
# Grab all parent paths
Pathname.new(install_dir).ascend do |path|
paths << path.to_s
end
# Create the hierarchy
hierarchy = paths.reverse.inject({}) do |hash, path|
hash[File.basename(path)] = path.gsub(/[^[:alnum:]]/, "").upcase + "LOCATION"
hash
end
# The last item in the path MUST be named PROJECTLOCATION or else space
# robots will cause permanent damage to you and your family.
hierarchy[hierarchy.keys.last] = "PROJECTLOCATION"
# If the path hierarchy is > 1, the customizable installation directory
# should default to the second-to-last item in the hierarchy. If the
# hierarchy is smaller than that, then just use the system drive.
wix_install_dir = if hierarchy.size > 1
hierarchy.to_a[-2][1]
else
|
ruby
|
{
"resource": ""
}
|
q26360
|
Omnibus.Packager::MSI.write_bundle_file
|
test
|
def write_bundle_file
render_template(resource_path("bundle.wxs.erb"),
destination: "#{staging_dir}/bundle.wxs",
variables: {
name: project.package_name,
friendly_name: project.friendly_name,
maintainer: project.maintainer,
upgrade_code: upgrade_code,
parameters: parameters,
|
ruby
|
{
"resource": ""
}
|
q26361
|
Omnibus.BuildVersionDSL.resolve
|
test
|
def resolve(dependency)
if from_dependency? && version_dependency == dependency.name
|
ruby
|
{
"resource": ""
}
|
q26362
|
Omnibus.BuildVersionDSL.maybe_append_timestamp
|
test
|
def maybe_append_timestamp(version)
if Config.append_timestamp && !has_timestamp?(version)
[version,
|
ruby
|
{
"resource": ""
}
|
q26363
|
Omnibus.BuildVersionDSL.has_timestamp?
|
test
|
def has_timestamp?(version)
_ver, build_info = version.split("+")
return false if build_info.nil?
build_info.split(".").any? do |part|
begin
|
ruby
|
{
"resource": ""
}
|
q26364
|
Omnibus.BuildVersionDSL.construct_build_version
|
test
|
def construct_build_version(version_source = nil)
case source_type
when :git
version = if version_source
Omnibus::BuildVersion.new(version_source.project_dir)
else
Omnibus::BuildVersion.new
|
ruby
|
{
"resource": ""
}
|
q26365
|
Omnibus.Templating.render_template_content
|
test
|
def render_template_content(source, variables = {})
template = ERB.new(File.read(source), nil, "-")
struct =
if variables.empty?
|
ruby
|
{
"resource": ""
}
|
q26366
|
Omnibus.Logger.deprecated
|
test
|
def deprecated(progname, &block)
meta = Proc.new { "DEPRECATED: #{yield}" }
|
ruby
|
{
"resource": ""
}
|
q26367
|
Omnibus.Logger.add
|
test
|
def add(severity, progname, &block)
return true if io.nil? || severity < level
message = format_message(severity,
|
ruby
|
{
"resource": ""
}
|
q26368
|
Omnibus.Builder.command
|
test
|
def command(command, options = {})
warn_for_shell_commands(command)
build_commands <<
|
ruby
|
{
"resource": ""
}
|
q26369
|
Omnibus.Builder.make
|
test
|
def make(*args)
options = args.last.is_a?(Hash) ? args.pop : {}
make = options.delete(:bin) ||
# Prefer gmake on non-windows environments.
if !windows? && Omnibus.which("gmake")
|
ruby
|
{
"resource": ""
}
|
q26370
|
Omnibus.Builder.appbundle
|
test
|
def appbundle(software_name, lockdir: nil, gem: nil, without: nil, extra_bin_files: nil , **options)
build_commands << BuildCommand.new("appbundle `#{software_name}'") do
bin_dir = "#{install_dir}/bin"
appbundler_bin = embedded_bin("appbundler")
lockdir ||=
begin
app_software = project.softwares.find do |p|
p.name == software_name
end
if app_software.nil?
raise "could not find software definition for #{software_name}, add a dependency to it, or pass a lockdir argument to appbundle command."
end
app_software.project_dir
end
command = [ appbundler_bin, "'#{lockdir}'", "'#{bin_dir}'" ]
|
ruby
|
{
"resource": ""
}
|
q26371
|
Omnibus.Builder.rake
|
test
|
def rake(command, options = {})
build_commands << BuildCommand.new("rake `#{command}'") do
bin = embedded_bin("rake")
|
ruby
|
{
"resource": ""
}
|
q26372
|
Omnibus.Builder.touch
|
test
|
def touch(file, options = {})
build_commands << BuildCommand.new("touch `#{file}'") do
Dir.chdir(software.project_dir) do
parent = File.dirname(file)
|
ruby
|
{
"resource": ""
}
|
q26373
|
Omnibus.Builder.delete
|
test
|
def delete(path, options = {})
build_commands << BuildCommand.new("delete `#{path}'") do
Dir.chdir(software.project_dir) do
FileSyncer.glob(path).each do |file|
|
ruby
|
{
"resource": ""
}
|
q26374
|
Omnibus.Builder.copy
|
test
|
def copy(source, destination, options = {})
command = "copy `#{source}' to `#{destination}'"
build_commands << BuildCommand.new(command) do
Dir.chdir(software.project_dir) do
files = FileSyncer.glob(source)
if files.empty?
log.warn(log_key) { "no matched files
|
ruby
|
{
"resource": ""
}
|
q26375
|
Omnibus.Builder.update_config_guess
|
test
|
def update_config_guess(target: ".", install: [:config_guess, :config_sub])
build_commands << BuildCommand.new("update_config_guess `target: #{target} install: #{install.inspect}'") do
config_guess_dir = "#{install_dir}/embedded/lib/config_guess"
%w{config.guess config.sub}.each do |c|
unless File.exist?(File.join(config_guess_dir, c))
|
ruby
|
{
"resource": ""
}
|
q26376
|
Omnibus.Compressor::TGZ.write_tgz
|
test
|
def write_tgz
# Grab the contents of the gzipped tarball for reading
contents = gzipped_tarball
# Write the .tar.gz into the staging directory
File.open("#{staging_dir}/#{package_name}", "wb") do |tgz|
while chunk = contents.read(1024)
tgz.write(chunk)
end
|
ruby
|
{
"resource": ""
}
|
q26377
|
Omnibus.Compressor::TGZ.tarball
|
test
|
def tarball
tarfile = StringIO.new("")
Gem::Package::TarWriter.new(tarfile) do |tar|
path = "#{staging_dir}/#{packager.package_name}"
name = packager.package_name
mode = File.stat(path).mode
tar.add_file(name, mode) do |tf|
File.open(path,
|
ruby
|
{
"resource": ""
}
|
q26378
|
Omnibus.NetFetcher.clean
|
test
|
def clean
needs_cleaning = File.exist?(project_dir)
if needs_cleaning
log.info(log_key) { "Cleaning project
|
ruby
|
{
"resource": ""
}
|
q26379
|
Omnibus.NetFetcher.deploy
|
test
|
def deploy
if downloaded_file.end_with?(*ALL_EXTENSIONS)
log.info(log_key) { "Extracting `#{safe_downloaded_file}' to `#{safe_project_dir}'" }
extract
else
log.info(log_key) { "`#{safe_downloaded_file}' is not an archive - copying to `#{safe_project_dir}'" }
if File.directory?(downloaded_file)
# If the file itself was a directory, copy the whole thing over. This
# seems unlikely, because I do not think it is a possible to download
# a folder, but better safe than sorry.
FileUtils.cp_r("#{downloaded_file}/.", project_dir)
else
|
ruby
|
{
"resource": ""
}
|
q26380
|
Omnibus.NetFetcher.extract
|
test
|
def extract
# Only used by tar
compression_switch = ""
compression_switch = "z" if downloaded_file.end_with?("gz")
compression_switch = "--lzma -" if downloaded_file.end_with?("lzma")
compression_switch = "j" if downloaded_file.end_with?("bz2")
compression_switch = "J" if downloaded_file.end_with?("xz")
if Ohai["platform"] == "windows"
if downloaded_file.end_with?(*TAR_EXTENSIONS) && source[:extract] != :seven_zip
returns = [0]
returns << 1 if source[:extract] == :lax_tar
shellout!("tar #{compression_switch}xf #{safe_downloaded_file} -C#{safe_project_dir}", returns: returns)
elsif downloaded_file.end_with?(*COMPRESSED_TAR_EXTENSIONS)
Dir.mktmpdir do |temp_dir|
log.debug(log_key) { "Temporarily extracting `#{safe_downloaded_file}' to `#{temp_dir}'" }
shellout!("7z.exe x #{safe_downloaded_file} -o#{windows_safe_path(temp_dir)} -r -y")
fname = File.basename(downloaded_file, File.extname(downloaded_file))
|
ruby
|
{
"resource": ""
}
|
q26381
|
Omnibus.NetFetcher.digest_type
|
test
|
def digest_type
DIGESTS.each do |digest|
return digest if source.key? digest
|
ruby
|
{
"resource": ""
}
|
q26382
|
Omnibus.NetFetcher.verify_checksum!
|
test
|
def verify_checksum!
log.info(log_key) { "Verifying checksum" }
expected = checksum
actual
|
ruby
|
{
"resource": ""
}
|
q26383
|
Omnibus.Packager::WindowsBase.signing_identity
|
test
|
def signing_identity(thumbprint = NULL, params = NULL)
unless null?(thumbprint)
@signing_identity = {}
unless thumbprint.is_a?(String)
raise InvalidValue.new(:signing_identity, "be a String")
end
@signing_identity[:thumbprint] = thumbprint
if !null?(params)
unless params.is_a?(Hash)
raise InvalidValue.new(:params, "be a Hash")
end
valid_keys = [:store, :timestamp_servers, :machine_store, :algorithm]
invalid_keys = params.keys - valid_keys
unless invalid_keys.empty?
raise InvalidValue.new(:params, "contain keys from [#{valid_keys.join(',
|
ruby
|
{
"resource": ""
}
|
q26384
|
Omnibus.Packager::WindowsBase.sign_package
|
test
|
def sign_package(package_file)
success = false
timestamp_servers.each do |ts|
success = try_sign(package_file, ts)
break
|
ruby
|
{
"resource": ""
}
|
q26385
|
Omnibus.Packager::WindowsBase.certificate_subject
|
test
|
def certificate_subject
return "CN=#{project.package_name}" unless signing_identity
store = machine_store? ? "LocalMachine" : "CurrentUser"
cmd = Array.new.tap do |arr|
arr << "powershell.exe"
arr << "-ExecutionPolicy Bypass"
arr << "-NoProfile"
|
ruby
|
{
"resource": ""
}
|
q26386
|
Omnibus.Software.manifest_entry
|
test
|
def manifest_entry
@manifest_entry ||= if manifest
log.info(log_key) { "Using user-supplied manifest entry for #{name}" }
manifest.entry_for(name)
else
|
ruby
|
{
"resource": ""
}
|
q26387
|
Omnibus.Software.source
|
test
|
def source(val = NULL)
unless null?(val)
unless val.is_a?(Hash)
raise InvalidValue.new(:source,
"be a kind of `Hash', but was `#{val.class.inspect}'")
end
val = canonicalize_source(val)
extra_keys = val.keys - [
:git, :file, :path, :url, # fetcher types
:md5, :sha1, :sha256, :sha512, # hash type - common to all fetchers
:cookie, :warning, :unsafe, :extract, :cached_name, :authorization, # used by net_fetcher
:options, # used by path_fetcher
:submodules # used by git_fetcher
]
unless extra_keys.empty?
raise InvalidValue.new(:source,
|
ruby
|
{
"resource": ""
}
|
q26388
|
Omnibus.Software.version
|
test
|
def version(val = NULL, &block)
final_version = apply_overrides(:version)
if block_given?
if val.equal?(NULL)
raise InvalidValue.new(:version,
"pass a block when given a version argument")
else
if val == final_version
#
# Unfortunately we need to make a specific logic here for license files.
# We support multiple calls `license_file` and we support overriding the
# license files inside a version block. We can not differentiate whether
# `license_file` is being called from a version block or not. So we need
# to check if the license files are being overridden during the call to
# block.
#
# If so we use the new set, otherwise we restore the old license files.
#
current_license_files = @license_files
@license_files = []
yield
|
ruby
|
{
"resource": ""
}
|
q26389
|
Omnibus.Software.whitelist_file
|
test
|
def whitelist_file(file)
file = Regexp.new(file) unless
|
ruby
|
{
"resource": ""
}
|
q26390
|
Omnibus.Software.project_file
|
test
|
def project_file
if fetcher && fetcher.is_a?(NetFetcher)
log.deprecated(log_key) do
"project_file (DSL). This is a property of the NetFetcher and will " \
"not be publically exposed in the next major release. In general, " \
"you should not be using this method in your software definitions " \
"as it is an internal implementation detail of the NetFetcher. If " \
"you disagree with this statement, you should open an issue on the " \
"Omnibus repository on GitHub an explain your use case. For now, " \
"I will return the path to the downloaded file on disk, but please " \
"rethink the problem you are trying to solve :)."
end
|
ruby
|
{
"resource": ""
}
|
q26391
|
Omnibus.Software.prepend_path
|
test
|
def prepend_path(*paths)
path_values = Array(paths)
path_values << ENV[path_key]
|
ruby
|
{
"resource": ""
}
|
q26392
|
Omnibus.Software.overrides
|
test
|
def overrides
if null?(@overrides)
# lazily initialized because we need the 'name' to be parsed first
@overrides = {}
|
ruby
|
{
"resource": ""
}
|
q26393
|
Omnibus.Software.version_for_cache
|
test
|
def version_for_cache
@version_for_cache ||= if fetcher.version_for_cache
fetcher.version_for_cache
elsif version
version
else
log.warn(log_key) do
"No version given! This is probably a bad thing. I am going to " \
|
ruby
|
{
"resource": ""
}
|
q26394
|
Omnibus.Software.fetcher
|
test
|
def fetcher
@fetcher ||=
if source_type == :url && File.basename(source[:url], "?*").end_with?(*NetFetcher::ALL_EXTENSIONS)
Fetcher.fetcher_class_for_source(source).new(manifest_entry, fetch_dir, build_dir)
else
|
ruby
|
{
"resource": ""
}
|
q26395
|
Omnibus.Software.shasum
|
test
|
def shasum
@shasum ||= begin
digest = Digest::SHA256.new
update_with_string(digest, project.shasum)
update_with_string(digest, builder.shasum)
update_with_string(digest, name)
update_with_string(digest, version_for_cache)
update_with_string(digest, FFI_Yajl::Encoder.encode(overrides))
|
ruby
|
{
"resource": ""
}
|
q26396
|
Omnibus.Software.canonicalize_source
|
test
|
def canonicalize_source(source)
if source.is_a?(Hash) && source[:github]
source = source.dup
|
ruby
|
{
"resource": ""
}
|
q26397
|
Omnibus.Packager::Makeself.write_makeselfinst
|
test
|
def write_makeselfinst
makeselfinst_staging_path = File.join(staging_dir, "makeselfinst")
render_template(resource_path("makeselfinst.erb"),
destination: makeselfinst_staging_path,
|
ruby
|
{
"resource": ""
}
|
q26398
|
Omnibus.GitCache.create_cache_path
|
test
|
def create_cache_path
if File.directory?(cache_path)
false
else
create_directory(File.dirname(cache_path))
git_cmd("init -q")
|
ruby
|
{
"resource": ""
}
|
q26399
|
Omnibus.GitCache.tag
|
test
|
def tag
return @tag if @tag
log.internal(log_key) { "Calculating tag" }
# Accumulate an array of all the software projects that come before
# the name and version we are tagging. So if you have
#
# build_order = [ 1, 2, 3, 4, 5 ]
#
# And we are tagging 3, you would get dep_list = [ 1, 2 ]
dep_list = software.project.library.build_order.take_while do |dep|
if dep.name == software.name && dep.version == software.version
false
else
true
end
end
log.internal(log_key) { "dep_list:
|
ruby
|
{
"resource": ""
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.