hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
da90e2eabaa596ba15c3321e7733e8eaee36f87d
diff --git a/lib/build/development/configuration.rb b/lib/build/development/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/build/development/configuration.rb +++ b/lib/build/development/configuration.rb @@ -11,15 +11,15 @@ module RhoDevelopment end def self.handledNetworkExceptions - return [Errno::ECONNREFUSED, Errno::EHOSTDOWN, Errno::EHOSTUNREACH, Net.const_defined?(:OpenTimeout) ? Net::OpenTimeout : Timeout::Error] + [Errno::ECONNREFUSED, Errno::EHOSTDOWN, Errno::EHOSTUNREACH, Net.const_defined?(:OpenTimeout) ? Net::OpenTimeout : Timeout::Error] end def self.own_ip_addresses - return Socket.ip_address_list.select { |each| each.ipv4? and !each.ipv4_loopback? and !each.ipv4_multicast? }.map { |each| each.ip_address }.uniq + Socket.ip_address_list.select { |each| each.ipv4? and !each.ipv4_loopback? and !each.ipv4_multicast? }.map { |each| each.ip_address }.uniq end def self.own_ip_address - return self.own_ip_addresses.first + self.own_ip_addresses.first end def self.webserver_alive_request @@ -31,7 +31,17 @@ module RhoDevelopment end def self.webserver_uri - return URI("http://#{self.own_ip_address}:#{self.webserver_port}") + URI("http://#{self.webserver_ip}:#{self.webserver_port}") + end + + def self.webserver_ip + config = self.read_configuration + web_server_config = config['webserver'] + if web_server_config.nil? || web_server_config['ip'].nil? + self.own_ip_address + else + web_server_config['ip'] + end end def self.webserver_port @@ -39,16 +49,15 @@ module RhoDevelopment end - def self.config_filename File.join(self.application_root, 'dev-config.yml') end def self.read_configuration if File.exist?(self.config_filename) - return YAML.load_file(self.config_filename) + YAML.load_file(self.config_filename) end - return {} + {} end def self.subscribers @@ -103,13 +112,13 @@ module RhoDevelopment def self.full_bundle_name 'upgrade_bundle.zip' end - + def self.document_root=(aString) config = self.read_configuration config['webserver'] = {'documentRoot' => aString} yml = config.to_yaml File.open(self.config_filename, 'w') { |file| file.write yml } - end + end def self.document_root config = self.read_configuration @@ -121,12 +130,10 @@ module RhoDevelopment document_root = web_server_config['documentRoot'] end FileUtils.mkpath(document_root) unless File.exist?(document_root) - return document_root + document_root end - - end end \ No newline at end of file diff --git a/lib/build/development/web_server.rb b/lib/build/development/web_server.rb index <HASH>..<HASH> 100644 --- a/lib/build/development/web_server.rb +++ b/lib/build/development/web_server.rb @@ -60,13 +60,15 @@ module RhoDevelopment def initialize document_root = Configuration::document_root - puts "Path '#{document_root}' will be used as web server document root".primary + + puts "Webserver URL: #{Configuration::webserver_ip}:#{Configuration::webserver_port}".primary + puts "Webserver document root: #{document_root}".primary print 'Cleaning document root directory... '.primary FileUtils.rm_rf("#{document_root}/.", secure: true) puts 'done'.success @tasks = Queue.new @web_server = WEBrick::HTTPServer.new( - :BindAddress => Configuration::own_ip_address, + :BindAddress => Configuration::webserver_ip, :Port => Configuration::webserver_port, :DocumentRoot => document_root, :ServerType => WEBrick::SimpleServer
[server side] Reaadingwebserver ip on starting from dev-config.yml. If setting is absent - use first network interface
rhomobile_rhodes
train
9a0cebb9a7c9f7a0accbfcc5469212954f294a26
diff --git a/OpenStreetMapViewer/src/main/java/org/osmdroid/samplefragments/tileproviders/OfflinePickerSample.java b/OpenStreetMapViewer/src/main/java/org/osmdroid/samplefragments/tileproviders/OfflinePickerSample.java index <HASH>..<HASH> 100644 --- a/OpenStreetMapViewer/src/main/java/org/osmdroid/samplefragments/tileproviders/OfflinePickerSample.java +++ b/OpenStreetMapViewer/src/main/java/org/osmdroid/samplefragments/tileproviders/OfflinePickerSample.java @@ -304,8 +304,9 @@ public class OfflinePickerSample extends BaseSampleFragment implements View.OnCl mMapView.zoomToBoundingBox(src.getBoundsOsmdroid(), true); } else if (strName instanceof GeopackageRasterTileSource) { GeopackageRasterTileSource src = (GeopackageRasterTileSource) strName; - mMapView.zoomToBoundingBox(src.getBounds(), true); + mMapView.getController().setZoom(src.getMinimumZoomLevel()); + mMapView.zoomToBoundingBox(src.getBounds(), true); } dialog.dismiss();
#<I> minor fix to get the set center correct
osmdroid_osmdroid
train
165a7502f8c8429c770e469f91c1889c7f5bda71
diff --git a/user/view.php b/user/view.php index <HASH>..<HASH> 100644 --- a/user/view.php +++ b/user/view.php @@ -230,7 +230,7 @@ ' height="16" width="16" /></a>'); } if ($user->yahoo && !isset($hiddenfields['yahooid'])) { - print_row(get_string('yahooid').':', '<a href="http://edit.yahoo.com/config/send_webmesg?.target='.s($user->yahoo).'&amp;.src=pg">'.s($user->yahoo).'</a>'); + print_row(get_string('yahooid').':', '<a href="http://edit.yahoo.com/config/send_webmesg?.target='.urlencode($user->yahoo).'&amp;.src=pg">'.s($user->yahoo)." <img border=0 src=\"http://opi.yahoo.com/online?u=".urlencode($user->yahoo)."&m=g&t=0\" width=\"12\" height=\"12\" alt=\"\"></a>"); } if ($user->aim && !isset($hiddenfields['aimid'])) { print_row(get_string('aimid').':', '<a href="aim:goim?screenname='.s($user->aim).'">'.s($user->aim).'</a>');
Bug #<I> - add Yahoo online status to user profile; merged from MOODLE_<I>_STABLE
moodle_moodle
train
ec02f844839b06db348bc791a036045eaa120e69
diff --git a/push50.py b/push50.py index <HASH>..<HASH> 100644 --- a/push50.py +++ b/push50.py @@ -67,19 +67,25 @@ def local(slug, tool, update=True): # pull new commits if update=True if update: - _run(git("pull")) + _run(git("fetch")) else: # clone repo to local_path _run(f"git clone -b {slug.branch} https://github.com/{slug.org}/{slug.repo} {local_path}") problem_path = (local_path / slug.problem).absolute() + if not problem_path.exists(): + raise InvalidSlug(f"{slug.problem} does not exist at {slug.org}/{slug.repo}") + # get tool_yaml - with open(problem_path / ".cs50.yaml", "r") as f: - try: - tool_yaml = yaml.safe_load(f.read())[tool] - except KeyError: - raise InvalidSlug("Invalid slug for {}, did you mean something else?".format(tool)) + try: + with open(problem_path / ".cs50.yaml", "r") as f: + try: + tool_yaml = yaml.safe_load(f.read())[tool] + except KeyError: + raise InvalidSlug("Invalid slug for {}, did you mean something else?".format(tool)) + except FileNotFoundError: + raise InvalidSlug("Invalid slug, did you mean something else?") # if problem is not referencing root of repo if slug.problem != Path("."): @@ -577,6 +583,5 @@ if __name__ == "__main__": push("check50", "cs50/problems2/master/hello", "check50", prompt=prompt) - #global LOCAL_PATH #LOCAL_PATH = "./test" #print(local("cs50/problems2/master/hello", "check50"))
push->fetch, some additional error handling
cs50_lib50
train
656c0350d1bfc67bf2f25aab8033dd491eddfbc5
diff --git a/Doctrineum/Scalar/ScalarEnum.php b/Doctrineum/Scalar/ScalarEnum.php index <HASH>..<HASH> 100644 --- a/Doctrineum/Scalar/ScalarEnum.php +++ b/Doctrineum/Scalar/ScalarEnum.php @@ -78,7 +78,7 @@ class ScalarEnum extends StrictObject implements ScalarEnumInterface * @param string|int|float|bool $key * @return string */ - protected static function createKey($key) + protected static function createKey($key): string { return serialize($key); }
An inner method got hardcoded return type
doctrineum_doctrineum-scalar
train
5facefebd9c8721346ed029cec09184a62443fad
diff --git a/lib/faraday/adapter/patron.rb b/lib/faraday/adapter/patron.rb index <HASH>..<HASH> 100644 --- a/lib/faraday/adapter/patron.rb +++ b/lib/faraday/adapter/patron.rb @@ -14,6 +14,13 @@ module Faraday if req = env[:request] session.timeout = session.connect_timeout = req[:timeout] if req[:timeout] session.connect_timeout = req[:open_timeout] if req[:open_timeout] + + if proxy = req[:proxy] + session.proxy = "http://#{proxy[:uri].host}:#{proxy[:uri].port}" + if proxy[:username] && proxy[:password] + session.proxy.insert(7, "#{proxy[:username]}:#{proxy[:password]}@") + end + end end response = begin
Add HTTP proxy support to Patron adapter
lostisland_faraday
train
b1b851ac5e25143db16204cfbc65d7f8b0aebab9
diff --git a/src/drivers/bookmarklet/driver.js b/src/drivers/bookmarklet/driver.js index <HASH>..<HASH> 100644 --- a/src/drivers/bookmarklet/driver.js +++ b/src/drivers/bookmarklet/driver.js @@ -33,8 +33,15 @@ .filter(s => s.src) .map(s => s.src); + var html = new XMLSerializer().serializeToString(document).split('\n'); + + html = html + .slice(0, 1000).concat(html.slice(html.length - 1000)) + .map(line => line.substring(0, 1000)) + .join('\n'); + wappalyzer.analyze(url, { - html: document.documentElement.innerHTML, + html: html, env: env, scripts: scripts });
Fix for #<I> (#<I>) Use the same code than in the WebExtension driver to extract HTML
AliasIO_Wappalyzer
train
24ba712aa5ae47d95ec45cc964b53882ee6e8c06
diff --git a/script/lib/config.py b/script/lib/config.py index <HASH>..<HASH> 100644 --- a/script/lib/config.py +++ b/script/lib/config.py @@ -8,7 +8,7 @@ import sys BASE_URL = os.getenv('LIBCHROMIUMCONTENT_MIRROR') or \ 'http://gh-contractor-zcbenz.s3.amazonaws.com/libchromiumcontent' -LIBCHROMIUMCONTENT_COMMIT = '8c7f5b9adb9372130a9295b7e0fb19355f613cf9' +LIBCHROMIUMCONTENT_COMMIT = 'dd51a41b42246b0b5159bfad5e327c8cf10bc585' PLATFORM = { 'cygwin': 'win32',
Update libchromiumcontent to have WebScopedRunV8Script
electron_electron
train
716135a7962e6530723e0012ef020b191456ad06
diff --git a/resources/views/subscribe/subscribe.blade.php b/resources/views/subscribe/subscribe.blade.php index <HASH>..<HASH> 100644 --- a/resources/views/subscribe/subscribe.blade.php +++ b/resources/views/subscribe/subscribe.blade.php @@ -1,5 +1,7 @@ @extends('layout.master') +@section('title', trans('cachet.subscriber.subscribe'). " | ". $site_title)) + @section('description', trans('cachet.meta.description.subscribe', ['app' => $site_title])) @section('content')
Configure a title for the subscribe page
CachetHQ_Cachet
train
5d55136ed16ce6db557105397e9d999a754faed2
diff --git a/analysis/api.py b/analysis/api.py index <HASH>..<HASH> 100644 --- a/analysis/api.py +++ b/analysis/api.py @@ -1421,8 +1421,9 @@ def coarsegrain(P, n): ################################################################################ def _showSparseConversionWarning(): - warnings.warn('Converting input to dense, since sensitivity is ' - 'currently only impled for dense types.', UserWarning) + msg = ("Converting input to dense, since this method is\n" + "currently only implemented for dense arrays") + warnings.warn(msg, UserWarning) def eigenvalue_sensitivity(T, k): r"""Sensitivity matrix of a specified eigenvalue.
[msm/analysis] Modified docstring in conversion warning. The old docstring was suggesting that a sensitivy method was called, but this warning is also issued for pcca
markovmodel_msmtools
train
b4c8ddceecfb2798bc40b4e813f4c84ac524fdd5
diff --git a/ford/__init__.py b/ford/__init__.py index <HASH>..<HASH> 100755 --- a/ford/__init__.py +++ b/ford/__init__.py @@ -120,7 +120,8 @@ LICENSES = { def convert_to_bool(name, option): - """Convert value 'option' to a bool, with a nice error message on failure""" + """Convert value 'option' to a bool, with a nice error message on + failure. Expects a list from the markdown meta-data extension""" if len(option) > 1: raise ValueError( f"Could not convert option '{name}' to bool: expected a single value but got a list ({option})" @@ -395,6 +396,9 @@ FORD will look in the provided paths for a modules.json file. if isinstance(default_type, bool): proj_data[option] = convert_to_bool(option, proj_data[option]) elif not isinstance(default_type, list): + # If it's not supposed to be a list, then it's + # probably supposed to be a single big block of text, + # like a description proj_data[option] = "\n".join(proj_data[option]) else: proj_data[option] = default
Tweak some comments on converting options
Fortran-FOSS-Programmers_ford
train
d0160c595a85285f60760e0d1d4afcbd7ab48a26
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index <HASH>..<HASH> 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -6,7 +6,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.5, 3.6, 3.7, 3.8] + python-version: [3.6, 3.7, 3.8] steps: - name: Checkout repository uses: actions/checkout@v2 diff --git a/moj_irat/__init__.py b/moj_irat/__init__.py index <HASH>..<HASH> 100644 --- a/moj_irat/__init__.py +++ b/moj_irat/__init__.py @@ -0,0 +1,3 @@ +VERSION = (0, 5) +__version__ = '.'.join(map(str, VERSION)) +__author__ = 'Ministry of Justice Digital & Technology' diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -1,8 +1,14 @@ #!/usr/bin/env python +import importlib import os +import sys +import warnings from setuptools import setup +if sys.version_info[0:2] < (3, 6): + warnings.warn('This package is tested with Python version 3.6+') + root_path = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(root_path, 'README.rst')) as readme: @@ -11,17 +17,19 @@ with open(os.path.join(root_path, 'README.rst')) as readme: install_requires = ['Django>=2.2,<4', 'requests'] tests_require = ['flake8', 'responses'] +package_info = importlib.import_module('moj_irat') + setup( name='django-moj-irat', - version='0.5', - author='Ministry of Justice Digital & Technology', + version=package_info.__version__, + author=package_info.__author__, author_email='[email protected]', url='https://github.com/ministryofjustice/django-moj-irat', packages=['moj_irat'], include_package_data=True, license='MIT', - description="Tools to support adding a Django-based service to " - "Ministry of Justice's Incidence Response and Tuning", + description='Tools to support adding a Django-based service to ' + 'Ministry of Justice’s Incidence Response and Tuning', long_description=README, keywords='moj django irat monitoring', classifiers=[ @@ -32,7 +40,6 @@ setup( 'Natural Language :: English', 'Operating System :: OS Independent', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: 3.8',
Move to supporting python<I>+ only as prior versions are have reached end-of-life
ministryofjustice_django-moj-irat
train
8307b2cb48f1831a200d9e6acd6febd2d82b1798
diff --git a/packages/node_modules/@webex/plugin-meetings/src/meeting/index.js b/packages/node_modules/@webex/plugin-meetings/src/meeting/index.js index <HASH>..<HASH> 100644 --- a/packages/node_modules/@webex/plugin-meetings/src/meeting/index.js +++ b/packages/node_modules/@webex/plugin-meetings/src/meeting/index.js @@ -5775,4 +5775,38 @@ export default class Meeting extends StatelessWebexPlugin { return isSuccess; } + + /** + * disableBNR API + * @returns {Promise<Boolean>} + * @public + * @memberof Meeting + */ + async disableBNR() { + LoggerProxy.logger.info('Meeting:index#disableBNR. Disable BNR called'); + let isSuccess = false; + + try { + if (typeof this.mediaProperties === 'undefined' || typeof this.mediaProperties.audioTrack === 'undefined') { + throw new Error("Meeting doesn't have an audioTrack attached"); + } + this.mediaProperties.audioTrack = WebRTCMedia.Effects.BNR.disableBNR(this.mediaProperties.audioTrack); + const audioStream = MediaUtil.createMediaStream([this.mediaProperties.audioTrack]); + + LoggerProxy.logger.info('Meeting:index#disableBNR. Raw media track obtained from WebRTC & sent to updateAudio'); + await this.updateAudio({ + sendAudio: true, + receiveAudio: true, + stream: audioStream + }); + this.isBnrEnabled = false; + isSuccess = true; + } + catch (error) { + LoggerProxy.logger.error(`Meeting:index#disableBNR. ${error}`); + throw error; + } + + return isSuccess; + } }
feat(plugin-meetings): disable BNR api included
webex_spark-js-sdk
train
0275feea9655f43248f0ec0b3bfaccde740b6937
diff --git a/src/preloadjs/TagLoader.js b/src/preloadjs/TagLoader.js index <HASH>..<HASH> 100644 --- a/src/preloadjs/TagLoader.js +++ b/src/preloadjs/TagLoader.js @@ -131,7 +131,7 @@ if (this.isAudio) { // Handlers for audio tags tag.onstalled = PreloadJS.proxy(this._handleStalled, this); - tag.addEventListener("canplaythrough", this.tagCompleteProxy); //LM: oncanplaythrough callback does not work in Chrome. + tag.addEventListener("canplaythrough", this.tagCompleteProxy, false); //LM: oncanplaythrough callback does not work in Chrome. } else { // Handlers for non-audio tags tag.onload = PreloadJS.proxy(this._handleTagLoad, this); @@ -178,7 +178,7 @@ // Delete handlers. var tag = this.getItem().tag; tag.onload = null; - tag.removeEventListener("canplaythrough", this.tagCompleteProxy); + tag.removeEventListener("canplaythrough", this.tagCompleteProxy, false); tag.onstalled = null; tag.onprogress = null; tag.onerror = null;
Added useCapture parameter on events to play nice with Firefox.
CreateJS_PreloadJS
train
8c6f7dad93a060b40bbb51ae7815eb89b59ece66
diff --git a/lib/filter_factory/active_record/condition.rb b/lib/filter_factory/active_record/condition.rb index <HASH>..<HASH> 100644 --- a/lib/filter_factory/active_record/condition.rb +++ b/lib/filter_factory/active_record/condition.rb @@ -29,11 +29,11 @@ module FilterFactory fail NotImplementedError, "all operator is not available for ActiveRecord" end - def in(obj) + def is_in(obj) obj.where("#{field_name} IN (?)", value) end - def nin(obj) + def not_in(obj) obj.where("#{field_name} NOT IN (?)", value) end diff --git a/lib/filter_factory/filter.rb b/lib/filter_factory/filter.rb index <HASH>..<HASH> 100644 --- a/lib/filter_factory/filter.rb +++ b/lib/filter_factory/filter.rb @@ -8,7 +8,7 @@ module FilterFactory attr_reader :fields - CONDITIONS = [:eq, :ne, :lt, :lte, :gt, :gte, :all, :in, :nin, :regex, :exists, :presents].freeze + CONDITIONS = [:eq, :ne, :lt, :lte, :gt, :gte, :all, :is_in, :not_in, :regex, :exists, :presents].freeze # Initializes new instance of Filter class. def initialize diff --git a/lib/filter_factory/mongoid/condition.rb b/lib/filter_factory/mongoid/condition.rb index <HASH>..<HASH> 100644 --- a/lib/filter_factory/mongoid/condition.rb +++ b/lib/filter_factory/mongoid/condition.rb @@ -29,11 +29,11 @@ module FilterFactory obj.where(field_name => { '$all' => value }) end - def in(obj) + def is_in(obj) obj.where(field_name => { '$in' => value }) end - def nin(obj) + def not_in(obj) obj.where(field_name => { '$nin' => value }) end diff --git a/spec/filter_factory/active_record/model_spec.rb b/spec/filter_factory/active_record/model_spec.rb index <HASH>..<HASH> 100644 --- a/spec/filter_factory/active_record/model_spec.rb +++ b/spec/filter_factory/active_record/model_spec.rb @@ -105,7 +105,7 @@ RSpec.describe ARPost do sample = posts.sample(3) filter = FilterFactory.create do - field :id, :in + is_in :id end filter.id = sample.map(&:id) @@ -116,7 +116,7 @@ RSpec.describe ARPost do it 'returns records with column values not in specified values' do sample = posts.sample(3) - filter = FilterFactory.create { nin :id } + filter = FilterFactory.create { not_in :id } filter.id = sample.map(&:id) expected_result = (posts.map(&:id) - sample.map(&:id)).sort diff --git a/spec/filter_factory/mongoid/model_spec.rb b/spec/filter_factory/mongoid/model_spec.rb index <HASH>..<HASH> 100644 --- a/spec/filter_factory/mongoid/model_spec.rb +++ b/spec/filter_factory/mongoid/model_spec.rb @@ -96,7 +96,7 @@ RSpec.describe MPost do sample = posts.sample(3) filter = FilterFactory.create do - field :id, :in + is_in :id end filter.id = sample.map(&:id) @@ -107,7 +107,7 @@ RSpec.describe MPost do it 'returns records with column values not in specified values' do sample = posts.sample(3) - filter = FilterFactory.create { nin :id } + filter = FilterFactory.create { not_in :id } filter.id = sample.map(&:id) expected_result = (posts.map(&:id) - sample.map(&:id)).sort
Rename `in` and `nin` filter conditions to `is_in` and `not_in`.
hck_filter_factory
train
c777eba2c1dd39d383ea71baf55f25e4637906a3
diff --git a/salt/modules/file.py b/salt/modules/file.py index <HASH>..<HASH> 100644 --- a/salt/modules/file.py +++ b/salt/modules/file.py @@ -4005,7 +4005,7 @@ def extract_hash(hash_fn, hash_matched = True except IndexError: pass - elif re.match(source_hash_name.replace('.', r'\.') + r'\s+', + elif re.match(re.escape(file_name) + r'\s+', line): _add_to_matches(found, line, 'source_hash_name', source_hash_name, matched) @@ -4023,7 +4023,7 @@ def extract_hash(hash_fn, hash_matched = True except IndexError: pass - elif re.match(file_name.replace('.', r'\.') + r'\s+', line): + elif re.match(re.escape(file_name) + r'\s+', line): _add_to_matches(found, line, 'file_name', file_name, matched) hash_matched = True
Use re.escape to escape paths, before handing them to re.match Addresses #<I>
saltstack_salt
train
9f47423a29b6de897c2b84e894bcc7707a6287b0
diff --git a/lib/pronto/config.rb b/lib/pronto/config.rb index <HASH>..<HASH> 100644 --- a/lib/pronto/config.rb +++ b/lib/pronto/config.rb @@ -40,7 +40,7 @@ module Pronto end def max_warnings - ENV['PRONTO_MAX_WARNINGS'] || @config_hash['max_warnings'] + ENV['PRONTO_MAX_WARNINGS'] && Integer(ENV['PRONTO_MAX_WARNINGS']) || @config_hash['max_warnings'] end def message_format(formatter) diff --git a/spec/pronto/config_spec.rb b/spec/pronto/config_spec.rb index <HASH>..<HASH> 100644 --- a/spec/pronto/config_spec.rb +++ b/spec/pronto/config_spec.rb @@ -56,6 +56,35 @@ module Pronto end end + describe '#max_warnings' do + subject { config.max_warnings } + + context 'from env variable' do + context 'with a valid value' do + before { stub_const('ENV', 'PRONTO_MAX_WARNINGS' => '20') } + it { should == 20 } + end + + context 'with an invalid value' do + before { stub_const('ENV', 'PRONTO_MAX_WARNINGS' => 'twenty') } + + specify do + -> { subject }.should raise_error(ArgumentError) + end + end + end + + context 'from config hash' do + let(:config_hash) { { 'max_warnings' => 40 } } + it { should == 40 } + end + + context 'default' do + let(:config_hash) { ConfigFile::EMPTY } + it { should == nil } + end + end + describe '#message_format' do subject { config.message_format('whatever') }
Convert `PRONTO_MAX_WARNINGS` to Integer (#<I>) * Convert `PRONTO_MAX_WARNINGS` to Integer ENV variables are always read as Strings. When trying to set this from an environment variable I got an exception at [`Pronto::Runners#exceeds_max?`](<URL>) since it tried to compare an Integer to a String. * Reject invalid `max_warnings` from ENV
prontolabs_pronto
train
3f21efb8d891c873572ed2192e81525d5c05a057
diff --git a/src/jquery.fancytree.js b/src/jquery.fancytree.js index <HASH>..<HASH> 100644 --- a/src/jquery.fancytree.js +++ b/src/jquery.fancytree.js @@ -1902,7 +1902,7 @@ Fancytree.prototype = /**@lends Fancytree*/{ * data was rendered. */ nodeLoadChildren: function(ctx, source) { - var children, + var children = null, tree = ctx.tree, node = ctx.node, dfd; @@ -1910,7 +1910,6 @@ Fancytree.prototype = /**@lends Fancytree*/{ if($.isFunction(source)){ source = source(); } -// alert("nodeLoadChildren() source = " + JSON.stringify(source)); if(source.url || $.isFunction(source.done)){ tree.nodeSetStatus(ctx, "loading"); if(source.url){ @@ -1941,8 +1940,19 @@ Fancytree.prototype = /**@lends Fancytree*/{ } dfd.done(function(data, textStatus, jqXHR){ tree.nodeSetStatus(ctx, "ok"); + if(typeof data === "string"){ $.error("Ajax request returned a string (did you get the JSON dataType wrong?)."); } +// alert("nodeLoadChildren() source = " + JSON.stringify(source)); + // postProcess is similar to the standard dataFilter hook, + // but it is also called for JSONP + if( ctx.options.postProcess ){ + // TODO: enable and test +// data = options.postProcess.call(this, data, this.dataType); + } else if (data && data.hasOwnProperty("d") && ctx.options.enableAspx ) { + // Process ASPX WebMethod JSON object inside "d" property + data = (typeof data.d === "string") ? $.parseJSON(data.d) : data.d; + } children = data; - if(typeof children === "string"){ $.error("Ajax request returned a string (did you get the JSON dataType wrong?)."); } + }).fail(function(jqXHR, textStatus, errorThrown){ tree.nodeSetStatus(ctx, "error", textStatus, jqXHR.status + ": " + errorThrown); alert("error: " + textStatus + " (" + jqXHR.status + ": " + (errorThrown.message || errorThrown) + ")"); @@ -3100,6 +3110,7 @@ $.widget("ui.fancytree", clickFolderMode: 4, // TODO: required anymore? disabled: false, + enableAspx: true, // TODO: document extensions: [], fx: { height: "toggle", duration: 200 }, // hooks: {}, @@ -3143,7 +3154,8 @@ $.widget("ui.fancytree", lastsib: "fancytree-lastsib" }, // events - lazyload: null + lazyload: null, + postProcess: null }, /* Set up the widget, Called on first $().fancytree() */ _create: function() {
#<I>: Request: Reinstate ".d" property parsing as dynatree used to
mar10_fancytree
train
03866e828877bc4666d709d29ea5d1a1557c2619
diff --git a/raven/raven.go b/raven/raven.go index <HASH>..<HASH> 100644 --- a/raven/raven.go +++ b/raven/raven.go @@ -142,8 +142,8 @@ func (client Client) CaptureMessagef(format string, a ...interface{}) (result st // sends a packet to the sentry server with a given timestamp func (client Client) send(packet []byte, timestamp time.Time) (response *http.Response, err error) { apiURL := *client.URL - apiURL.Path = path.Join(apiURL.Path, "/api/"+client.Project+"/store/") - apiURL.User = nil + apiURL.Path = path.Join(apiURL.Path, "/api/"+client.Project+"/store") + apiURL.Path += "/" location := apiURL.String() // for loop to follow redirects
fix the trailing slash issue.
kisielk_raven-go
train
39aac6ba46ba1c33d4502f31a32093d2636949ad
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -3,6 +3,7 @@ language: go go: # NB: order matters - matrix items that don't specify will use the # first value (ditto for `os` below) + - 1.11.x - 1.9.x - 1.8.x diff --git a/pkg/kubecfg/delete.go b/pkg/kubecfg/delete.go index <HASH>..<HASH> 100644 --- a/pkg/kubecfg/delete.go +++ b/pkg/kubecfg/delete.go @@ -80,7 +80,7 @@ func (c DeleteCmd) Run(apiObjects []*unstructured.Unstructured) error { return fmt.Errorf("Error deleting %s: %s", desc, err) } - log.Debugf("Deleted object: ", obj) + log.Debug("Deleted object: ", obj) } return nil diff --git a/pkg/kubecfg/diff.go b/pkg/kubecfg/diff.go index <HASH>..<HASH> 100644 --- a/pkg/kubecfg/diff.go +++ b/pkg/kubecfg/diff.go @@ -51,7 +51,7 @@ func (c DiffCmd) Run(apiObjects []*unstructured.Unstructured, out io.Writer) err diffFound := false for _, obj := range apiObjects { desc := fmt.Sprintf("%s %s", utils.ResourceNameFor(c.Discovery, obj), utils.FqName(obj)) - log.Debugf("Fetching ", desc) + log.Debug("Fetching ", desc) client, err := utils.ClientForResource(c.ClientPool, c.Discovery, obj, c.DefaultNamespace) if err != nil {
Fix test with go<I> Go test got more fussy, now it runs vet.
bitnami_kubecfg
train
61e1ea8ebcf1f614a50229e29eb27d00bcad6520
diff --git a/blueocean-rest-impl/src/main/java/io/jenkins/blueocean/service/embedded/rest/ActionProxiesImpl.java b/blueocean-rest-impl/src/main/java/io/jenkins/blueocean/service/embedded/rest/ActionProxiesImpl.java index <HASH>..<HASH> 100644 --- a/blueocean-rest-impl/src/main/java/io/jenkins/blueocean/service/embedded/rest/ActionProxiesImpl.java +++ b/blueocean-rest-impl/src/main/java/io/jenkins/blueocean/service/embedded/rest/ActionProxiesImpl.java @@ -23,7 +23,7 @@ public class ActionProxiesImpl extends BlueActionProxy { private final Action action; private final Reachable parent; private static final Logger logger = LoggerFactory.getLogger(ActionProxiesImpl.class); - private static final ImmutableSet<String> BANNED_ACTIONS = ImmutableSet.of("org.jenkinsci.plugins.workflow.job.views.FlowGraphAction"); + private static final ImmutableSet<String> BANNED_ACTIONS = ImmutableSet.of("org.jenkinsci.plugins.workflow.job.views.FlowGraphAction", "hudson.plugins.jobConfigHistory.JobConfigHistoryProjectAction"); public ActionProxiesImpl(Action action, Reachable parent) { this.action = action;
JENKINS-<I># Exclude job history config action (#<I>)
jenkinsci_blueocean-plugin
train
9af9e35482bb8fe45a0fa9b1ffb6f4558324122f
diff --git a/projects/impl/src/main/java/org/jboss/forge/addon/projects/ui/BuildCommand.java b/projects/impl/src/main/java/org/jboss/forge/addon/projects/ui/BuildCommand.java index <HASH>..<HASH> 100644 --- a/projects/impl/src/main/java/org/jboss/forge/addon/projects/ui/BuildCommand.java +++ b/projects/impl/src/main/java/org/jboss/forge/addon/projects/ui/BuildCommand.java @@ -83,7 +83,7 @@ public class BuildCommand extends AbstractProjectCommand builder.runTests(false); } - if (profile.getValue() != null) + if (profile.hasValue()) { builder.addArguments("-P" + profile.getValue()); }
Profile input should use hasValue to test for contents
forge_core
train
b20e3259bc4289eec9d94065122e2e71c782a687
diff --git a/test/test-boolean-default.js b/test/test-boolean-default.js index <HASH>..<HASH> 100644 --- a/test/test-boolean-default.js +++ b/test/test-boolean-default.js @@ -47,7 +47,7 @@ describe('templates with boolean, 0, or "" default values', function () { }; var r = j2j.run(template, { - id: 'test', + id: 'test' }); expect(r).to.deep.equal({ @@ -71,7 +71,7 @@ describe('templates with boolean, 0, or "" default values', function () { }; var r = j2j.run(template, { - id: 'test', + id: 'test' }); expect(r).to.deep.equal({ @@ -95,7 +95,7 @@ describe('templates with boolean, 0, or "" default values', function () { }; var r = j2j.run(template, { - id: 'test', + id: 'test' }); expect(r).to.deep.equal({ diff --git a/test/test-content.js b/test/test-content.js index <HASH>..<HASH> 100644 --- a/test/test-content.js +++ b/test/test-content.js @@ -22,6 +22,15 @@ describe('content', function () { } }); + it('case-content-0: basic default for array with function', function () { + var template = case_0.template; + template.default = function () { + return []; + }; + var actual = engine.run(template, []); + expect(actual).to.deep.equal([]); + }); + it('case-content-1: array', function () { var template = case_1.template; var n = case_1.inputs.length;
Modified test-content.js, added a test for template.default for array.
amida-tech_jsonapter
train
59a7d10e8520ba62db239c6260148d6237e1560e
diff --git a/core/src/main/java/org/seedstack/business/assembler/modelmapper/ModelMapperAssembler.java b/core/src/main/java/org/seedstack/business/assembler/modelmapper/ModelMapperAssembler.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/org/seedstack/business/assembler/modelmapper/ModelMapperAssembler.java +++ b/core/src/main/java/org/seedstack/business/assembler/modelmapper/ModelMapperAssembler.java @@ -10,10 +10,14 @@ package org.seedstack.business.assembler.modelmapper; import net.jodah.typetools.TypeResolver; import org.modelmapper.ModelMapper; import org.seedstack.business.assembler.AbstractBaseAssembler; +import org.seedstack.business.assembler.AssemblerErrorCodes; import org.seedstack.business.assembler.BaseAssembler; import org.seedstack.business.domain.AggregateRoot; +import org.seedstack.seed.SeedException; import org.seedstack.seed.core.utils.SeedReflectionUtils; +import java.lang.reflect.Type; + /** * This assembler automatically assembles aggregates in DTO and vice versa. * @@ -31,7 +35,7 @@ public abstract class ModelMapperAssembler<A extends AggregateRoot<?>, D> extend public ModelMapperAssembler() { initModelMappers(); Class<? extends BaseAssembler> class1 = (Class<? extends BaseAssembler>) SeedReflectionUtils.cleanProxy(getClass()); - dtoClass = (Class<D>) TypeResolver.resolveRawArguments(class1.getGenericSuperclass(), class1)[1]; + dtoClass = (Class<D>) TypeResolver.resolveRawArguments(getGenericType(class1), class1)[1]; } public ModelMapperAssembler(Class<D> dtoClass) { @@ -54,7 +58,7 @@ public abstract class ModelMapperAssembler<A extends AggregateRoot<?>, D> extend mergeModelMapper.map(sourceDto, targetAggregate); } - void initModelMappers() { + private void initModelMappers() { this.assembleModelMapper = new ModelMapper(); configureAssembly(assembleModelMapper); @@ -62,6 +66,19 @@ public abstract class ModelMapperAssembler<A extends AggregateRoot<?>, D> extend configureMerge(mergeModelMapper); } + private Type getGenericType(Class<?> aClass) { + Class<?> superclass = aClass.getSuperclass(); + while (!ModelMapperAssembler.class.equals(superclass) && superclass != null) { + superclass = superclass.getSuperclass(); + } + + if (superclass == null) { + throw SeedException.createNew(AssemblerErrorCodes.UNABLE_TO_RESOLVE_GENERIC_TYPE); + } + + return superclass.getGenericSuperclass(); + } + protected abstract void configureAssembly(ModelMapper modelMapper); protected abstract void configureMerge(ModelMapper modelMapper); diff --git a/core/src/test/java/org/seedstack/business/internal/assembler/modelmapper/ModelMapperAssemblerTest.java b/core/src/test/java/org/seedstack/business/internal/assembler/modelmapper/ModelMapperAssemblerTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/org/seedstack/business/internal/assembler/modelmapper/ModelMapperAssemblerTest.java +++ b/core/src/test/java/org/seedstack/business/internal/assembler/modelmapper/ModelMapperAssemblerTest.java @@ -13,8 +13,8 @@ import org.junit.Before; import org.junit.Test; import org.modelmapper.ModelMapper; import org.modelmapper.PropertyMap; -import org.seedstack.business.domain.BaseAggregateRoot; import org.seedstack.business.assembler.modelmapper.ModelMapperAssembler; +import org.seedstack.business.domain.BaseAggregateRoot; import org.seedstack.business.internal.assembler.DefaultModelMapperAssembler; import java.util.HashMap; @@ -46,6 +46,19 @@ public class ModelMapperAssemblerTest { } } + static abstract class AbstractAutoAssembler<T> extends ModelMapperAssembler<Order, T> { + } + + static class InheritingAutoAssembler extends AbstractAutoAssembler<DummyDTO> { + @Override + protected void configureAssembly(ModelMapper modelMapper) { + } + + @Override + protected void configureMerge(ModelMapper modelMapper) { + } + } + @Before public void before() { modelMapperAssembler = new AutoAssembler(); @@ -53,6 +66,11 @@ public class ModelMapperAssemblerTest { } @Test + public void testInheritingAssembler() { + new InheritingAutoAssembler(); + } + + @Test public void testAssembleDtoFromAggregate() { Order order = new Order(new Customer(new Name("John", "Doe")), new Address("main street", "bevillecity"), null, null); @@ -266,6 +284,10 @@ public class ModelMapperAssemblerTest { } } + static class DummyDTO { + + } + static class OrderDTO { String customerFirstName; String customerLastName; diff --git a/specs/src/main/java/org/seedstack/business/assembler/AssemblerErrorCodes.java b/specs/src/main/java/org/seedstack/business/assembler/AssemblerErrorCodes.java index <HASH>..<HASH> 100644 --- a/specs/src/main/java/org/seedstack/business/assembler/AssemblerErrorCodes.java +++ b/specs/src/main/java/org/seedstack/business/assembler/AssemblerErrorCodes.java @@ -14,5 +14,6 @@ import org.seedstack.seed.ErrorCode; */ public enum AssemblerErrorCodes implements ErrorCode { UNABLE_TO_FIND_ASSEMBLER_WITH_QUALIFIER, - UNABLE_TO_FIND_ASSEMBLER + UNABLE_TO_FIND_ASSEMBLER, + UNABLE_TO_RESOLVE_GENERIC_TYPE }
Fix bug on ModelMapper assemblers with inheritance
seedstack_business
train
f6221cd9c8e615ed86729d74b09cfb65d9f3faa0
diff --git a/core/src/test/java/smile/classification/MLPTest.java b/core/src/test/java/smile/classification/MLPTest.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/smile/classification/MLPTest.java +++ b/core/src/test/java/smile/classification/MLPTest.java @@ -179,7 +179,7 @@ public class MLPTest { System.out.format("----- epoch %d -----%n", epoch); int[] permutation = MathEx.permutate(x.length); int i = 0; - for (; i < x.length-batch;) { + while (i < x.length-batch) { for (int j = 0; j < batch; j++, i++) { batchx[j] = x[permutation[i]]; batchy[j] = Segment.y[permutation[i]]; @@ -239,9 +239,9 @@ public class MLPTest { smile.data.Serialize.read(temp); } - @Test(expected = Test.None.class) - public void testUSPSMiniBatch() throws Exception { - System.out.println("USPS Mini-Batch Learning"); + @Test + public void testUSPSMiniBatch() { + System.out.println("USPS Mini-Batch"); MathEx.setSeed(19650218); // to get repeatable results. @@ -258,18 +258,23 @@ public class MLPTest { Layer.mle(k, OutputFunction.SIGMOID) ); - model.setLearningRate(TimeFunction.linear(0.01, 20000, 0.001)); + model.setLearningRate( + TimeFunction.piecewise( + new int[] {1000, 2000, 3000, 4000, 5000}, + new double[]{0.01, 0.009, 0.008, 0.007, 0.006, 0.005} + ) + ); model.setRMSProp(0.9, 1E-7); int batch = 20; double[][] batchx = new double[batch][]; int[] batchy = new int[batch]; int error = 0; - for (int epoch = 1; epoch <= 15; epoch++) { + for (int epoch = 1; epoch <= 5; epoch++) { System.out.format("----- epoch %d -----%n", epoch); int[] permutation = MathEx.permutate(x.length); int i = 0; - for (; i < x.length-batch;) { + while (i < x.length-batch) { for (int j = 0; j < batch; j++, i++) { batchx[j] = x[permutation[i]]; batchy[j] = USPS.y[permutation[i]]; @@ -286,6 +291,6 @@ public class MLPTest { System.out.println("Test Error = " + error); } - assertEquals(127, error); + assertEquals(168, error); } } \ No newline at end of file
tune USPS minibatch learning rate
haifengl_smile
train
1eae2bd2cb8d133a851184bd5bddbc874bf9493d
diff --git a/lib/pidgin2adium/log_generator.rb b/lib/pidgin2adium/log_generator.rb index <HASH>..<HASH> 100644 --- a/lib/pidgin2adium/log_generator.rb +++ b/lib/pidgin2adium/log_generator.rb @@ -2,7 +2,7 @@ module Pidgin2Adium class LogGenerator include Pidgin2Adium def initialize(service, user_SN, partner_SN, adium_chat_time_start, dest_dir_base, force=false) - @service = service + # service is used below @user_SN = user_SN @partner_SN = partner_SN @adium_chat_time_start = adium_chat_time_start @@ -20,19 +20,20 @@ module Pidgin2Adium 'msn' => 'MSN', 'yahoo' => 'Yahoo'} - service_name = @SERVICE_NAME_MAP[@service.downcase] - dest_dir_real = File.join(@dest_dir_base, "#{service_name}.#{@user_SN}", @partner_SN, "#{@partner_SN} (#{@adium_chat_time_start}).chatlog") + @service_name = @SERVICE_NAME_MAP[service.downcase] + dest_dir_real = File.join(@dest_dir_base, "#{@service_name}.#{@user_SN}", @partner_SN, "#{@partner_SN} (#{@adium_chat_time_start}).chatlog") FileUtils.mkdir_p(dest_dir_real) @dest_file_path = dest_dir_real << '/' << "#{@partner_SN} (#{@adium_chat_time_start}).xml" end + # :nodoc: def file_exists? return File.exist?(@dest_file_path) end + # Given an array of Message, Status, and/or Event objects created by LogParser, generates # Returns path of output file. def generate(chat_array) - # chat_array is an array of Message, Status, and Event objects created by LogParser if not @force return FILE_EXISTS if file_exists? end @@ -40,12 +41,11 @@ module Pidgin2Adium # TODO: inject? map! ? chat_array.each { |obj| all_msgs << obj.to_s } - # no \n before </chat> because all_msgs has it already - ret = sprintf('<?xml version="1.0" encoding="UTF-8" ?>'<<"\n"+ - '<chat xmlns="http://purl.org/net/ulf/ns/0.4-02" account="%s" service="%s">'<<"\n"<<'%s</chat>', @user_SN, service_name, all_msgs) - outfile = File.new(@dest_file_path, 'w') - outfile.puts(ret) + # no \n before </chat> because all_msgs has it already + outfile.printf('<?xml version="1.0" encoding="UTF-8" ?>'<<"\n"+ + '<chat xmlns="http://purl.org/net/ulf/ns/0.4-02" account="%s" service="%s">'<<"\n"<<'%s</chat>', + @user_SN, @service_name, all_msgs) outfile.close return @dest_file_path end
Added documentation. service_name is now an instance variable (@service_name) so that it's actually used instead of being translated as a blank string
gabebw_pipio
train
95e0d8d8be8412454ac7332877582c534d7f9c5a
diff --git a/src/Caouecs/Sirtrevorjs/SirTrevorJs.php b/src/Caouecs/Sirtrevorjs/SirTrevorJs.php index <HASH>..<HASH> 100644 --- a/src/Caouecs/Sirtrevorjs/SirTrevorJs.php +++ b/src/Caouecs/Sirtrevorjs/SirTrevorJs.php @@ -221,6 +221,7 @@ class SirTrevorJs 'language' => self::defineParam('language', $params, $config), 'uploadUrl' => self::defineParam('uploadUrl', $params, $config), 'tweetUrl' => self::defineParam('tweetUrl', $params, $config), + 'version' => $config['version'], ]; } diff --git a/src/config/sir-trevor-js.php b/src/config/sir-trevor-js.php index <HASH>..<HASH> 100644 --- a/src/config/sir-trevor-js.php +++ b/src/config/sir-trevor-js.php @@ -62,4 +62,9 @@ return [ // View //'view' => 'sirtrevorjs' + + // Version of Sir Trevor JS + // + // for i18n + 'version' => '0.4' ]; diff --git a/src/views/js.blade.php b/src/views/js.blade.php index <HASH>..<HASH> 100644 --- a/src/views/js.blade.php +++ b/src/views/js.blade.php @@ -1,7 +1,9 @@ <script type="text/javascript"> $(function(){ -@if ($config['language'] != "en") +@if ($config['language'] != "en" && $config['version'] == '0.3') + SirTrevor.LANGUAGE = '{!! $config['language'] !!}'; +@elseif ($config['language'] != "en") SirTrevor.config.language = '{!! $config['language'] !!}'; @endif
fix: language and old version of Sir Trevor Js
caouecs_Laravel-SirTrevorJS
train
920b9e00f4cf548035318ec2a81f7dead58f335d
diff --git a/lib/change.js b/lib/change.js index <HASH>..<HASH> 100644 --- a/lib/change.js +++ b/lib/change.js @@ -55,7 +55,11 @@ function Change(options) { self._modification = attr; return; } - Object.keys(attr).forEach(function(k) { + var keys = Object.keys(attr); + if (keys.length > 1) + throw new Error('Only one attribute per Change allowed'); + + keys.forEach(function(k) { var _attr = new Attribute({type: k}); if (Array.isArray(attr[k])) { attr[k].forEach(function(v) { diff --git a/lib/client.js b/lib/client.js index <HASH>..<HASH> 100644 --- a/lib/client.js +++ b/lib/client.js @@ -435,17 +435,41 @@ Client.prototype.exop = function(name, value, controls, callback) { Client.prototype.modify = function(name, change, controls, callback) { if (typeof(name) !== 'string') throw new TypeError('name (string) required'); - if (!Array.isArray(change) && !(change instanceof Change)) + if (typeof(change) !== 'object') throw new TypeError('change (Change) required'); - if (!Array.isArray(change)) { - var save = change; - change = []; - change.push(save); + + var changes = []; + + function changeFromObject(change) { + if (!change.operation && !change.type) + throw new Error('change.operation required'); + if (typeof(change.modification) !== 'object') + throw new Error('change.modification (object) required'); + + Object.keys(change.modification).forEach(function(k) { + var mod = {}; + mod[k] = change.modification[k]; + changes.push(new Change({ + operation: change.operation || change.type, + modification: mod + })); + }); } - change.forEach(function(c) { - if (!(c instanceof Change)) - throw new TypeError('change ([Change]) required'); - }); + + if (change instanceof Change) { + changes.push(change); + } else if (Array.isArray(change)) { + change.forEach(function(c) { + if (c instanceof Change) { + changes.push(c); + } else { + changeFromObject(c); + } + }); + } else { + changeFromObject(change); + } + if (typeof(controls) === 'function') { callback = controls; controls = []; @@ -457,7 +481,7 @@ Client.prototype.modify = function(name, change, controls, callback) { var req = new ModifyRequest({ object: dn.parse(name), - changes: change, + changes: changes, controls: controls }); diff --git a/tst/change.test.js b/tst/change.test.js index <HASH>..<HASH> 100644 --- a/tst/change.test.js +++ b/tst/change.test.js @@ -50,6 +50,23 @@ test('new with args', function(t) { }); +test('GH-31 (multiple attributes per Change)', function(t) { + try { + var change = new Change({ + operation: 'replace', + modification: { + cn: 'foo', + sn: 'bar' + } + }); + t.fail('should have thrown'); + } catch (e) { + t.ok(e); + t.end(); + } +}); + + test('toBer', function(t) { var change = new Change({ operation: 'Add', diff --git a/tst/client.test.js b/tst/client.test.js index <HASH>..<HASH> 100644 --- a/tst/client.test.js +++ b/tst/client.test.js @@ -324,6 +324,23 @@ test('modify array success', function(t) { }); +test('modify change plain object success (GH-31)', function(t) { + var change = { + type: 'replace', + modification: { + cn: 'test', + sn: 'bar' + } + }; + client.modify('cn=modify, ' + SUFFIX, change, function(err, res) { + t.ifError(err); + t.ok(res); + t.equal(res.status, 0); + t.end(); + }); +}); + + test('modify DN new RDN only', function(t) { client.modifyDN('cn=old, ' + SUFFIX, 'cn=new', function(err, res) { t.ifError(err);
GH-<I> allow changing multiple fields in a single ldap_modify with better object sytnax
joyent_node-ldapjs
train
7d6f39409e52ba3c52549612bfba558d6b6f9563
diff --git a/safe_qgis/keywords_dialog.py b/safe_qgis/keywords_dialog.py index <HASH>..<HASH> 100644 --- a/safe_qgis/keywords_dialog.py +++ b/safe_qgis/keywords_dialog.py @@ -685,3 +685,7 @@ class KeywordsDialog(QtGui.QDialog, Ui_KeywordsDialogBase): if idxKey > -1: self.cboKeyword.setCurrentIndex(idxKey) self.lePredefinedValue.setText(myTempValue) + else: + self.radUserDefined.setChecked(True) + self.leKey.setText(myTempKey) + self.leValue.setText(myTempValue)
If keyword is not in combo box, go to user defined UI per #<I>.
inasafe_inasafe
train
937e3a3dd41b86881965cb91acc29bf3b2f612e7
diff --git a/mollie/api/client.py b/mollie/api/client.py index <HASH>..<HASH> 100644 --- a/mollie/api/client.py +++ b/mollie/api/client.py @@ -17,12 +17,12 @@ from . import resources class Client(object): CLIENT_VERSION = '2.0.0a0' - API_ENDPOINT = 'https://api.mollie.com' - API_VERSION = 'v2' - UNAME = ' '.join(platform.uname()) - USER_AGENT = ' '.join(vs.replace(r'\s+', '-') for vs in [ - 'Mollie/' + CLIENT_VERSION, - 'Python/' + sys.version.split(' ')[0], + API_ENDPOINT = 'https://api.mollie.com' + API_VERSION = 'v2' + UNAME = ' '.join(platform.uname()) + USER_AGENT = ' '.join(vs.replace(r'\s+', '-') for vs in [ + 'Mollie/' + CLIENT_VERSION, + 'Python/' + sys.version.split(' ')[0], 'OpenSSL/' + ssl.OPENSSL_VERSION.split(' ')[1], ])
Fix pycodestyle issues in client
mollie_mollie-api-python
train
62f625e725e4f0639a529b7a98b7d61d22f2b235
diff --git a/knights/klass.py b/knights/klass.py index <HASH>..<HASH> 100644 --- a/knights/klass.py +++ b/knights/klass.py @@ -52,21 +52,43 @@ def kompile(src): ], value=ast.Name(id='context', ctx=ast.Load()), ), - ast.Expr(value=ast.YieldFrom( + ast.Return( value=ast.Call( func=ast.Attribute( - value=ast.Name(id='self', ctx=ast.Load()), - attr='_root', + value=ast.Str(s=''), + attr='join', ctx=ast.Load() ), args=[ - ast.Name(id='context', ctx=ast.Load()) + ast.GeneratorExp( + elt=ast.Call( + func=ast.Name(id='str', ctx=ast.Load()), + args=[ + ast.Name(id='x', ctx=ast.Load()), + ], + keywords=[], starargs=None, kwargs=None + ), + generators=[ + ast.comprehension( + target=ast.Name(id='x', ctx=ast.Store()), + iter=ast.Call( + func=ast.Attribute( + value=ast.Name(id='self', ctx=ast.Load()), + attr='_root', + ctx=ast.Load() + ), + args=[ + ast.Name(id='context', ctx=ast.Load()), + ], keywords=[], starargs=None, kwargs=None + ), + ifs=[] + ), + ] + ), ], - keywords=[], - starargs=None, - kwargs=None, + keywords=[], starargs=None, kwargs=None ) - )), + ), ], decorator_list=[], ) @@ -150,6 +172,19 @@ def build_method(state, name): return func +class VarVisitor(ast.NodeTransformer): + def visit_Name(self, node): + return ast.Subscript( + value=ast.Attribute( + value=ast.Name(id='self', ctx=ast.Load()), + attr='context', + ctx=ast.Load() + ), + slice=ast.Index(value=ast.Str(s=node.id)), + ctx=ast.Load(), + ) + + def parse_node(state): for mode, token in state['stream']: if mode == Token.load: @@ -159,6 +194,7 @@ def parse_node(state): node = ast.Yield(value=ast.Str(s=token)) elif mode == Token.var: code = ast.parse(token, mode='eval') + VarVisitor().visit(code) node = ast.Yield(value=code.body) elif mode == Token.block: #
Add VarVisitor to make var expressions use self.context Rewrite call method to '' join
funkybob_knights-templater
train
bef8e90772d73e76109d41da65e113921e1b1145
diff --git a/lib/prerender_rails.rb b/lib/prerender_rails.rb index <HASH>..<HASH> 100644 --- a/lib/prerender_rails.rb +++ b/lib/prerender_rails.rb @@ -110,7 +110,9 @@ module Rack def get_prerendered_page_response(env) begin - Net::HTTP.get_response(URI.parse(build_api_url(env))) + url = URI.parse(build_api_url(env)) + req = Net::HTTP::Get.new(url.request_uri, { 'User-Agent' => env['HTTP_USER_AGENT'] }) + response = Net::HTTP.start(url.host, url.port) { |http| http.request(req) } rescue nil end diff --git a/prerender_rails.gemspec b/prerender_rails.gemspec index <HASH>..<HASH> 100644 --- a/prerender_rails.gemspec +++ b/prerender_rails.gemspec @@ -2,7 +2,7 @@ Gem::Specification.new do |spec| spec.name = "prerender_rails" - spec.version = "0.1.4" + spec.version = "0.1.5" spec.authors = ["Todd Hooper"] spec.email = ["[email protected]"] spec.description = %q{Rails middleware to prerender your javascript heavy pages on the fly by a phantomjs service} diff --git a/test/lib/prerender_rails.rb b/test/lib/prerender_rails.rb index <HASH>..<HASH> 100644 --- a/test/lib/prerender_rails.rb +++ b/test/lib/prerender_rails.rb @@ -15,7 +15,7 @@ describe Rack::Prerender do it "should return a prerendered response for a crawler with the returned status code" do request = Rack::MockRequest.env_for "/", "HTTP_USER_AGENT" => bot - stub_request(:get, @prerender.build_api_url(request)).to_return(:body => "<html></html>", :status => 201) + stub_request(:get, @prerender.build_api_url(request)).with(:headers => { 'User-Agent' => bot }).to_return(:body => "<html></html>", :status => 201) response = Rack::Prerender.new(@app).call(request) assert_equal response[2].body, ["<html></html>"] @@ -24,7 +24,7 @@ describe Rack::Prerender do it "should return a prerendered reponse if user is a bot by checking for _escaped_fragment_" do request = Rack::MockRequest.env_for "/path?_escaped_fragment_=", "HTTP_USER_AGENT" => user - stub_request(:get, @prerender.build_api_url(request)).to_return(:body => "<html></html>") + stub_request(:get, @prerender.build_api_url(request)).with(:headers => { 'User-Agent' => user }).to_return(:body => "<html></html>") response = Rack::Prerender.new(@app).call(request) assert_equal response[2].body, ["<html></html>"]
added ability to pass prerender token header
prerender_prerender_rails
train
02ca66c5243aa3c2dc116c95b7eac27dbf79bcaf
diff --git a/yoconfigurator/base.py b/yoconfigurator/base.py index <HASH>..<HASH> 100644 --- a/yoconfigurator/base.py +++ b/yoconfigurator/base.py @@ -20,7 +20,7 @@ else: class DetectMissingEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, MissingValue): - raise ValueError("Missing Value found in config: %s" % obj.name) + raise ValueError('Missing Value found in config: %s' % obj.name) return super(DetectMissingEncoder, self).default(obj) diff --git a/yoconfigurator/dicts.py b/yoconfigurator/dicts.py index <HASH>..<HASH> 100644 --- a/yoconfigurator/dicts.py +++ b/yoconfigurator/dicts.py @@ -101,10 +101,10 @@ class MissingValue(object): self.name = name def __getattr__(self, k): - raise AttributeError("No value provided for %s" % self.name) + raise AttributeError('No value provided for %s' % self.name) def get(self, k, default=None): - raise KeyError("No value provided for %s" % self.name) + raise KeyError('No value provided for %s' % self.name) __getitem__ = get diff --git a/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py b/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py index <HASH>..<HASH> 100644 --- a/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py +++ b/yoconfigurator/tests/samples/public-config/deploy/configuration/public-data.py @@ -6,8 +6,8 @@ from yoconfigurator.dicts import filter_dict def filter(config): """The subset of configuration keys to be made public.""" keys = [ - "myapp.hello", - "myapp.some.deeply.nested.value", - "myapp.oz" + 'myapp.hello', + 'myapp.some.deeply.nested.value', + 'myapp.oz' ] return filter_dict(config, keys) diff --git a/yoconfigurator/tests/test_configurator.py b/yoconfigurator/tests/test_configurator.py index <HASH>..<HASH> 100644 --- a/yoconfigurator/tests/test_configurator.py +++ b/yoconfigurator/tests/test_configurator.py @@ -42,24 +42,24 @@ class TestConfigurator(unittest.TestCase): def test_creates_a_config_that_looks_as_expected(self): expected = { - "yoconfigurator": { - "app": "myapp" + 'yoconfigurator': { + 'app': 'myapp' }, - "myapp": { - "secret": "sauce", - "some": { - "deeply": { - "nested": { - "value": "Stefano likes beer" + 'myapp': { + 'secret': 'sauce', + 'some': { + 'deeply': { + 'nested': { + 'value': 'Stefano likes beer' } } }, - "hello": "world", - "oz": { - "bears": True, - "tigers": True, - "lions": True, - "zebras": False + 'hello': 'world', + 'oz': { + 'bears': True, + 'tigers': True, + 'lions': True, + 'zebras': False } } }
switch " to ' for flake8 compliance
yola_yoconfigurator
train
db6ba60af6ea8a5b0c6f3114c271eec4636f7a6f
diff --git a/src/main/java/net/dv8tion/jda/internal/requests/ratelimit/BotRateLimiter.java b/src/main/java/net/dv8tion/jda/internal/requests/ratelimit/BotRateLimiter.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/dv8tion/jda/internal/requests/ratelimit/BotRateLimiter.java +++ b/src/main/java/net/dv8tion/jda/internal/requests/ratelimit/BotRateLimiter.java @@ -266,11 +266,12 @@ public class BotRateLimiter extends RateLimiter // Handle hard rate limit, pretty much just log that it happened else { - boolean firstHit = hitRatelimit.add(baseRoute); + boolean firstHit = hitRatelimit.add(baseRoute) && retryAfter < 60000; // Update the bucket to the new information bucket.remaining = 0; bucket.reset = getNow() + retryAfter; // don't log warning if we hit the rate limit for the first time, likely due to initialization of the bucket + // unless its a long retry-after delay (more than a minute) if (firstHit) log.debug("Encountered 429 on route {} with bucket {} Retry-After: {} ms", baseRoute, bucket.bucketId, retryAfter); else @@ -377,6 +378,11 @@ public class BotRateLimiter extends RateLimiter requests.addFirst(request); } + private boolean isGlobalRateLimit() + { + return requester.getJDA().getSessionController().getGlobalRatelimit() > getNow(); + } + public long getRateLimit() { long now = getNow(); @@ -440,7 +446,11 @@ public class BotRateLimiter extends RateLimiter if (rateLimit > 0L) { // We need to backoff since we ran out of remaining uses or hit the global rate limit - log.debug("Backing off {} ms for bucket {}", rateLimit, bucketId); + Request request = requests.peekFirst(); // this *should* not be null + String baseRoute = request != null ? request.getRoute().getBaseRoute().toString() : "N/A"; + if (!isGlobalRateLimit() && rateLimit >= 1000 * 60 * 30) // 30 minutes + log.warn("Encountered long {} minutes Rate-Limit on route {}", TimeUnit.MILLISECONDS.toMinutes(rateLimit), baseRoute); + log.debug("Backing off {} ms for bucket {} on route {}", rateLimit, bucketId, baseRoute); break; }
Improve some logging for large rate limits (#<I>)
DV8FromTheWorld_JDA
train
f21ed12b043278398d28e7b9016032c2a1f84d7e
diff --git a/django_countries/tests/forms.py b/django_countries/tests/forms.py index <HASH>..<HASH> 100644 --- a/django_countries/tests/forms.py +++ b/django_countries/tests/forms.py @@ -8,3 +8,10 @@ class PersonForm(forms.ModelForm): class Meta: model = models.Person fields = ['country'] + + +class LegacyForm(forms.ModelForm): + + class Meta: + model = models.Legacy + fields = ['default', 'default_callable'] diff --git a/django_countries/tests/models.py b/django_countries/tests/models.py index <HASH>..<HASH> 100644 --- a/django_countries/tests/models.py +++ b/django_countries/tests/models.py @@ -2,6 +2,10 @@ from django.db import models from django_countries.fields import CountryField +def en_zed(): + return 'NZ' + + class Person(models.Model): name = models.CharField(max_length=50) country = CountryField() @@ -11,3 +15,8 @@ class Person(models.Model): class AllowNull(models.Model): country = CountryField(null=True) + + +class Legacy(models.Model): + default = CountryField(default='AU', null=True) + default_callable = CountryField(default=en_zed) diff --git a/django_countries/tests/test_fields.py b/django_countries/tests/test_fields.py index <HASH>..<HASH> 100644 --- a/django_countries/tests/test_fields.py +++ b/django_countries/tests/test_fields.py @@ -14,7 +14,11 @@ except: from django_countries import fields, countries from django_countries.tests import forms -from django_countries.tests.models import Person, AllowNull +from django_countries.tests.models import Person, AllowNull, en_zed + +skipUnlessLegacy = skipIf( + django.VERSION >= (1, 5), + "Legacy tests only necessary in Django < 1.5") class TestCountryField(TestCase): @@ -174,8 +178,30 @@ class TestModelForm(TestCase): translation.activate('eo') form = forms.PersonForm() try: + # This is just to prove that the language changed. self.assertEqual(list(countries)[0][1], 'Afganio') + # If the choices aren't lazy, this wouldn't be translated. It's the + # second choice because the first one is the initial blank option. self.assertEqual( form.fields['country'].choices[1][1], 'Afganio') finally: translation.activate(lang) + + @skipUnlessLegacy + def test_legacy_default(self): + self.assertEqual( + forms.LegacyForm.base_fields['default'].initial, 'AU') + + @skipUnlessLegacy + def test_legacy_default_callable(self): + self.assertEqual( + forms.LegacyForm.base_fields['default_callable'].initial, en_zed) + form = forms.LegacyForm() + self.assertEqual(form['default_callable'].value(), 'NZ') + + @skipUnlessLegacy + def test_legacy_empty_value(self): + self.assertEqual( + forms.LegacyForm.base_fields['default'].empty_value, None) + self.assertEqual( + forms.LegacyForm.base_fields['default_callable'].empty_value, '')
Form tests for Django LTS
SmileyChris_django-countries
train
869bab43f074f2a7c5687c1c1b5cca13bf99e5e1
diff --git a/src/Ouzo/Goodies/Utilities/Strings.php b/src/Ouzo/Goodies/Utilities/Strings.php index <HASH>..<HASH> 100644 --- a/src/Ouzo/Goodies/Utilities/Strings.php +++ b/src/Ouzo/Goodies/Utilities/Strings.php @@ -759,8 +759,7 @@ class Strings */ public static function uppercaseFirst($string, $encoding = 'UTF-8') { - $length = mb_strlen($string, $encoding); $first = mb_substr($string, 0, 1, $encoding); - return mb_strtoupper($first, $encoding) . mb_substr($string, 1, $length - 1, $encoding); + return mb_strtoupper($first, $encoding) . mb_substr($string, 1, null, $encoding); } }
Cleaned Strings::uppercaseFirst()
letsdrink_ouzo
train
3b30e59dd237e16f4c7314dad606031711c161b8
diff --git a/src/Publisher/Builder/DocResolver.js b/src/Publisher/Builder/DocResolver.js index <HASH>..<HASH> 100644 --- a/src/Publisher/Builder/DocResolver.js +++ b/src/Publisher/Builder/DocResolver.js @@ -247,10 +247,10 @@ export default class DocResolver { let superClassDoc = this._builder._findByName(doc.extends[0])[0]; - // this is circular extends - if (superClassDoc.longname === selfDoc.longname) { break; } - if (superClassDoc) { + // this is circular extends + if (superClassDoc.longname === selfDoc.longname) { break; } + chains.push(superClassDoc.longname); doc = superClassDoc; } else {
fix(extends): npe
esdoc_esdoc
train
8ec660e0e25aae3267d8a51c6c6b628fe278f324
diff --git a/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategy.java b/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategy.java index <HASH>..<HASH> 100644 --- a/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategy.java +++ b/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategy.java @@ -26,8 +26,9 @@ public class GlobTextMatchingStrategy implements TextMatchingStrategy { public boolean isAMatch(String compareThis, String with) { String regex = compareThis.replace(".", "\\.").replace("*", ".*").replace("?", ".?"); Pattern pattern = Pattern.compile(regex, Pattern.MULTILINE); - Matcher matcher = pattern.matcher(with); + String mutatedWith = with.replaceAll("\u00a0", " "); + Matcher matcher = pattern.matcher(mutatedWith); - return matcher.find(); + return matcher.find() || with.contains(compareThis); } } diff --git a/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/htmlutils.js b/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/htmlutils.js index <HASH>..<HASH> 100644 --- a/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/htmlutils.js +++ b/selenium/src/java/org/openqa/selenium/internal/seleniumemulation/htmlutils.js @@ -174,7 +174,7 @@ getTextContent: function(element, preformatted) { if (!preformatted) { text = text.replace(/\n|\r|\t/g, " "); } - return text; + return text.replace(/&nbsp/, " "); } if (element.nodeType == 1 /*Node.ELEMENT_NODE*/ && element.nodeName != 'SCRIPT') { var childrenPreformatted = preformatted || (element.tagName == "PRE"); @@ -193,7 +193,7 @@ getTextContent: function(element, preformatted) { if (element.tagName == "P" || element.tagName == "BR" || element.tagName == "HR" || element.tagName == "DIV") { text += "\n"; } - return text; + return text.replace(/&nbsp/, " "); } return ''; } diff --git a/selenium/test/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategyTest.java b/selenium/test/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategyTest.java index <HASH>..<HASH> 100644 --- a/selenium/test/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategyTest.java +++ b/selenium/test/java/org/openqa/selenium/internal/seleniumemulation/GlobTextMatchingStrategyTest.java @@ -54,4 +54,13 @@ public class GlobTextMatchingStrategyTest extends TestCase { boolean result = glob.isAMatch("This * test", "\t\r\nThis is a test of the open command."); assertTrue(result); } + + public void testShouldMatchEvenWhenTextIsAtTheStartOfAString() { + GlobTextMatchingStrategy glob = new GlobTextMatchingStrategy(); + + // The second text contains the nbsp character. + boolean result = glob.isAMatch("this is the span", + "this is the�span first option second option third,,option Line 1 Line 2 th1th2abcdf1f2�"); + assertTrue(result); + } }
SimonStewart: When doing a globbed match, the verifications weren't handling a non-breaking space properly. Fixing. r<I>
SeleniumHQ_selenium
train
e289dfa787b4ecafb84b966eced9ec7f23a51531
diff --git a/ts/blueprints/ember-cli-typescript/index.js b/ts/blueprints/ember-cli-typescript/index.js index <HASH>..<HASH> 100644 --- a/ts/blueprints/ember-cli-typescript/index.js +++ b/ts/blueprints/ember-cli-typescript/index.js @@ -27,7 +27,7 @@ function buildTemplateDeclarations(projectName, layout) { case 'classic': return `${comment} declare module '${projectName}/templates/*' { ${moduleBody}}`; case 'pods': return `${comment} -declare module '${projectName}/components/*/template' { ${moduleBody}}`; +declare module '${projectName}/*/template' { ${moduleBody}}`; case 'mu': return `${comment} declare module '${projectName}/ui/components/*/template' { ${moduleBody}}`; default: throw new Error(`Unexpecte project layout type: "${layout}"`);
chore: more conservative pods approach for compiled template exports
typed-ember_ember-cli-typescript
train
8c9785e6c7b5a6e5a5b4206c352d7dd8526df097
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -21,6 +21,15 @@ module.exports = function (grunt) { } } }, + watch: { + scripts: { + files: ['src/**/*'], + tasks: ['deploy'], + options: { + spawn: false, + }, + }, + }, clean: { all: ['dist/*'] },
Add watch task to automatically run "deploy" task when files in /src change
openshift_origin-web-common
train
c447e4c131f35c8666d775405a7e34629014545c
diff --git a/ipyrad/assemble/utils.py b/ipyrad/assemble/utils.py index <HASH>..<HASH> 100644 --- a/ipyrad/assemble/utils.py +++ b/ipyrad/assemble/utils.py @@ -19,9 +19,6 @@ import pandas as pd import numpy as np import ipyrad -import logging -LOGGER = logging.getLogger(__name__) - ### custom Exception classes class IPyradParamsError(Exception): @@ -198,7 +195,7 @@ def chroms2ints(data, intkeys): Parse .fai to get a dict with {chroms/scaffolds: ints}, or reversed. """ fai = pd.read_csv( - data.paramsdict["reference_sequence"] + ".fai", + data.param.reference_sequence + ".fai", names=['scaffold', 'length', 'start', 'a', 'b'], sep="\t", ) @@ -465,7 +462,6 @@ def clustdealer(pairdealer, optim): taker = takewhile(lambda x: x[0] != b"//\n", pairdealer) oneclust = [b"".join(next(taker))] except StopIteration: - #LOGGER.debug('last chunk %s', chunk) return 1, chunk ## load one cluster @@ -493,7 +489,7 @@ def get_threaded_view(ipyclient, split=True): ## group ids into a dict by their hostnames ## e.g., {a: [0, 1, 4], b: [2, 3], c: [5, 6, 7, 8]} - hostdict = {i:[] for i in hosts} # defaultdict(list) + hostdict = {i: [] for i in hosts} # defaultdict(list) for host, eid in zip(hosts, eids): hostdict[host].append(eid) @@ -525,7 +521,6 @@ def get_threaded_view(ipyclient, split=True): ## make sure split numbering is correct #threaded = hostdict.values() #assert len(ipyclient.ids) <= len(list(itertools.chain(*threaded))) - LOGGER.info("threaded_view: %s", dict(hostdict)) return hostdict diff --git a/ipyrad/core/assembly.py b/ipyrad/core/assembly.py index <HASH>..<HASH> 100644 --- a/ipyrad/core/assembly.py +++ b/ipyrad/core/assembly.py @@ -824,7 +824,7 @@ class Hackers: @property def bwa_args(self): - return self._bwa_args + return self.data["bwa_args"] @bwa_args.setter def bwa_args(self, value): self._data["bwa_args"] = str(value)
bugfix for hackersonly and remove loggers from utils
dereneaton_ipyrad
train
b2c191de6af34d908f534d0c6c6ab24c797c7f3e
diff --git a/tests/test_helpers.py b/tests/test_helpers.py index <HASH>..<HASH> 100644 --- a/tests/test_helpers.py +++ b/tests/test_helpers.py @@ -179,7 +179,7 @@ def test_multi_daemon(pyscript): assert len(pgids) == 4 assert set(pids) == pgids - result = script.run('status', '--json') + result = script.run('status', '--json', '--fields=pid,name') assert result.returncode == 0 statuses = json.loads(result.stdout.decode('ascii').rstrip('\n')) for n, status in enumerate(statuses): @@ -224,18 +224,20 @@ def test_multi_daemon(pyscript): def test_multi_daemon_basic(pyscript): script = pyscript(""" + import sys import time from daemonocle.helpers import MultiDaemon def worker(): time.sleep(10) - MultiDaemon( + multi_daemon = MultiDaemon( name='foo_{n}', worker=worker, pid_file='foo.{n}.pid', num_workers=2, - ).cli() + ) + multi_daemon.do_action(sys.argv[1]) """) result = script.run('start') @@ -245,6 +247,15 @@ def test_multi_daemon_basic(pyscript): b'Starting foo_0 ... OK\n' b'Starting foo_1 ... OK\n') assert result.stderr == b'' + + result = script.run('restart') + assert result.returncode == 0 + assert result.stdout == ( + b'Stopping foo_0 ... OK\n' + b'Stopping foo_1 ... OK\n' + b'Starting foo_0 ... OK\n' + b'Starting foo_1 ... OK\n') + assert result.stderr == b'' finally: result = script.run('stop') assert result.returncode == 0
Add some variation to the tests to cover more code paths
jnrbsn_daemonocle
train
caaac3996b308942fab4d4bd1dc416a443a7da90
diff --git a/plugin/exo/Entity/Attempt/Answer.php b/plugin/exo/Entity/Attempt/Answer.php index <HASH>..<HASH> 100644 --- a/plugin/exo/Entity/Attempt/Answer.php +++ b/plugin/exo/Entity/Attempt/Answer.php @@ -48,7 +48,7 @@ class Answer * * @ORM\Column(type="text", nullable=true) */ - private $feedback; + private $feedback = ''; /** * @var int @@ -158,6 +158,10 @@ class Answer */ public function getFeedback() { + if (!$this->feedback) { + return ''; + } + return $this->feedback; } diff --git a/plugin/exo/Library/Model/FeedbackTrait.php b/plugin/exo/Library/Model/FeedbackTrait.php index <HASH>..<HASH> 100644 --- a/plugin/exo/Library/Model/FeedbackTrait.php +++ b/plugin/exo/Library/Model/FeedbackTrait.php @@ -14,7 +14,7 @@ trait FeedbackTrait * * @ORM\Column(name="feedback", type="text", nullable=true) */ - private $feedback = null; + private $feedback = ''; /** * Sets feedback. @@ -33,6 +33,10 @@ trait FeedbackTrait */ public function getFeedback() { + if (!$this->feedback) { + return ''; + } + return $this->feedback; } }
[ExoBundle] Feedback default return is now empty string. (#<I>)
claroline_Distribution
train
1a86ff865195f975aee14cb2b8a78ea14294e05c
diff --git a/src/main/java/org/filteredpush/qc/date/DateUtils.java b/src/main/java/org/filteredpush/qc/date/DateUtils.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/filteredpush/qc/date/DateUtils.java +++ b/src/main/java/org/filteredpush/qc/date/DateUtils.java @@ -415,7 +415,39 @@ public class DateUtils { logger.debug(e.getMessage()); } } - if (result.getResultState().equals(EventResult.EventQCResultState.NOT_RUN) && verbatimEventDate.matches("^[A-Za-z]{3,9}[.]{0,1}[-/ ][0-9]{4}$")) { + if (result.getResultState().equals(EventResult.EventQCResultState.NOT_RUN) && + verbatimEventDate.matches("^[0-9]{4}$")) { + try { + DateTimeParser[] parsers = { + DateTimeFormat.forPattern("yyyy").getParser(), + }; + DateTimeFormatter formatter = new DateTimeFormatterBuilder().append( null, parsers ).toFormatter(); + DateMidnight parseDate = LocalDate.parse(verbatimEventDate,formatter).toDateMidnight(); + resultDate = parseDate.toString("yyyy"); + result.setResultState(EventResult.EventQCResultState.RANGE); + result.setResult(resultDate); + } catch (Exception e) { + logger.debug(e.getMessage()); + } + } + if (result.getResultState().equals(EventResult.EventQCResultState.NOT_RUN) && + verbatimEventDate.matches("^[12][0-9]{2}0s$")) { + try { + DateTimeParser[] parsers = { + DateTimeFormat.forPattern("yyyy's").getParser(), + }; + DateTimeFormatter formatter = new DateTimeFormatterBuilder().append( null, parsers ).toFormatter(); + DateMidnight parseDate = LocalDate.parse(verbatimEventDate,formatter).toDateMidnight(); + DateMidnight endDate = parseDate.plusYears(10).minusDays(1); + resultDate = parseDate.toString("yyyy") + "-01-01/" + endDate.toString("yyyy") + "-12-31"; + result.setResultState(EventResult.EventQCResultState.RANGE); + result.setResult(resultDate); + } catch (Exception e) { + logger.debug(e.getMessage()); + } + } + if (result.getResultState().equals(EventResult.EventQCResultState.NOT_RUN) && + verbatimEventDate.matches("^[A-Za-z]{3,9}[.]{0,1}[-/ ][0-9]{4}$")) { try { DateTimeParser[] parsers = { DateTimeFormat.forPattern("MMM-yyyy").getParser(), @@ -570,7 +602,8 @@ public class DateUtils { logger.debug(e.getMessage()); } } - if (result.getResultState().equals(EventResult.EventQCResultState.NOT_RUN) && verbatimEventDate.matches("^[0-9]{4}[-][0-9]{2}$")) { + if (result.getResultState().equals(EventResult.EventQCResultState.NOT_RUN) && + verbatimEventDate.matches("^[0-9]{4}[-][0-9]{2}$")) { try { String century = verbatimEventDate.substring(0,2); String startBit = verbatimEventDate.substring(0,4); diff --git a/src/test/java/org/filteredpush/qc/date/DateUtilsTest.java b/src/test/java/org/filteredpush/qc/date/DateUtilsTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/filteredpush/qc/date/DateUtilsTest.java +++ b/src/test/java/org/filteredpush/qc/date/DateUtilsTest.java @@ -877,8 +877,8 @@ public class DateUtilsTest { // assertEquals("1974", result.getResult()); result = DateUtils.extractDateFromVerbatimER("1980s"); - //assertEquals(EventResult.EventQCResultState.RANGE, result.getResultState()); - //assertEquals("1980-01-01/1989-12-31", result.getResult()); + assertEquals(EventResult.EventQCResultState.RANGE, result.getResultState()); + assertEquals("1980-01-01/1989-12-31", result.getResult()); result = DateUtils.extractDateFromVerbatimER("19-21.vii.1990"); //assertEquals(EventResult.EventQCResultState.RANGE, result.getResultState()); @@ -932,7 +932,7 @@ public class DateUtilsTest { result = DateUtils.extractDateFromVerbatimER("1930"); assertEquals(EventResult.EventQCResultState.RANGE, result.getResultState()); - // assertEquals("1930", result.getResult()); + assertEquals("1930", result.getResult()); result = DateUtils.extractDateFromVerbatimER("17 Oct., 1993"); assertEquals(EventResult.EventQCResultState.DATE, result.getResultState());
ISSUE: #6 PURPOSE: Supporting more verbatim date forms found in VertNet. DESCRIPTION: Passing test cases for <I>s and <I>.
FilteredPush_event_date_qc
train
2c67b164dd7c27607fcc7a34d8b9f1276846e8e0
diff --git a/lib/lightstreamer/subscription.rb b/lib/lightstreamer/subscription.rb index <HASH>..<HASH> 100644 --- a/lib/lightstreamer/subscription.rb +++ b/lib/lightstreamer/subscription.rb @@ -163,7 +163,7 @@ module Lightstreamer # Returns whether the specified line of stream data is an overflow message for this subscription. Currently nothing # is done with overflow messages if they occur. def overflow_message?(line) - line =~ /^\d,\d,OV\d$/ + line.match Regexp.new("^#{id},\\d+,OV\\d+$") end # Returns the regular expression that will match a single line of data in the incoming stream that is relevant to diff --git a/spec/lightstreamer/subscription_spec.rb b/spec/lightstreamer/subscription_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lightstreamer/subscription_spec.rb +++ b/spec/lightstreamer/subscription_spec.rb @@ -34,7 +34,8 @@ describe Lightstreamer::Subscription do expect(subscription.process_stream_data("#{subscription.id},1|a|b")).to be true expect(subscription.process_stream_data("#{subscription.id},2|c|")).to be true - expect(subscription.process_stream_data("#{subscription.id},2,OV2")).to be true + expect(subscription.process_stream_data("#{subscription.id},2,OV5")).to be true + expect(subscription.process_stream_data("#{subscription.id},99,OV123")).to be true expect(subscription.process_stream_data('0,3|d|e')).to be false expect(calls.count).to eq(4)
Fixed failure to recognize some overflow messages
richard-viney_lightstreamer
train
a07cd30157facdd3301dfb822d52290812c4edf3
diff --git a/lib/sensu/server.rb b/lib/sensu/server.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/server.rb +++ b/lib/sensu/server.rb @@ -76,6 +76,7 @@ module Sensu end @amq = AMQP::Channel.new(@rabbitmq) @amq.auto_recovery = true + @amq.prefetch(10) @amq.on_error do |channel, channel_close| @logger.fatal('rabbitmq channel closed', { :error => { @@ -90,7 +91,7 @@ module Sensu def setup_keepalives @logger.debug('subscribing to keepalives') @keepalive_queue = @amq.queue('keepalives') - @keepalive_queue.subscribe do |payload| + @keepalive_queue.subscribe(:ack => true) do |header, payload| client = JSON.parse(payload, :symbolize_names => true) @logger.debug('received keepalive', { :client => client @@ -98,6 +99,9 @@ module Sensu @redis.set('client:' + client[:name], client.to_json).callback do @redis.sadd('clients', client[:name]) end + EM::next_tick do + header.ack + end end end @@ -434,12 +438,15 @@ module Sensu def setup_results @logger.debug('subscribing to results') @result_queue = @amq.queue('results') - @result_queue.subscribe do |payload| + @result_queue.subscribe(:ack => true) do |header, payload| result = JSON.parse(payload, :symbolize_names => true) @logger.debug('received result', { :result => result }) process_result(result) + EM::next_tick do + header.ack + end end end
[prefetch] began qos experiments
sensu_sensu
train
4a4968128a89cc323efa1621c5e7f49d9eb45c04
diff --git a/rb/lib/selenium/webdriver/common/service.rb b/rb/lib/selenium/webdriver/common/service.rb index <HASH>..<HASH> 100644 --- a/rb/lib/selenium/webdriver/common/service.rb +++ b/rb/lib/selenium/webdriver/common/service.rb @@ -135,12 +135,12 @@ module Selenium end def connect_to_server - http = Selenium::WebDriver::Remote::Http::Default.new - http.open_timeout = STOP_TIMEOUT / 2 - http.read_timeout = STOP_TIMEOUT / 2 - http.server_url = uri - yield http - http.close + Net::HTTP.start(@host, @port) do |http| + http.open_timeout = STOP_TIMEOUT / 2 + http.read_timeout = STOP_TIMEOUT / 2 + + yield http + end end def find_free_port @@ -164,7 +164,10 @@ module Selenium def stop_server return if process_exited? - connect_to_server { |http| http.call(:get, '/shutdown', nil) } + connect_to_server do |http| + headers = WebDriver::Remote::Http::Common::DEFAULT_HEADERS.dup + http.get('/shutdown', headers) + end end def process_running?
Revert using HTTP default client for service shutdown It doesn't play nicely with IEDriverServer which responds with text/html content-type, while client only supports application/json Basically reverts <I> while ensuring that default headers are used so that User-Agent is set properly
SeleniumHQ_selenium
train
796ff35a0494568be645f4e4c5bcc1e8102c00db
diff --git a/jquery.placeholder.js b/jquery.placeholder.js index <HASH>..<HASH> 100644 --- a/jquery.placeholder.js +++ b/jquery.placeholder.js @@ -1,4 +1,10 @@ -/*! http://mths.be/placeholder v2.1.3 by @mathias */ +/*! + * jQuery Placeholder Plugin v2.1.3 + * https://github.com/mathiasbynens/jquery-placeholder + * + * Copyright 2011, 2015 Mathias Bynens + * Released under the MIT license + */ (function(factory) { if (typeof define === 'function' && define.amd) { // AMD @@ -262,4 +268,4 @@ return document.activeElement; } catch (exception) {} } -})); \ No newline at end of file +}));
Added MIT license in source file
mathiasbynens_jquery-placeholder
train
7a243af8be2652f5dbe6e7ddade35943cdd11193
diff --git a/lib/Skeleton/Object/Get.php b/lib/Skeleton/Object/Get.php index <HASH>..<HASH> 100644 --- a/lib/Skeleton/Object/Get.php +++ b/lib/Skeleton/Object/Get.php @@ -84,8 +84,13 @@ trait Get { $db = self::trait_get_database(); $where = ''; - if (property_exists(get_class(), 'class_configuration') AND isset(self::$class_configuration['soft_delete']) AND self::$class_configuration['soft_delete'] === TRUE) { - $where = ' AND archived IS NULL'; + $field_archived = self::trait_get_table_field_archived(); + foreach (self::get_object_fields() as $field) { + if ($field['field'] != $field_archived) { + continue; + } + + $where = ' AND ' . $field_archived . ' IS NULL'; } if (is_null($sort)) { diff --git a/lib/Skeleton/Object/Model.php b/lib/Skeleton/Object/Model.php index <HASH>..<HASH> 100644 --- a/lib/Skeleton/Object/Model.php +++ b/lib/Skeleton/Object/Model.php @@ -433,6 +433,20 @@ trait Model { } /** + * trait_get_table_field_archived: get the field that is used for 'archived' + * + * @access private + * @return string $updated + */ + private static function trait_get_table_field_archived() { + if (property_exists(get_class(), 'class_configuration') AND isset(self::$class_configuration['table_field_archived'])) { + return self::$class_configuration['table_field_archived']; + } else { + return 'archived'; + } + } + + /** * Trait_get_link_tables * * @access private
Fix use of archived in get_all
tigron_skeleton-object
train
2dcc6966fe7b82bb81aad813fa42742e63d4784d
diff --git a/logging/logfile.go b/logging/logfile.go index <HASH>..<HASH> 100644 --- a/logging/logfile.go +++ b/logging/logfile.go @@ -93,23 +93,27 @@ func (l *LogFile) pruneFiles() error { if l.MaxFiles == 0 { return nil } - pattern := l.fileNamePattern() - //get all the files that match the log file pattern - globExpression := filepath.Join(l.logPath, fmt.Sprintf(pattern, "*")) - matches, err := filepath.Glob(globExpression) + + pattern := filepath.Join(l.logPath, fmt.Sprintf(l.fileNamePattern(), "*")) + matches, err := filepath.Glob(pattern) if err != nil { return err } - var stale int - if l.MaxFiles <= -1 { - // Prune everything - stale = len(matches) - } else { - // Prune if there are more files stored than the configured max - stale = len(matches) - l.MaxFiles + + switch { + case l.MaxFiles < 0: + return removeFiles(matches) + case len(matches) < l.MaxFiles: + return nil } - for i := 0; i < stale; i++ { - if err := os.Remove(matches[i]); err != nil { + + last := len(matches) - l.MaxFiles + return removeFiles(matches[:last]) +} + +func removeFiles(files []string) error { + for _, file := range files { + if err := os.Remove(file); err != nil { return err } } diff --git a/logging/logger.go b/logging/logger.go index <HASH>..<HASH> 100644 --- a/logging/logger.go +++ b/logging/logger.go @@ -92,6 +92,9 @@ func Setup(config Config, out io.Writer) (hclog.InterceptLogger, error) { MaxBytes: config.LogRotateBytes, MaxFiles: config.LogRotateMaxFiles, } + if err := logFile.pruneFiles(); err != nil { + return nil, fmt.Errorf("Failed to prune log files: %w", err) + } if err := logFile.openNew(); err != nil { return nil, fmt.Errorf("Failed to setup logging: %w", err) }
logging: call pruneFiles on startup To ensure that files are pruned before a new one is created. Also clean up the logic in pruneFiles
hashicorp_consul
train
ecb515772cc5ce572f0bfed5a51b4c5e187444cd
diff --git a/source/functions/resolveInlineOptions.js b/source/functions/resolveInlineOptions.js index <HASH>..<HASH> 100644 --- a/source/functions/resolveInlineOptions.js +++ b/source/functions/resolveInlineOptions.js @@ -5,7 +5,7 @@ function resolveInlineOptions(format, options={}) { delete newOptions._ format = format._ } - Object.assign({newOptions, options}) + Object.assign(newOptions, options) return {options: newOptions, format} } diff --git a/source/sanitize/number.js b/source/sanitize/number.js index <HASH>..<HASH> 100644 --- a/source/sanitize/number.js +++ b/source/sanitize/number.js @@ -1,7 +1,6 @@ function sanitizeNumber(input, options) { - const {allowNaN, finite, min, max, even, odd} = options - if (typeof input != 'number') return 'Expected Number' + const {allowNaN, finite, min, max, even, odd} = options if (options.hasOwnProperty('allowNaN')) { if (typeof allowNaN != 'boolean') throw new Error('Invalid AllowNaN Option')
fix syntax error in resolveInlineOptions
L1lith_Sandhands
train
0277f59c31e96d96fea132c72e479a049f161fd4
diff --git a/test/phpunit/SessionStoreTest.php b/test/phpunit/SessionStoreTest.php index <HASH>..<HASH> 100644 --- a/test/phpunit/SessionStoreTest.php +++ b/test/phpunit/SessionStoreTest.php @@ -13,7 +13,7 @@ use SessionHandler; class SessionStoreTest extends TestCase { use KeyValuePairProvider; - public function setUp():void { + protected function setUp():void { FunctionMocker::mock("session_start"); FunctionMocker::mock("session_id"); } diff --git a/test/phpunit/SessionTest.php b/test/phpunit/SessionTest.php index <HASH>..<HASH> 100644 --- a/test/phpunit/SessionTest.php +++ b/test/phpunit/SessionTest.php @@ -15,7 +15,7 @@ class SessionTest extends TestCase { use StringProvider; use ConfigProvider; - public function setUp():void { + protected function setUp():void { FunctionMocker::mock("session_start"); FunctionMocker::mock("session_id"); FunctionMocker::mock("session_destroy");
Improve PHPUnit fixtures (#<I>)
PhpGt_Session
train
e754d3d28a711f772f2fb0ca1098b29a1c73662d
diff --git a/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamRecordWriter.java b/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamRecordWriter.java index <HASH>..<HASH> 100644 --- a/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamRecordWriter.java +++ b/flink-streaming-java/src/main/java/org/apache/flink/streaming/runtime/io/StreamRecordWriter.java @@ -106,6 +106,8 @@ public class StreamRecordWriter<T extends IOReadableWritable> extends RecordWrit } catch (InterruptedException e) { // ignore on close + // restore interrupt flag to fast exit further blocking calls + Thread.currentThread().interrupt(); } } }
[hotfix] [runtime] Restore interruption flag in StreamRecordWriter.close()
apache_flink
train
ad1305b7a0f811eefe277f5938235949f124e7c6
diff --git a/redis/connection.py b/redis/connection.py index <HASH>..<HASH> 100644 --- a/redis/connection.py +++ b/redis/connection.py @@ -378,7 +378,6 @@ class UnixDomainSocketConnection(Connection): (exception.args[0], self.path, exception.args[1]) -# TODO: add ability to block waiting on a connection to be released class ConnectionPool(object): "Generic connection pool" def __init__(self, connection_class=Connection, max_connections=None,
remove TODO, it is implemented by BlockingConnectionPool without a timeout
andymccurdy_redis-py
train
13cb1b7893befacc3c7ab321062f27d7e4ccef8c
diff --git a/ui/api/src/main/java/org/jboss/forge/ui/UICommandMetadata.java b/ui/api/src/main/java/org/jboss/forge/ui/UICommandMetadata.java index <HASH>..<HASH> 100644 --- a/ui/api/src/main/java/org/jboss/forge/ui/UICommandMetadata.java +++ b/ui/api/src/main/java/org/jboss/forge/ui/UICommandMetadata.java @@ -7,6 +7,8 @@ package org.jboss.forge.ui; +import java.net.URL; + /** * @author <a href="mailto:[email protected]">Lincoln Baxter, III</a> */ @@ -17,4 +19,12 @@ public interface UICommandMetadata String getDescription(); UICategory getCategory(); + + /** + * Returns the location of the documentation of this command + * + * @return null if no documentation was found + */ + URL getDocLocation(); + } diff --git a/ui/api/src/main/java/org/jboss/forge/ui/base/UICommandMetadataBase.java b/ui/api/src/main/java/org/jboss/forge/ui/base/UICommandMetadataBase.java index <HASH>..<HASH> 100644 --- a/ui/api/src/main/java/org/jboss/forge/ui/base/UICommandMetadataBase.java +++ b/ui/api/src/main/java/org/jboss/forge/ui/base/UICommandMetadataBase.java @@ -1,5 +1,7 @@ package org.jboss.forge.ui.base; +import java.net.URL; + import org.jboss.forge.ui.UICategory; import org.jboss.forge.ui.UICommandMetadata; @@ -7,21 +9,31 @@ public class UICommandMetadataBase implements UICommandMetadata { private final String name; private final String description; - private UICategory category; + private final UICategory category; + private final URL docLocation; public UICommandMetadataBase(String name, String description) { - super(); - this.name = name; - this.description = description; + this(name, description, null, null); } public UICommandMetadataBase(String name, String description, UICategory category) { + this(name, description, category, null); + } + + public UICommandMetadataBase(String name, String description, URL docLocation) + { + this(name, description, null, docLocation); + } + + public UICommandMetadataBase(String name, String description, UICategory category, URL docLocation) + { super(); this.name = name; this.description = description; this.category = category; + this.docLocation = docLocation; } @Override @@ -41,4 +53,10 @@ public class UICommandMetadataBase implements UICommandMetadata { return category; } + + @Override + public URL getDocLocation() + { + return docLocation; + } } diff --git a/ui/tests/src/test/java/org/jboss/forge/ui/MyFirstWizard.java b/ui/tests/src/test/java/org/jboss/forge/ui/MyFirstWizard.java index <HASH>..<HASH> 100644 --- a/ui/tests/src/test/java/org/jboss/forge/ui/MyFirstWizard.java +++ b/ui/tests/src/test/java/org/jboss/forge/ui/MyFirstWizard.java @@ -2,6 +2,7 @@ package org.jboss.forge.ui; import javax.inject.Inject; +import org.jboss.forge.ui.base.UICommandMetadataBase; import org.jboss.forge.ui.util.Categories; import org.jboss.forge.ui.wizard.UIWizard; @@ -38,26 +39,8 @@ public class MyFirstWizard implements UIWizard @Override public UICommandMetadata getMetadata() { - return new UICommandMetadata() - { - @Override - public String getName() - { - return MyFirstWizard.class.getName(); - } - - @Override - public String getDescription() - { - return "generic test wizard"; - } - - @Override - public UICategory getCategory() - { - return Categories.create("Example"); - } - }; + return new UICommandMetadataBase(MyFirstWizard.class.getName(), "generic test wizard", + Categories.create("Example")); } @Override
Added getDocLocation() for UICommandMetadata
forge_core
train
e5830dc7ac16d448cd861a3cb87ef587efa8b422
diff --git a/src/Cdb/EventLD.php b/src/Cdb/EventLD.php index <HASH>..<HASH> 100644 --- a/src/Cdb/EventLD.php +++ b/src/Cdb/EventLD.php @@ -15,7 +15,7 @@ class EventLD implements \JsonSerializable public function __construct(\CultureFeed_Cdb_Item_Event $event) { - + $this->event = $event; } /** @@ -24,8 +24,25 @@ class EventLD implements \JsonSerializable */ function jsonSerialize() { + // @todo Handle language dynamically, currently hardcoded to nl. + /** @var \CultureFeed_Cdb_Data_EventDetail $detail */ + $detail = $this->event->getDetails()->getDetailByLanguage('nl'); + $pictures = $detail->getMedia()->byMediaType( + \CultureFeed_Cdb_Data_File::MEDIA_TYPE_PHOTO + ); + $pictures->rewind(); + $picture = count($pictures) > 0 ? $pictures->current() : null; + return array( + // @todo provide Event-LD context here relative to the base URI '@context' => '/api/1.0/event.jsonld', + // @todo make id a dereferenceable URI (http://en.wikipedia.org/wiki/Dereferenceable_Uniform_Resource_Identifier) + '@id' => $this->event->getCdbId(), + 'name' => $detail->getTitle(), + 'shortDescription' => $detail->getShortDescription(), + 'calendarSummary' => $detail->getCalendarSummary(), + 'image' => $picture ? $picture->getHLink() : null, + 'location' => $this->event->getLocation()->getLabel(), ); } } diff --git a/src/DefaultSearchService.php b/src/DefaultSearchService.php index <HASH>..<HASH> 100644 --- a/src/DefaultSearchService.php +++ b/src/DefaultSearchService.php @@ -7,6 +7,7 @@ namespace CultuurNet\UDB3; use CultuurNet\Search\Parameter; use CultuurNet\Search\SearchResult; +use CultuurNet\UDB3\Cdb\EventLD; use CultuurNet\UDB3\SearchAPI2; /** @@ -54,24 +55,9 @@ class DefaultSearchService implements SearchServiceInterface foreach ($result->getItems() as $item) { /** @var \CultureFeed_Cdb_Item_Event $event */ - $event = $item->getEntity(); - // @todo Handle language dynamically, currently hardcoded to nl. - /** @var \CultureFeed_Cdb_Data_EventDetail $detail */ - $detail = $event->getDetails()->getDetailByLanguage('nl'); - $pictures = $detail->getMedia()->byMediaType(\CultureFeed_Cdb_Data_File::MEDIA_TYPE_PHOTO); - $pictures->rewind(); - $picture = count($pictures) > 0 ? $pictures->current() : NULL; - $return['member'][] = array( - // @todo provide Event-LD context here - // @todo make id a dereferenceable URI (http://en.wikipedia.org/wiki/Dereferenceable_Uniform_Resource_Identifier) - '@context' => '/api/1.0/event.jsonld', - '@id' => $item->getId(), - 'name' => $detail->getTitle(), - 'shortDescription' => $detail->getShortDescription(), - 'calendarSummary' => $detail->getCalendarSummary(), - 'image' => $picture ? $picture->getHLink() : NULL, - 'location' => $event->getLocation()->getLabel(), - ); + $cdbEvent = $item->getEntity(); + $event = new EventLD($cdbEvent); + $return['member'][] = $event; } return $return;
Move JSON-LD transformation to its dedicated class.
cultuurnet_udb3-php
train
05c484b54c40683f6ee250938226ae6fd9398b74
diff --git a/src/Models/BaseElement.php b/src/Models/BaseElement.php index <HASH>..<HASH> 100644 --- a/src/Models/BaseElement.php +++ b/src/Models/BaseElement.php @@ -525,8 +525,9 @@ class BaseElement extends DataObject return $this->cacheData['page']; } - $area = $this->Parent(); + $class = DataObject::getSchema()->hasOneComponent($this, 'Parent'); + $area = ($this->ParentID) ? DataObject::get_by_id($class, $this->ParentID) : null; if ($area instanceof ElementalArea && $area->exists()) { $this->cacheData['page'] = $area->getOwnerPage(); return $this->cacheData['page'];
Save repeated database queries for same ElementalArea (closes #<I>)
dnadesign_silverstripe-elemental
train
ee397c466ac03a8ed20c4fd7e7e645790f180180
diff --git a/pytest_girder/pytest_girder/utils.py b/pytest_girder/pytest_girder/utils.py index <HASH>..<HASH> 100644 --- a/pytest_girder/pytest_girder/utils.py +++ b/pytest_girder/pytest_girder/utils.py @@ -182,7 +182,10 @@ def request(path='/', method='GET', params=None, user=None, body = body.encode('utf8') if params: - qs = urllib.parse.urlencode(params) + # Python2 can't urlencode unicode and this does no harm in Python3 + qs = urllib.parse.urlencode({ + k: v.encode('utf8') if isinstance(v, six.text_type) else v + for k, v in params.items()}) if params and body: # In this case, we are forced to send params in query string diff --git a/pytest_girder/setup.py b/pytest_girder/setup.py index <HASH>..<HASH> 100644 --- a/pytest_girder/setup.py +++ b/pytest_girder/setup.py @@ -2,7 +2,7 @@ from setuptools import find_packages, setup setup( name='pytest-girder', - version='0.1.0a2', + version='0.1.0a3', description='A set of pytest fixtures for testing Girder applications.', author='Kitware, Inc.', author_email='[email protected]',
Support passing unicode values to the pytest request method. This is necessary to pass unicode values to the request method when testing in Python <I>.
girder_girder
train
48e96822b3ec4f897438a2d1cdb735f51648cb48
diff --git a/kafka/coordinator/base.py b/kafka/coordinator/base.py index <HASH>..<HASH> 100644 --- a/kafka/coordinator/base.py +++ b/kafka/coordinator/base.py @@ -200,7 +200,10 @@ class BaseCoordinator(object): self._client.poll(future=future) if future.failed(): - if future.retriable(): + if isinstance(future.exception, + Errors.GroupCoordinatorNotAvailableError): + continue + elif future.retriable(): metadata_update = self._client.cluster.request_update() self._client.poll(future=metadata_update) else:
Dont need to refresh metadata on GroupCoordinatorNotAvailableErrors
dpkp_kafka-python
train
7e625c1403ef8533ea3f029d30fc93dd6175abfb
diff --git a/liquibase-core/src/main/java/liquibase/database/core/OracleDatabase.java b/liquibase-core/src/main/java/liquibase/database/core/OracleDatabase.java index <HASH>..<HASH> 100644 --- a/liquibase-core/src/main/java/liquibase/database/core/OracleDatabase.java +++ b/liquibase-core/src/main/java/liquibase/database/core/OracleDatabase.java @@ -55,7 +55,7 @@ public class OracleDatabase extends AbstractJdbcDatabase { method.invoke(sqlConn, true); reservedWords.addAll(Arrays.asList(sqlConn.getMetaData().getSQLKeywords().toUpperCase().split(",\\s*"))); - reservedWords.addAll(Arrays.asList("USER", "SESSION","RESOURCE", "START", "SIZE")); //more reserved words not returned by driver + reservedWords.addAll(Arrays.asList("GROUP", "USER", "SESSION","RESOURCE", "START", "SIZE")); //more reserved words not returned by driver } catch (Exception e) { LogFactory.getLogger().info("Could not set remarks reporting on OracleDatabase: " + e.getMessage()); ; //cannot set it. That is OK
CORE-<I> "group" is not included as a reserved word
liquibase_liquibase
train
2df7ca0755e5c44613823395d05197484cc86c53
diff --git a/test/db/sqlite3/transaction_test.rb b/test/db/sqlite3/transaction_test.rb index <HASH>..<HASH> 100644 --- a/test/db/sqlite3/transaction_test.rb +++ b/test/db/sqlite3/transaction_test.rb @@ -21,7 +21,7 @@ class SQLite3TransactionTest < Test::Unit::TestCase assert_raise ActiveRecord::TransactionIsolationError do super end - end if Test::Unit::TestCase.ar_version('4.0') + end # @override def test_transaction_isolation_repeatable_read @@ -30,7 +30,7 @@ class SQLite3TransactionTest < Test::Unit::TestCase assert_raise ActiveRecord::TransactionIsolationError do super end - end if Test::Unit::TestCase.ar_version('4.0') + end def test_transaction_isolation_read_uncommitted Entry.transaction(:isolation => :read_uncommitted) do @@ -38,7 +38,7 @@ class SQLite3TransactionTest < Test::Unit::TestCase Entry.create # Entry2.create assert_equal 1, Entry.count end - end if Test::Unit::TestCase.ar_version('4.0') + end def test_supports_savepoints assert_true ActiveRecord::Base.connection.supports_savepoints? @@ -55,44 +55,20 @@ class SQLite3TransactionTest < Test::Unit::TestCase end MyUser.transaction(:requires_new => true) do - if ar_version('4.2') - assert_equal "active_record_1", MyUser.connection.current_savepoint_name - assert_equal "active_record_1", MyUser.connection.current_transaction.savepoint_name - else # 3.2 - # on AR < 3.2 we do get 'active_record_1' with AR-JDBC which is not compatible - # with MRI but is actually more accurate - maybe 3.2 should be updated as well - assert_equal "active_record_2", MyUser.connection.current_savepoint_name - - assert_equal "active_record_2", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION - #assert_equal "active_record_1", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION - end + assert_equal "active_record_1", MyUser.connection.current_savepoint_name + assert_equal "active_record_1", MyUser.connection.current_transaction.savepoint_name MyUser.transaction(:requires_new => true) do - if ar_version('4.2') - assert_equal "active_record_2", MyUser.connection.current_savepoint_name - assert_equal "active_record_2", MyUser.connection.current_transaction.savepoint_name - - assert_equal "active_record_2", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION - #assert_equal "active_record_2", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION - else # 3.2 - assert_equal "active_record_3", MyUser.connection.current_savepoint_name - - assert_equal "active_record_3", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION - #assert_equal "active_record_2", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION - end - end - - if ar_version('4.2') - assert_equal "active_record_1", MyUser.connection.current_savepoint_name - assert_equal "active_record_1", MyUser.connection.current_transaction.savepoint_name - else # 3.2 assert_equal "active_record_2", MyUser.connection.current_savepoint_name + assert_equal "active_record_2", MyUser.connection.current_transaction.savepoint_name - assert_equal "active_record_2", MyUser.connection.current_savepoint_name(true) if defined? JRUBY_VERSION - #assert_equal "active_record_1", MyUser.connection.current_savepoint_name(false) if defined? JRUBY_VERSION + assert_equal "active_record_2", MyUser.connection.current_savepoint_name end + + assert_equal "active_record_1", MyUser.connection.current_savepoint_name + assert_equal "active_record_1", MyUser.connection.current_transaction.savepoint_name end end - end if Test::Unit::TestCase.ar_version('3.2') + end end
Remove versioning from tests and fixup current_savepoint_name to not take name param since AR5 does not have that
jruby_activerecord-jdbc-adapter
train
0eac40cb2c1a1ba292def4924614f131819eb08c
diff --git a/lib/itcss_cli.rb b/lib/itcss_cli.rb index <HASH>..<HASH> 100644 --- a/lib/itcss_cli.rb +++ b/lib/itcss_cli.rb @@ -240,10 +240,7 @@ module ItcssCli puts "There's no #{@ITCSS_CONFIG_FILE} created yet. Run `itcss init` to create it.".red abort elsif @ITCSS_DIR.nil? || @ITCSS_BASE_FILE.nil? - puts "Something is wrong with your itcss.yml file. Please delete it and run `itcss init` again.".red - abort - elsif @ITCSS_DIR == 'path/to/itcss/root' || @ITCSS_BASE_FILE == 'yourapplication' - puts "You haven't done the itcss_cli's configuration. You must provide your directories settings in itcss.yml.".yellow + puts "Something is wrong with your itcss.yml file. Please run `itcss init` again to override it.".red abort end end
Remove legacy code from init via file
kandebonfim_itcsscli
train
65587b35488c255cfaa5a07a5340d30c13bcfeb8
diff --git a/src/Nether/Console/Client.php b/src/Nether/Console/Client.php index <HASH>..<HASH> 100644 --- a/src/Nether/Console/Client.php +++ b/src/Nether/Console/Client.php @@ -197,9 +197,18 @@ class Client { if(!array_key_exists($cmd,$this->Handlers)) throw new ClientHandlerException("no handler found {$cmd}"); - return call_user_func(function($cli,$func){ - return $func($cli); - },$this,$this->Handlers[$cmd]); + if($this->Handlers[$cmd] instanceof \Closure) { + $closure = $this->Handlers[$cmd]->BindTo($this); + $closure(); + unset($closure); + + // the php70 version of the above. + // $this->Handlers[$cmd]->Call($this); + } else { + return call_user_func(function($cli,$func){ + return $func($cli); + },$this,$this->Handlers[$cmd]); + } } protected function
if a handler was defined as a closure, instead of passing the cli object as an argument, bind it so you can use from within.
netherphp_console
train
e3cd9219b87cacb946189c0c317e596fcc0fa37f
diff --git a/lib/fs/casefs.go b/lib/fs/casefs.go index <HASH>..<HASH> 100644 --- a/lib/fs/casefs.go +++ b/lib/fs/casefs.go @@ -25,11 +25,11 @@ const ( ) type ErrCaseConflict struct { - given, real string + Given, Real string } func (e *ErrCaseConflict) Error() string { - return fmt.Sprintf(`given name "%v" differs from name in filesystem "%v"`, e.given, e.real) + return fmt.Sprintf(`given name "%v" differs from name in filesystem "%v"`, e.Given, e.Real) } func IsErrCaseConflict(err error) bool { diff --git a/lib/model/folder_sendrecv.go b/lib/model/folder_sendrecv.go index <HASH>..<HASH> 100644 --- a/lib/model/folder_sendrecv.go +++ b/lib/model/folder_sendrecv.go @@ -1060,6 +1060,14 @@ func (f *sendReceiveFolder) handleFile(file protocol.FileInfo, snap *db.Snapshot // Check for an old temporary file which might have some blocks we could // reuse. tempBlocks, err := scanner.HashFile(f.ctx, f.fs, tempName, file.BlockSize(), nil, false) + if err != nil { + var caseErr *fs.ErrCaseConflict + if errors.As(err, &caseErr) { + if rerr := f.fs.Rename(caseErr.Real, tempName); rerr == nil { + tempBlocks, err = scanner.HashFile(f.ctx, f.fs, tempName, file.BlockSize(), nil, false) + } + } + } if err == nil { // Check for any reusable blocks in the temp file tempCopyBlocks, _ := blockDiff(tempBlocks, file.Blocks) diff --git a/lib/model/folder_sendrecv_test.go b/lib/model/folder_sendrecv_test.go index <HASH>..<HASH> 100644 --- a/lib/model/folder_sendrecv_test.go +++ b/lib/model/folder_sendrecv_test.go @@ -1216,6 +1216,32 @@ func testPullCaseOnlyDirOrSymlink(t *testing.T, dir bool) { } } +func TestPullTempFileCaseConflict(t *testing.T) { + m, f := setupSendReceiveFolder() + defer cleanupSRFolder(f, m) + + copyChan := make(chan copyBlocksState, 1) + + file := protocol.FileInfo{Name: "foo"} + confl := "Foo" + tempNameConfl := fs.TempName(confl) + if fd, err := f.fs.Create(tempNameConfl); err != nil { + t.Fatal(err) + } else { + if _, err := fd.Write([]byte("data")); err != nil { + t.Fatal(err) + } + fd.Close() + } + + f.handleFile(file, f.fset.Snapshot(), copyChan) + + cs := <-copyChan + if _, err := cs.tempFile(); err != nil { + t.Error(err) + } +} + func cleanupSharedPullerState(s *sharedPullerState) { s.mut.Lock() defer s.mut.Unlock()
lib/model: Don't fail over case-conflict on tempfile (fixes #<I>) (#<I>)
syncthing_syncthing
train
b36a7e441db6a4a89ab93127c195e0db7046b7ef
diff --git a/src/stratum/stratumConnection.js b/src/stratum/stratumConnection.js index <HASH>..<HASH> 100644 --- a/src/stratum/stratumConnection.js +++ b/src/stratum/stratumConnection.js @@ -1,4 +1,5 @@ // @flow +/* global WebSocket */ import { parse } from 'uri-js' @@ -96,65 +97,67 @@ export class StratumConnection { /** * Activates the underlying TCP connection. */ - open () { - logger.info(`${this.walletId} stratum.open: ${this.uri}`) - const parsed = parse(this.uri) - if ( - (parsed.scheme !== 'electrum' && - parsed.scheme !== 'electrums' && - parsed.scheme !== 'electrumwss' && - parsed.scheme !== 'electrumws') || - !parsed.host || - !parsed.port - ) { - logger.info(`Bad stratum URI: ${this.uri}`) - return Promise.resolve( - this.handleError(new TypeError(`Bad stratum URI: ${this.uri}`)) - ) - } + async open () { + const { uri, io } = this - if (parsed.scheme === 'electrum' || parsed.scheme === 'electrums') { - const protocol = parsed.scheme === 'electrums' ? 'tls' : 'tcp' - const io: PluginIo = this.io - io.makeSocket({ - host: parsed.host, - port: Number(parsed.port), - type: protocol - }) - .then(socket => { - socket.on('close', () => this.onSocketClose()) - socket.on('error', (e: Error) => { - this.error = e + try { + if (uri.indexOf('electrumws') === 0 || uri.indexOf('electrumwss') === 0) { + // It's a websocket! + const server = this.uri + .replace(/^electrumwss/, 'wss') + .replace(/^electrumws/, 'ws') + const socket = new WebSocket(server) + socket.onclose = event => { + this.onSocketClose() + } + socket.onerror = event => { + this.error = new Error(JSON.stringify(event)) + } + socket.onopen = event => { + this.onSocketConnect() + } + socket.onmessage = (event: Object) => { + this.onSocketData(event.data) + } + this.socket = socket + this.cancelConnect = false + } else if ( + uri.indexOf('electrum') === 0 || + uri.indexOf('electrums') === 0 + ) { + // It's a TCP! + const parsed = parse(uri) + if ( + (parsed.scheme !== 'electrum' && parsed.scheme !== 'electrums') || + !parsed.host || + !parsed.port + ) { + throw new Error('Bad URL') + } + + // Connect to the server: + await io + .makeSocket({ + host: parsed.host, + port: Number(parsed.port), + type: parsed.scheme === 'electrum' ? 'tcp' : 'tls' }) - socket.on('open', () => this.onSocketConnect()) - socket.on('message', (data: string) => this.onSocketData(data)) - this.socket = socket - this.cancelConnect = false - socket.connect() - }) - .catch(e => { - this.handleError(e) - }) - } else if (parsed.scheme === 'electrumwss') { - // Connect to the server: - const server = this.uri.replace('electrumwss', 'wss') - const socket = new WebSocket(server) - socket.onclose = event => { - this.onSocketClose() - } - socket.onerror = event => { - this.error = new Error(JSON.stringify(event)) - } - socket.onopen = event => { - this.onSocketConnect() - } - socket.onmessage = (event: Object) => { - this.onSocketData(event.data) + .then(socket => { + socket.on('close', () => this.onSocketClose()) + socket.on('error', (e: Error) => { + this.error = e + }) + socket.on('open', () => this.onSocketConnect()) + socket.on('message', (data: string) => this.onSocketData(data)) + this.socket = socket + this.cancelConnect = false + return socket.connect() + }) + } else { + throw new Error('Wrong URL prefix') } - this.socket = socket - this.cancelConnect = false - } else { - logger.info(`${this.walletId} stratum.open invalid scheme: ${this.uri}`) + } catch (e) { + this.handleError(e) } }
Add crash fixes from yesterday's meeting
EdgeApp_edge-currency-bitcoin
train
a8a53e83a80e3e4e0a34c22e3b872729a0e9a8e9
diff --git a/contrib/externs/angular-1.4.js b/contrib/externs/angular-1.4.js index <HASH>..<HASH> 100644 --- a/contrib/externs/angular-1.4.js +++ b/contrib/externs/angular-1.4.js @@ -1033,11 +1033,11 @@ angular.$animate.prototype.setClass = function( element, add, remove, opt_options) {}; /** + * @param {(boolean|JQLiteSelector)=} opt_elementOrValue * @param {boolean=} opt_value - * @param {JQLiteSelector=} opt_element * @return {boolean} */ -angular.$animate.prototype.enabled = function(opt_value, opt_element) {}; +angular.$animate.prototype.enabled = function(opt_elementOrValue, opt_value) {}; /** * @param {angular.$q.Promise} animationPromise
Flip arguments of $animate.enabled externs. Breaking change was introduced in Angular <I>+. To verify, see commit to Angular <I> which flips the arguments [0] and method implementation Externs for <<I> remain unchanged. [0] <URL>
google_closure-compiler
train
27d60127e385d5b32eb6e9430641f110bcd24544
diff --git a/lib/jobba/status.rb b/lib/jobba/status.rb index <HASH>..<HASH> 100644 --- a/lib/jobba/status.rb +++ b/lib/jobba/status.rb @@ -36,7 +36,7 @@ module Jobba end def self.local_attrs - %w(id state progress errors data kill_requested_at job_name job_args attempt) + + %w(id state progress errors data kill_requested_at job_name job_args attempt prior_attempts) + State::ALL.collect(&:timestamp_name) end @@ -162,7 +162,7 @@ module Jobba end def prior_attempts - [*0..attempt-1].collect{|ii| self.class.find!("#{id}:#{ii}")} + @prior_attempts ||= [*0..attempt-1].collect{|ii| self.class.find!("#{id}:#{ii}")} end protected @@ -319,6 +319,7 @@ module Jobba end prior_attempts.each(&:delete!) + @prior_attempts = nil end def delete_locally! diff --git a/lib/jobba/statuses.rb b/lib/jobba/statuses.rb index <HASH>..<HASH> 100644 --- a/lib/jobba/statuses.rb +++ b/lib/jobba/statuses.rb @@ -33,6 +33,10 @@ class Jobba::Statuses def delete_all! load + + # preload prior attempts because loading them is not `multi`-friendly + @cache.each(&:prior_attempts) + redis.multi do @cache.each(&:delete!) end diff --git a/spec/status_spec.rb b/spec/status_spec.rb index <HASH>..<HASH> 100644 --- a/spec/status_spec.rb +++ b/spec/status_spec.rb @@ -369,8 +369,9 @@ describe Jobba::Status do it 'deletes prior attempts when current status deleted' do prior_0, prior_1 = @status.prior_attempts + prior_0_id = prior_0.id @status.delete! - expect(Jobba::Status.find(prior_0.id)).to be_nil + expect(Jobba::Status.find(prior_0_id)).to be_nil end end diff --git a/spec/statuses_spec.rb b/spec/statuses_spec.rb index <HASH>..<HASH> 100644 --- a/spec/statuses_spec.rb +++ b/spec/statuses_spec.rb @@ -82,6 +82,12 @@ describe Jobba::Statuses do expect(statuses.to_a).to eq [] expect(Jobba.redis.keys("*").count).to eq 0 end + + it 'can delete_all! when there are restarted jobs' do + failed.started! # restart + statuses.delete_all! + expect(Jobba.redis.keys("*").count).to eq 0 + end end it 'can bulk request kill' do
fix multi-related bug where delete_all cannot delete prior attempts
openstax_jobba
train
0536e3104c22062598ab81f8db8b53407a0fb968
diff --git a/peer.go b/peer.go index <HASH>..<HASH> 100644 --- a/peer.go +++ b/peer.go @@ -826,6 +826,16 @@ func (p *peer) writeMessage(msg btcwire.Message) { if atomic.LoadInt32(&p.disconnect) != 0 { return } + if !p.versionKnown { + switch msg.(type) { + case *btcwire.MsgVersion: + // This is OK. + default: + // We drop all messages other than version if we + // haven't done the handshake already. + return + } + } // Use closures to log expensive operations so they are only run when // the logging level requires it. @@ -1007,13 +1017,17 @@ out: p.writeMessage(msg) case iv := <-p.outputInvChan: - p.invSendQueue.PushBack(iv) + // No handshake? They'll find out soon enough. + if p.versionKnown { + p.invSendQueue.PushBack(iv) + } case <-trickleTicker.C: // Don't send anything if we're disconnecting or there // is no queued inventory. if atomic.LoadInt32(&p.disconnect) != 0 || - p.invSendQueue.Len() == 0 { + p.invSendQueue.Len() == 0 || + !p.versionKnown { continue }
Don't send messages before handshake. If we haven't handshaken with a peer don't send messages that are not the handshake. Additionally don't queue up invs for sending, they'll find out soon enough when they ask us what we know.
btcsuite_btcd
train
a70773099b22b042914e091a158daf9aca446ec6
diff --git a/lib/mapreduce/bloomSearch.js b/lib/mapreduce/bloomSearch.js index <HASH>..<HASH> 100644 --- a/lib/mapreduce/bloomSearch.js +++ b/lib/mapreduce/bloomSearch.js @@ -138,12 +138,12 @@ exports.search = function (reverseIndex, keySet, docFreqs, q, cleanQuery, filter } } -/* + console.log('idf: ' + JSON.stringify(idf)); console.log('otherKeys: ' + otherKeys); console.log('docFreqs: ' + JSON.stringify(docFreqs)); console.log('leastFrequentKey: ' + leastFrequentKey); -*/ + //if a key has a frequency of 0, since forage only supports ANDing, //the result set is empty @@ -151,6 +151,14 @@ exports.search = function (reverseIndex, keySet, docFreqs, q, cleanQuery, filter sendResultSet(); } + var seekLimit = -1; + debugger; + if (Object.keys(docFreqs).length == 1 && !q['facets']) { + seekLimit = q['pageSize'] + q['offset'] + totalHits = docFreqs[leastFrequentKey]; + } + + var vectorsRequested = 0; var vectorsProcessed = null; var reverseIndexFetchCursor = 0; @@ -158,6 +166,7 @@ exports.search = function (reverseIndex, keySet, docFreqs, q, cleanQuery, filter reverseIndex.createReadStream({ valueEncoding: 'json', reverse: true, + limit: seekLimit, start: 'REVERSEINDEX~' + leastFrequentKey + '~~', end: 'REVERSEINDEX~' + leastFrequentKey + '~'}) .on('data', function (data) {
added faster lookup for single term queries
fergiemcdowall_search-index
train
04830db859cb26127f8b14e67f9b430a9350a510
diff --git a/dna-jcr/src/main/java/org/jboss/dna/jcr/AbstractJcrNode.java b/dna-jcr/src/main/java/org/jboss/dna/jcr/AbstractJcrNode.java index <HASH>..<HASH> 100644 --- a/dna-jcr/src/main/java/org/jboss/dna/jcr/AbstractJcrNode.java +++ b/dna-jcr/src/main/java/org/jboss/dna/jcr/AbstractJcrNode.java @@ -126,8 +126,8 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { throw new RepositoryException(msg); } } - - final NodeEditor editorFor(Graph.Batch operations) throws RepositoryException { + + final NodeEditor editorFor( Graph.Batch operations ) throws RepositoryException { try { return cache.getEditorFor(nodeUuid, operations); } catch (ItemNotFoundException err) { @@ -137,7 +137,7 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { String msg = JcrI18n.nodeHasAlreadyBeenRemovedFromThisSession.text(nodeUuid, cache.workspaceName()); throw new RepositoryException(msg); } - } + } final JcrValue valueFrom( int propertyType, Object value ) { @@ -241,7 +241,6 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { /** * {@inheritDoc} * - * @throws UnsupportedOperationException always * @see javax.jcr.Node#getPrimaryNodeType() */ public JcrNodeType getPrimaryNodeType() throws RepositoryException { @@ -252,11 +251,10 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { Name getPrimaryTypeName() throws RepositoryException { return nodeInfo().getPrimaryTypeName(); } - + /** * {@inheritDoc} * - * @throws UnsupportedOperationException always * @see javax.jcr.Node#getMixinNodeTypes() */ public NodeType[] getMixinNodeTypes() throws RepositoryException { @@ -564,7 +562,6 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { /** * {@inheritDoc} * - * @throws UnsupportedOperationException always * @see javax.jcr.Node#getNodes(java.lang.String) */ public NodeIterator getNodes( String namePattern ) throws RepositoryException { @@ -944,17 +941,17 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { NodeInfo grandparentInfo; if (parentPath.size() > 1) { // Per the TCK, if relPath references a property, then we have to throw a ConstraintViolationException - // So, if we can't find the parent, try for the parent's parent and see if the last segment of the parent's + // So, if we can't find the parent, try for the parent's parent and see if the last segment of the + // parent's // path contains a property ... Path grandparentPath = parentPath.getParent(); assert grandparentPath != null; - + grandparentInfo = cache.findNodeInfo(nodeUuid, grandparentPath); // throws PathNotFoundException - } - else { + } else { grandparentInfo = this.nodeInfo(); } - + if (grandparentInfo.getProperty(parentPath.getLastSegment().getName()) != null) { // Need to throw a ConstraintViolationException since the request was to add a child to // a property ... @@ -999,8 +996,7 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { throw new UnsupportedOperationException(); } - protected final Property removeExistingValuedProperty( String name ) - throws ConstraintViolationException, RepositoryException { + protected final Property removeExistingValuedProperty( String name ) throws ConstraintViolationException, RepositoryException { PropertyId id = new PropertyId(nodeUuid, nameFrom(name)); AbstractJcrProperty property = cache.findJcrProperty(id); if (property != null) { @@ -1133,7 +1129,9 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { return removeExistingValuedProperty(name); } - return cache.findJcrProperty(editor().setProperty(nameFrom(name), valuesFrom(PropertyType.STRING, values), PropertyType.UNDEFINED)); + return cache.findJcrProperty(editor().setProperty(nameFrom(name), + valuesFrom(PropertyType.STRING, values), + PropertyType.UNDEFINED)); } /** @@ -1306,8 +1304,7 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { public final boolean isModified() { try { return nodeInfo().isModified(); - } - catch (RepositoryException re) { + } catch (RepositoryException re) { throw new IllegalStateException(re); } } @@ -1320,8 +1317,7 @@ abstract class AbstractJcrNode extends AbstractJcrItem implements Node { public final boolean isNew() { try { return nodeInfo().isNew(); - } - catch (RepositoryException re) { + } catch (RepositoryException re) { throw new IllegalStateException(re); } }
DNA-<I> AbstractJcrNode Javadoc Throws Clause Are Out Of Sync Removed the "@throws UnsupportedOperationException always" line from the JavaDoc of several methods. These lines were leftovers from when the methods were indeed not implemented. git-svn-id: <URL>
ModeShape_modeshape
train
e231417adc64e712d24efc1baa4599170fb1b277
diff --git a/ipydex/test/run_all.py b/ipydex/test/run_all.py index <HASH>..<HASH> 100644 --- a/ipydex/test/run_all.py +++ b/ipydex/test/run_all.py @@ -13,7 +13,10 @@ def run_all(): suite = loader.discover(current_path) runner = unittest.TextTestRunner() - runner.run(suite) + res = runner.run(suite) + + # cause CI to fail if tests have failed (otherwise this script returns 0 despite of failing tests) + assert res.wasSuccessful() if __name__ == '__main__': diff --git a/ipydex/test/test_displaytools.py b/ipydex/test/test_displaytools.py index <HASH>..<HASH> 100644 --- a/ipydex/test/test_displaytools.py +++ b/ipydex/test/test_displaytools.py @@ -5,8 +5,7 @@ from contextlib import contextmanager from io import StringIO from ipydex import displaytools as dt -from ipydex import IPS, activate_ips_on_exception -activate_ips_on_exception() +from ipydex import IPS def bool_sum(x): diff --git a/ipydex/test/test_embed.py b/ipydex/test/test_embed.py index <HASH>..<HASH> 100644 --- a/ipydex/test/test_embed.py +++ b/ipydex/test/test_embed.py @@ -13,8 +13,6 @@ import sys import unittest from IPython.utils.tempdir import NamedFileInTemporaryDirectory import pexpect -# from ipydex import IPS, activate_ips_on_exception -# activate_ips_on_exception() _exit = b"exit\r" _mu1 = b"__mu = 1; exit\n"
ensure nonzero return value of tests.run_all() in case of failing tests
cknoll_ipydex
train
8b47a5d6c4c3c48fa2c0273a32bcfc34f7db064a
diff --git a/src/Charcoal/Property/DateTimeProperty.php b/src/Charcoal/Property/DateTimeProperty.php index <HASH>..<HASH> 100644 --- a/src/Charcoal/Property/DateTimeProperty.php +++ b/src/Charcoal/Property/DateTimeProperty.php @@ -331,6 +331,11 @@ class DateTimeProperty extends AbstractProperty return null; } + if (is_int($val) && $this->isValidTimeStamp($val)) { + $dateTime = new DateTime(); + $val = $dateTime->setTimestamp($val); + } + if (is_string($val)) { $val = new DateTime($val); } @@ -343,4 +348,15 @@ class DateTimeProperty extends AbstractProperty return $val; } + + /** + * @param integer|string $timestamp Timestamp. + * @return boolean + */ + private function isValidTimeStamp($timestamp) + { + return (is_int($timestamp)) + && ($timestamp <= PHP_INT_MAX) + && ($timestamp >= ~PHP_INT_MAX); + } }
Improve DateTimeProperty::dateTimeVal to support timestamp
locomotivemtl_charcoal-property
train
00d3c6870b706eb45cfee2d9102f97ab47430639
diff --git a/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/Vocabulary.java b/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/Vocabulary.java index <HASH>..<HASH> 100644 --- a/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/Vocabulary.java +++ b/wdtk-rdf/src/main/java/org/wikidata/wdtk/rdf/Vocabulary.java @@ -535,6 +535,11 @@ public class Vocabulary { } public static String getReferenceUri(Reference reference) { + final String hash = reference.getHash(); + if (!hash.isEmpty()) { + return PREFIX_WIKIDATA_REFERENCE + hash; + } + md.reset(); ArrayList<Integer> hashes = new ArrayList<>(); for (SnakGroup snakgroup : reference.getSnakGroups()) {
rdf: prefer reference hash from dump if available
Wikidata_Wikidata-Toolkit
train
2c6dd79d419699e61970719dbb369aefe359ea6e
diff --git a/tests/test_db.py b/tests/test_db.py index <HASH>..<HASH> 100644 --- a/tests/test_db.py +++ b/tests/test_db.py @@ -42,8 +42,8 @@ def test_round_trip(tmp_path): def test_get_credentials_table(tmp_path): db.DB_FILE = str(tmp_path / 'db.json') with db.get_credentials_table() as table: - assert not table._storage._storage._handle.closed + assert not table._storage._handle.closed with db.get_credentials_table(table) as table2: assert table2 is table - assert not table._storage._storage._handle.closed - assert table._storage._storage._handle.closed + assert not table._storage._handle.closed + assert table._storage._handle.closed
Fix tests for updated TinyDB/Tinyrecord
ofek_pypinfo
train
3fcd240fe9c0eeb749c8feaaa2af28779ddca813
diff --git a/addon/hint/show-hint.js b/addon/hint/show-hint.js index <HASH>..<HASH> 100644 --- a/addon/hint/show-hint.js +++ b/addon/hint/show-hint.js @@ -167,6 +167,7 @@ // If we're at the edge of the screen, then we want the menu to appear on the left of the cursor. var winW = window.innerWidth || Math.max(document.body.offsetWidth, document.documentElement.offsetWidth); var winH = window.innerHeight || Math.max(document.body.offsetHeight, document.documentElement.offsetHeight); + (options.container || document.body).appendChild(hints); var box = hints.getBoundingClientRect(); var overlapX = box.right - winW, overlapY = box.bottom - winH; if (overlapX > 0) { @@ -187,7 +188,6 @@ } hints.style.top = (top = pos.bottom - overlapY) + "px"; } - (options.container || document.body).appendChild(hints); cm.addKeyMap(this.keyMap = buildKeyMap(options, { moveFocus: function(n) { widget.changeActive(widget.selectedHint + n); },
[show-hint addon] Fix broken inside-window-positioning code Closes #<I>
codemirror_CodeMirror
train
94b63b68825f8cdd077b81eac135d1b99e80a945
diff --git a/sundial/__init__.py b/sundial/__init__.py index <HASH>..<HASH> 100644 --- a/sundial/__init__.py +++ b/sundial/__init__.py @@ -5,6 +5,6 @@ from django.utils import version __all__ = ['VERSION', '__version__'] -VERSION = (1, 0, 5, 'final', 0) +VERSION = (1, 0, 6, 'alpha', 0) __version__ = version.get_version(VERSION)
Started <I> alpha development.
charettes_django-sundial
train
657a361e20015e3abffe3919534fa92b078635b5
diff --git a/docs/release.md b/docs/release.md index <HASH>..<HASH> 100644 --- a/docs/release.md +++ b/docs/release.md @@ -27,6 +27,7 @@ Current version is phy v2.0a1 (alpha 1). There may be further new features for v * Show histogram of amplitudes overlayed with the amplitudes * Support for multiple types of amplitudes (template waveform amplitude, raw waveform amplitude, feature amplitude) * Splitting is supported + * Shift+click in the amplitude view to center the trace view to a certain time * **Correlogram view**: * Show horizontal line for the baseline firing rate, and a customizable vertical line for the refractory period * **Waveform view**: diff --git a/phy/apps/base.py b/phy/apps/base.py index <HASH>..<HASH> 100644 --- a/phy/apps/base.py +++ b/phy/apps/base.py @@ -658,10 +658,15 @@ class TraceMixin(object): v.on_select() @connect + def on_amplitude_click(sender, time): + v.go_to(time) + + @connect def on_close_view(sender, view): if view == v: unconnect(on_spike_click) unconnect(on_color_mapping_changed) + unconnect(on_amplitude_click) return v diff --git a/phy/cluster/views/amplitude.py b/phy/cluster/views/amplitude.py index <HASH>..<HASH> 100644 --- a/phy/cluster/views/amplitude.py +++ b/phy/cluster/views/amplitude.py @@ -13,6 +13,7 @@ import numpy as np from phylib.utils.color import selected_cluster_color, add_alpha from phylib.utils._types import _as_array +from phylib.utils.event import emit from .base import ManualClusteringView, MarkerSizeMixin, LassoMixin from .histogram import _compute_histogram @@ -64,6 +65,7 @@ class AmplitudeView(MarkerSizeMixin, LassoMixin, ManualClusteringView): 'previous_amplitude_type': 'shift+a', 'select_x_dim': 'alt+left click', 'select_y_dim': 'alt+right click', + 'select_time': 'shift+click', } def __init__(self, amplitudes=None, amplitude_name=None, duration=None): @@ -204,3 +206,10 @@ class AmplitudeView(MarkerSizeMixin, LassoMixin, ManualClusteringView): def previous_amplitude_type(self): """Switch to the previous amplitude type.""" self._change_amplitude_type(-1) + + def on_mouse_click(self, e): + """Select a time from the amplitude view to display in the trace view.""" + if 'Shift' in e.modifiers: + mouse_pos = self.canvas.panzoom.window_to_ndc(e.pos) + time = Range(NDC, self.data_bounds).apply(mouse_pos)[0][0] + emit('amplitude_click', self, time=time)
Shift+click in amplitude view to center the trace view to a certain time
kwikteam_phy
train
4425fe980cb66e0324c599cac73187b2debddba5
diff --git a/src/com/google/javascript/jscomp/ConformanceRules.java b/src/com/google/javascript/jscomp/ConformanceRules.java index <HASH>..<HASH> 100644 --- a/src/com/google/javascript/jscomp/ConformanceRules.java +++ b/src/com/google/javascript/jscomp/ConformanceRules.java @@ -43,6 +43,7 @@ import com.google.javascript.rhino.JSDocInfo; import com.google.javascript.rhino.JSTypeExpression; import com.google.javascript.rhino.Node; import com.google.javascript.rhino.Token; +import com.google.javascript.rhino.jstype.EnumElementType; import com.google.javascript.rhino.jstype.FunctionType; import com.google.javascript.rhino.jstype.JSType; import com.google.javascript.rhino.jstype.JSTypeNative; @@ -2183,7 +2184,10 @@ public final class ConformanceRules { Node attr = node.getSecondChild(); String attrName = inferStringValue(traversal.getScope(), attr); if (attrName == null) { - return ConformanceResult.VIOLATION; + // xid() obfuscates attribute names, thus never clashing with security-sensitive attributes. + return isXid(attr.getJSType()) + ? ConformanceResult.CONFORMANCE + : ConformanceResult.VIOLATION; } attrName = attrName.toLowerCase(Locale.ROOT); @@ -2225,7 +2229,7 @@ public final class ConformanceRules { } } else if (hasElementType(node)) { // key is not a string literal. JSType keyType = key.getJSType(); - if (keyType == null) { + if (keyType == null || isXid(keyType)) { return ConformanceResult.CONFORMANCE; } @@ -2266,6 +2270,18 @@ public final class ConformanceRules { return true; } + private boolean isXid(JSType type) { + if (type == null) { + return false; + } + EnumElementType enumElTy = type.toMaybeEnumElementType(); + if (enumElTy != null + && enumElTy.getEnumType().getReferenceName().equals("enum{xid.String}")) { + return true; + } + return false; + } + @Nullable private String inferStringValue(Scope scope, Node node) { if (node == null) { diff --git a/test/com/google/javascript/jscomp/CheckConformanceTest.java b/test/com/google/javascript/jscomp/CheckConformanceTest.java index <HASH>..<HASH> 100644 --- a/test/com/google/javascript/jscomp/CheckConformanceTest.java +++ b/test/com/google/javascript/jscomp/CheckConformanceTest.java @@ -3355,6 +3355,22 @@ public final class CheckConformanceTest extends CompilerTestCase { "const attr = Attribute.SAFE;", "(new HTMLScriptElement).setAttribute(attr, 'xxx');") })); + + testNoWarning( + externs(externs), + srcs( + lines( + "goog.provide('xid');", + "goog.provide('xid.String');", + "/** @enum {string} */ xid.String = {DO_NOT_USE: ''};", + "/**", + " * @param {string} id", + " * @return {xid.String}", + " */", + "xid = function(id) {return /** @type {xid.String} */ (id);};", + "const attr = xid('src');", + "(new HTMLScriptElement).setAttribute(attr, 'xxx');", + "(new HTMLScriptElement)[attr] = 'xxx';"))); } @Test
Treat xid processed attribute names to be always secure in setAttribute. PiperOrigin-RevId: <I>
google_closure-compiler
train
3c4989327b72714fd7d51d53563132b60f7c91c9
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -88,6 +88,7 @@ MOCK_MODULES = [ 'tornado.httpserver', 'tornado.httputil', 'tornado.ioloop', + 'tornado.iostream', 'tornado.simple_httpclient', 'tornado.web', 'tornado.websocket',
added tornado.iostream to doc mock modules
saltstack_salt
train
9f4757b8c3002e963453c54b5983730a9fe43226
diff --git a/osmcha/changeset.py b/osmcha/changeset.py index <HASH>..<HASH> 100644 --- a/osmcha/changeset.py +++ b/osmcha/changeset.py @@ -20,6 +20,7 @@ def changeset_info(changeset): def get_bounds(changeset): + """Get the bounds of the changeset and return it as a MultiPoint object.""" return MultiPoint([ (float(changeset.get('min_lon')), float(changeset.get('min_lat'))), (float(changeset.get('max_lon')), float(changeset.get('max_lat'))) @@ -27,22 +28,28 @@ def get_bounds(changeset): def get_changeset(changeset): + """Get the changeset using OSM API and return the content as a XML + ElementTree. + """ url = 'http://www.openstreetmap.org/api/0.6/changeset/%s/download' % changeset return ET.fromstring(requests.get(url).content) def get_metadata(changeset): + """Get the metadata of the changeset using OSM API and return it as a XML + ElementTree. + """ url = 'http://www.openstreetmap.org/api/0.6/changeset/%s' % changeset return ET.fromstring(requests.get(url).content).getchildren()[0] class ChangesetList(object): - """Read replication changeset and return a list with information of all - changesets. + """Read replication changeset file and return a list with information about + all changesets. """ - def __init__(self, url, geojson=None): - self.read_file(url) + def __init__(self, changeset_file, geojson=None): + self.read_file(changeset_file) if geojson: self.get_area(geojson) self.filter() @@ -50,34 +57,44 @@ class ChangesetList(object): self.content = self.xml.getchildren() self.changesets = [changeset_info(ch) for ch in self.content] - def read_file(self, url): - """Download the replicate_changeset file or read it directly from the - filesystem (to test purposes).""" - if isfile(url): - self.filename = url + def read_file(self, changeset_file): + """Download the replication changeset file or read it directly from the + filesystem (to test purposes). + """ + if isfile(changeset_file): + self.filename = changeset_file else: self.path = mkdtemp() - self.filename = join(self.path, basename(url)) - download(url, self.path) + self.filename = join(self.path, basename(changeset_file)) + download(changeset_file, self.path) self.xml = ET.fromstring(gzip.open(self.filename).read()) def get_area(self, geojson): + """Read the first feature from the geojson and return it as a Polygon + object. + """ geojson = json.load(open(geojson, 'r')) self.area = Polygon(geojson['features'][0]['geometry']['coordinates'][0]) def filter(self): - self.content = [ch for ch in self.xml.getchildren() if get_bounds(ch).intersects(self.area)] + """Filter the changesets """ + self.content = [ + ch for ch in self.xml.getchildren() if get_bounds(ch).intersects(self.area) + ] class Analyse(object): - + """Analyse a changeset and define if it is suspect.""" def __init__(self, changeset): self.changeset = changeset self.reasons = [] self.verify_words() def verify_words(self): + """Verify the fields source and comment of the changeset for some + suspect words. + """ suspect_words = [ 'google', 'nokia', @@ -103,6 +120,9 @@ class Analyse(object): break def count(self): + """Count the number of elements created, modified and deleted by the + changeset. + """ xml = get_changeset(self.changeset.get('id')) actions = [action.tag for action in xml.getchildren()] return {
add docstrings to all functions and classes
willemarcel_osmcha
train
2a8a24b2bb73242f899ee21bbb13e8cfab16b4cd
diff --git a/library/Garp/Model/Db/Faker.php b/library/Garp/Model/Db/Faker.php index <HASH>..<HASH> 100644 --- a/library/Garp/Model/Db/Faker.php +++ b/library/Garp/Model/Db/Faker.php @@ -30,7 +30,8 @@ class Garp_Model_Db_Faker { */ public function createFakeRow(array $fieldConfiguration, array $defaultValues = array()) { // TODO For now, filter primary keys, assuming they will be auto-generated by the database. - $configWithoutPks = f\filter(not(array_get('primary')), $fieldConfiguration); + $exclude = f\either(f\prop('primary'), f\prop_equals('origin', 'relation')); + $configWithoutPks = f\filter(f\not($exclude), $fieldConfiguration); $self = $this; return f\reduce( function ($out, $field) use ($self) { @@ -146,3 +147,4 @@ class Garp_Model_Db_Faker { + diff --git a/tests/library/Garp/Model/Db/FakerTest.php b/tests/library/Garp/Model/Db/FakerTest.php index <HASH>..<HASH> 100644 --- a/tests/library/Garp/Model/Db/FakerTest.php +++ b/tests/library/Garp/Model/Db/FakerTest.php @@ -1,18 +1,34 @@ <?php +use Garp\Functional as f; + /** * @package Tests * @author Harmen Janssen <[email protected]> */ class Garp_Model_Db_FakerTest extends Garp_Test_PHPUnit_TestCase { + + /** @test */ + public function should_omit_primary_and_foreign_keys() { + $fieldConfig = $this->_getFieldConfig(); + + $faker = new Garp_Model_Db_Faker(); + $fakeRow = $faker->createFakeRow($fieldConfig, []); + + $this->assertFalse(array_key_exists('id', $fakeRow)); + $this->assertFalse(array_key_exists('user_id', $fakeRow)); + } + /** @test */ public function should_generate_a_random_row() { $fieldConfig = $this->_getFieldConfig(); - $expectedKeys = array_map(array_get('name'), $fieldConfig); + $isPrimaryOrForeign = f\either(f\equals('id'), f\equals('user_id')); + $expectedKeys = f\filter(f\not($isPrimaryOrForeign), f\map(f\prop('name'), $fieldConfig)); + $faker = new Garp_Model_Db_Faker(); $fakeRow = $faker->createFakeRow($fieldConfig); - $this->assertEquals( + $this->assertEqualsCanonicalized( $expectedKeys, array_keys($fakeRow) ); @@ -57,6 +73,7 @@ class Garp_Model_Db_FakerTest extends Garp_Test_PHPUnit_TestCase { protected function _getFieldConfig() { return array( + array('name' => 'id', 'type' => 'numeric', 'primary' => true), array('name' => 'name', 'type' => 'text'), array('name' => 'excerpt', 'type' => 'text', 'required' => false, 'maxLength' => 255), array('name' => 'body', 'type' => 'html'), @@ -68,6 +85,10 @@ class Garp_Model_Db_FakerTest extends Garp_Test_PHPUnit_TestCase { 'deleted', 'draft' ) + ), + array( + 'name' => 'user_id', 'type' => 'numeric', + 'origin' => 'relation', 'relationType' => 'hasOne' ) ); } @@ -81,3 +102,4 @@ class Garp_Model_Db_FakerTest extends Garp_Test_PHPUnit_TestCase { ); } } +
Make Faker ignore foreign keys in fake rows
grrr-amsterdam_garp3
train
f311af7fbd8a51e6376724d303f495b5accfd049
diff --git a/languagetool-core/src/main/java/org/languagetool/rules/ngrams/NgramProbabilityRule.java b/languagetool-core/src/main/java/org/languagetool/rules/ngrams/NgramProbabilityRule.java index <HASH>..<HASH> 100644 --- a/languagetool-core/src/main/java/org/languagetool/rules/ngrams/NgramProbabilityRule.java +++ b/languagetool-core/src/main/java/org/languagetool/rules/ngrams/NgramProbabilityRule.java @@ -190,6 +190,9 @@ public class NgramProbabilityRule extends Rule { Probability newProb = lm.getPseudoProbability(newNgram); if (newProb.getProb() * 1000000L > p.getProb()) { // TODO: this is a good factor - find the best one (3gram vs. 4gram) betterAlternatives.add(new Alternative(replacement, newProb)); + debug("More probable: %s\n", replacement); + } else { + debug("Less probable: %s\n", replacement); } alternativesConsidered = true; }
a bit more debugging output
languagetool-org_languagetool
train
6876d84f38efe529ab1f84918f6d48578eacd30e
diff --git a/src/sap.ui.core/src/jquery.sap.dom.js b/src/sap.ui.core/src/jquery.sap.dom.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.core/src/jquery.sap.dom.js +++ b/src/sap.ui.core/src/jquery.sap.dom.js @@ -701,7 +701,7 @@ sap.ui.define(['jquery.sap.global', 'sap/ui/Device'], if ( !element.href || !mapName || map.nodeName.toLowerCase() !== "map" ) { return false; } - img = jQuery( "img[usemap=#" + mapName + "]" )[0]; + img = jQuery( "img[usemap='#" + mapName + "']" )[0]; return !!img && visible( img ); } /*eslint-disable no-nested-ternary */
[INTERNAL][FIX] jquery.sap.dom: Fix for jQuery selector For version <I> the selector logic was changed. Thus when using the selectors with an ImapgeMap, jQuery reported a syntax error. This fix wraps the selector with single quotes, so jQuery can use it properly. Change-Id: I8f3f0b0deaab8c8f<I>c<I>f<I>ba<I>fd<I>
SAP_openui5
train
b08f1e316989ff7e84048c303afc977332788185
diff --git a/salmonella/widgets.py b/salmonella/widgets.py index <HASH>..<HASH> 100644 --- a/salmonella/widgets.py +++ b/salmonella/widgets.py @@ -4,12 +4,10 @@ from django.core.urlresolvers import reverse, NoReverseMatch from django.core.exceptions import ImproperlyConfigured from django.template.loader import render_to_string -import sys -version = sys.version_info[0] -if version >= 3: +try: from django.utils.encoding import force_text -else: - from django.utils.encoding import force_unicode +except ImportError: + from django.utils.encoding import force_unicode as force_text class SalmonellaImproperlyConfigured(ImproperlyConfigured): @@ -68,10 +66,7 @@ class SalmonellaMultiIdWidget(SalmonellaIdWidget): attrs = {} attrs['class'] = 'vManyToManyRawIdAdminField' if value: - if self.version >= 3: - value = ','.join([force_text(v) for v in value]) - else: - value = ','.join([force_unicode(v) for v in value]) + value = ','.join([force_text(v) for v in value]) else: value = '' return super(SalmonellaMultiIdWidget, self).render(name, value,
Python 3 support: Refactored code
lincolnloop_django-dynamic-raw-id
train
03ed67870d9e707fec319eb30c3cd732a846144d
diff --git a/src/Core.php b/src/Core.php index <HASH>..<HASH> 100644 --- a/src/Core.php +++ b/src/Core.php @@ -7,9 +7,9 @@ if(!defined('PEAK_VERSION')) define('PEAK_VERSION', '2.0.0'); /** - * relative_basepath() + * relativeBasepath() */ -if(!function_exists('relative_basepath')) { +if(!function_exists('relativeBasepath')) { /** * Get relativepath of specified dir from the server document root * @@ -25,6 +25,24 @@ if(!function_exists('relative_basepath')) { } /** + * relativePath() + */ +if(!function_exists('relativePath')) { + /** + * Get relative path of specified dir from the server document root + * + * @param string $dir + * @return string + */ + function relativePath($dir, $doc_root = null) { + if(!isset($doc_root)) { + $doc_root = (!isset($_SERVER['DOCUMENT_ROOT'])) ? '' : $_SERVER['DOCUMENT_ROOT']; + } + return substr(str_replace([$doc_root,$dir],'',str_replace('\\','/',$dir)), 0, -1); + } +} + +/** * __() */ if(!function_exists('__')) {
added relativePath and renamed relative_basepath to camelstyle
peakphp_framework
train
58d85282b43f83974a2131563fafafaf99eed372
diff --git a/src/util/Util.js b/src/util/Util.js index <HASH>..<HASH> 100644 --- a/src/util/Util.js +++ b/src/util/Util.js @@ -125,7 +125,7 @@ class Util { if (!has(given, key) || given[key] === undefined) { given[key] = def[key]; } else if (given[key] === Object(given[key])) { - given[key] = this.mergeDefault(def[key], given[key]); + given[key] = Util.mergeDefault(def[key], given[key]); } } @@ -139,7 +139,7 @@ class Util { * @private */ static convertToBuffer(ab) { - if (typeof ab === 'string') ab = this.str2ab(ab); + if (typeof ab === 'string') ab = Util.str2ab(ab); return Buffer.from(ab); }
Fix Destructuring Errors in Util (#<I>) * Fix Destructuring * Fix another one
discordjs_discord.js
train
a86f586abb22923e750f7f9ba646802e86d1ae01
diff --git a/src/Symfony/Component/PropertyAccess/PropertyAccessor.php b/src/Symfony/Component/PropertyAccess/PropertyAccessor.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/PropertyAccess/PropertyAccessor.php +++ b/src/Symfony/Component/PropertyAccess/PropertyAccessor.php @@ -470,6 +470,10 @@ class PropertyAccessor implements PropertyAccessorInterface throw $e; } } elseif (PropertyReadInfo::TYPE_PROPERTY === $type) { + if (!method_exists($object, '__get') && !\array_key_exists($name, (array) $object)) { + throw new UninitializedPropertyException(sprintf('The property "%s::$%s" is not initialized.', $class, $name)); + } + $result[self::VALUE] = $object->$name; if (isset($zval[self::REF]) && $access->canBeReference()) { diff --git a/src/Symfony/Component/Serializer/Normalizer/AbstractObjectNormalizer.php b/src/Symfony/Component/Serializer/Normalizer/AbstractObjectNormalizer.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Serializer/Normalizer/AbstractObjectNormalizer.php +++ b/src/Symfony/Component/Serializer/Normalizer/AbstractObjectNormalizer.php @@ -123,10 +123,6 @@ abstract class AbstractObjectNormalizer extends AbstractNormalizer $this->defaultContext[self::EXCLUDE_FROM_CACHE_KEY] = array_merge($this->defaultContext[self::EXCLUDE_FROM_CACHE_KEY] ?? [], [self::CIRCULAR_REFERENCE_LIMIT_COUNTERS]); - if (\PHP_VERSION_ID >= 70400) { - $this->defaultContext[self::SKIP_UNINITIALIZED_VALUES] = true; - } - $this->propertyTypeExtractor = $propertyTypeExtractor; if (null === $classDiscriminatorResolver && null !== $classMetadataFactory) { @@ -194,12 +190,7 @@ abstract class AbstractObjectNormalizer extends AbstractNormalizer try { $attributeValue = $this->getAttributeValue($object, $attribute, $format, $attributeContext); } catch (UninitializedPropertyException $e) { - if ($this->shouldSkipUninitializedValues($context)) { - continue; - } - throw $e; - } catch (\Error $e) { - if ($this->shouldSkipUninitializedValues($context) && $this->isUninitializedValueError($e)) { + if ($context[self::SKIP_UNINITIALIZED_VALUES] ?? $this->defaultContext[self::SKIP_UNINITIALIZED_VALUES] ?? true) { continue; } throw $e; @@ -733,22 +724,4 @@ abstract class AbstractObjectNormalizer extends AbstractNormalizer return false; } } - - private function shouldSkipUninitializedValues(array $context): bool - { - return $context[self::SKIP_UNINITIALIZED_VALUES] - ?? $this->defaultContext[self::SKIP_UNINITIALIZED_VALUES] - ?? false; - } - - /** - * This error may occur when specific object normalizer implementation gets attribute value - * by accessing a public uninitialized property or by calling a method accessing such property. - */ - private function isUninitializedValueError(\Error $e): bool - { - return \PHP_VERSION_ID >= 70400 - && str_starts_with($e->getMessage(), 'Typed property') - && str_ends_with($e->getMessage(), 'must not be accessed before initialization'); - } } diff --git a/src/Symfony/Component/Serializer/Normalizer/PropertyNormalizer.php b/src/Symfony/Component/Serializer/Normalizer/PropertyNormalizer.php index <HASH>..<HASH> 100644 --- a/src/Symfony/Component/Serializer/Normalizer/PropertyNormalizer.php +++ b/src/Symfony/Component/Serializer/Normalizer/PropertyNormalizer.php @@ -11,6 +11,8 @@ namespace Symfony\Component\Serializer\Normalizer; +use Symfony\Component\PropertyAccess\Exception\UninitializedPropertyException; + /** * Converts between objects and arrays by mapping properties. * @@ -131,6 +133,17 @@ class PropertyNormalizer extends AbstractObjectNormalizer $reflectionProperty->setAccessible(true); } + if (!method_exists($object, '__get')) { + $propertyValues = (array) $object; + + if (($reflectionProperty->isPublic() && !\array_key_exists($reflectionProperty->name, $propertyValues)) + || ($reflectionProperty->isProtected() && !\array_key_exists("\0*\0{$reflectionProperty->name}", $propertyValues)) + || ($reflectionProperty->isPrivate() && !\array_key_exists("\0{$reflectionProperty->class}\0{$reflectionProperty->name}", $propertyValues)) + ) { + throw new UninitializedPropertyException(sprintf('The property "%s::$%s" is not initialized.', $reflectionProperty->class, $reflectionProperty->name)); + } + } + return $reflectionProperty->getValue($object); }
[Serializer] fix reading unset properties
symfony_symfony
train
c3a514fd8158adf4cc5e89d0eb7e2ffb20422755
diff --git a/phonopy/interface/vasp.py b/phonopy/interface/vasp.py index <HASH>..<HASH> 100644 --- a/phonopy/interface/vasp.py +++ b/phonopy/interface/vasp.py @@ -487,6 +487,14 @@ def symmetrize_borns(borns, sys.stderr.write( "Born effective charge symmetrization might go wrong.\n") + sys.stderr.write("Sum of Born charges:\n") + sys.stderr.write(str(borns_orig.sum(axis=0))) + sys.stderr.write("\n") + + # for b_o, b in zip(borns_orig, borns): + # sys.stderr.write(str(b - b_o)) + # sys.stderr.write("\n") + return borns def symmetrize_2nd_rank_tensor(tensor, symmetry_operations, lattice):
Show sum of Born charges if symmetrization didn't go well.
atztogo_phonopy
train
6847fa4e4e05bfd5b2cbd94d0aff21d7b0063291
diff --git a/lib/ronin/formatting/extensions/html/string.rb b/lib/ronin/formatting/extensions/html/string.rb index <HASH>..<HASH> 100644 --- a/lib/ronin/formatting/extensions/html/string.rb +++ b/lib/ronin/formatting/extensions/html/string.rb @@ -25,6 +25,7 @@ require 'ronin/formatting/extensions/text' require 'scanf' require 'cgi' +require 'hpricot' class String @@ -61,4 +62,14 @@ class String format_bytes(options) { |c| sprintf("&#%d;",c) } end + # + # Returns the inner text of the String. + # + # "This page is <b>restricted</b>.".strip_html + # # => "This page is restricted." + # + def strip_html + Hpricot(self).inner_text + end + end
* Added String#strip_html.
ronin-ruby_ronin
train
cc2f9ba57d338dd02a460ce5a0263e10773a7930
diff --git a/lib/eventStream.js b/lib/eventStream.js index <HASH>..<HASH> 100644 --- a/lib/eventStream.js +++ b/lib/eventStream.js @@ -44,8 +44,8 @@ function EventStream (eventstore, query, events) { throw new Error(errEvtsArrMsg); } - for (var e in events) { - var evt = events[e]; + for (var i = 0, len = events.length; i < len; i++) { + var evt = events[i]; if (evt.streamRevision === undefined || evt.streamRevision === null) { var errEvtMsg = 'The events passed should all have a streamRevision!'; debug(errEvtMsg);
Handled the exception described the issue #<I> where events is [] (#<I>)
adrai_node-eventstore
train
7fbb49b5982b6bfd1a30963202b5a571ed46e37e
diff --git a/core/src/main/java/com/capitalone/dashboard/event/BuildEventListener.java b/core/src/main/java/com/capitalone/dashboard/event/BuildEventListener.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/capitalone/dashboard/event/BuildEventListener.java +++ b/core/src/main/java/com/capitalone/dashboard/event/BuildEventListener.java @@ -30,8 +30,19 @@ public class BuildEventListener extends HygieiaMongoEventListener<Build> { if(build.getBuildStatus().equals(BuildStatus.Success)){ processBuild(event.getSource()); } + else{ + processFailedBuild(event.getSource()); + } } + private void processFailedBuild(Build failedBuild){ + List<Dashboard> teamDashboardsReferencingBuild = findAllDashboardsForBuild(failedBuild); + for(Dashboard teamDashboard : teamDashboardsReferencingBuild){ + Pipeline pipeline = getOrCreatePipeline(teamDashboard); + pipeline.addFailedBuild(failedBuild); + pipelineRepository.save(pipeline); + } + } private void processBuild(Build build){ List<Dashboard> teamDashboardsReferencingBuild = findAllDashboardsForBuild(build); @@ -44,10 +55,22 @@ public class BuildEventListener extends HygieiaMongoEventListener<Build> { commit.addNewPipelineProcessedTimestamp(PipelineStageType.Build.name(), build.getTimestamp()); pipeline.addCommit(PipelineStageType.Build.name(), commit); } + processPreviousFailedBuilds(build, pipeline); pipelineRepository.save(pipeline); } } + private void processPreviousFailedBuilds(Build successfulBuild, Pipeline pipeline){ + for(Build b : pipeline.getFailedBuilds()){ + if(b.getBuildUrl().equals(successfulBuild.getBuildUrl()) && b.getCollectorItemId().equals(successfulBuild.getCollectorItemId())){ + for(SCM scm : b.getSourceChangeSet()){ + PipelineCommit failedBuildCommit = new PipelineCommit(scm); + failedBuildCommit.addNewPipelineProcessedTimestamp(PipelineStageType.Build.name(), successfulBuild.getTimestamp()); + pipeline.addCommit(PipelineStageType.Build.name(), failedBuildCommit); + } + } + } + } /** * Finds all of the dashboards for a given build way of the build by: * 1. Get collector item id for the build diff --git a/core/src/main/java/com/capitalone/dashboard/model/Pipeline.java b/core/src/main/java/com/capitalone/dashboard/model/Pipeline.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/com/capitalone/dashboard/model/Pipeline.java +++ b/core/src/main/java/com/capitalone/dashboard/model/Pipeline.java @@ -4,7 +4,9 @@ import org.bson.types.ObjectId; import org.springframework.data.mongodb.core.mapping.Document; import java.util.HashMap; +import java.util.HashSet; import java.util.Map; +import java.util.Set; /** * Document containing the details of a Pipeline for a TeamDashboardCollectorItem @@ -17,6 +19,10 @@ public class Pipeline extends BaseModel{ /** Map of environment name and stage object*/ private Map<String, EnvironmentStage> stages = new HashMap<>(); + /**not including this in the map above because the enum allows us to + * use ordinals to iterate through pipeline progression*/ + private Set<Build> failedBuilds = new HashSet<>(); + public ObjectId getCollectorItemId() { return collectorItemId; } @@ -40,4 +46,15 @@ public class Pipeline extends BaseModel{ this.getStages().get(stage).getCommits().add(commit); } + public Set<Build> getFailedBuilds() { + return failedBuilds; + } + + public void setFailedBuilds(Set<Build> failedBuilds) { + this.failedBuilds = failedBuilds; + } + + public void addFailedBuild(Build failedBuild){ + this.getFailedBuilds().add(failedBuild); + } }
Failed build processing feature completed. Has a bucket that is a property on the pipeline. Didn't want to modify the existing map on the pipeline or the enum to handle this special case...
Hygieia_Hygieia
train
5bd8356439c2da6c375024b9e6e59d375f068cb1
diff --git a/src/main/java/me/prettyprint/cassandra/model/Mutator.java b/src/main/java/me/prettyprint/cassandra/model/Mutator.java index <HASH>..<HASH> 100644 --- a/src/main/java/me/prettyprint/cassandra/model/Mutator.java +++ b/src/main/java/me/prettyprint/cassandra/model/Mutator.java @@ -88,10 +88,14 @@ public final class Mutator<K> { return this; } + /** + * Adds a Deletion to the underlying batch_mutate call. The columnName argument can be null + * in which case Deletion is created with only the Clock, resulting in the whole row being deleted + */ public <N> Mutator<K> addDeletion(K key, String cf, N columnName, Serializer<N> nameSerializer) { SlicePredicate sp = new SlicePredicate(); sp.addToColumn_names(nameSerializer.toBytes(columnName)); - Deletion d = new Deletion(ko.createClock()).setPredicate(sp); + Deletion d = columnName != null ? new Deletion(ko.createClock()).setPredicate(sp) : new Deletion(ko.createClock()); getPendingMutations().addDeletion(key, Arrays.asList(cf), d); return this; } diff --git a/src/test/java/me/prettyprint/cassandra/model/MutatorTest.java b/src/test/java/me/prettyprint/cassandra/model/MutatorTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/me/prettyprint/cassandra/model/MutatorTest.java +++ b/src/test/java/me/prettyprint/cassandra/model/MutatorTest.java @@ -96,6 +96,22 @@ public class MutatorTest extends BaseEmbededServerSetupTest { } m.execute(); } + + @Test + public void testRowDeletion() { + String cf = "Standard1"; + + Mutator<String> m = createMutator(keyspaceOperator, se); + for (int i = 0; i < 5; i++) { + m.addInsertion("k" + i, cf, createColumn("name", "value" + i, se, se)); + } + MutationResult r = m.execute(); + + for (int i = 0; i < 5; i++) { + m.addDeletion("k" + i, cf, null, se); + } + m.execute(); + } private void assertColumnExists(String keyspace, String cf, String key, String column) { ColumnPath cp = new ColumnPath(cf);
added null option on column name for easier row deletion in mutator
hector-client_hector
train
0ac3ef4cf3a2ff99c644b49c4528f93290d13aca
diff --git a/bin/gandalf.go b/bin/gandalf.go index <HASH>..<HASH> 100644 --- a/bin/gandalf.go +++ b/bin/gandalf.go @@ -167,16 +167,19 @@ func main() { var err error log, err = syslog.New(syslog.LOG_INFO, "gandalf-listener") if err != nil { + fmt.Fprintln(os.Stderr, err.Error()) panic(err.Error()) } err = config.ReadConfigFile("/etc/gandalf.conf") if err != nil { log.Err(err.Error()) + fmt.Fprintln(os.Stderr, err.Error()) return } err = validateCmd() if err != nil { log.Err(err.Error()) + fmt.Fprintln(os.Stderr, err.Error()) return } a := action()
bin: printing error messages in main at stderr
tsuru_gandalf
train
ca15aac9308fde3a62bc0312d1266c2ed4f149af
diff --git a/core/src/main/java/smile/classification/RandomForest.java b/core/src/main/java/smile/classification/RandomForest.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/smile/classification/RandomForest.java +++ b/core/src/main/java/smile/classification/RandomForest.java @@ -481,7 +481,7 @@ public class RandomForest implements SoftClassifier<double[]>, Serializable { * sampling without replacement. */ public RandomForest(Attribute[] attributes, double[][] x, int[] y, int ntrees, int maxNodes, int nodeSize, int mtry, double subsample) { - this(attributes, x, y, ntrees, 100, 5, mtry, subsample, DecisionTree.SplitRule.GINI); + this(attributes, x, y, ntrees, maxNodes, nodeSize, mtry, subsample, DecisionTree.SplitRule.GINI); } /**
fix the bug with RandomForest constructor
haifengl_smile
train
0967f8c5a10b86893c80d4a23dd2dbade1ca0a7a
diff --git a/src/shims/form-number-date-ui.js b/src/shims/form-number-date-ui.js index <HASH>..<HASH> 100644 --- a/src/shims/form-number-date-ui.js +++ b/src/shims/form-number-date-ui.js @@ -402,6 +402,8 @@ webshims.register('form-number-date-ui', function($, webshims, window, document, fVal[0] = tmp; } val = this.date(fVal[0], o) +'T'+ this.time(fVal[1], o); + } else if (fVal.length == 3) { + val = this.date(fVal[0], o) +'T'+ this.time(fVal[1]+fVal[2], o); } return val; },
fix meridian parsing for datetime-local ( fixes issue #<I> | thx @terinjokes )
aFarkas_webshim
train
7429a159f456a229b6d76ca52ca850e399f90ec5
diff --git a/sysfs/digital_pin.go b/sysfs/digital_pin.go index <HASH>..<HASH> 100644 --- a/sysfs/digital_pin.go +++ b/sysfs/digital_pin.go @@ -22,21 +22,23 @@ const ( GPIOPATH = "/sys/class/gpio" ) -// DigitalPin is the interface for sysfs gpio interactions -type DigitalPin interface { - // Unexport unexports the pin and releases the pin from the operating system - Unexport() error +var errNotExported = errors.New("pin has not been exported") + +// DigitalPinner is the interface for sysfs gpio interactions +type DigitalPinner interface { // Export exports the pin for use by the operating system Export() error - // Read reads the current value of the pin - Read() (int, error) + // Unexport unexports the pin and releases the pin from the operating system + Unexport() error // Direction sets the direction for the pin Direction(string) error + // Read reads the current value of the pin + Read() (int, error) // Write writes to the pin Write(int) error } -type digitalPin struct { +type DigitalPin struct { pin string label string @@ -47,8 +49,8 @@ type digitalPin struct { // NewDigitalPin returns a DigitalPin given the pin number and an optional sysfs pin label. // If no label is supplied the default label will prepend "gpio" to the pin number, // eg. a pin number of 10 will have a label of "gpio10" -func NewDigitalPin(pin int, v ...string) DigitalPin { - d := &digitalPin{pin: strconv.Itoa(pin)} +func NewDigitalPin(pin int, v ...string) *DigitalPin { + d := &DigitalPin{pin: strconv.Itoa(pin)} if len(v) > 0 { d.label = v[0] } else { @@ -58,19 +60,17 @@ func NewDigitalPin(pin int, v ...string) DigitalPin { return d } -var errNotExported = errors.New("pin has not been exported") - -func (d *digitalPin) Direction(dir string) error { +func (d *DigitalPin) Direction(dir string) error { _, err := writeFile(d.direction, []byte(dir)) return err } -func (d *digitalPin) Write(b int) error { +func (d *DigitalPin) Write(b int) error { _, err := writeFile(d.value, []byte(strconv.Itoa(b))) return err } -func (d *digitalPin) Read() (n int, err error) { +func (d *DigitalPin) Read() (n int, err error) { buf, err := readFile(d.value) if err != nil { return 0, err @@ -78,7 +78,7 @@ func (d *digitalPin) Read() (n int, err error) { return strconv.Atoi(string(buf[0])) } -func (d *digitalPin) Export() error { +func (d *DigitalPin) Export() error { export, err := fs.OpenFile(GPIOPATH+"/export", os.O_WRONLY, 0644) if err != nil { return err @@ -127,7 +127,7 @@ func (d *digitalPin) Export() error { return err } -func (d *digitalPin) Unexport() error { +func (d *DigitalPin) Unexport() error { unexport, err := fs.OpenFile(GPIOPATH+"/unexport", os.O_WRONLY, 0644) if err != nil { return err diff --git a/sysfs/digital_pin_test.go b/sysfs/digital_pin_test.go index <HASH>..<HASH> 100644 --- a/sysfs/digital_pin_test.go +++ b/sysfs/digital_pin_test.go @@ -19,11 +19,11 @@ func TestDigitalPin(t *testing.T) { SetFilesystem(fs) - pin := NewDigitalPin(10, "custom").(*digitalPin) + pin := NewDigitalPin(10, "custom") gobottest.Assert(t, pin.pin, "10") gobottest.Assert(t, pin.label, "custom") - pin = NewDigitalPin(10).(*digitalPin) + pin = NewDigitalPin(10) gobottest.Assert(t, pin.pin, "10") gobottest.Assert(t, pin.label, "gpio10") gobottest.Assert(t, pin.value, nil) @@ -95,7 +95,7 @@ func TestDigitalPinExportError(t *testing.T) { SetFilesystem(fs) - pin := NewDigitalPin(10, "custom").(*digitalPin) + pin := NewDigitalPin(10, "custom") writeFile = func(File, []byte) (int, error) { return 0, &os.PathError{Err: syscall.EBUSY} } @@ -114,7 +114,7 @@ func TestDigitalPinUnexportError(t *testing.T) { SetFilesystem(fs) - pin := NewDigitalPin(10, "custom").(*digitalPin) + pin := NewDigitalPin(10, "custom") writeFile = func(File, []byte) (int, error) { return 0, &os.PathError{Err: syscall.EBUSY} }
sysfs: export DigitalPin to make it more like PWMPin
hybridgroup_gobot
train
f99518dad2731d2f2a49208334a8e9085a484fcb
diff --git a/lib/phusion_passenger/config/reopen_logs_command.rb b/lib/phusion_passenger/config/reopen_logs_command.rb index <HASH>..<HASH> 100644 --- a/lib/phusion_passenger/config/reopen_logs_command.rb +++ b/lib/phusion_passenger/config/reopen_logs_command.rb @@ -1,5 +1,5 @@ # Phusion Passenger - https://www.phusionpassenger.com/ -# Copyright (c) 2014 Phusion +# Copyright (c) 2014-2015 Phusion # # "Phusion Passenger" is a trademark of Hongli Lai & Ninh Bui. # @@ -49,7 +49,7 @@ module PhusionPassenger opts.banner = "Usage: passenger-config reopen-logs [OPTIONS]\n" opts.separator "" opts.separator " Instruct #{PROGRAM_NAME} agent processes to reopen their log files. This" - opts.separator " should be involved after you've rotated logs. This command returns after" + opts.separator " should be invoked after you've rotated log files. This command returns after" opts.separator " the log files have been reopened." opts.separator ""
Fix a typo in passenger-config reopen-logs help mesage
phusion_passenger
train
0ff4fd87443678a5b121f250f7928017b4fd3c5e
diff --git a/platform/kubernetes/kubernetes.go b/platform/kubernetes/kubernetes.go index <HASH>..<HASH> 100644 --- a/platform/kubernetes/kubernetes.go +++ b/platform/kubernetes/kubernetes.go @@ -8,6 +8,7 @@ import ( "bytes" "errors" "fmt" + "net/http" "os" "os/exec" "strings" @@ -15,6 +16,7 @@ import ( "github.com/go-kit/kit/log" "k8s.io/kubernetes/pkg/api" + k8serrors "k8s.io/kubernetes/pkg/api/errors" "k8s.io/kubernetes/pkg/client/restclient" k8sclient "k8s.io/kubernetes/pkg/client/unversioned" @@ -68,6 +70,18 @@ func NewCluster(config *restclient.Config, kubectl string, logger log.Logger) (* }, nil } +// Service returns the platform.Service representation of the named service. +func (c *Cluster) Service(namespace, service string) (platform.Service, error) { + apiService, err := c.service(namespace, service) + if err != nil { + if statusErr, ok := err.(*k8serrors.StatusError); ok && statusErr.ErrStatus.Code == http.StatusNotFound { // le sigh + return platform.Service{}, ErrNoMatchingService + } + return platform.Service{}, err + } + return c.makePlatformService(apiService), nil +} + // Services returns the set of services currently active on the platform in the // given namespace. Maybe it makes sense to move the namespace to the // constructor? Depends on how it will be used. For now it is here. @@ -84,6 +98,17 @@ func (c *Cluster) Services(namespace string) ([]platform.Service, error) { return c.makePlatformServices(apiServices), nil } +func (c *Cluster) service(namespace, service string) (res api.Service, err error) { + defer func() { + c.logger.Log("method", "service", "namespace", namespace, "service", service, "err", err) + }() + apiService, err := c.client.Services(namespace).Get(service) + if err != nil { + return api.Service{}, err + } + return *apiService, nil +} + func (c *Cluster) services(namespace string) (res []api.Service, err error) { defer func() { c.logger.Log("method", "services", "namespace", namespace, "count", len(res), "err", err)
platform/kubernetes: add single Service lookup method
weaveworks_flux
train
c6f568f99bd2f4e753cb42199d2767722e9934d4
diff --git a/src/ol/map.js b/src/ol/map.js index <HASH>..<HASH> 100644 --- a/src/ol/map.js +++ b/src/ol/map.js @@ -1064,7 +1064,12 @@ ol.Map.prototype.handleTargetChanged_ = function() { // If it's not now an Element we remove the viewport from the DOM. // If it's an Element we append the viewport element to it. - var targetElement = this.getTargetElement(); + var targetElement; + if (this.getTarget()) { + targetElement = this.getTargetElement(); + goog.asserts.assert(targetElement !== null, + 'expects a non-null value for targetElement'); + } if (this.keyHandlerKeys_) { for (var i = 0, ii = this.keyHandlerKeys_.length; i < ii; ++i) {
assertion to check that there's a DOM node for target element
openlayers_openlayers
train
bd27f3332a98c686bf022203e31c51de575beca5
diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -29,11 +29,11 @@ "babel-preset-react": "^6.11.1", "babel-preset-stage-0": "^6.5.0", "chai": "^3.5.0", - "eslint": "^3.9.1", - "eslint-config-airbnb": "^13.0.0", + "eslint": "^3.13.0", + "eslint-config-airbnb": "^14.0.0", "eslint-plugin-import": "^2.2.0", - "eslint-plugin-jsx-a11y": "^2.2.3", - "eslint-plugin-react": "^6.6.0", + "eslint-plugin-jsx-a11y": "^3.0.2", + "eslint-plugin-react": "^6.9.0", "gulp": "^3.9.1", "gulp-eslint": "^3.0.1", "gulp-header": "^1.8.8", diff --git a/src/js/Tree.js b/src/js/Tree.js index <HASH>..<HASH> 100644 --- a/src/js/Tree.js +++ b/src/js/Tree.js @@ -12,6 +12,9 @@ class Tree extends React.Component { }; static defaultProps = { + name: undefined, + nameAsArray: false, + nodes: [], checked: [], expanded: [], }; @@ -137,12 +140,13 @@ class Tree extends React.Component { renderTreeNodes(nodes) { const treeNodes = nodes.map((node, index) => { + const key = `${index}-${node.value}`; const checked = this.getCheckState(node); const children = this.renderChildNodes(node); return ( <TreeNode - key={index} + key={key} value={node.value} title={node.title} checked={checked} @@ -185,9 +189,10 @@ class Tree extends React.Component { renderArrayHiddenInput() { return this.state.checked.map((value, index) => { + const key = index; const name = `${this.props.name}[]`; - return <input key={index} name={name} type="hidden" value={value} />; + return <input key={key} name={name} type="hidden" value={value} />; }); } diff --git a/src/js/TreeNode.js b/src/js/TreeNode.js index <HASH>..<HASH> 100644 --- a/src/js/TreeNode.js +++ b/src/js/TreeNode.js @@ -2,14 +2,14 @@ import React from 'react'; class TreeNode extends React.Component { static propTypes = { - children: React.PropTypes.node, - checked: React.PropTypes.number, - expanded: React.PropTypes.bool, - rawChildren: React.PropTypes.any, - onCheck: React.PropTypes.func, - onExpand: React.PropTypes.func, - title: React.PropTypes.string, - value: React.PropTypes.string, + children: React.PropTypes.node.isRequired, + checked: React.PropTypes.number.isRequired, + expanded: React.PropTypes.bool.isRequired, + rawChildren: React.PropTypes.any.isRequired, + onCheck: React.PropTypes.func.isRequired, + onExpand: React.PropTypes.func.isRequired, + title: React.PropTypes.string.isRequired, + value: React.PropTypes.string.isRequired, }; constructor(props) {
Update and comply with latest linting rules
jakezatecky_react-checkbox-tree
train