hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
262b641435fe3b93fc2c0aad5153951e084c2ce6
diff --git a/eZ/Publish/Core/REST/Server/index.php b/eZ/Publish/Core/REST/Server/index.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/Core/REST/Server/index.php +++ b/eZ/Publish/Core/REST/Server/index.php @@ -104,7 +104,8 @@ $parserTools = new Common\Input\ParserTools(); $fieldTypeParser = new Common\Input\FieldTypeParser( $repository->getContentService(), $repository->getContentTypeService(), - $repository->getFieldTypeService() + $repository->getFieldTypeService(), + new Common\FieldTypeProcessorRegistry() ); /* diff --git a/eZ/Publish/Core/REST/common.php b/eZ/Publish/Core/REST/common.php index <HASH>..<HASH> 100644 --- a/eZ/Publish/Core/REST/common.php +++ b/eZ/Publish/Core/REST/common.php @@ -129,7 +129,8 @@ $parserTools = new Common\Input\ParserTools(); $fieldTypeParser = new Common\Input\FieldTypeParser( $repository->getContentService(), $repository->getContentTypeService(), - $repository->getFieldTypeService() + $repository->getFieldTypeService(), + new Common\FieldTypeProcessorRegistry() ); // The parsing dispatcher configures which parsers are used for which
REST: Fix FieldTypeParser not receiving FieldTypeProcessorRegistry on instantiating
ezsystems_ezpublish-kernel
train
59ca7ad80af3faf4f87f4d82ff02f5d390c08ed6
diff --git a/internal/measurement/measurement.go b/internal/measurement/measurement.go index <HASH>..<HASH> 100644 --- a/internal/measurement/measurement.go +++ b/internal/measurement/measurement.go @@ -110,10 +110,15 @@ func compatibleValueTypes(v1, v2 *profile.ValueType) bool { return false } - return v1.Unit == v2.Unit || - (timeUnits.sniffUnit(v1.Unit) != nil && timeUnits.sniffUnit(v2.Unit) != nil) || - (memoryUnits.sniffUnit(v1.Unit) != nil && memoryUnits.sniffUnit(v2.Unit) != nil) || - (gcuUnits.sniffUnit(v1.Unit) != nil && gcuUnits.sniffUnit(v2.Unit) != nil) + if v1.Unit == v2.Unit { + return true + } + for _, ut := range unitTypes { + if ut.sniffUnit(v1.Unit) != nil && ut.sniffUnit(v2.Unit) != nil { + return true + } + } + return false } // Scale a measurement from an unit to a different unit and returns @@ -125,14 +130,10 @@ func Scale(value int64, fromUnit, toUnit string) (float64, string) { v, u := Scale(-value, fromUnit, toUnit) return -v, u } - if m, u, ok := memoryUnits.convertUnit(value, fromUnit, toUnit); ok { - return m, u - } - if t, u, ok := timeUnits.convertUnit(value, fromUnit, toUnit); ok { - return t, u - } - if g, u, ok := gcuUnits.convertUnit(value, fromUnit, toUnit); ok { - return g, u + for _, ut := range unitTypes { + if v, u, ok := ut.convertUnit(value, fromUnit, toUnit); ok { + return v, u + } } // Skip non-interesting units. switch toUnit { @@ -257,7 +258,7 @@ func (ut unitType) convertUnit(value int64, fromUnitStr, toUnitStr string) (floa return v / toUnit.factor, toUnit.canonicalName, true } -var memoryUnits = unitType{ +var unitTypes = []unitType{{ units: []unit{ {"B", []string{"b", "byte"}, 1}, {"kB", []string{"kb", "kbyte", "kilobyte"}, float64(1 << 10)}, @@ -267,9 +268,7 @@ var memoryUnits = unitType{ {"PB", []string{"pb", "pbyte", "petabyte"}, float64(1 << 50)}, }, defaultUnit: unit{"B", []string{"b", "byte"}, 1}, -} - -var timeUnits = unitType{ +}, { units: []unit{ {"ns", []string{"ns", "nanosecond"}, float64(time.Nanosecond)}, {"us", []string{"μs", "us", "microsecond"}, float64(time.Microsecond)}, @@ -278,9 +277,7 @@ var timeUnits = unitType{ {"hrs", []string{"hour", "hr"}, float64(time.Hour)}, }, defaultUnit: unit{"s", []string{}, float64(time.Second)}, -} - -var gcuUnits = unitType{ +}, { units: []unit{ {"n*GCU", []string{"nanogcu"}, 1e-9}, {"u*GCU", []string{"microgcu"}, 1e-6}, @@ -293,4 +290,4 @@ var gcuUnits = unitType{ {"P*GCU", []string{"petagcu"}, 1e15}, }, defaultUnit: unit{"GCU", []string{}, 1.0}, -} +}}
Generalize the unit support in pprof a bit further. (#<I>) Make unit types a list. When and if needed, next step can be making it possible for select pprof clients (such as internal Google one) append additional types.
google_pprof
train
97c0a543540608222bb1886ff29a9bcd55ce4323
diff --git a/locale/he.js b/locale/he.js index <HASH>..<HASH> 100644 --- a/locale/he.js +++ b/locale/he.js @@ -1,13 +1,13 @@ $.fullCalendar.locale("he", { - defaultButtonText: { + buttonText: { month: "חודש", week: "שבוע", day: "יום", list: "סדר יום" }, - weekNumberTitle: "שבוע", allDayText: "כל היום", eventLimitText: "אחר", - noEventsMessage: "אין אירועים להצגה" + noEventsMessage: "אין אירועים להצגה", + weekNumberTitle: "שבוע" }); diff --git a/locale/ug-cn.js b/locale/ug-cn.js index <HASH>..<HASH> 100644 --- a/locale/ug-cn.js +++ b/locale/ug-cn.js @@ -1,6 +1,6 @@ $.fullCalendar.locale("ug-cn", { - defaultButtonText: { + buttonText: { month: "ئاي", week: "ھەپتە", day: "كۈن",
fixup ug-cn and he
fullcalendar_fullcalendar
train
b902324f42a5e984dce2995ce1219756ce7c84fc
diff --git a/share/views/public/js/_ajax_replace.js b/share/views/public/js/_ajax_replace.js index <HASH>..<HASH> 100644 --- a/share/views/public/js/_ajax_replace.js +++ b/share/views/public/js/_ajax_replace.js @@ -77,18 +77,28 @@ function capture_embedded_form(object){ var embedded = object; var params = ""; + console.log(params) - form.find('input').not('[type=submit]').not('[type=radio]').each(function(){ + form.find('input').not('[type=submit]').not('[type=radio]').not('[type=checkbox]').each(function(){ var input = $(this) if (params.length > 0){ params += '&'} params += input.attr('name') + "=" + input.val(); }) + console.log(params) + + form.find('input[type=checkbox]:checked').each(function(){ + var input = $(this) + if (params.length > 0){ params += '&'} + params += input.attr('name') + "=" + input.val(); + }) + console.log(params) form.find('input[type=radio]:checked').each(function(){ var input = $(this) if (params.length > 0){ params += '&'} params += input.attr('name') + "=" + input.val(); }) + console.log(params) form.find('select').not('[type=submit]').each(function(){ var select = $(this) @@ -96,17 +106,19 @@ function capture_embedded_form(object){ if (params.length > 0){ params += '&'} params += select.attr('name') + "=" + option.val(); }) + console.log(params) form.find('textarea').each(function(){ var input = $(this) if (params.length > 0){ params += '&'} params += input.attr('name') + "=" + escape(input.val()); }) + console.log(params) - params = params var url = embedded.attr('target-href'); + console.log(params) if (url.indexOf('?') == -1){ url = url + '?' + params; }else{
Fixed bug in processing checkboxes in embedded forms
mikisvaz_rbbt-rest
train
db66a331ffe9c36c65f241fc7591ce054d996388
diff --git a/lib/systemd/journal.rb b/lib/systemd/journal.rb index <HASH>..<HASH> 100644 --- a/lib/systemd/journal.rb +++ b/lib/systemd/journal.rb @@ -52,6 +52,7 @@ module Systemd raise JournalError, rc if rc < 0 @ptr = ptr.read_pointer + file_descriptor ObjectSpace.define_finalizer(self, self.class.finalize(@ptr)) end
Ensure `sd_journal_get_fd` is called directly after `sd_journal_open` This ensures that inotify properly tracks journal rotation fixing #<I> and should also guard against the issues in <URL>
ledbettj_systemd-journal
train
b8d8a02156fd97fbfd6c341371aa08075493a3bc
diff --git a/lib/appsignal.rb b/lib/appsignal.rb index <HASH>..<HASH> 100644 --- a/lib/appsignal.rb +++ b/lib/appsignal.rb @@ -27,11 +27,11 @@ module Appsignal Appsignal.agent env = ENV.to_hash - Appsignal::Transaction.create(SecureRandom.uuid, env) - Appsignal::Transaction.current.add_exception( + transaction = Appsignal::Transaction.create(SecureRandom.uuid, env) + transaction.add_exception( Appsignal::ExceptionNotification.new(env, exception, false) ) - Appsignal::Transaction.current.complete! + transaction.complete! Appsignal.agent.send_queue end end diff --git a/spec/appsignal_spec.rb b/spec/appsignal_spec.rb index <HASH>..<HASH> 100644 --- a/spec/appsignal_spec.rb +++ b/spec/appsignal_spec.rb @@ -100,23 +100,17 @@ describe Appsignal do describe ".send_exception" do it "should raise exception" do agent = mock - Appsignal.should_receive(:agent).twice.and_return(agent) + Appsignal.should_receive(:agent).exactly(3).times.and_return(agent) agent.should_receive(:send_queue) + agent.should_receive(:enqueue).with(kind_of(Appsignal::Transaction)) - current = mock Appsignal::Transaction.should_receive(:create).and_call_original - Appsignal::Transaction.should_receive(:current).twice.and_return(current) - current.should_receive(:add_exception). - with(kind_of(Appsignal::ExceptionNotification)) - current.should_receive(:complete!) - expect { - begin - raise "I am an exception" - rescue Exception => e - Appsignal.send_exception(e) - end - }.to_not raise_error + begin + raise "I am an exception" + rescue Exception => e + Appsignal.send_exception(e) + end end end
Don't rely on Transaction.current, we know the transaction being created
appsignal_appsignal-ruby
train
89cb4b1c9627778b41188bdb3fbcf29c904ab89f
diff --git a/gtk/gtk.go b/gtk/gtk.go index <HASH>..<HASH> 100644 --- a/gtk/gtk.go +++ b/gtk/gtk.go @@ -522,7 +522,7 @@ const ( // TODO: // GTK_INPUT_HINT_VERTICAL_WRITING Since 3.18 -// GTK_INPUT_HINT_EMOJI Since 3.22.20 +// GTK_INPUT_HINT_EMOJI Since 3.22.20 // GTK_INPUT_HINT_NO_EMOJI Since 3.22.20 func marshalInputHints(p uintptr) (interface{}, error) { @@ -9862,10 +9862,9 @@ type TreeIterCompareFunc func(model *TreeModel, a, b *TreeIter, userData interfa // GetSortColumnId() is a wrapper around gtk_tree_sortable_get_sort_column_id(). func (v *TreeSortable) GetSortColumnId() (int, SortType, bool) { - sort := C.toGtkTreeSortable(unsafe.Pointer(v.native())) var column C.gint var order C.GtkSortType - ok := gobool(C.gtk_tree_sortable_get_sort_column_id(sort, &column, &order)) + ok := gobool(C.gtk_tree_sortable_get_sort_column_id(v.native(), &column, &order)) return int(column), SortType(order), ok } @@ -9887,8 +9886,7 @@ var ( // SetSortColumnId() is a wrapper around gtk_tree_sortable_set_sort_column_id(). func (v *TreeSortable) SetSortColumnId(column int, order SortType) { - sort := C.toGtkTreeSortable(unsafe.Pointer(v.native())) - C.gtk_tree_sortable_set_sort_column_id(sort, C.gint(column), C.GtkSortType(order)) + C.gtk_tree_sortable_set_sort_column_id(v.native(), C.gint(column), C.GtkSortType(order)) } // SetSortFunc() is a wrapper around gtk_tree_sortable_set_sort_func().
Use native types directly First part of #<I>
gotk3_gotk3
train
b0c095891f84f611c78c7ebab7225e96b8cce65e
diff --git a/odl/trafos/fourier.py b/odl/trafos/fourier.py index <HASH>..<HASH> 100644 --- a/odl/trafos/fourier.py +++ b/odl/trafos/fourier.py @@ -800,9 +800,6 @@ class PyfftwTransform(Operator): (2, 4) >>> fft.range.shape (2, 4) - >>> print(fft(fft.domain.one())) # FFT of an array of ones - [[(4+0j), 0j, 0j, 0j], - [(4+0j), 0j, 0j, 0j]] Real-to-complex transforms have a range grid with shape ``n // 2 + 1`` in the last tranform axis: @@ -978,9 +975,6 @@ class PyfftwTransformInverse(Operator): (2, 4) >>> ifft.range.shape (2, 4) - >>> print(ifft(ifft.domain.one())) # IFFT of an array of ones - [[(1+0j), 0j, 0j, 0j], - [(1+0j), 0j, 0j, 0j]] Complex-to-real transforms have a domain grid with shape ``n // 2 + 1`` in the last tranform axis:
BUG: remove FFT-computing doctests - won't work w/o pyfftw
odlgroup_odl
train
7c85411507ebb36cacb1f3347449ef3b56c9563f
diff --git a/spec/mangopay/shared_resources.rb b/spec/mangopay/shared_resources.rb index <HASH>..<HASH> 100644 --- a/spec/mangopay/shared_resources.rb +++ b/spec/mangopay/shared_resources.rb @@ -307,7 +307,7 @@ shared_context 'transfers' do CreditedUserId: to_wallet['Owners'][0], CreditedWalletId: to_wallet['Id'], DebitedFunds: { Currency: 'EUR', Amount: amnt}, - Fees: { Currency: 'EUR', Amout: 0}, + Fees: { Currency: 'EUR', Amount: 0}, Tag: 'Test transfer' }) end
Fixed Typo in Amount'Fees property In shared_resources, the in create_new_transfer method contains a typo.
Mangopay_mangopay2-ruby-sdk
train
915407d1bd2e2e50131d08e1ade01f985c97e75f
diff --git a/jbpm-designer-client/src/main/java/org/jbpm/designer/client/DesignerPresenter.java b/jbpm-designer-client/src/main/java/org/jbpm/designer/client/DesignerPresenter.java index <HASH>..<HASH> 100644 --- a/jbpm-designer-client/src/main/java/org/jbpm/designer/client/DesignerPresenter.java +++ b/jbpm-designer-client/src/main/java/org/jbpm/designer/client/DesignerPresenter.java @@ -60,9 +60,6 @@ public class DesignerPresenter private PlaceManager placeManager; @Inject - private Event<NotificationEvent> notification; - - @Inject private Caller<VFSService> vfsServices; @Inject @@ -77,10 +74,6 @@ public class DesignerPresenter @Inject private Bpmn2Type resourceType; - @Inject - private DefaultFileNameValidator fileNameValidator; - - private ObservablePath.OnConcurrentUpdateEvent concurrentUpdateSessionInfo = null; private DesignerView view; @Inject
Bug <I> - Missing conflict dialog for jBPM Designer
kiegroup_jbpm-designer
train
9995a212a03d2d398bde62c3e0ab64da2c8b7594
diff --git a/gitenberg/clone.py b/gitenberg/clone.py index <HASH>..<HASH> 100644 --- a/gitenberg/clone.py +++ b/gitenberg/clone.py @@ -4,7 +4,7 @@ import os import git -from .library import GitbergLibraryManager +from . import config from .parameters import GITHUB_ORG from .util.catalog import get_repo_name @@ -30,10 +30,9 @@ class CloneVat(object): self.local_repo = None # create a local instance of the library manager with the provided # config if available - self.l_manager = GitbergLibraryManager() def library_book_dir(self): - return os.path.join(self.l_manager.library_base_path, self.book_repo_name) + return os.path.join(config.data['library_path'], self.book_repo_name) def path_exists(self): if os.path.exists(self.library_book_dir()): @@ -49,7 +48,6 @@ class CloneVat(object): assumes you are authenticated to git clone from repo? returns True/False, message """ - # FIXME: check if this works from a server install logging.debug("Attempting to clone {0}".format(self.book_repo_name)) if self.path_exists(): diff --git a/gitenberg/library.py b/gitenberg/library.py index <HASH>..<HASH> 100644 --- a/gitenberg/library.py +++ b/gitenberg/library.py @@ -14,14 +14,9 @@ class GitbergLibraryManager(object): # by default, loads the default config location self.config = config.ConfigFile() - @property - def library_base_path(self): - """ returns the path where library books are stored """ - return self.config.data['library_path'] - def book_directories(self): """ Returns a list of book directories in the library folder """ - return os.listdir(self.library_base_path) + return os.listdir(config.data['library_path']) def main(): diff --git a/gitenberg/tests/test_library.py b/gitenberg/tests/test_library.py index <HASH>..<HASH> 100644 --- a/gitenberg/tests/test_library.py +++ b/gitenberg/tests/test_library.py @@ -17,12 +17,6 @@ class TestLibraryManager(unittest.TestCase): 'library_path': self.library_path } - def test_library_base_path_from_config(self): - self.assertEqual( - self.glm.library_base_path, - self.library_path - ) - def test_book_directories(self): with patch('os.listdir', return_value=[1, 2, 3, 4]) as _mock: self.glm.book_directories()
refactor library_base_path
gitenberg-dev_gitberg
train
947c3403730a32b1d1dc66a7fff5d56d5dd44b8c
diff --git a/lib/html_mockup/cli.rb b/lib/html_mockup/cli.rb index <HASH>..<HASH> 100644 --- a/lib/html_mockup/cli.rb +++ b/lib/html_mockup/cli.rb @@ -167,7 +167,7 @@ module HtmlMockup search_files = %w{.html .htm}.map!{|p| path + "index#{p}" } # If it ends with a slash or does not contain a . and it's not a directory # try to add .html/.htm/.rhtml to see if that exists. - elsif (path =~ /\/$/) || (path =~ /^[^.]+$/) + elsif (path.to_s =~ /\/$/) || (path.to_s =~ /^[^.]+$/) search_files = [path.to_s + ".html", path.to_s + ".htm"].map!{|p| Pathname.new(p) } else search_files = [path]
Fix Pathname bug where it was used in a regex
DigitPaint_html_mockup
train
d32c48950acdb8d0fd61bf77f5778c0852e26e3a
diff --git a/thinc/tests/layers/test_mnist.py b/thinc/tests/layers/test_mnist.py index <HASH>..<HASH> 100644 --- a/thinc/tests/layers/test_mnist.py +++ b/thinc/tests/layers/test_mnist.py @@ -68,7 +68,7 @@ def create_model(request): @pytest.mark.slow [email protected](("width", "nb_epoch", "min_score"), [(32, 10, 0.2)]) [email protected](("width", "nb_epoch", "min_score"), [(32, 20, 0.8)]) def test_small_end_to_end(width, nb_epoch, min_score, create_model, mnist): batch_size = 128 dropout = 0.2 diff --git a/thinc/tests/strategies.py b/thinc/tests/strategies.py index <HASH>..<HASH> 100644 --- a/thinc/tests/strategies.py +++ b/thinc/tests/strategies.py @@ -46,7 +46,7 @@ def ndarrays(min_len=0, max_len=10, min_val=-10.0, max_val=10.0): def arrays_BI(min_B=1, max_B=10, min_I=1, max_I=100): - shapes = tuples(lengths(lo=min_B, hi=max_B), lengths(lo=min_B, hi=max_I)) + shapes = tuples(lengths(lo=min_B, hi=max_B), lengths(lo=min_I, hi=max_I)) return shapes.flatmap(ndarrays_of_shape)
requiring mnist to achieve at least <I>
explosion_thinc
train
ee86f7d93d11c3d15fc14889c953fc24ef0b1ad9
diff --git a/lib/dm-core/support/hook.rb b/lib/dm-core/support/hook.rb index <HASH>..<HASH> 100644 --- a/lib/dm-core/support/hook.rb +++ b/lib/dm-core/support/hook.rb @@ -30,10 +30,12 @@ module DataMapper class << self def method_added(name) process_method_added(name, :instance) + super end def singleton_method_added(name) process_method_added(name, :class) + super end end end
method_added and singleton_method_added should call super Otherwise, any other included modules that rely on those methods won't work properly.
datamapper_dm-core
train
583c480079a4a6473d225b60da81683dd82fc9f6
diff --git a/CHANGELOG b/CHANGELOG index <HASH>..<HASH> 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,6 @@ +0.9.3 - Taxamatch::Normalizer substitutes multiplication sign to 'x' +(lowcase) instead of '?' + 0.9.2 - Taxamatch::Normalizer.normalize always returns only ASCII characters, all utf-8 characters unknown to normalizer are becoming '?' diff --git a/lib/taxamatch_rb/normalizer.rb b/lib/taxamatch_rb/normalizer.rb index <HASH>..<HASH> 100644 --- a/lib/taxamatch_rb/normalizer.rb +++ b/lib/taxamatch_rb/normalizer.rb @@ -4,7 +4,7 @@ module Taxamatch module Normalizer def self.normalize(string) - utf8_to_ascii(string.strip).upcase.gsub(/[^\x00-\x7F]/,'?') + utf8_to_ascii(string.strip.upcase).gsub(/[^\x00-\x7F]/,'?') end def self.normalize_word(word) @@ -25,37 +25,24 @@ module Taxamatch private def self.utf8_to_ascii(string) string = string.gsub(/\s{2,}/, ' ') - string = string.gsub(/[ÀÂÅÃÄÁẤẠÁ]/, "A") - string = string.gsub(/[ÉÈÊË]/, "E") - string = string.gsub(/[ÍÌÎÏ]/, "I") - string = string.gsub(/[ÓÒÔØÕÖỚỔ]/, "O") - string = string.gsub(/[ÚÙÛÜ]/, "U") - string = string.gsub(/[Ý]/, "Y") - string = string.gsub(/Æ/, "AE") - string = string.gsub(/[ČÇ]/, "C") - string = string.gsub(/[ŠŞ]/, "S") - string = string.gsub(/[Đ]/, "D") - string = string.gsub(/Ž/, "Z") - string = string.gsub(/Ñ/, "N") - string = string.gsub(/Œ/, "OE") + string = string.gsub("×", "x") + string = string.gsub(/[ÀÂÅÃÄÁẤẠÁáàâåãäăãắảạậầằá]/, "A") + string = string.gsub(/[ÉÈÊËéèêëĕěếệểễềẻ]/, "E") + string = string.gsub(/[ÍÌÎÏíìîïǐĭīĩỉï]/, "I") + string = string.gsub(/[ÓÒÔØÕÖỚỔóòôøõöŏỏỗộơọỡốơồờớổő]/, "O") + string = string.gsub(/[ÚÙÛÜúùûüůưừựủứụű]/, "U") + string = string.gsub(/[Ýýÿỹ]/, "Y") + string = string.gsub(/[Ææ]/, "AE") + string = string.gsub(/[ČÇčćç]/, "C") + string = string.gsub(/[ŠŞśšşſ]/, "S") + string = string.gsub(/[Đđð]/, "D") + string = string.gsub(/Žžź/, "Z") + string = string.gsub(/[Ññńň]/, "N") + string = string.gsub(/[Œœ]/, "OE") string = string.gsub(/ß/, "B") string = string.gsub(/Ķ/, "K") - string = string.gsub(/[áàâåãäăãắảạậầằá]/, "a") - string = string.gsub(/[éèêëĕěếệểễềẻ]/, "e") - string = string.gsub(/[íìîïǐĭīĩỉï]/, "i") - string = string.gsub(/[óòôøõöŏỏỗộơọỡốơồờớổő]/, "o") - string = string.gsub(/[úùûüůưừựủứụű]/, "u") - string = string.gsub(/[žź]/, "z") - string = string.gsub(/[ýÿỹ]/, "y") - string = string.gsub(/[đð]/, "d") - string = string.gsub(/æ/, "ae") - string = string.gsub(/[čćç]/, "c") - string = string.gsub(/[ñńň]/, "n") - string = string.gsub(/œ/, "oe") - string = string.gsub(/[śšş]/, "s") - string = string.gsub(/ř/, "r") - string = string.gsub(/ğ/, "g") - string = string.gsub(/Ř/, "R") + string = string.gsub(/ğ/, "G") + string = string.gsub(/[Řř]/, "R") end end diff --git a/spec/taxamatch_rb_spec.rb b/spec/taxamatch_rb_spec.rb index <HASH>..<HASH> 100644 --- a/spec/taxamatch_rb_spec.rb +++ b/spec/taxamatch_rb_spec.rb @@ -35,6 +35,7 @@ describe 'Taxamatch::Normalizer' do Taxamatch::Normalizer.normalize('Fallé€n').should == 'FALLE?N' Taxamatch::Normalizer.normalize('Fallén привет').should == 'FALLEN ??????' Taxamatch::Normalizer.normalize('Choriozopella trägårdhi').should == 'CHORIOZOPELLA TRAGARDHI' + Taxamatch::Normalizer.normalize('×Zygomena').should == 'xZYGOMENA' end it 'should normalize words' do
hybrid sign is translated to x by normalizer
GlobalNamesArchitecture_taxamatch_rb
train
592f2e915c2f1b65c961abd81971e6d08571304f
diff --git a/src/viz.js b/src/viz.js index <HASH>..<HASH> 100644 --- a/src/viz.js +++ b/src/viz.js @@ -6,6 +6,7 @@ d3plus.viz = function() { var vars = { "autodraw": false, + "filtered": false, "footer_text": function() { var text = vars.html.value || vars.tooltip.value.long ? "Click for More Info" : null return vars.text_format.value(text) @@ -37,6 +38,7 @@ d3plus.viz = function() { // and check text direction. //------------------------------------------------------------------- if (vars.container.changed) { + vars.parent = d3.select(vars.container.value) vars.parent @@ -47,6 +49,7 @@ d3plus.viz = function() { return remain ? current : "relative"; }) .html("") + } //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -64,7 +67,9 @@ d3plus.viz = function() { // Run setup function if app has it //------------------------------------------------------------------- if (d3plus.apps[vars.type.value].setup) { + if (vars.dev.value) d3plus.console.group("Running setup function for \""+vars.type.value+"\"") d3plus.apps[vars.type.value].setup(vars) + if (vars.dev.value) d3plus.console.groupEnd() } //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ @@ -180,7 +185,7 @@ d3plus.viz = function() { d3plus.info.timeline(vars); vars.app_height = vars.height.value - vars.margin.top - vars.margin.bottom; vars.graph.height = vars.app_height-vars.graph.margin.top-vars.graph.margin.bottom; - + //^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ // Update Elements //-------------------------------------------------------------------
created "filtered" variable to show if previously filtered by solo/mute
alexandersimoes_d3plus
train
fd818f2c37a91105cf1c30fb3a824625a1bceca7
diff --git a/ConsumptionSaving/NonDurables_During_Great_Recession.py b/ConsumptionSaving/NonDurables_During_Great_Recession.py index <HASH>..<HASH> 100644 --- a/ConsumptionSaving/NonDurables_During_Great_Recession.py +++ b/ConsumptionSaving/NonDurables_During_Great_Recession.py @@ -1,4 +1,3 @@ - """ At the onset of the Great Recession, there was a large drop (X%) in consumer spending on non-durables. Some economists have proffered that this could be attributed to precautionary @@ -31,10 +30,16 @@ sys.path.insert(0, os.path.abspath('../cstwMPC')) import cstwMPC import SetupParamsCSTW as cstwParams -#cstwParams.init_infite['Nagents'] = 5000 +# Note if we change Nagents, we need to change it here, before it is passed, because the constructor uses Nagents to construct p_init etc +#cstwParams.init_infinite['Nagents'] = 10000 + # Now, initialize a baseline consumer type, using the default parameters from the infinite horizon cstwMPC BaselineType = IndShockConsumerType(**cstwParams.init_infinite) -BaselineType.Nagents = 5000 +BaselineType.seed = 212 + +#assert False +#BaselineType.Nagents = 5000 +#BaselineType.sim_pop_size = 5001 # The cstwMPC parameters do not define a discount factor, since there is ex-ante heterogeneity # in the discount factor. To prepare to create this ex-ante heterogeneity, first create @@ -47,13 +52,16 @@ for nn in range(num_consumer_types): newType = deepcopy(BaselineType) ConsumerTypes.append(newType) -# Now, generate the desired ex-ante heterogeneity, by giving the different consumer types -# each with their own discount factor -bottomDiscFac = 0.9800 -topDiscFac = .9834 #0.9934 +## Now, generate the desired ex-ante heterogeneity, by giving the different consumer types +## each with their own discount factor +# First, decide the discount factors to assign +bottomDiscFac = 0.9800 +topDiscFac = 0.9934 #.9834 from HARKutilities import approxUniform DiscFac_list = approxUniform(N=num_consumer_types,bot=bottomDiscFac,top=topDiscFac)[1] + +# Now, assign the discount factors we want cstwMPC.assignBetaDistribution(ConsumerTypes,DiscFac_list) @@ -64,7 +72,6 @@ cstwMPC.assignBetaDistribution(ConsumerTypes,DiscFac_list) """ Now, solve and simulate the model for each consumer type """ -import numpy as np for ConsumerType in ConsumerTypes: @@ -74,7 +81,6 @@ for ConsumerType in ConsumerTypes: ### Now simulate many periods to get to the stationary distribution ConsumerType.sim_periods = 1000 - assert False ConsumerType.makeIncShkHist() ConsumerType.initializeSim() ConsumerType.simConsHistory() @@ -87,6 +93,7 @@ for ConsumerType in ConsumerTypes: """ Now, create functions to change household income uncertainty in various ways """ +import numpy as np def calcAvgC(Types): """ @@ -128,7 +135,7 @@ def cChangeAfterUncertaintyChange(consumerTypes,newVals,paramToChange): # Copy everything we have from the consumerTypes NewConsumerTypes = deepcopy(consumerTypes) - for NewConsumerType in NewConsumerTypes: + for index,NewConsumerType in enumerate(NewConsumerTypes): # Change what we want to change if paramToChange == "PermShkStd": NewConsumerType.PermShkStd = [newVal] @@ -143,24 +150,19 @@ def cChangeAfterUncertaintyChange(consumerTypes,newVals,paramToChange): NewConsumerType.solve() # Advance the simulation one period -# NewConsumerType.Shk_idx = ConsumerType.sim_periods - 1 -# NewConsumerType.advanceIncShks() -# NewConsumerType.advancecFunc() -# NewConsumerType.simOnePrd() - - assert False NewConsumerType.sim_periods = 1 NewConsumerType.makeIncShkHist() - NewConsumerType.initializeSim(a_init=ConsumerType.aHist[-1:,:],p_init=ConsumerType.pHist[-1,:]) + NewConsumerType.initializeSim(a_init=ConsumerTypes[index].aHist[-1:,:], + p_init=ConsumerTypes[index].pHist[-1,:]) NewConsumerType.simConsHistory() # Add the new period to the simulation history - NewConsumerType.cHist = np.append(ConsumerType.cHist, + NewConsumerType.cHist = np.append(ConsumerTypes[index].cHist, NewConsumerType.cNow, #cNow has shape (N,1) axis=0) - NewConsumerType.pHist = np.append(ConsumerType.pHist, + NewConsumerType.pHist = np.append(ConsumerTypes[index].pHist, NewConsumerType.pNow[np.newaxis,:], #pNow has shape (N,) axis=0) @@ -171,7 +173,7 @@ def cChangeAfterUncertaintyChange(consumerTypes,newVals,paramToChange): changeInConsumption = 100. * (newAvgC - oldAvgC) / oldAvgC changesInConsumption.append(changeInConsumption) - assert False + return changesInConsumption ## Define functions that calculate the change in average consumption after income process changes diff --git a/HARKparallel.py b/HARKparallel.py index <HASH>..<HASH> 100644 --- a/HARKparallel.py +++ b/HARKparallel.py @@ -5,7 +5,7 @@ a command prompt. ''' import multiprocessing from joblib import Parallel, delayed -import dill as pickle +#import dill as pickle import numpy as np from time import clock import csv
Fixed bug in NonDurables experiment
econ-ark_HARK
train
c110c8ac4ce274b667892372e02f7f7a974d5dca
diff --git a/pymzn/mzn/output.py b/pymzn/mzn/output.py index <HASH>..<HASH> 100644 --- a/pymzn/mzn/output.py +++ b/pymzn/mzn/output.py @@ -83,6 +83,7 @@ class Solutions: else: return str(self) + class SolutionParser: SOLN_SEP = '----------' @@ -115,16 +116,11 @@ class SolutionParser: def _parse(self, proc): parse_lines = self._parse_lines() parse_lines.send(None) - for line in proc.stdout.readlines(): + for line in proc.readlines(): soln = parse_lines.send(line) if soln is not None: yield soln - solver_parse_err = self.solver_parser - solver_parser_err.send(None) - for line in proc.stderr.readlines(): - solver_parser_err.send(line) - def _parse_lines(self): solver_parse_out = self.solver_parser.parse_out() split_solns = self._split_solns()
output: return to only stdout stream parsing
paolodragone_pymzn
train
5785105e7972a1baf84e95c189fcbb7118b8e093
diff --git a/src/Controller/Api/AppController.php b/src/Controller/Api/AppController.php index <HASH>..<HASH> 100644 --- a/src/Controller/Api/AppController.php +++ b/src/Controller/Api/AppController.php @@ -2,14 +2,25 @@ namespace CsvMigrations\Controller\Api; use Cake\Controller\Controller; +use Cake\Core\Configure; use Cake\Datasource\ResultSetDecorator; use Cake\Event\Event; +use Cake\ORM\Entity; use Crud\Controller\ControllerTrait; use CsvMigrations\FieldHandlers\RelatedFieldTrait; +use CsvMigrations\FieldHandlers\FieldHandlerFactory; +use CsvMigrations\CsvTrait; +use CsvMigrations\MigrationTrait; class AppController extends Controller { + /** + * Pretty format identifier + */ + const FORMAT_PRETTY = 'pretty'; + use ControllerTrait; + use MigrationTrait; use RelatedFieldTrait; public $components = [ @@ -56,6 +67,37 @@ class AppController extends Controller } }); + $this->Crud->on('afterFind', function(Event $event) { + if (static::FORMAT_PRETTY === $this->request->query('format')) { + $table = $event->subject()->repository->registryAlias(); + $fields = $this->getFieldsDefinitions($event->subject()->repository->alias()); + $fhf = new FieldHandlerFactory(); + $entity = $event->subject()->entity; + $event->subject()->entity = $this->_getPrettyValues($entity, $table, $fields, $fhf); + } + }); + + return $this->Crud->execute(); + } + + /** + * Index CRUD action events handling logic. + * + * @return \Cake\Network\Response + */ + public function index() + { + $this->Crud->on('afterPaginate', function(Event $event) { + if (static::FORMAT_PRETTY === $this->request->query('format')) { + $table = $event->subject()->query->repository()->registryAlias(); + $fields = $this->getFieldsDefinitions($event->subject()->query->repository()->alias()); + $fhf = new FieldHandlerFactory(); + foreach ($event->subject()->entities as $entity) { + $entity = $this->_getPrettyValues($entity, $table, $fields, $fhf); + } + } + }); + return $this->Crud->execute(); } @@ -136,4 +178,28 @@ class AppController extends Controller return $this->response; } } + + /** + * Method that renders Entity values through Field Handler Factory. + * + * @param Cake\ORM\Entity $entity Entity instance + * @param [type] $tableName Table name + * @param array $fields Migration fields + * @param FieldHandlerFactory $fhf Field Handler Factory instance + * @return Cake\ORM\Entity + */ + protected function _getPrettyValues(Entity $entity, $tableName, array $fields, FieldHandlerFactory $fhf) + { + foreach ($fields as $field => $definitions) { + $renderOptions = ['entity' => $entity]; + $entity->{$field} = $fhf->renderValue( + $tableName, + $field, + $entity->{$field}, + $renderOptions + ); + } + + return $entity; + } }
add support for pretty values on API index and view actions (task #<I>)
QoboLtd_cakephp-csv-migrations
train
3843ae3f723271e23e8124afd3b8a783acb56ccd
diff --git a/js/binance.js b/js/binance.js index <HASH>..<HASH> 100644 --- a/js/binance.js +++ b/js/binance.js @@ -764,13 +764,17 @@ module.exports = class binance extends Exchange { '-2015': AuthenticationError, // "Invalid API-key, IP, or permissions for action." '-2019': InsufficientFunds, // {"code":-2019,"msg":"Margin is insufficient."} '-3005': InsufficientFunds, // {"code":-3005,"msg":"Transferring out not allowed. Transfer out amount exceeds max amount."} + '-3006': InsufficientFunds, // {"code":-3006,"msg":"Your borrow amount has exceed maximum borrow amount."} '-3008': InsufficientFunds, // {"code":-3008,"msg":"Borrow not allowed. Your borrow amount has exceed maximum borrow amount."} '-3010': ExchangeError, // {"code":-3010,"msg":"Repay not allowed. Repay amount exceeds borrow amount."} + '-3015': ExchangeError, // {"code":-3015,"msg":"Repay amount exceeds borrow amount."} '-3022': AccountSuspended, // You account's trading is banned. '-4028': BadRequest, // {"code":-4028,"msg":"Leverage 100 is not valid"} '-3020': InsufficientFunds, // {"code":-3020,"msg":"Transfer out amount exceeds max amount."} '-3041': InsufficientFunds, // {"code":-3041,"msg":"Balance is not enough"} '-5013': InsufficientFunds, // Asset transfer failed: insufficient balance" + '-11008': InsufficientFunds, // {"code":-11008,"msg":"Exceeding the account's maximum borrowable limit."} + }, 'broad': { 'has no operation privilege': PermissionDenied,
binance: add error codes for borrowing
ccxt_ccxt
train
ae4dbf916d24cd273228d7eaeb6f6a680dc4f8a1
diff --git a/src/main/java/com/zaxxer/hikari/pool/HikariPool.java b/src/main/java/com/zaxxer/hikari/pool/HikariPool.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/zaxxer/hikari/pool/HikariPool.java +++ b/src/main/java/com/zaxxer/hikari/pool/HikariPool.java @@ -185,7 +185,7 @@ public class HikariPool implements HikariPoolMXBean, IBagStateListener } final long now = clockSource.currentTime(); - if (bagEntry.evicted || (clockSource.elapsedMillis(bagEntry.lastAccess, now) > ALIVE_BYPASS_WINDOW_MS && !poolElf.isConnectionAlive(bagEntry.connection, lastConnectionFailure))) { + if (bagEntry.evict || (clockSource.elapsedMillis(bagEntry.lastAccess, now) > ALIVE_BYPASS_WINDOW_MS && !poolElf.isConnectionAlive(bagEntry.connection, lastConnectionFailure))) { closeConnection(bagEntry, "(connection evicted or dead)"); // Throw away the dead connection and try again timeout = hardTimeout - clockSource.elapsedMillis(startTime, now); } @@ -222,7 +222,7 @@ public class HikariPool implements HikariPoolMXBean, IBagStateListener { metricsTracker.recordConnectionUsage(bagEntry); - if (bagEntry.evicted) { + if (bagEntry.evict) { closeConnection(bagEntry, "(connection broken or evicted)"); } else { @@ -413,7 +413,7 @@ public class HikariPool implements HikariPoolMXBean, IBagStateListener public void softEvictConnections() { for (PoolBagEntry bagEntry : connectionBag.values()) { - bagEntry.evicted = true; + bagEntry.evict = true; if (connectionBag.reserve(bagEntry)) { closeConnection(bagEntry, "(connection evicted by user)"); } @@ -540,7 +540,7 @@ public class HikariPool implements HikariPoolMXBean, IBagStateListener { for (PoolBagEntry bagEntry : connectionBag.values(STATE_IN_USE)) { try { - bagEntry.aborted = bagEntry.evicted = true; + bagEntry.evict = true; bagEntry.connection.abort(assassinExecutor); } catch (Throwable e) { @@ -567,7 +567,7 @@ public class HikariPool implements HikariPoolMXBean, IBagStateListener } ConnectionProxy connection = (ConnectionProxy) getConnection(); - connection.getPoolBagEntry().evicted = (config.getMinimumIdle() == 0); + connection.getPoolBagEntry().evict = (config.getMinimumIdle() == 0); connection.close(); } catch (Throwable e) { diff --git a/src/main/java/com/zaxxer/hikari/pool/PoolBagEntry.java b/src/main/java/com/zaxxer/hikari/pool/PoolBagEntry.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/zaxxer/hikari/pool/PoolBagEntry.java +++ b/src/main/java/com/zaxxer/hikari/pool/PoolBagEntry.java @@ -49,8 +49,7 @@ public final class PoolBagEntry implements IConcurrentBagEntry public long lastAccess; public volatile long lastOpenTime; - public volatile boolean evicted; - public volatile boolean aborted; + public volatile boolean evict; public boolean isAutoCommit; int networkTimeout; @@ -95,7 +94,7 @@ public final class PoolBagEntry implements IConcurrentBagEntry } else { // else the connection is "in-use" and we mark it for eviction by pool.releaseConnection() - PoolBagEntry.this.evicted = true; + PoolBagEntry.this.evict = true; } } }, lifetime, TimeUnit.MILLISECONDS); diff --git a/src/main/java/com/zaxxer/hikari/proxy/ConnectionProxy.java b/src/main/java/com/zaxxer/hikari/proxy/ConnectionProxy.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/zaxxer/hikari/proxy/ConnectionProxy.java +++ b/src/main/java/com/zaxxer/hikari/proxy/ConnectionProxy.java @@ -108,7 +108,7 @@ public abstract class ConnectionProxy implements IHikariConnectionProxy if (sqlState != null) { boolean isForceClose = sqlState.startsWith("08") || SQL_ERRORS.contains(sqlState); if (isForceClose) { - poolEntry.evicted = true; + poolEntry.evict = true; LOGGER.warn("{} - Connection {} marked as broken because of SQLSTATE({}), ErrorCode({})", poolEntry.parentPool, poolEntry, sqlState, sqle.getErrorCode(), sqle); } @@ -161,7 +161,7 @@ public abstract class ConnectionProxy implements IHikariConnectionProxy } catch (SQLException e) { checkException(e); - success = success ? !poolEntry.evicted : false; + success = success ? !poolEntry.evict : false; } } @@ -202,7 +202,7 @@ public abstract class ConnectionProxy implements IHikariConnectionProxy } catch (SQLException e) { // when connections are aborted, exceptions are often thrown that should not reach the application - if (!poolEntry.aborted) { + if (!poolEntry.evict) { throw checkException(e); } }
removed PoolBagEntry.aborted ...always had same 'value' as evicted renamed PoolBagEntry.evicted to PoolBagEntry.evict
brettwooldridge_HikariCP
train
d824b5fbdc009f8af21bab84e24ef57a8758e073
diff --git a/src/main/java/com/plaid/client/PlaidClients.java b/src/main/java/com/plaid/client/PlaidClients.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/plaid/client/PlaidClients.java +++ b/src/main/java/com/plaid/client/PlaidClients.java @@ -1,5 +1,7 @@ package com.plaid.client; +import org.apache.http.impl.client.HttpClientBuilder; + import com.plaid.client.http.ApacheHttpClientHttpDelegate; import com.plaid.client.http.WireLogger; @@ -13,7 +15,7 @@ public class PlaidClients { } public static PlaidUserClient testUserClient(String clientId, String secret, WireLogger wireLogger) { - return new DefaultPlaidUserClient(createHttpDelegate(BASE_URI_TEST, wireLogger), clientId, secret); + return new DefaultPlaidUserClient(createHttpDelegate(BASE_URI_TEST, wireLogger, true), clientId, secret); } public static PlaidPublicClient productionPublicClient(WireLogger wireLogger) { @@ -21,7 +23,7 @@ public class PlaidClients { } public static PlaidPublicClient testPublicClient(WireLogger wireLogger) { - return new DefaultPlaidPublicClient(createHttpDelegate(BASE_URI_TEST, wireLogger)); + return new DefaultPlaidPublicClient(createHttpDelegate(BASE_URI_TEST, wireLogger, true)); } public static PlaidUserClient productionUserClient(String clientId, String secret) { @@ -41,7 +43,17 @@ public class PlaidClients { } private static ApacheHttpClientHttpDelegate createHttpDelegate(String uri, WireLogger wireLogger) { - ApacheHttpClientHttpDelegate httpDelegate = ApacheHttpClientHttpDelegate.createDefault(uri); + return createHttpDelegate(uri, wireLogger, false); + } + + private static ApacheHttpClientHttpDelegate createHttpDelegate(String uri, WireLogger wireLogger, boolean test) { + ApacheHttpClientHttpDelegate httpDelegate; + if (test) { + httpDelegate = new ApacheHttpClientHttpDelegate(uri, HttpClientBuilder.create().disableContentCompression().build()); + } + else { + httpDelegate = ApacheHttpClientHttpDelegate.createDefault(uri); + } if (wireLogger != null) { httpDelegate.setWireLogger(wireLogger); }
Adjustment so that test clients don't do content compression (simplifies debugging of responses)
plaid_plaid-java
train
fd0398d5e20baececc18a777350f9f3ac387c0b2
diff --git a/ghost/admin/app/controllers/react-editor.js b/ghost/admin/app/controllers/react-editor.js index <HASH>..<HASH> 100644 --- a/ghost/admin/app/controllers/react-editor.js +++ b/ghost/admin/app/controllers/react-editor.js @@ -532,10 +532,11 @@ export default class ReactEditorController extends Controller { *beforeSaveTask(options = {}) { // ensure we remove any blank cards when performing a full save if (!options.backgroundSave) { - if (this._koenig) { - this._koenig.cleanup(); - this.set('hasDirtyAttributes', true); - } + // TODO: not yet implemented in react editor + // if (this._koenig) { + // this._koenig.cleanup(); + // this.set('hasDirtyAttributes', true); + // } } // TODO: There's no need for (at least) most of these scratch values. @@ -821,9 +822,10 @@ export default class ReactEditorController extends Controller { // - blank cards could be left around due to autosave triggering whilst // a blank card is present then the user attempting to leave // - will mark the post as dirty so it gets saved when transitioning - if (this._koenig && post.isDraft) { - this._koenig.cleanup(); - } + // TODO: not yet implemented in react editor + // if (this._koenig && post.isDraft) { + // this._koenig.cleanup(); + // } let hasDirtyAttributes = this.hasDirtyAttributes; let state = post.getProperties('isDeleted', 'isSaving', 'hasDirtyAttributes', 'isNew');
Fixed unsupported `.cleanup()` calls on react editor no issue - `react-editor` controller has much of the same code as the normal editor controller but not all of the editor-instance methods are supported in the react editor yet - commented out the remaining `.cleanup()` calls that are intended to remove empty cards because the method does not exist and was throwing errors when leaving the react-editor route
TryGhost_Ghost
train
79bef46b5e44509e841465bc6e22e1206d44b10c
diff --git a/react/CozyDialogs/dialogPropTypes.js b/react/CozyDialogs/dialogPropTypes.js index <HASH>..<HASH> 100644 --- a/react/CozyDialogs/dialogPropTypes.js +++ b/react/CozyDialogs/dialogPropTypes.js @@ -2,7 +2,7 @@ import PropTypes from 'prop-types' export default { open: PropTypes.bool.isRequired, - onClose: PropTypes.func.isRequired, + onClose: PropTypes.func, title: PropTypes.node, content: PropTypes.node, actions: PropTypes.node,
fix: OnClose is not required for CozyDialogs
cozy_cozy-ui
train
7ad0ba3c1a55c0ed708690fc02025164ea6b0283
diff --git a/src/common/errors.js b/src/common/errors.js index <HASH>..<HASH> 100644 --- a/src/common/errors.js +++ b/src/common/errors.js @@ -17,8 +17,10 @@ class ChainsqlError extends extendableBuiltin(Error) { super(message) this.name = browserHacks.getConstructorName(this) - this.message = message - this.data = data + this.message = message + if (data !== undefined) { + this.data = data + } if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor.name) } @@ -46,8 +48,10 @@ class ChainsqldError extends ChainsqlError { super(message) this.name = name; - this.message = message - this.data = data + this.message = message + if( data !== undefined) { + this.data = data + } if (Error.captureStackTrace) { Error.captureStackTrace(this, this.constructor.name) }
remove data field when it is undefined
ChainSQL_chainsql-lib
train
e77410bd87f763001714204eb58773fa3e563e64
diff --git a/atomic_reactor/dirs.py b/atomic_reactor/dirs.py index <HASH>..<HASH> 100644 --- a/atomic_reactor/dirs.py +++ b/atomic_reactor/dirs.py @@ -195,7 +195,10 @@ class RootBuildDir(object): """ if not self.has_sources: raise BuildDirIsNotInitialized() - platform: str = self.platforms[0] + return self.platform_dir(self.platforms[0]) + + def platform_dir(self, platform: str) -> BuildDir: + """Get the build directory for the specified platform.""" return BuildDir(self.path / platform, platform) def for_each_platform(self, action: Callable[[BuildDir], Any]) -> Dict[str, Any]: @@ -223,8 +226,7 @@ class RootBuildDir(object): raise BuildDirIsNotInitialized() results: Dict[str, Any] = {} for platform in self.platforms: - build_dir = BuildDir(self.path / platform, platform) - results[platform] = action(build_dir) + results[platform] = action(self.platform_dir(platform)) return results def for_all_platforms_copy(self, action: FileCreationFunc) -> List[Path]: @@ -246,22 +248,21 @@ class RootBuildDir(object): if not self.has_sources: raise BuildDirIsNotInitialized() - first_platform: str = self.platforms[0] - build_dir = self.path / first_platform - created_files = action(BuildDir(build_dir, first_platform)) + build_dir = self.platform_dir(self.platforms[0]) + created_files = action(build_dir) the_new_files: List[Path] = [] file_path: Path for file_path in created_files: if not file_path.is_absolute(): - file_path = build_dir / file_path + file_path = build_dir.path / file_path file_path = file_path.resolve() - if file_path == build_dir: + if file_path == build_dir.path: raise ValueError( f"{file_path} should not be added as a created directory." ) try: - file_path.relative_to(build_dir) + file_path.relative_to(build_dir.path) except ValueError as e: raise ValueError( f"File must be created inside the build directory. " @@ -275,7 +276,7 @@ class RootBuildDir(object): for platform in self.platforms[1:]: for src_file in the_new_files: - dest = self.path / platform / src_file.relative_to(build_dir) + dest = self.path / platform / src_file.relative_to(build_dir.path) if src_file.is_dir(): copytree(src_file, dest) else:
Add method to get platform-specific build dir CLOUDBLD-<I> The build tasks are per-platform, they need a way to access the BuildDir for their platform.
projectatomic_atomic-reactor
train
5c0ada1f456e84c179dd233013cf39921e527a45
diff --git a/tests/unit/test_config.py b/tests/unit/test_config.py index <HASH>..<HASH> 100644 --- a/tests/unit/test_config.py +++ b/tests/unit/test_config.py @@ -17,7 +17,6 @@ from salt.exceptions import ( SaltCloudConfigError, SaltConfigurationError, ) -from salt.ext import six from salt.syspaths import CONFIG_DIR from tests.support.helpers import patched_environ, slowTest, with_tempdir, with_tempfile from tests.support.mixins import AdaptedConfigurationTestCaseMixin @@ -881,7 +880,7 @@ class ConfigTestCase(TestCase, AdaptedConfigurationTestCaseMixin): tally = self._get_tally(salt.config.master_config) # pylint: enable=no-value-for-parameter non_unicode = tally.get("non_unicode", []) - self.assertEqual(len(non_unicode), 8 if six.PY2 else 0, non_unicode) + self.assertEqual(len(non_unicode), 0, non_unicode) self.assertTrue(tally["unicode"] > 0) def test_conf_file_strings_are_unicode_for_minion(self):
Drop Py2 and six on tests/unit/test_config.py
saltstack_salt
train
780532ba0960f0fffc9ad8a324aca80229f0736f
diff --git a/src/jQuery-image-upload.js b/src/jQuery-image-upload.js index <HASH>..<HASH> 100644 --- a/src/jQuery-image-upload.js +++ b/src/jQuery-image-upload.js @@ -60,6 +60,10 @@ return $self; } + // don't create the image upload if it was already created + // for this element + if ($self.data("imageUpload")) { return $self; } + // add class $self.addClass(settings.addClass);
Return if imageUpload was already created.g
jillix_jQuery-image-upload
train
ad23cd90b97eec6142d76c6cf85f300a4a3be04e
diff --git a/lib/parole/comment.rb b/lib/parole/comment.rb index <HASH>..<HASH> 100644 --- a/lib/parole/comment.rb +++ b/lib/parole/comment.rb @@ -14,9 +14,9 @@ module Parole # Validations validate :ensure_valid_role_for_commentable validate :ensure_valid_commentable - validate :commenter, presence: true - validate :commentable, presence: true - validate :comment, presence: true + validates :commenter, presence: true + validates :commentable, presence: true + validates :comment, presence: true end protected
Fix method typo: validate -> validates
mirego_parole
train
1279c934b3c2d8f9a08566ac9d74a43dc3cf1178
diff --git a/inspire_json_merger/inspire_json_merger.py b/inspire_json_merger/inspire_json_merger.py index <HASH>..<HASH> 100644 --- a/inspire_json_merger/inspire_json_merger.py +++ b/inspire_json_merger/inspire_json_merger.py @@ -25,6 +25,7 @@ from __future__ import absolute_import, print_function +from inspire_utils.helpers import get_value from json_merger.merger import MergeError, Merger from inspire_json_merger.merger_config import ARXIV_TO_ARXIV, \ @@ -42,10 +43,7 @@ _MERGER_CONFIGS = { } -ARXIV_SOURCE = 'arxiv' - - -def inspire_json_merge(root, head, update): +def inspire_json_merge(root, head, update, head_source=None): """ This function instantiate a ``Merger`` object using a configuration in according to the ``source`` value of head and update params. @@ -55,14 +53,16 @@ def inspire_json_merge(root, head, update): root(dict): the last common parent json of head and update head(dict): the last version of a record in INSPIRE update(dict): the update coming from outside INSPIRE to merge + head_source(string): the source of the head record. If ``None``, + heuristics are used to derive it from the metadata. Return A tuple containing the resulted merged record in json format and a an object containing all generated conflicts. """ configuration = _get_configuration( - get_source(head), - get_source(update) + head or get_head_source(head), + get_acquisition_source(update) ) conflicts = None @@ -101,22 +101,38 @@ def _get_configuration(head_source, update_source): raise ValueError('Can\'t get any configuration:\n\tHEAD SOURCE: {0}' '\n\tUPDATE SOURCE: {1}' .format(head_source, update_source)) - if head_source.lower() == ARXIV_SOURCE: - if update_source.lower() == ARXIV_SOURCE: + if head_source.lower() == 'arxiv': + if update_source.lower() == 'arxiv': return _MERGER_CONFIGS.get(ARXIV_TO_ARXIV) else: return _MERGER_CONFIGS.get(PUBLISHER_TO_ARXIV) else: - if update_source.lower() == ARXIV_SOURCE: + if update_source.lower() == 'arxiv': raise Exception('Not yet implemented') # return _MERGER_CONFIGS.get(ARXIV_TO_PUBLISHER) else: return _MERGER_CONFIGS.get(PUBLISHER_TO_PUBLISHER) -def get_source(json_obj): +def get_head_source(json_obj): + def _has_non_arxiv_field(field_name): + source = '{}.source'.format(field_name) + return ( + json_obj.get(field_name) and not get_value(json_obj, source) + or any(source.lower() != 'arxiv' for source in get_value(json_obj, source)) + ) + + if _has_non_arxiv_field('publication_info') or _has_non_arxiv_field('dois'): + return 'publisher' + elif 'arxiv_eprints' in json_obj: + return 'arxiv' + else: + return 'publisher' + + +def get_acquisition_source(json_obj): # in case of missing acquisition source, it returns the ARXIV one try: return json_obj['acquisition_source']['source'] except KeyError: - return ARXIV_SOURCE + return 'arxiv'
add head_source argument to inspire_json_merge * INCOMPATIBLE: this changes the previous API. This argument allows to specify the source of the head record. If it is None, heuristics are used to determine the source.
inspirehep_inspire-json-merger
train
2a6cfa8bf6c25ab573392865c0ab6282c10c5b3a
diff --git a/src/Facades/DataTables.php b/src/Facades/DataTables.php index <HASH>..<HASH> 100644 --- a/src/Facades/DataTables.php +++ b/src/Facades/DataTables.php @@ -3,12 +3,15 @@ namespace Yajra\DataTables\Facades; use Illuminate\Support\Facades\Facade; +use Yajra\DataTables\CollectionDataTable; +use Yajra\DataTables\EloquentDatatable; +use Yajra\DataTables\QueryDataTable; /** * @mixin \Yajra\DataTables\DataTables - * @method eloquent($builder) - * @method queryBuilder($builder) - * @method collection($collection) + * @method EloquentDatatable eloquent($builder) + * @method QueryDataTable queryBuilder($builder) + * @method CollectionDataTable collection($collection) */ class DataTables extends Facade {
Add return type on doc methods.
yajra_laravel-datatables
train
a00b5d58c770015d4637600ce5e43d6a83398215
diff --git a/lib/rpc-consumer-factory.js b/lib/rpc-consumer-factory.js index <HASH>..<HASH> 100644 --- a/lib/rpc-consumer-factory.js +++ b/lib/rpc-consumer-factory.js @@ -21,7 +21,7 @@ var processMessageDefault = function(msg) { var rpcConsumerProto = { uri: function () { - if(_.startsWith('amqp://') || _.startsWith('amqps://')) { + if(this.url.slice(0, 7) == 'amqp://' || this.url.slice(0, 8) == 'amqps://') { return this.url; } return ['amqp://', this.url].join(''); diff --git a/lib/rpc-publisher-factory.js b/lib/rpc-publisher-factory.js index <HASH>..<HASH> 100644 --- a/lib/rpc-publisher-factory.js +++ b/lib/rpc-publisher-factory.js @@ -16,7 +16,7 @@ var logErrorDefault = function(msg) { var rpcPublisherProto = { uri: function () { - if(_.startsWith('amqp://', this.url) || _.startsWith('amqps://', this.url)) { + if(this.url.slice(0, 7) == 'amqp://' || this.url.slice(0, 8) == 'amqps://') { return this.url; } return ['amqp://', this.url].join('');
correct url testing for amqp://
rudijs_amqp.node-rpc-factory
train
ef248302fa4f8c9e49f1777ac1bdac20d232ca51
diff --git a/tests/test_api.py b/tests/test_api.py index <HASH>..<HASH> 100644 --- a/tests/test_api.py +++ b/tests/test_api.py @@ -270,13 +270,3 @@ b-comment = "a is 3" encoder=toml.TomlPreserveCommentEncoder()) assert len(s) == len(test_str) and sorted(test_str) == sorted(s) - - [email protected]('test_val', [ - 'opt = "unterminated double\n', - "opt = 'unterminated single\n", - "opt = '''\nunterminated\nraw multiline\n", - 'opt = """\nunterminated\nmultiline\n']) -def test_unterminated_string_eof(test_val): - with pytest.raises(toml.TomlDecodeError): - toml.loads(test_val)
Removed test for unexpected EOF while parsing string The test for an EOF before the end of a string is being moved to the toml test suite
uiri_toml
train
b23d890f60d07c31f15f339ef3f10d82fd13d56b
diff --git a/datapackage_pipelines/specs/specs.py b/datapackage_pipelines/specs/specs.py index <HASH>..<HASH> 100644 --- a/datapackage_pipelines/specs/specs.py +++ b/datapackage_pipelines/specs/specs.py @@ -83,14 +83,11 @@ def pipelines(): deferred.append((e.spec, e.missing)) continue - calculate_dirty(spec) - - if not error_num and len(spec.errors): - status.register(spec.pipeline_id, - spec.cache_hash, - spec.pipeline_details, - spec.source_details, - spec.errors) + spec.dirty = status.register(spec.pipeline_id, + spec.cache_hash, + spec.pipeline_details, + spec.source_details, + spec.errors) yield spec
Synchronise dirtyness between specs and status
frictionlessdata_datapackage-pipelines
train
f47be14d35f7b1b9295114de7b6345f6c1f1bfe5
diff --git a/wicked-charts-wicket15/src/main/java/com/googlecode/wickedcharts/wicket15/JavaScriptResourceRegistry.java b/wicked-charts-wicket15/src/main/java/com/googlecode/wickedcharts/wicket15/JavaScriptResourceRegistry.java index <HASH>..<HASH> 100644 --- a/wicked-charts-wicket15/src/main/java/com/googlecode/wickedcharts/wicket15/JavaScriptResourceRegistry.java +++ b/wicked-charts-wicket15/src/main/java/com/googlecode/wickedcharts/wicket15/JavaScriptResourceRegistry.java @@ -49,7 +49,7 @@ public class JavaScriptResourceRegistry { response.renderJavaScriptReference(this.reference); } else { throw new IllegalStateException( - "A RegistryEntry must have at least a non-null url or a non-null reference!"); + "A RegistryEntry must have at least a non-null url or a non-null reference!"); } } @@ -59,7 +59,7 @@ public class JavaScriptResourceRegistry { public static final String DEFAULT_HIGHCHARTS_URL = "http://code.highcharts.com/2.3.5/highcharts.js"; - public static final String DEFAULT_HIGHCHARTS_MORE_URL = "http://code.highcharts.com/highcharts-more.js"; + public static final String DEFAULT_HIGHCHARTS_MORE_URL = "http://code.highcharts.com/2.3.5/highcharts-more.js"; public static final String DEFAULT_HIGHCHARTS_EXPORTING_URL = "http://code.highcharts.com/2.3.5/modules/exporting.js"; @@ -101,9 +101,9 @@ public class JavaScriptResourceRegistry { } /** - * Sets the {@link ResourceReference} to use to load the Highcharts - * exporting javascript library (exporting.js). Use this method if you want - * to include the javascript file in your web application. + * Sets the {@link ResourceReference} to use to load the Highcharts exporting + * javascript library (exporting.js). Use this method if you want to include + * the javascript file in your web application. */ public void setHighchartsExportingReference(final ResourceReference reference) { this.highchartsExportingEntry = new RegistryEntry(reference); @@ -137,9 +137,9 @@ public class JavaScriptResourceRegistry { } /** - * Sets the {@link ResourceReference} to use to load the Highcharts - * javascript library (highcharts.js). Use this method if you want to - * include the javascript file in your web application. + * Sets the {@link ResourceReference} to use to load the Highcharts javascript + * library (highcharts.js). Use this method if you want to include the + * javascript file in your web application. */ public void setHighchartsReference(final ResourceReference reference) { this.highchartsEntry = new RegistryEntry(reference); @@ -164,8 +164,8 @@ public class JavaScriptResourceRegistry { } /** - * Sets the URL to use to load JQuery (jquery.js). Use this method if you - * want to load the javascript file from an external URL. + * Sets the URL to use to load JQuery (jquery.js). Use this method if you want + * to load the javascript file from an external URL. */ public void setJQueryReference(final String url) { this.jqueryEntry = new RegistryEntry(url); diff --git a/wicked-charts-wicket6/src/main/java/com/googlecode/wickedcharts/wicket6/JavaScriptResourceRegistry.java b/wicked-charts-wicket6/src/main/java/com/googlecode/wickedcharts/wicket6/JavaScriptResourceRegistry.java index <HASH>..<HASH> 100644 --- a/wicked-charts-wicket6/src/main/java/com/googlecode/wickedcharts/wicket6/JavaScriptResourceRegistry.java +++ b/wicked-charts-wicket6/src/main/java/com/googlecode/wickedcharts/wicket6/JavaScriptResourceRegistry.java @@ -17,6 +17,7 @@ package com.googlecode.wickedcharts.wicket6; import org.apache.wicket.markup.head.IHeaderResponse; import org.apache.wicket.markup.head.JavaScriptReferenceHeaderItem; import org.apache.wicket.request.resource.ResourceReference; +import org.apache.wicket.resource.JQueryResourceReference; /** * A registry for the JavaScript dependencies used by wicked-charts. By default, @@ -55,11 +56,9 @@ public class JavaScriptResourceRegistry { } - public static final String DEFAULT_JQUERY_URL = "https://ajax.googleapis.com/ajax/libs/jquery/1.8.0/jquery.min.js"; - public static final String DEFAULT_HIGHCHARTS_URL = "http://code.highcharts.com/2.3.5/highcharts.js"; - public static final String DEFAULT_HIGHCHARTS_MORE_URL = "http://code.highcharts.com/highcharts-more.js"; + public static final String DEFAULT_HIGHCHARTS_MORE_URL = "http://code.highcharts.com/2.3.5/highcharts-more.js"; public static final String DEFAULT_HIGHCHARTS_EXPORTING_URL = "http://code.highcharts.com/2.3.5/modules/exporting.js"; @@ -70,7 +69,7 @@ public class JavaScriptResourceRegistry { return INSTANCE; } - private RegistryEntry jqueryEntry = new RegistryEntry(DEFAULT_JQUERY_URL); + private RegistryEntry jqueryEntry = new RegistryEntry(JQueryResourceReference.get()); private RegistryEntry highchartsEntry = new RegistryEntry(DEFAULT_HIGHCHARTS_URL);
issue <I>: included wickets default jquery resource and included highcharts-more.js of a fixed version
adessoAG_wicked-charts
train
30f0e14b261ebfe2c83e75b151ff18ce088a6e48
diff --git a/lib/worker/subprocess.js b/lib/worker/subprocess.js index <HASH>..<HASH> 100644 --- a/lib/worker/subprocess.js +++ b/lib/worker/subprocess.js @@ -125,8 +125,7 @@ ipc.options.then(options => { const required = require(mod); try { - if (required[Symbol.for('esm:package')] || - required[Symbol.for('esm\u200D:package')]) { + if (Reflect.has(required, Symbol.for('esm:package'))) { require = required(module); // eslint-disable-line no-global-assign } } catch (_) {}
Remove support for outdated 'esm' versions
avajs_ava
train
d088184094f2c0b271e944ef4c699d824a724525
diff --git a/PyFunceble/__init__.py b/PyFunceble/__init__.py index <HASH>..<HASH> 100644 --- a/PyFunceble/__init__.py +++ b/PyFunceble/__init__.py @@ -96,7 +96,7 @@ from PyFunceble.whois_lookup import WhoisLookup # We set our project name. NAME = "PyFunceble" # We set out project version. -VERSION = "2.11.5.dev (Green Galago: Skitterbug)" +VERSION = "2.11.6.dev (Green Galago: Skitterbug)" # We set the list of windows "platforms" WINDOWS_PLATFORMS = ["windows", "cygwin", "cygwin_nt-10.0"] diff --git a/PyFunceble/preset.py b/PyFunceble/preset.py index <HASH>..<HASH> 100644 --- a/PyFunceble/preset.py +++ b/PyFunceble/preset.py @@ -334,7 +334,14 @@ class Preset: # pragma: no cover if cls.__are_we_allowed_to_overwrite("timeout") and ( not PyFunceble.CONFIGURATION.timeout or PyFunceble.CONFIGURATION.timeout < 3 ): - PyFunceble.CONFIGURATION.timeout = 3 + PyFunceble.CONFIGURATION.timeout = float(3) + + PyFunceble.Logger().debug( + f"CONFIGURATION.timeout switched to {PyFunceble.CONFIGURATION.timeout}" + ) + + if not isinstance(PyFunceble.CONFIGURATION.timeout, float): + PyFunceble.CONFIGURATION.timeout = float(PyFunceble.CONFIGURATION.timeout) PyFunceble.Logger().debug( f"CONFIGURATION.timeout switched to {PyFunceble.CONFIGURATION.timeout}" diff --git a/PyFunceble/whois_lookup.py b/PyFunceble/whois_lookup.py index <HASH>..<HASH> 100644 --- a/PyFunceble/whois_lookup.py +++ b/PyFunceble/whois_lookup.py @@ -130,16 +130,11 @@ class WhoisLookup: if timeout: # The timeout is given. - if isinstance(timeout, int): + if isinstance(timeout, (int, float)): # The timeout is an int # We share it. - self.timeout = timeout - elif timeout.isdigit(): - # The timeout is a str digit. - - # We convert it to int and share it. - self.timeout = int(timeout) + self.timeout = float(timeout) else: # The timeout is something we could not understand. @@ -159,11 +154,8 @@ class WhoisLookup: # We initiate a socket for the request. req = socket(AF_INET, SOCK_STREAM) - if self.timeout % 3 == 0: - # The timeout is a modulo of 3. - - # We set the timeout. - req.settimeout(self.timeout) + # We set the timeout. + req.settimeout(self.timeout) try: # We try to connect to the whois server at the port 43. diff --git a/version.yaml b/version.yaml index <HASH>..<HASH> 100644 --- a/version.yaml +++ b/version.yaml @@ -1,4 +1,4 @@ -current_version: '2.11.4.dev (Green Galago: Skitterbug)' +current_version: '2.11.6.dev (Green Galago: Skitterbug)' deprecated: [0.0.0, 0.0.1, 0.65.0, 0.67.1, 0.68.0, 0.69.3, 0.69.5, 0.70.4, 0.71.2, 0.72.7, 0.73.1, 0.74.5, 0.75.1, 0.76.2, 0.77.0, 0.78.0, 0.79.1, 0.80.9, 0.81.8, 0.82.4, 0.83.2, 0.84.5, 0.85.0, 0.86.0, 0.87.1, 0.88.3, 0.89.3, 0.90.2, 0.91.1,
Ensure that the timeout is always a float
funilrys_PyFunceble
train
7323047b18c6ff3dd4c2d69736ce9e5779a1f1f2
diff --git a/skyfield/positionlib.py b/skyfield/positionlib.py index <HASH>..<HASH> 100644 --- a/skyfield/positionlib.py +++ b/skyfield/positionlib.py @@ -5,7 +5,7 @@ from numpy import array, arccos, clip, einsum, exp from .constants import RAD2DEG, tau from .data.spice import inertial_frames from .functions import dots, from_polar, length_of, to_polar, rot_z -from .earthlib import compute_limb_angle, refract +from .earthlib import compute_limb_angle, refract, reverse_terra from .relativity import add_aberration, add_deflection from .timelib import Time from .units import Distance, Velocity, Angle, _interpret_angle @@ -371,6 +371,24 @@ class Apparent(ICRF): class Geocentric(ICRF): """An (x,y,z) position measured from the geocenter.""" + def subpoint(self): + if self.center != 399: # TODO: should an __init__() check this? + raise ValueError("you can only ask for the geographic subpoint" + " of a position measured from Earth's center") + t = self.t + xyz_au = einsum('ij...,j...->i...', t.M, self.position.au) + lat, lon, elevation_m = reverse_terra(xyz_au, t.gast) + + # TODO. Move VectorFunction and Topos into this file, since the + # three kinds of class work together: Topos is-a VF; VF.at() can + # return a Geocentric position; and Geocentric.subpoint() should + # return a Topos. I'm deferring the refactoring for now, to get + # this new feature to users more quickly. + from .toposlib import Topos + return Topos(latitude=Angle(radians=lat), + longitude=Angle(radians=lon), + elevation_m=elevation_m) + def _to_altaz(position_au, observer_data, temperature_C, pressure_mbar): """Compute (alt, az, distance) relative to the observer's horizon. diff --git a/skyfield/tests/test_topos.py b/skyfield/tests/test_topos.py index <HASH>..<HASH> 100644 --- a/skyfield/tests/test_topos.py +++ b/skyfield/tests/test_topos.py @@ -14,7 +14,7 @@ def test_beneath(ts, angle): # than a very large elevation. top = Topos(latitude_degrees=angle, longitude_degrees=0, elevation_m=0) p = top.at(t) - b = Topos.subpoint(p) + b = p.subpoint() error_degrees = abs(b.latitude.degrees - angle) error_mas = 60.0 * 60.0 * 1000.0 * error_degrees #print(b.latitude.degrees, deg, error_mas) diff --git a/skyfield/toposlib.py b/skyfield/toposlib.py index <HASH>..<HASH> 100644 --- a/skyfield/toposlib.py +++ b/skyfield/toposlib.py @@ -1,7 +1,7 @@ from numpy import einsum from .constants import ASEC2RAD, tau -from .earthlib import terra, reverse_terra +from .earthlib import terra from .functions import rot_x, rot_y, rot_z from .units import Distance, Angle, _interpret_ltude from .vectorlib import VectorFunction @@ -57,18 +57,6 @@ class Topos(VectorFunction): self.target = object() # TODO: make this more interesting self.target_name = '{0} N {1} E'.format(self.latitude, self.longitude) - @classmethod - def subpoint(cls, position): - if position.center != 399: - raise ValueError("you can only ask for the geographic subpoint" - " of a position measured from Earth's center") - t = position.t - xyz_au = einsum('ij...,j...->i...', t.M, position.position.au) - lat, lon, elevation_m = reverse_terra(xyz_au, t.gast) - return cls(latitude=Angle(radians=lat), - longitude=Angle(radians=lon), - elevation_m=elevation_m) - def __str__(self): return 'Topos {0}'.format(self.target_name)
Add `subpoint()` method to `Geocentric` class Fix #<I>, fix #<I>. Addresses #<I>, but may not quite do everything that issue wanted? This decides against the more awkward approach of a constructor method that would have then had to check its argument type.
skyfielders_python-skyfield
train
4987df2528d4694309746d53c9c7f176f219b982
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -317,6 +317,11 @@ module.exports = { type: String, default: '', // http://openshiftvisualacceptance-ewhite.rhcloud.com/comment description: 'API to call to comment on pr' + }, { + name: 'branch', + type: String, + default: 'master', + description: 'branch to push to' }], run: function (options, rawArgs) { let requestOptions = { @@ -326,52 +331,31 @@ module.exports = { } } - function _getLastPrNumber () { - return exec('git log -10 --oneline').then((stdout) => { - // the --oneline format for `git log` puts each commit on a single line, with the hash and then - // the commit message, so we first split on \n to get an array of commits - const commits = stdout.split('\n') - - // The commit that represents the merging of the PR will include the text 'Merge pull request' so - // we find that one - const mergeCommit = __.find(commits, (commit) => { - return commit.indexOf('Merge pull request') !== -1 - }) - - // The format of the auto-generated commit line will look something like: - // 'edf85e0 Merge pull request #30 from job13er/remove-newline' - // so if we split on the space, and grab the 5th item, it's '#30' then strip the '#' to get '30' - const prNumber = mergeCommit.split(' ')[4].replace('#', '') - - return prNumber - }) - } - if (!process.env.RO_GH_TOKEN || !process.env.TRAVIS_REPO_SLUG) { console.log('No github token found or Travis found. Just running ember test') return runCommand('ember', ['test']) } var repoSlug = process.env.TRAVIS_REPO_SLUG - var prNumber = process.env.TRAVIS_PULL_REQUEST === false ? _getLastPrNumber() : process.env.TRAVIS_PULL_REQUEST + var prNumber = process.env.TRAVIS_PULL_REQUEST var url = 'https://api.github.com/repos/' + repoSlug + '/pulls/' + prNumber var res = request('GET', url, requestOptions) var travisMessage = res.body if (/\#new\-baseline\#/.exec(travisMessage)) { console.log('Creating new baseline') return runCommand('ember', ['new-baseline', '--image-directory=' + options.imageDirectory]).then(function (params) { - if (process.env.TRAVIS_PULL_REQUEST === false) { + if (prNumber === false) { console.log('Git add') return runCommand('git', ['add', options.imageDirectory + '/*']).then(function (params) { console.log('Git commit') return runCommand('git', ['commit', '-m', '"Adding new baseline images [ci skip]"']).then(function (params) { console.log('Git push') - return runCommand('git', ['push']) + return runCommand('git', ['push', 'origin', 'HEAD:' + options.branch]) }) }) } }) - } else if (process.env.TRAVIS_PULL_REQUEST !== false && process.env.TRAVIS_PULL_REQUEST !== 'false' && options.prApiUrl !== '') { + } else if (prNumber !== false && prNumber !== 'false' && options.prApiUrl !== '') { return runCommand('ember', ['br']).then(function (params) { return runCommand('phantomjs', ['vendor/html-to-image.js', 'visual-acceptance-report/report.html']).then(function (params) { console.log('Sending to github') @@ -387,14 +371,14 @@ module.exports = { console.log(response.getBody()) }) }) - } else if ((process.env.TRAVIS_PULL_REQUEST === false || process.env.TRAVIS_PULL_REQUEST === 'false') && options.prApiUrl !== '') { + } else if ((prNumber === false || prNumber === 'false') && options.prApiUrl !== '') { return runCommand('ember', ['test']).then(function (params) { console.log('Git add') return runCommand('git', ['add', options.imageDirectory + '/*']).then(function (params) { console.log('Git commit') return runCommand('git', ['commit', '-m', '"Adding new baseline images [ci skip]"']).then(function (params) { console.log('Git push') - return runCommand('git', ['push']) + return runCommand('git', ['push', 'origin', 'HEAD:' + options.branch]) }) }) })
Push to branch. Simplify prNumber logic
ciena-blueplanet_ember-cli-visual-acceptance
train
e41df14bbb6efc587a416e58038ef0ae9ea68cac
diff --git a/test/loader/file_loader.test.js b/test/loader/file_loader.test.js index <HASH>..<HASH> 100644 --- a/test/loader/file_loader.test.js +++ b/test/loader/file_loader.test.js @@ -194,14 +194,14 @@ describe('test/file_loader.test.js', () => { assert.deepEqual(app.model.mod, { a: 1 }); }); - it.skip('should contain syntax error filepath', () => { + it('should contain syntax error filepath', () => { const app = { model: {} }; assert.throws(() => { new FileLoader({ directory: path.join(dirBase, 'syntax_error'), target: app.model, }).load(); - }, /Parse Error: Unexpected token/); + }, /Parse Error:/); }); it('should throw when directory contains dot', () => {
test: fix the testcase that is skipped (#<I>)
eggjs_egg-core
train
09fec430734c6433235f8dfb2e3fc3ca2ba601d5
diff --git a/src/label/base.js b/src/label/base.js index <HASH>..<HASH> 100644 --- a/src/label/base.js +++ b/src/label/base.js @@ -72,9 +72,13 @@ class Label extends Component { */ destroy() { const group = this.get('group'); + const container = this.get('container'); if (!group.destroy) { group.destroy(); } + if (container) { + container.innerHTML = ''; + } } /**
feat: empty container if label is to destroyed
antvis_component
train
eb4f1f2d7e563a718eea82089e5537762860d6ed
diff --git a/src/test/java/com/marklogic/client/test/PojoFacadeTest.java b/src/test/java/com/marklogic/client/test/PojoFacadeTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/com/marklogic/client/test/PojoFacadeTest.java +++ b/src/test/java/com/marklogic/client/test/PojoFacadeTest.java @@ -90,8 +90,7 @@ public class PojoFacadeTest { } @Test - // most of the queries below currently don't work because of issues in the search:search layer - // but the numbers expected come from working queries at the cts:search layer + // the geo queries below currently don't work yet because underlying layers are not yet ready public void testC_QueryPojos() throws Exception { StringQueryDefinition stringQuery = Common.client.newQueryManager().newStringDefinition(); stringQuery.setCriteria("Tungi OR Dalatando OR Chittagong"); @@ -162,6 +161,7 @@ public class PojoFacadeTest { iterator = page.iterator(); numRead = 0; while ( iterator.hasNext() ) { + @SuppressWarnings("unused") City city = iterator.next(); numRead++; } @@ -178,6 +178,7 @@ public class PojoFacadeTest { iterator = page.iterator(); numRead = 0; while ( iterator.hasNext() ) { + @SuppressWarnings("unused") City city = iterator.next(); numRead++; } @@ -194,6 +195,7 @@ public class PojoFacadeTest { iterator = page.iterator(); numRead = 0; while ( iterator.hasNext() ) { + @SuppressWarnings("unused") City city = iterator.next(); numRead++; }
surpress some warnings about an unused variable
marklogic_java-client-api
train
745051a6953d128163271f4aff290e71ebac041c
diff --git a/src/languages/Latvian.php b/src/languages/Latvian.php index <HASH>..<HASH> 100644 --- a/src/languages/Latvian.php +++ b/src/languages/Latvian.php @@ -22,17 +22,6 @@ final class Latvian extends Speller 8 => 'astoņi simti', 9 => 'deviņi simti', ]; - static $teens = [ - 11 => 'vienpadsmit', - 12 => 'divpadsmit', - 13 => 'trīspadsmit', - 14 => 'četrpadsmit', - 15 => 'piecpadsmit', - 16 => 'sešpadsmit', - 17 => 'septiņpadsmit', - 18 => 'astoņpadsmit', - 19 => 'deviņpadsmit', - ]; static $tens = [ 1 => 'desmit', 2 => 'divdesmit', @@ -44,6 +33,17 @@ final class Latvian extends Speller 8 => 'astoņdesmit', 9 => 'deviņdesmit', ]; + static $teens = [ + 11 => 'vienpadsmit', + 12 => 'divpadsmit', + 13 => 'trīspadsmit', + 14 => 'četrpadsmit', + 15 => 'piecpadsmit', + 16 => 'sešpadsmit', + 17 => 'septiņpadsmit', + 18 => 'astoņpadsmit', + 19 => 'deviņpadsmit', + ]; $text = ''; @@ -72,7 +72,7 @@ final class Latvian extends Speller { $text .= $tens[intval(substr($number, 0, 1))]; - if ($number % 10 > 0) // whole tens + if ($number % 10 > 0) { $text .= ' ' . $this->spellSingle($number % 10, $isDecimalPart, $currency); } diff --git a/src/languages/Lithuanian.php b/src/languages/Lithuanian.php index <HASH>..<HASH> 100644 --- a/src/languages/Lithuanian.php +++ b/src/languages/Lithuanian.php @@ -76,8 +76,7 @@ final class Lithuanian extends Speller { $text .= $singles[$number]; } - else if (($number > 10) - && ($number < 20)) + else if (($number > 10) && ($number < 20)) { $text .= $teens[$number]; }
Making Latvian and Lithuanian classes as identical as possible. The languages are very similar after all.
jurchiks_numbers2words
train
cd1f840434f2e48e8646d34aee27c0bb07864c06
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -138,7 +138,7 @@ Vector.prototype.getTile = function(z, x, y, callback) { return callback(err); // For xray styles use srcdata tile format. - if (source._xray && vtile._srcdata) { + if (!callback.format && source._xray && vtile._srcdata) { var type = tiletype.type(vtile._srcdata); format = type === 'jpg' ? 'jpeg' : type === 'webp' ? 'webp' :
Allow format to be overridden in xray mode
mapbox_tilelive-vector
train
5e5efd62df08e16c2172ed31cc67500a2885fd38
diff --git a/xml_test.go b/xml_test.go index <HASH>..<HASH> 100644 --- a/xml_test.go +++ b/xml_test.go @@ -45,8 +45,6 @@ var xmldirs = []string{ "testdata/libvirt/tests/domaincapsschemadata", "testdata/libvirt/tests/domainconfdata", "testdata/libvirt/tests/domainschemadata", - "testdata/libvirt/tests/domainsnapshotxml2xmlin", - "testdata/libvirt/tests/domainsnapshotxml2xmlout", "testdata/libvirt/tests/genericxml2xmlindata", "testdata/libvirt/tests/genericxml2xmloutdata", "testdata/libvirt/tests/interfaceschemadata", @@ -66,6 +64,8 @@ var xmldirs = []string{ "testdata/libvirt/tests/nwfilterxml2xmlout", "testdata/libvirt/tests/qemuagentdata", "testdata/libvirt/tests/qemucapabilitiesdata", + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlin", + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlout", "testdata/libvirt/tests/qemuhotplugtestcpus", "testdata/libvirt/tests/qemuhotplugtestdevices", "testdata/libvirt/tests/qemuhotplugtestdomains", @@ -191,27 +191,27 @@ var extraActualNodes = map[string][]string{ "testdata/libvirt/tests/interfaceschemadata/vlan.xml": []string{ "/interface[0]/vlan[0]/interface[0]/@type", }, - "testdata/libvirt/tests/domainsnapshotxml2xmlin/disk_driver_name_null.xml": []string{ + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlin/disk_driver_name_null.xml": []string{ "/domainsnapshot[0]/disks[0]/disk[0]/@type", }, - "testdata/libvirt/tests/domainsnapshotxml2xmlin/disk_snapshot.xml": []string{ + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlin/disk_snapshot.xml": []string{ "/domainsnapshot[0]/disks[0]/disk[0]/@type", "/domainsnapshot[0]/disks[0]/disk[1]/@type", "/domainsnapshot[0]/disks[0]/disk[2]/@type", "/domainsnapshot[0]/disks[0]/disk[3]/@type", "/domainsnapshot[0]/disks[0]/disk[4]/@type", }, - "testdata/libvirt/tests/domainsnapshotxml2xmlout/disk_snapshot.xml": []string{ + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlout/disk_snapshot.xml": []string{ "/domainsnapshot[0]/disks[0]/disk[0]/@type", "/domainsnapshot[0]/disks[0]/disk[1]/@type", "/domainsnapshot[0]/disks[0]/disk[2]/@type", }, - "testdata/libvirt/tests/domainsnapshotxml2xmlout/disk_snapshot_redefine.xml": []string{ + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlout/disk_snapshot_redefine.xml": []string{ "/domainsnapshot[0]/disks[0]/disk[0]/@type", "/domainsnapshot[0]/disks[0]/disk[1]/@type", "/domainsnapshot[0]/disks[0]/disk[2]/@type", }, - "testdata/libvirt/tests/domainsnapshotxml2xmlout/external_vm_redefine.xml": []string{ + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlout/external_vm_redefine.xml": []string{ "/domainsnapshot[0]/disks[0]/disk[0]/@type", }, } @@ -272,7 +272,7 @@ var extraExpectNodes = map[string][]string{ "testdata/libvirt/tests/storagevolxml2xmlout/vol-qcow2-nocow.xml": []string{volsrc}, "testdata/libvirt/tests/storagevolxml2xmlout/vol-qcow2.xml": []string{volsrc}, "testdata/libvirt/tests/storagevolxml2xmlout/vol-sheepdog.xml": []string{volsrc}, - "testdata/libvirt/tests/domainsnapshotxml2xmlin/disk_snapshot.xml": []string{ + "testdata/libvirt/tests/qemudomainsnapshotxml2xmlin/disk_snapshot.xml": []string{ "/domainsnapshot[0]/disks[0]/disk[3]/source[0]", }, }
Adapt tests to renamed data directories
libvirt_libvirt-go-xml
train
cd52ab5da902457f63bbdd0d2dfbc65a94389ee8
diff --git a/src/Domain/CustomDomain.php b/src/Domain/CustomDomain.php index <HASH>..<HASH> 100644 --- a/src/Domain/CustomDomain.php +++ b/src/Domain/CustomDomain.php @@ -27,7 +27,7 @@ abstract class CustomDomain extends DomainBase { public function __construct($type, $value, array $optValues = []) { $this->type = $type; $this->value = $value; - $this->optValues = $optValues; + $this->optValues = array_merge(['value' => $value], $optValues); } /** diff --git a/tests/Target/Editor.php b/tests/Target/Editor.php index <HASH>..<HASH> 100644 --- a/tests/Target/Editor.php +++ b/tests/Target/Editor.php @@ -18,4 +18,8 @@ class Editor extends CustomDomain { public function getName() { return $this->getOptValue('name'); } + + public function name() { + return $this->getOptValue('name'); + } }
removed Iterator interface from CustomDomain class
ritalin_omelet
train
0080dec3cfb7bfb5aa6cd95093d0b9ed95e0abcb
diff --git a/CameraRegionController.js b/CameraRegionController.js index <HASH>..<HASH> 100644 --- a/CameraRegionController.js +++ b/CameraRegionController.js @@ -15,6 +15,7 @@ function Controller(opts) { }; function onPointerDown(x, y, id) { + if(activePointers.indexOf(id) !== -1) return; if(id === 0 && activePointers.length < 2) { mousePosition[0] = x; mousePosition[1] = y; @@ -93,16 +94,31 @@ function Controller(opts) { } - pointers.onPointerDownSignal.add(onPointerDown); - pointers.onPointerMoveSignal.add(onPointerMove); - pointers.onPointerDragSignal.add(onPointerDrag); - pointers.onPointerUpSignal.add(onPointerUp); + var active = false; + + function setState(state) { + if(state === active) return; + active = state; + if(state) { + pointers.onPointerDownSignal.add(onPointerDown); + pointers.onPointerMoveSignal.add(onPointerMove); + pointers.onPointerDragSignal.add(onPointerDrag); + pointers.onPointerUpSignal.add(onPointerUp); + onMouseWheelSignal.add(onMouseWheelZoom); + } else { + pointers.onPointerDownSignal.remove(onPointerDown); + pointers.onPointerMoveSignal.remove(onPointerMove); + pointers.onPointerDragSignal.remove(onPointerDrag); + pointers.onPointerUpSignal.remove(onPointerUp); + onMouseWheelSignal.remove(onMouseWheelZoom); + } + } - onMouseWheelSignal.add(onMouseWheelZoom); - this.panSignal = panSignal; this.zoomSignal = zoomSignal; this.panSignal = panSignal; this.zoomSignal = zoomSignal; + this.setState = setState; + this.onPointerDown = onPointerDown; } module.exports = Controller; \ No newline at end of file diff --git a/PanZoomRegion.js b/PanZoomRegion.js index <HASH>..<HASH> 100644 --- a/PanZoomRegion.js +++ b/PanZoomRegion.js @@ -1,11 +1,14 @@ -function PanZoomRegion(camera) { - +function PanZoomRegion(opts) { + + var camera = opts.camera; + this.zoomValue = 1; var fullWidth = 100, fullHeight = 100, width = 1, height = 1, + zoomMax = opts.zoomMax || 0.000001, aspect = 1, left = 0, right = 1, @@ -75,6 +78,7 @@ function PanZoomRegion(camera) { } function zoom(x, y, zoom) { + if(this.zoomValue <= zoomMax && zoom < 1) return; var ratioX = x / fullWidth; var ratioY = y / fullHeight; var focusX = left + ratioX * (right - left); @@ -98,10 +102,18 @@ function PanZoomRegion(camera) { setCamera(); } + function reset() { + left = 0; + right = 1; + top = 0; + bottom = 1; + } + this.pan = pan; this.zoom = zoom; this.precomposeViewport = precomposeViewport; this.setSize = setSize; + this.reset = reset; } module.exports = PanZoomRegion; \ No newline at end of file diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -1,6 +1,6 @@ var CameraRegionController = require('./CameraRegionController'); var PanZoomRegion = require('./PanZoomRegion'); - +var gsap = require('gsap'); var INTERNAL = 0, EXTERNAL = 1; @@ -8,10 +8,14 @@ function Controller(opts) { var camera = opts.camera; var fovMin = opts.fovMin || 50; var fovMax = opts.fovMax || 60; - var panSpeed = panSpeed || .2; + var panSpeed = panSpeed || 0.2; + var zoomMax = zoomMax || 0.2; - var panZoomRegion = new PanZoomRegion(camera); + var panZoomRegion = new PanZoomRegion({ + camera: camera, + zoomMax: zoomMax + }); var regionController = new CameraRegionController({ @@ -86,10 +90,50 @@ function Controller(opts) { panZoomRegion.setSize(w, h); } + function setState(state) { + regionController.setState(state); + } + + this.animationValue = 0; + this.reset = function(animate) { + gsap.killTweensOf(camera); + gsap.killTweensOf(this); + if(animate) { + this.animationValue = 0; + gsap.to(this, 2, { + fov: fovMax, + onUpdate: function() { + zoomFov( + fullWidth * .5, + fullHeight * .5, + 1 + ); + } + }) + gsap.to(this, 2, { + animationValue: 1, + onUpdate: function() { + zoomRegion( + fullWidth * .5, + fullHeight * .5, + 1 + (this.animationValue * (1-panZoomRegion.zoomValue)) + .00001 + ); + }, + onUpdateScope: this + }); + } else { + panZoomRegion.reset(animate); + camera.fov = fovMax; + camera.updateProjectionMatrix(); + } + } this.setSize = setSize; this.precomposeViewport = precomposeViewport; this.panSignal = regionController.panSignal; this.zoomSignal = regionController.zoomSignal; + this.setState = setState; + this.onPointerDown = regionController.onPointerDown; + } module.exports = Controller; \ No newline at end of file
bugfix and new feature to toggle active status
bunnybones1_threejs-camera-controller-pan-zoom-unified-pointer
train
c5b2ab478635e69f9a7ca2f157b9f709b6ec7077
diff --git a/admin/settings/appearance.php b/admin/settings/appearance.php index <HASH>..<HASH> 100644 --- a/admin/settings/appearance.php +++ b/admin/settings/appearance.php @@ -76,6 +76,24 @@ preferences,moodle|/user/preferences.php|t/preferences', $ADMIN->add('appearance', $temp); + // Course colours section. + $temp = new admin_settingpage('coursecolors', new lang_string('coursecolorsettings', 'admin')); + $temp->add(new admin_setting_heading('coursecolorheading', '', + new lang_string('coursecolorheading_desc', 'admin'))); + + $basecolors = ['#81ecec', '#74b9ff', '#a29bfe', '#dfe6e9', '#00b894', + '#0984e3', '#b2bec3', '#fdcb6e', '#fd79a8', '#6c5ce7']; + + foreach ($basecolors as $key => $color) { + $number = $key + 1; + $name = 'core_admin/coursecolor' . $number; + $title = get_string('coursecolor', 'admin', $number); + $setting = new admin_setting_configcolourpicker($name, $title, '', $color); + $temp->add($setting); + } + + $ADMIN->add('appearance', $temp); + // Calendar settings. $temp = new admin_settingpage('calendar', new lang_string('calendarsettings','admin')); diff --git a/lang/en/admin.php b/lang/en/admin.php index <HASH>..<HASH> 100644 --- a/lang/en/admin.php +++ b/lang/en/admin.php @@ -390,6 +390,9 @@ $string['cookiehttponly'] = 'Only http cookies'; $string['cookiesecure'] = 'Secure cookies only'; $string['country'] = 'Default country'; $string['course_customfield'] = 'Course custom fields'; +$string['coursecolor'] = 'Colour {$a}'; +$string['coursecolorheading_desc'] = 'Any courses without a course image set in the course settings are displayed on the Dashboard with a patterned course card. The colours used in the pattern may be specified below.'; +$string['coursecolorsettings'] = 'Course card colours'; $string['coursecontact'] = 'Course contacts'; $string['coursecontact_desc'] = 'This setting allows you to control who appears on the course description. Users need to have at least one of these roles in a course to be shown on the course description for that course.'; $string['coursecontactduplicates'] = 'Display all course contact roles'; diff --git a/lib/db/upgrade.php b/lib/db/upgrade.php index <HASH>..<HASH> 100644 --- a/lib/db/upgrade.php +++ b/lib/db/upgrade.php @@ -3402,5 +3402,19 @@ function xmldb_main_upgrade($oldversion) { upgrade_main_savepoint(true, 2019062900.00); } + if ($oldversion < 2019070400.01) { + + $basecolors = ['#81ecec', '#74b9ff', '#a29bfe', '#dfe6e9', '#00b894', + '#0984e3', '#b2bec3', '#fdcb6e', '#fd79a8', '#6c5ce7']; + + $colornr = 1; + foreach ($basecolors as $color) { + set_config('coursecolor' . $colornr, $color, 'core_admin'); + $colornr++; + } + + upgrade_main_savepoint(true, 2019070400.01); + } + return true; } diff --git a/lib/outputrenderers.php b/lib/outputrenderers.php index <HASH>..<HASH> 100644 --- a/lib/outputrenderers.php +++ b/lib/outputrenderers.php @@ -1570,9 +1570,11 @@ class core_renderer extends renderer_base { * @return string hex color code. */ public function get_generated_color_for_id($id) { - // The colour palette is hardcoded for now. It would make sense to combine it with theme settings. - $basecolors = ['#81ecec', '#74b9ff', '#a29bfe', '#dfe6e9', '#00b894', - '#0984e3', '#b2bec3', '#fdcb6e', '#fd79a8', '#6c5ce7']; + $colornumbers = range(1, 10); + $basecolors = []; + foreach ($colornumbers as $number) { + $basecolors[] = get_config('core_admin', 'coursecolor' . $number); + } $color = $basecolors[$id % 10]; return $color; diff --git a/version.php b/version.php index <HASH>..<HASH> 100644 --- a/version.php +++ b/version.php @@ -29,7 +29,7 @@ defined('MOODLE_INTERNAL') || die(); -$version = 2019071200.00; // YYYYMMDD = weekly release date of this DEV branch. +$version = 2019071200.01; // YYYYMMDD = weekly release date of this DEV branch. // RR = release increments - 00 in DEV branches. // .XX = incremental changes.
MDL-<I> core_admin: make course card colours configurable
moodle_moodle
train
faf5ad9e96e8198eacc6e182486588ec14b16ea8
diff --git a/cache/remotecache/registry/registry.go b/cache/remotecache/registry/registry.go index <HASH>..<HASH> 100644 --- a/cache/remotecache/registry/registry.go +++ b/cache/remotecache/registry/registry.go @@ -10,6 +10,7 @@ import ( "github.com/moby/buildkit/cache/remotecache" "github.com/moby/buildkit/session" "github.com/moby/buildkit/util/contentutil" + "github.com/moby/buildkit/util/push" "github.com/moby/buildkit/util/resolver" "github.com/moby/buildkit/util/resolver/limited" digest "github.com/opencontainers/go-digest" @@ -48,7 +49,7 @@ func ResolveCacheExporterFunc(sm *session.Manager, hosts docker.RegistryHosts) r ociMediatypes = b } remote := resolver.DefaultPool.GetResolver(hosts, ref, "push", sm, g) - pusher, err := remote.Pusher(ctx, ref) + pusher, err := push.Pusher(ctx, remote, ref) if err != nil { return nil, err } diff --git a/util/contentutil/refs.go b/util/contentutil/refs.go index <HASH>..<HASH> 100644 --- a/util/contentutil/refs.go +++ b/util/contentutil/refs.go @@ -37,17 +37,21 @@ func IngesterFromRef(ref string) (content.Ingester, error) { Client: http.DefaultClient, }) - pusher, err := remote.Pusher(context.TODO(), ref) + p, err := remote.Pusher(context.TODO(), ref) if err != nil { return nil, err } return &ingester{ locker: locker.New(), - pusher: pusher, + pusher: &pusher{p}, }, nil } +type pusher struct { + remotes.Pusher +} + type ingester struct { locker *locker.Locker pusher remotes.Pusher diff --git a/util/push/push.go b/util/push/push.go index <HASH>..<HASH> 100644 --- a/util/push/push.go +++ b/util/push/push.go @@ -11,6 +11,7 @@ import ( "github.com/containerd/containerd/content" "github.com/containerd/containerd/errdefs" "github.com/containerd/containerd/images" + "github.com/containerd/containerd/remotes" "github.com/containerd/containerd/remotes/docker" "github.com/docker/distribution/reference" "github.com/moby/buildkit/session" @@ -28,6 +29,21 @@ import ( "github.com/sirupsen/logrus" ) +type pusher struct { + remotes.Pusher +} + +// Pusher creates and new pusher instance for resolver +// containerd resolver.Pusher() method is broken and should not be called directly +// we need to wrap to mask interface detection +func Pusher(ctx context.Context, resolver remotes.Resolver, ref string) (remotes.Pusher, error) { + p, err := resolver.Pusher(ctx, ref) + if err != nil { + return nil, err + } + return &pusher{Pusher: p}, nil +} + func Push(ctx context.Context, sm *session.Manager, sid string, provider content.Provider, manager content.Manager, dgst digest.Digest, ref string, insecure bool, hosts docker.RegistryHosts, byDigest bool, annotations map[digest.Digest]map[string]string) error { desc := ocispecs.Descriptor{ Digest: dgst, @@ -66,7 +82,7 @@ func Push(ctx context.Context, sm *session.Manager, sid string, provider content resolver := resolver.DefaultPool.GetResolver(hosts, ref, scope, sm, session.NewGroup(sid)) - pusher, err := resolver.Pusher(ctx, ref) + pusher, err := Pusher(ctx, resolver, ref) if err != nil { return err }
push: workaround deadlock in containerd pusher
moby_buildkit
train
fdba0ffc2c7a2ef8437054d0524b58c6b1ab7f22
diff --git a/src/CastableDataObject.php b/src/CastableDataObject.php index <HASH>..<HASH> 100644 --- a/src/CastableDataObject.php +++ b/src/CastableDataObject.php @@ -14,6 +14,14 @@ use UnexpectedValueException; */ class CastableDataObject extends AbstractDataObject { + const SCALAR_CASTS = ['boolean', 'integer', 'float', 'string', 'array']; + + /** + * If true, returns an empty dataobject instance for unset or null values. + * + * @var bool + */ + protected $castUnsetObjects = false; /** * Returns cast types per attribute key. @@ -43,24 +51,37 @@ class CastableDataObject extends AbstractDataObject */ public function &getAttributeValue($key) { + $this->applyCast($key); + + return parent::getAttributeValue($key); + } + + /** + * Applies cast for a given attribute key. + * + * @param string $key + */ + protected function applyCast($key) + { $casts = $this->casts(); if ( ! count($casts) || ! array_key_exists($key, $casts)) { - return parent::getAttribute($key); + return; } - if ( ! isset($this->attributes[$key])) { + if ( ! isset($this->attributes[ $key ])) { $value = null; } else { - $value = $this->attributes[$key]; + $value = $this->attributes[ $key ]; } - if (in_array($casts[ $key ], ['boolean', 'integer', 'float', 'string', 'array'])) { - $value = call_user_func([$this, 'castValueAs' . ucfirst($casts[ $key ])], $value); - return $value; + // If the cast type is a simple scalar, apply it and return + if (in_array($casts[ $key ], static::SCALAR_CASTS)) { + $this->attributes[ $key ] = call_user_func([$this, 'castValueAs' . ucfirst($casts[ $key ])], $value); + return; } - // Fallback is to attempt a data object cast + // Otherwise attempt a data object cast $dataObjectClass = $casts[ $key ]; $dataObjectArray = false; @@ -72,19 +93,26 @@ class CastableDataObject extends AbstractDataObject if (null === $value) { if ($dataObjectArray) { - $value = []; - return $value; + $this->attributes[ $key ] = []; + return; } - return $value; + + if ($this->castUnsetObjects) { + $this->attributes[ $key ] = $this->makeNestedDataObject($dataObjectClass, [], $key); + } + return; } if ($dataObjectArray) { - if (is_array($this->attributes[$key])) { + if (is_array($this->attributes[ $key ])) { - foreach ($this->attributes[$key] as $index => &$item) { + foreach ($this->attributes[ $key ] as $index => &$item) { if (null === $item) { + if ($this->castUnsetObjects) { + $item = $this->makeNestedDataObject($dataObjectClass, [], $key . '.' . $index); + } continue; } @@ -96,18 +124,17 @@ class CastableDataObject extends AbstractDataObject unset($item); - } else { - - if ( ! ($this->attributes[ $key ] instanceof $dataObjectClass)) { - $this->attributes[ $key ] = $this->makeNestedDataObject( - $dataObjectClass, - $this->attributes[ $key ], - $key - ); - } + return; } - return $this->attributes[$key]; + // Single data object + if ( ! ($this->attributes[ $key ] instanceof $dataObjectClass)) { + $this->attributes[ $key ] = $this->makeNestedDataObject( + $dataObjectClass, + $this->attributes[ $key ], + $key + ); + } } /**
Castable data object can now cast unset values to empty objects
czim_laravel-dataobject
train
4989a87b53b409e3f930a04ee0a52c29802c89ba
diff --git a/grails-web-sitemesh/src/main/groovy/org/grails/web/sitemesh/GroovyPageLayoutFinder.java b/grails-web-sitemesh/src/main/groovy/org/grails/web/sitemesh/GroovyPageLayoutFinder.java index <HASH>..<HASH> 100644 --- a/grails-web-sitemesh/src/main/groovy/org/grails/web/sitemesh/GroovyPageLayoutFinder.java +++ b/grails-web-sitemesh/src/main/groovy/org/grails/web/sitemesh/GroovyPageLayoutFinder.java @@ -16,6 +16,7 @@ package org.grails.web.sitemesh; import grails.util.Environment; +import grails.util.GrailsNameUtils; import groovy.lang.GroovyObject; import java.util.Map; @@ -27,8 +28,10 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import grails.util.GrailsClassUtils; import grails.util.GrailsStringUtils; +import org.grails.core.artefact.ControllerArtefactHandler; import org.grails.io.support.GrailsResourceUtils; import org.codehaus.groovy.grails.web.metaclass.ControllerDynamicMethods; +import org.grails.web.servlet.mvc.GrailsWebRequest; import org.grails.web.util.GrailsApplicationAttributes; import org.grails.web.servlet.view.AbstractGrailsView; import org.grails.web.servlet.view.GrailsViewResolver; @@ -110,33 +113,40 @@ public class GroovyPageLayoutFinder implements ApplicationListener<ContextRefres if (GrailsStringUtils.isBlank(layoutName)) { GroovyObject controller = (GroovyObject)request.getAttribute(GrailsApplicationAttributes.CONTROLLER); - if (controller != null) { - String controllerName = (String)controller.getProperty(ControllerDynamicMethods.CONTROLLER_NAME_PROPERTY); - String actionUri = (String)controller.getProperty(ControllerDynamicMethods.ACTION_URI_PROPERTY); - - if (LOG.isDebugEnabled()) { - LOG.debug("Found controller in request, location layout for controller [" + controllerName - + "] and action [" + actionUri + "]"); + if (controller != null ) { + GrailsWebRequest webRequest = GrailsWebRequest.lookup(request); + String controllerName = webRequest.getControllerName(); + if(controllerName == null) { + controllerName = GrailsNameUtils.getLogicalPropertyName(controller.getClass().getName(), ControllerArtefactHandler.TYPE); } + String actionUri = webRequest.getAttributes().getControllerActionUri(request); - LayoutCacheKey cacheKey = null; - boolean cachedIsNull = false; + if(controllerName != null && actionUri != null) { - if (cacheEnabled) { - cacheKey = new LayoutCacheKey(controllerName, actionUri); - DecoratorCacheValue cacheValue = layoutDecoratorCache.get(cacheKey); - if (cacheValue != null && (!gspReloadEnabled || !cacheValue.isExpired())) { - d = cacheValue.getDecorator(); - if (d == null) { - cachedIsNull = true; - } + if (LOG.isDebugEnabled()) { + LOG.debug("Found controller in request, location layout for controller [" + controllerName + + "] and action [" + actionUri + "]"); } - } - if (d == null && !cachedIsNull) { - d = resolveDecorator(request, controller, controllerName, actionUri); + LayoutCacheKey cacheKey = null; + boolean cachedIsNull = false; + if (cacheEnabled) { - layoutDecoratorCache.put(cacheKey, new DecoratorCacheValue(d)); + cacheKey = new LayoutCacheKey(controllerName, actionUri); + DecoratorCacheValue cacheValue = layoutDecoratorCache.get(cacheKey); + if (cacheValue != null && (!gspReloadEnabled || !cacheValue.isExpired())) { + d = cacheValue.getDecorator(); + if (d == null) { + cachedIsNull = true; + } + } + } + + if (d == null && !cachedIsNull) { + d = resolveDecorator(request, controller, controllerName, actionUri); + if (cacheEnabled) { + layoutDecoratorCache.put(cacheKey, new DecoratorCacheValue(d)); + } } } }
fix NPE when applyLayout is used. Fixes #<I>
grails_grails-core
train
9bbd9f0aa1fff3cb85fdb9b2cbc8c96539b695ca
diff --git a/percy/client.py b/percy/client.py index <HASH>..<HASH> 100644 --- a/percy/client.py +++ b/percy/client.py @@ -32,6 +32,7 @@ class Client(object): branch = kwargs.get('branch') or self.environment.branch pull_request_number = kwargs.get('pull_request_number') \ or self.environment.pull_request_number + resources = kwargs.get('resources') data = { 'data': { @@ -42,6 +43,14 @@ class Client(object): } } } + + if resources: + data['data']['relationships'] = { + 'resources': { + 'data': [r.serialize() for r in resources], + } + } + path = "{base_url}/repos/{repo}/builds/".format( base_url=self.config.api_url, repo=repo, diff --git a/tests/test_client.py b/tests/test_client.py index <HASH>..<HASH> 100644 --- a/tests/test_client.py +++ b/tests/test_client.py @@ -31,11 +31,15 @@ class TestPercyClient(unittest.TestCase): fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures', 'build_response.json') build_fixture = open(fixture_path).read() mock.post('https://percy.io/api/v1/repos/foo/bar/builds/', text=build_fixture) + resources = [ + percy.Resource(resource_url='/main.css', is_root=False, content='foo'), + ] build_data = self.percy_client.create_build( repo='foo/bar', branch='branch', pull_request_number=111, + resources=resources ) assert mock.request_history[0].json() == { 'data': { @@ -43,7 +47,23 @@ class TestPercyClient(unittest.TestCase): 'attributes': { 'branch': 'branch', 'pull-request-number': 111, + }, + 'relationships': { + 'resources': { + 'data': [ + { + 'type': 'resources', + 'id': resources[0].sha, + 'attributes': { + 'resource-url': resources[0].resource_url, + 'mimetype': resources[0].mimetype, + 'is-root': resources[0].is_root, + } + } + ], + } } + } } assert build_data == json.loads(build_fixture) @@ -136,4 +156,4 @@ class TestPercyClient(unittest.TestCase): } } - assert result == {'success': 'true'} \ No newline at end of file + assert result == {'success': 'true'}
Resources in create build (#8) * ResourceLoader build_resources method - test data - method to build resources - tests for it * Rename src_path to root_dir and make singular * Add resources to create build
percy_python-percy-client
train
47c23bf5b736a0881fd2545420374e15ec0311e4
diff --git a/spi/src/main/java/org/jboss/arquillian/spi/ExceptionProxy.java b/spi/src/main/java/org/jboss/arquillian/spi/ExceptionProxy.java index <HASH>..<HASH> 100644 --- a/spi/src/main/java/org/jboss/arquillian/spi/ExceptionProxy.java +++ b/spi/src/main/java/org/jboss/arquillian/spi/ExceptionProxy.java @@ -1,3 +1,19 @@ +/* + * JBoss, Home of Professional Open Source + * Copyright 2009, Red Hat Middleware LLC, and individual contributors + * by the @authors tag. See the copyright.txt in the distribution for a + * full listing of individual contributors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ package org.jboss.arquillian.spi; import java.io.Serializable; @@ -24,7 +40,7 @@ import java.lang.reflect.Constructor; * </ul> * * - * @author Andy Gibson + * @author <a href="mailto:[email protected]">Andy Gibson</a> * */ public class ExceptionProxy implements Serializable {
ARQ-<I> Proxy server side exceptions and re-create them on the client if possible (added JBoss header)
arquillian_arquillian-core
train
cc4d64dfd7fa88a29ebe69ff9f0d5c15a8943058
diff --git a/src/AnimeDB/Bundle/CatalogBundle/Resources/views/Item/search.html.twig b/src/AnimeDB/Bundle/CatalogBundle/Resources/views/Item/search.html.twig index <HASH>..<HASH> 100644 --- a/src/AnimeDB/Bundle/CatalogBundle/Resources/views/Item/search.html.twig +++ b/src/AnimeDB/Bundle/CatalogBundle/Resources/views/Item/search.html.twig @@ -23,11 +23,11 @@ <ul> {% for item in list %} <li> - <a href="{{ path('filler_fill') }}?filler_get[url]={{ item.source|url_encode }}" class="image"> + <a href="{{ item.link }}" class="image"> <img src="{{ item.image|dummy('card') }}" alt="{{ item.name }}" /> </a> <div class="info"> - <a href="{{ path('filler_fill', {'filler_get': {'url': item.source, 'filler': plugin }}) }}">{{ item.name }}</a> + <a href="{{ item.link }}">{{ item.name }}</a> <div>{{ item.description|nl2br }}</div> </div> <br clear="both"> diff --git a/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/CustomForm.php b/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/CustomForm.php index <HASH>..<HASH> 100644 --- a/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/CustomForm.php +++ b/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/CustomForm.php @@ -30,6 +30,8 @@ interface CustomForm extends SearchInterface /** * Search source by form data * + * Use $url_bulder for build link to fill item from source or build their own links + * * Return structure * <code> * [ @@ -38,8 +40,9 @@ interface CustomForm extends SearchInterface * </code> * * @param array $data + * @param \Closure $url_bulder * * @return array */ - public function search(array $data); + public function search(array $data, \Closure $url_bulder); } \ No newline at end of file diff --git a/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/Item.php b/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/Item.php index <HASH>..<HASH> 100644 --- a/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/Item.php +++ b/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/Item.php @@ -26,11 +26,11 @@ class Item protected $name = ''; /** - * Source + * Link to fill item from source * * @var string */ - protected $source = ''; + protected $link = ''; /** * Image @@ -50,14 +50,14 @@ class Item * Construct * * @param string $name - * @param string $source + * @param string $link * @param string $image * @param string $description */ - public function __construct($name, $source, $image, $description) + public function __construct($name, $link, $image, $description) { $this->name = $name; - $this->source = $source; + $this->link = $link; $this->image = $image; $this->description = $description; } @@ -73,13 +73,13 @@ class Item } /** - * Get source + * Get link to fill item from source * * @return string */ - public function getSource() + public function getLink() { - return $this->source; + return $this->link; } /** diff --git a/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/SearchInterface.php b/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/SearchInterface.php index <HASH>..<HASH> 100644 --- a/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/SearchInterface.php +++ b/src/AnimeDB/Bundle/CatalogBundle/Service/Plugin/Search/SearchInterface.php @@ -23,6 +23,8 @@ interface SearchInterface extends PluginInterface /** * Search source by name * + * Use $url_bulder for build link to fill item from source or build their own links + * * Return structure * <code> * [ @@ -31,8 +33,9 @@ interface SearchInterface extends PluginInterface * </code> * * @param string $name + * @param \Closure $url_bulder * * @return array */ - public function search($name); + public function search($name, \Closure $url_bulder); } \ No newline at end of file
SearchItem should be return link to fill item from source
anime-db_anime-db
train
af7fb4fa0a64f338ed1e40ead5a002f091d638a0
diff --git a/src/Opulence/Routing/Dispatchers/RouteDispatcher.php b/src/Opulence/Routing/Dispatchers/RouteDispatcher.php index <HASH>..<HASH> 100644 --- a/src/Opulence/Routing/Dispatchers/RouteDispatcher.php +++ b/src/Opulence/Routing/Dispatchers/RouteDispatcher.php @@ -11,8 +11,6 @@ namespace Opulence\Routing\Dispatchers; use Closure; -use Exception; -use Opulence\Http\HttpException; use Opulence\Http\Requests\Request; use Opulence\Http\Responses\Response; use Opulence\Routing\Controller; @@ -73,62 +71,54 @@ class RouteDispatcher implements IRouteDispatcher * @param CompiledRoute $route The route being dispatched * @return Response Returns the value from the controller method * @throws RouteException Thrown if the method could not be called on the controller - * @throws HttpException Thrown if the controller threw an HttpException */ private function callController($controller, CompiledRoute $route) : Response { - try { - if (is_callable($controller)) { + if (is_callable($controller)) { + try { $reflection = new ReflectionFunction($controller); - $parameters = $this->resolveControllerParameters( - $reflection->getParameters(), - $route->getPathVars(), - $route, - true - ); + } catch (\ReflectionException $e) { + throw new RouteException("Function {$controller} does not exist"); + } + $parameters = $this->resolveControllerParameters( + $reflection->getParameters(), + $route->getPathVars(), + $route, + true + ); - $response = $controller(...$parameters); - } else { + $response = $controller(...$parameters); + } else { + try { $reflection = new ReflectionMethod($controller, $route->getControllerMethod()); - $parameters = $this->resolveControllerParameters( - $reflection->getParameters(), - $route->getPathVars(), - $route, - false - ); + } catch (\ReflectionException $e) { + throw new RouteException("Method {$route->getControllerMethod()} does not exist"); + } + $parameters = $this->resolveControllerParameters( + $reflection->getParameters(), + $route->getPathVars(), + $route, + false + ); - if ($reflection->isPrivate()) { - throw new RouteException("Method {$route->getControllerMethod()} is private"); - } - - if ($controller instanceof Controller) { - $response = $controller->callMethod( - $route->getControllerMethod(), $parameters - ); - } else { - $response = $controller->{$route->getControllerMethod()}(...$parameters); - } + if ($reflection->isPrivate()) { + throw new RouteException("Method {$route->getControllerMethod()} is private"); } - if (is_string($response)) { - $response = new Response($response); + if ($controller instanceof Controller) { + $response = $controller->callMethod( + $route->getControllerMethod(), $parameters + ); + } else { + $response = $controller->{$route->getControllerMethod()}(...$parameters); } + } - return $response; - } catch (HttpException $ex) { - // We don't want to catch these exceptions, but we want to catch all others - throw $ex; - } catch (Exception $ex) { - throw new RouteException( - sprintf( - 'Reflection failed for %s: %s', - $route->usesCallable() ? 'closure' : "{$route->getControllerName()}::{$route->getControllerMethod()}", - $ex - ), - 0, - $ex - ); + if (is_string($response)) { + $response = new Response($response); } + + return $response; } /**
Remove excessive exception handling (#<I>) RouteDispatcher exception handling prevents handling of custom exceptions in Global Middleware.
opulencephp_Opulence
train
b4bf3646577d42adb96580c8aa96cd4fc4eb8e2b
diff --git a/test/high_availability/ha_tools.py b/test/high_availability/ha_tools.py index <HASH>..<HASH> 100644 --- a/test/high_availability/ha_tools.py +++ b/test/high_availability/ha_tools.py @@ -35,9 +35,9 @@ import pymongo.errors from pymongo.read_preferences import ReadPreference home = os.environ.get('HOME') -default_dbpath = os.path.join(home, 'data', 'pymongo_high_availability') +default_dbpath = os.path.join(home, 'data', 'motor_ha') dbpath = os.environ.get('DBPATH', default_dbpath) -default_logpath = os.path.join(home, 'log', 'pymongo_high_availability') +default_logpath = os.path.join(home, 'log', 'motor_ha') logpath = os.environ.get('LOGPATH', default_logpath) hostname = os.environ.get('HOSTNAME', 'localhost') port = int(os.environ.get('DBPORT', 27017))
Move HA tests' dbpath to motor_ha/.
mongodb_motor
train
b8a5d7857ea1e7d52c3dc04b639238679d149ca2
diff --git a/src/mui/delete/Delete.js b/src/mui/delete/Delete.js index <HASH>..<HASH> 100644 --- a/src/mui/delete/Delete.js +++ b/src/mui/delete/Delete.js @@ -115,8 +115,8 @@ Delete.propTypes = { function mapStateToProps(state, props) { return { - id: props.match.params.id, - data: state.admin[props.resource].data[props.match.params.id], + id: decodeURIComponent(props.match.params.id), + data: state.admin[props.resource].data[decodeURIComponent(props.match.params.id)], isLoading: state.admin.loading > 0, }; } diff --git a/src/mui/detail/Edit.js b/src/mui/detail/Edit.js index <HASH>..<HASH> 100644 --- a/src/mui/detail/Edit.js +++ b/src/mui/detail/Edit.js @@ -141,8 +141,8 @@ Edit.propTypes = { function mapStateToProps(state, props) { return { - id: props.match.params.id, - data: state.admin[props.resource].data[props.match.params.id], + id: decodeURIComponent(props.match.params.id), + data: state.admin[props.resource].data[decodeURIComponent(props.match.params.id)], isLoading: state.admin.loading > 0, }; } diff --git a/src/mui/detail/Show.js b/src/mui/detail/Show.js index <HASH>..<HASH> 100644 --- a/src/mui/detail/Show.js +++ b/src/mui/detail/Show.js @@ -101,8 +101,8 @@ Show.propTypes = { function mapStateToProps(state, props) { return { - id: props.match.params.id, - data: state.admin[props.resource].data[props.match.params.id], + id: decodeURIComponent(props.match.params.id), + data: state.admin[props.resource].data[decodeURIComponent(props.match.params.id)], isLoading: state.admin.loading > 0, }; }
decode id uri component matched from react router
marmelab_react-admin
train
517731d841cc5206b7b476540fd3692802055d08
diff --git a/.travis.yml b/.travis.yml index <HASH>..<HASH> 100644 --- a/.travis.yml +++ b/.travis.yml @@ -17,10 +17,11 @@ before_install: - pip install grimoire-kidash - pip install grimoire-reports - pip install grimoire-reports + - pip install httpretty - pip install pandas==0.18.1 - - pip install sortinghat - pip install PyMySQL - pip install pyyaml + - pip install sortinghat # install: # - ./setup.py install diff --git a/tests/test_task.py b/tests/test_task.py index <HASH>..<HASH> 100644 --- a/tests/test_task.py +++ b/tests/test_task.py @@ -35,7 +35,7 @@ from mordred.task import Task CONF_FILE = 'test.cfg' BACKEND_NAME = 'stackexchange' -COLLECTION_URL = 'http://bitergia:[email protected]:9200' +COLLECTION_URL = 'http://localhost:9200' COLLECTION_URL_STACKEXCHANGE = 'http://127.0.0.1:9200' REPO_NAME = 'https://stackoverflow.com/questions/tagged/ovirt' diff --git a/tests/test_task_enrich.py b/tests/test_task_enrich.py index <HASH>..<HASH> 100644 --- a/tests/test_task_enrich.py +++ b/tests/test_task_enrich.py @@ -26,6 +26,7 @@ import unittest import requests +from sortinghat.db.database import Database # Hack to make sure that tests import the right packages # due to setuptools behaviour @@ -44,6 +45,21 @@ logging.basicConfig(level=logging.INFO) class TestTaskEnrich(unittest.TestCase): """Task tests""" + def setUp(self): + config = Config(CONF_FILE) + sh = config.get_conf()['sortinghat'] + + self.sh_kwargs = {'user': sh['user'], 'password': sh['password'], + 'database': sh['database'], 'host': sh['host'], + 'port': None} + + # Clean the database to start an empty state + Database.drop(**self.sh_kwargs) + + # Create command + Database.create(**self.sh_kwargs) + self.sh_db = Database(**self.sh_kwargs) + def test_initialization(self): """Test whether attributes are initializated"""
[tests] Drop and Create Sortinghat database before enrich Install httpprety pip also.
chaoss_grimoirelab-sirmordred
train
6eea0b43f4f3356f0c5f772760cb42e65c0381dc
diff --git a/tests/integration/annotations/test_label.py b/tests/integration/annotations/test_label.py index <HASH>..<HASH> 100644 --- a/tests/integration/annotations/test_label.py +++ b/tests/integration/annotations/test_label.py @@ -38,10 +38,10 @@ def test_label(output_file_url, selenium, screenshot): border_line_color='black', border_line_width=2, border_line_dash='8 4', render_mode='css') - plot.add_label(label1) - plot.add_label(label2) - plot.add_label(label3) - plot.add_label(label4) + plot.add_layout(label1) + plot.add_layout(label2) + plot.add_layout(label3) + plot.add_layout(label4) # Save the plot and start the test save(plot)
Properly name add layout! Typo from previous commit
bokeh_bokeh
train
6b7a7ed5705ada7db84938de0d8166f3bc551d20
diff --git a/source/awesome_tool/statemachine/global_variable_manager.py b/source/awesome_tool/statemachine/global_variable_manager.py index <HASH>..<HASH> 100644 --- a/source/awesome_tool/statemachine/global_variable_manager.py +++ b/source/awesome_tool/statemachine/global_variable_manager.py @@ -34,9 +34,10 @@ class GlobalVariableManager(Observable): self.__variable_locks = {} self.__dictionary_lock = Lock() self.__access_keys = {} + self.__variable_references = {} @Observable.observed - def set_variable(self, key, value): + def set_variable(self, key, value, per_reference=False, access_key=None): """Sets a global variable :param key: the key of the global variable to be set @@ -44,16 +45,33 @@ class GlobalVariableManager(Observable): """ self.__dictionary_lock.acquire() - self.__variable_locks[key] = Lock() - access_key = self.lock_variable(key) + unlock = True + if self.variable_exist(key): + if self.is_locked(key) and self.__access_keys[key] != access_key: + raise RuntimeError("Wrong access key for accessing global variable") + elif self.is_locked(key): + unlock = False + else: + access_key = self.lock_variable(key) + else: + self.__variable_locks[key] = Lock() + access_key = self.lock_variable(key) + # --- variable locked - self.__global_variable_dictionary[key] = copy.deepcopy(value) + if per_reference: + self.__global_variable_dictionary[key] = value + self.__variable_references[key] = True + else: + self.__global_variable_dictionary[key] = copy.deepcopy(value) + self.__variable_references[key] = False # --- release variable - self.unlock_variable(key, access_key) + + if unlock: + self.unlock_variable(key, access_key) self.__dictionary_lock.release() logger.debug("Global variable %s was set to %s" % (key, str(value))) - def get_variable(self, key): + def get_variable(self, key, per_reference=False): """Fetches the value of a global variable :param key: the key of the global variable to be fetched @@ -61,12 +79,27 @@ class GlobalVariableManager(Observable): """ if key in self.__global_variable_dictionary: access_key = self.lock_variable(key) - return_value = copy.deepcopy(self.__global_variable_dictionary[key]) + if per_reference: + if self.variable_can_be_referenced(key): + return_value = self.__global_variable_dictionary[key] + else: + self.unlock_variable(key, access_key) + raise RuntimeError("Variable cannot be accessed by reference") + else: + return_value = copy.deepcopy(self.__global_variable_dictionary[key]) self.unlock_variable(key, access_key) return return_value else: raise AttributeError("Global variable %s does not exist!" % str(key)) + def variable_can_be_referenced(self, key): + """Checks whether the value of the variable can be returned by reference + + :param str key: Name of the variable + :return: True if value of variable can bbe returned by reference, False else + """ + return key in self.__variable_references and self.__variable_references[key] + @Observable.observed def delete_global_variable(self, key): """Deletes a global variable @@ -80,6 +113,7 @@ class GlobalVariableManager(Observable): del self.__global_variable_dictionary[key] self.unlock_variable(key, access_key) del self.__variable_locks[key] + del self.__variable_references[key] else: raise AttributeError("Global variable %s does not exist!" % str(key)) self.__dictionary_lock.release() @@ -125,6 +159,7 @@ class GlobalVariableManager(Observable): """ if self.__access_keys[key] is access_key: self.__global_variable_dictionary[key] = copy.deepcopy(value) + self.__variable_references[key] = False else: raise RuntimeError("Wrong access key for accessing global variable") @@ -156,6 +191,7 @@ class GlobalVariableManager(Observable): """ if key in self.__variable_locks: return self.__variable_locks[key].locked() + return False ######################################################################### # Properties for all class fields that must be observed by gtkmvc @@ -174,11 +210,4 @@ class GlobalVariableManager(Observable): :param str key: The name of the variable :return: True if locked, False else """ - if key in self.__variable_locks: - # Try to acquire non-blocking, returns True if successful - if self.__variable_locks[key].acquire(False): - self.__variable_locks[key].release() - return False - return True - return False - + return self.locked_status_for_variable(key)
Allow values to be stored by reference in GVM - value of variables can be stored by reference in the GVM - accordingly, values can be retrieved by reference - the GVM hold a list of variables that have been stored by reference - only these variables can be accessed by reference
DLR-RM_RAFCON
train
081b1c4592d2cc180c0a3b7bf3aa4ac8a3bc3eb1
diff --git a/environments/common/src/main/java/org/jboss/weld/environment/deployment/WeldDeployment.java b/environments/common/src/main/java/org/jboss/weld/environment/deployment/WeldDeployment.java index <HASH>..<HASH> 100644 --- a/environments/common/src/main/java/org/jboss/weld/environment/deployment/WeldDeployment.java +++ b/environments/common/src/main/java/org/jboss/weld/environment/deployment/WeldDeployment.java @@ -18,9 +18,7 @@ package org.jboss.weld.environment.deployment; import java.util.Collection; import java.util.Collections; -import java.util.HashMap; import java.util.HashSet; -import java.util.Map; import java.util.Set; import javax.enterprise.inject.spi.Extension; @@ -37,14 +35,14 @@ import org.jboss.weld.resources.spi.ResourceLoader; */ public class WeldDeployment extends AbstractWeldDeployment { - public static final String ADDITIONAL_BDA_ID_SUFFIX = ".additional"; + public static final String ADDITIONAL_BDA_ID = WeldDeployment.class.getName() + ".additional"; private final Set<WeldBeanDeploymentArchive> beanDeploymentArchives; - private final Map<ClassLoader, WeldBeanDeploymentArchive> additionalBeanDeploymentArchives; - private final ResourceLoader resourceLoader; + private WeldBeanDeploymentArchive additionalBeanDeploymentArchive = null; + /** * * @param resourceLoader @@ -57,7 +55,6 @@ public class WeldDeployment extends AbstractWeldDeployment { super(bootstrap, extensions); this.resourceLoader = resourceLoader; this.beanDeploymentArchives = beanDeploymentArchives; - this.additionalBeanDeploymentArchives = new HashMap<ClassLoader, WeldBeanDeploymentArchive>(); for (BeanDeploymentArchive archive : beanDeploymentArchives) { archive.getServices().add(ResourceLoader.class, resourceLoader); } @@ -89,13 +86,12 @@ public class WeldDeployment extends AbstractWeldDeployment { } protected BeanDeploymentArchive createAdditionalBeanDeploymentArchiveIfNeeded(Class<?> beanClass) { - WeldBeanDeploymentArchive additionalBda = additionalBeanDeploymentArchives.get(beanClass.getClassLoader()); - if (additionalBda != null) { - additionalBda.addBeanClass(beanClass.getName()); + if (additionalBeanDeploymentArchive == null) { + additionalBeanDeploymentArchive = createAdditionalBeanDeploymentArchive(beanClass); } else { - additionalBda = createAdditionalBeanDeploymentArchive(beanClass); + additionalBeanDeploymentArchive.addBeanClass(beanClass.getName()); } - return additionalBda; + return additionalBeanDeploymentArchive; } /** @@ -109,8 +105,7 @@ public class WeldDeployment extends AbstractWeldDeployment { Set<String> beanClasses = new HashSet<String>(); beanClasses.add(beanClass.getName()); - WeldBeanDeploymentArchive additionalBda = new WeldBeanDeploymentArchive(beanClass.getClassLoader().getClass().getName() + "@" - + System.identityHashCode(beanClass.getClassLoader()) + ADDITIONAL_BDA_ID_SUFFIX, beanClasses, null); + WeldBeanDeploymentArchive additionalBda = new WeldBeanDeploymentArchive(ADDITIONAL_BDA_ID, beanClasses, null); additionalBda.getServices().add(ResourceLoader.class, resourceLoader); additionalBda.getServices().addAll(getServices().entrySet()); beanDeploymentArchives.add(additionalBda); @@ -119,8 +114,6 @@ public class WeldDeployment extends AbstractWeldDeployment { for (WeldBeanDeploymentArchive archive : beanDeploymentArchives) { archive.setAccessibleBeanDeploymentArchives(beanDeploymentArchives); } - - additionalBeanDeploymentArchives.put(beanClass.getClassLoader(), additionalBda); return additionalBda; } diff --git a/environments/servlet/tests/base/src/main/java/org/jboss/weld/environment/servlet/test/deployment/bda/additional/AdditionalBeanDeploymentArchiveTestBase.java b/environments/servlet/tests/base/src/main/java/org/jboss/weld/environment/servlet/test/deployment/bda/additional/AdditionalBeanDeploymentArchiveTestBase.java index <HASH>..<HASH> 100644 --- a/environments/servlet/tests/base/src/main/java/org/jboss/weld/environment/servlet/test/deployment/bda/additional/AdditionalBeanDeploymentArchiveTestBase.java +++ b/environments/servlet/tests/base/src/main/java/org/jboss/weld/environment/servlet/test/deployment/bda/additional/AdditionalBeanDeploymentArchiveTestBase.java @@ -58,7 +58,7 @@ public abstract class AdditionalBeanDeploymentArchiveTestBase { assertEquals(3, beanDeploymentArchivesMap.size()); boolean additionalBdaFound = false; for (BeanDeploymentArchive bda : beanDeploymentArchivesMap.keySet()) { - if (bda.getId().endsWith(WeldDeployment.ADDITIONAL_BDA_ID_SUFFIX)) { + if (bda.getId().equals(WeldDeployment.ADDITIONAL_BDA_ID)) { additionalBdaFound = true; break; }
WELD-<I> Switch to a simpler single-additional-BDA solution
weld_core
train
28c61fc80b7f4db5f58d54356bbbb98880a49bbb
diff --git a/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/KeyringMonitor.java b/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/KeyringMonitor.java index <HASH>..<HASH> 100644 --- a/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/KeyringMonitor.java +++ b/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/KeyringMonitor.java @@ -28,6 +28,17 @@ public interface KeyringMonitor { */ String MONITOR_IDENTIFICATION_CONFIG_NAME = "monitor.config.name"; + /** + * <h4>Service property</h4> + * + * The value should be a String, indicating the safLocation of + * keyring that should be monitored. + */ + String SAF_LOCATION = "saf.location"; + + /** + * Constant that is used for prefix + */ String SAF_PREFIX = "safkeyring://"; /** diff --git a/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/KeystoreConfigurationFactory.java b/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/KeystoreConfigurationFactory.java index <HASH>..<HASH> 100644 --- a/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/KeystoreConfigurationFactory.java +++ b/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/KeystoreConfigurationFactory.java @@ -110,7 +110,7 @@ public class KeystoreConfigurationFactory implements ManagedServiceFactory, File if (fileBased.booleanValue()) { createFileMonitor(svc.getKeyStore().getName(), svc.getKeyStore().getLocation(), trigger, svc.getKeyStore().getPollingRate()); } else if (svc.getKeyStore().getLocation().contains(KeyringMonitor.SAF_PREFIX)) { - createKeyringMonitor(svc.getKeyStore().getName(), trigger); + createKeyringMonitor(svc.getKeyStore().getName(), trigger, svc.getKeyStore().getLocation()); } } } else { @@ -316,12 +316,12 @@ public class KeystoreConfigurationFactory implements ManagedServiceFactory, File /** * Handles the creation of the SAF keyring monitor. */ - private void createKeyringMonitor(String name, String trigger) { + private void createKeyringMonitor(String name, String trigger, String keyStoreLocation) { if (TraceComponent.isAnyTracingEnabled() && tc.isEntryEnabled()) Tr.entry(tc, "createKeyringMonitor", new Object[] { name, trigger }); try { SafKeyringMonitor = new SAFKeyringMonitor(this); - setKeyringMonitorRegistration(SafKeyringMonitor.monitorKeyRings(name, trigger)); + setKeyringMonitorRegistration(SafKeyringMonitor.monitorKeyRings(name, trigger, keyStoreLocation)); } catch (Exception e) { if (TraceComponent.isAnyTracingEnabled() && tc.isDebugEnabled()) { Tr.debug(tc, "Exception creating the keyring monitor.", e); diff --git a/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/SAFKeyringMonitor.java b/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/SAFKeyringMonitor.java index <HASH>..<HASH> 100644 --- a/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/SAFKeyringMonitor.java +++ b/dev/com.ibm.ws.ssl/src/com/ibm/ws/ssl/internal/SAFKeyringMonitor.java @@ -43,13 +43,14 @@ public class SAFKeyringMonitor implements KeyringMonitor { * @param trigger what trigger the keyring update notification mbean * @return The <code>KeyringMonitor</code> service registration. */ - public ServiceRegistration<KeyringMonitor> monitorKeyRings(String name, String trigger) { + public ServiceRegistration<KeyringMonitor> monitorKeyRings(String name, String trigger, String keyStoreLocation) { if (TraceComponent.isAnyTracingEnabled() && tc.isEventEnabled()) { Tr.event(this, tc, "monitorKeyRing registration for", name); } BundleContext bundleContext = actionable.getBundleContext(); final Hashtable<String, Object> keyRingMonitorProps = new Hashtable<String, Object>(); keyRingMonitorProps.put(KeyringMonitor.MONITOR_IDENTIFICATION_CONFIG_NAME, name); + keyRingMonitorProps.put(KeyringMonitor.SAF_LOCATION, keyStoreLocation); if (!(trigger.equalsIgnoreCase("disabled")) && trigger.equals("polled")) { Tr.warning(tc, "Cannot have polled trigger for keyRing name", name); } @@ -58,7 +59,7 @@ public class SAFKeyringMonitor implements KeyringMonitor { /** {@inheritDoc} */ @Override - public void refreshRequested(String name) { + public void refreshRequested(String keyStoreLocation) { actionable.performFileBasedAction(null); }
Added saflocation as a property
OpenLiberty_open-liberty
train
336fc72ebb75ec8c8499154b0690d982fc4700dc
diff --git a/pytify/cli.py b/pytify/cli.py index <HASH>..<HASH> 100755 --- a/pytify/cli.py +++ b/pytify/cli.py @@ -1,7 +1,8 @@ #!/usr/bin/env python -import pytifylib -from strategy import get_pytify_class_by_platform -from menu import Menu +from __future__ import absolute_import +import pytify.pytifylib +from pytify.strategy import get_pytify_class_by_platform +from pytify.menu import Menu import argparse import sys import curses @@ -80,4 +81,4 @@ def main(): App() except KeyboardInterrupt: - print('\n Closing application...\n') \ No newline at end of file + print('\n Closing application...\n') diff --git a/pytify/darwin.py b/pytify/darwin.py index <HASH>..<HASH> 100644 --- a/pytify/darwin.py +++ b/pytify/darwin.py @@ -1,6 +1,7 @@ +from __future__ import absolute_import import sys import subprocess -from pytifylib import Pytifylib +from pytify.pytifylib import Pytifylib class Darwin(Pytifylib): diff --git a/pytify/linux.py b/pytify/linux.py index <HASH>..<HASH> 100644 --- a/pytify/linux.py +++ b/pytify/linux.py @@ -1,6 +1,7 @@ +from __future__ import absolute_import import sys import dbus -from pytifylib import Pytifylib +from pytify.pytifylib import Pytifylib class Linux(Pytifylib): diff --git a/pytify/menu.py b/pytify/menu.py index <HASH>..<HASH> 100644 --- a/pytify/menu.py +++ b/pytify/menu.py @@ -1,6 +1,7 @@ +from __future__ import absolute_import import curses from curses import panel -from strategy import get_pytify_class_by_platform +from pytify.strategy import get_pytify_class_by_platform """ diff --git a/pytify/strategy.py b/pytify/strategy.py index <HASH>..<HASH> 100644 --- a/pytify/strategy.py +++ b/pytify/strategy.py @@ -1,12 +1,14 @@ +from __future__ import absolute_import from sys import platform + def get_pytify_class_by_platform(): if 'linux' in platform: - from linux import Linux + from pytify.linux import Linux return Linux elif 'darwin' in platform: - from darwin import Darwin + from pytify.darwin import Darwin return Darwin else:
Added absolute import for python 2 and <I> compatibility.
bjarneo_Pytify
train
62cfa5111cb3d35daffa4437518d3b31e5483dda
diff --git a/angularjs-portal-frame/src/main/webapp/main.js b/angularjs-portal-frame/src/main/webapp/main.js index <HASH>..<HASH> 100644 --- a/angularjs-portal-frame/src/main/webapp/main.js +++ b/angularjs-portal-frame/src/main/webapp/main.js @@ -1,38 +1,38 @@ -require.config({ - - packages: [ - 'portal', - 'my-app' - ], - - paths: { - 'angular' : "bower_components/angular/angular", - 'app-config' : "js/app-config", - 'jquery' : "bower_components/jquery/dist/jquery.min", - 'jquery-ui' : "bower_components/jquery-ui/jquery-ui.min", - 'ngRoute' : "bower_components/angular-route/angular-route.min", - 'ngSanitize' : "bower_components/angular-sanitize/angular-sanitize.min", - 'ngStorage' : "bower_components/ngstorage/ngStorage.min", - 'sortable' : "js/sortable", - 'ui-bootstrap' : "bower_components/angular-bootstrap/ui-bootstrap.min", - 'ui-gravatar' : "bower_components/angular-gravatar/build/angular-gravatar", - // Use ui-bootstrap instead of bootstrap or uw-ui-toolkit. See https://angular-ui.github.io/bootstrap/ - //'uw-ui-toolkit' : "bower_components/uw-ui-toolkit/dist/js/uw-ui-toolkit.min" - }, - - shim: { - 'angular' : { deps: ['jquery'], exports: 'angular' }, - 'ngRoute' : { deps: ['angular'] }, - 'ngSanitize' : { deps: ['angular'] }, - 'ngStorage' : { deps: ['angular'] }, - 'ui-bootstrap' : { deps: ['angular'] }, - 'ui-gravatar' : { deps: ['angular'] }, - 'uw-ui-toolkit' : { deps: ['jquery'] } - } - -}); - -require(['angular', 'my-app'], function(angular) { - angular.bootstrap(document, ['my-app']); -}); - +require.config({ + + packages: [ + 'portal', + 'my-app' + ], + + paths: { + 'angular' : "bower_components/angular/angular", + 'app-config' : "js/app-config", + 'jquery' : "bower_components/jquery/dist/jquery.min", + 'jquery-ui' : "bower_components/jquery-ui/jquery-ui.min", + 'ngRoute' : "bower_components/angular-route/angular-route.min", + 'ngSanitize' : "bower_components/angular-sanitize/angular-sanitize.min", + 'ngStorage' : "bower_components/ngstorage/ngStorage.min", + 'sortable' : "js/sortable", + 'ui-bootstrap' : "bower_components/angular-bootstrap/ui-bootstrap-tpls.min", + 'ui-gravatar' : "bower_components/angular-gravatar/build/angular-gravatar", + // Use ui-bootstrap instead of bootstrap or uw-ui-toolkit. See https://angular-ui.github.io/bootstrap/ + //'uw-ui-toolkit' : "bower_components/uw-ui-toolkit/dist/js/uw-ui-toolkit.min" + }, + + shim: { + 'angular' : { deps: ['jquery'], exports: 'angular' }, + 'ngRoute' : { deps: ['angular'] }, + 'ngSanitize' : { deps: ['angular'] }, + 'ngStorage' : { deps: ['angular'] }, + 'ui-bootstrap' : { deps: ['angular'] }, + 'ui-gravatar' : { deps: ['angular'] }, + 'uw-ui-toolkit' : { deps: ['jquery'] } + } + +}); + +require(['angular', 'my-app'], function(angular) { + angular.bootstrap(document, ['my-app']); +}); +
Use correct artifact for ui-bootstrap (provides templates), and normalize line endings to LF for the file
uPortal-Project_uportal-home
train
2c28e32789442b58396e2501f483c72a349297a3
diff --git a/olctools/databasesetup/get_mlst.py b/olctools/databasesetup/get_mlst.py index <HASH>..<HASH> 100644 --- a/olctools/databasesetup/get_mlst.py +++ b/olctools/databasesetup/get_mlst.py @@ -4,6 +4,7 @@ from argparse import ArgumentParser from urllib.parse import urlparse import xml.dom.minidom as xml import urllib.request as url +import ssl import os """ Software Copyright License Agreement (BSD License) @@ -28,6 +29,7 @@ xml file, the script simply reports the possible matches so the user can try again. ''' +ssl._create_default_https_context = ssl._create_unverified_context def parse_args(): parser = ArgumentParser(description='Download MLST datasets by species' @@ -110,7 +112,7 @@ def getspeciesinfo(species_node, species, exact): for database_child_node in database_node.childNodes: if testelementtag(database_child_node, 'url'): info.database_url = gettext(database_child_node) - elif testelementtag(database_child_node, 'retrieved'): + elif testelementtag(database_child_node, 'retrieved') or testelementtag(database_child_node, b'retrieved'): info.retrieved = gettext(database_child_node) elif testelementtag(database_child_node, 'profiles'): for profile_count in database_child_node.getElementsByTagName('count'): @@ -141,13 +143,17 @@ def main(args): 'Listeria': 'Listeria monocytogenes', 'Bacillus': 'Bacillus cereus', 'Staphylococcus': "Staphylococcus aureus", - 'Salmonella': 'Salmonella enterica'} + 'Salmonella': 'Salmonella enterica', + 'Yersinia': 'Yersinia ruckeri'} # Set the appropriate profile based on the dictionary key:value pairs try: args.genus = organismdictionary[args.genus] except (KeyError, AttributeError): pass - with url.urlopen(args.repository_url) as docfile: + ctx = ssl.create_default_context() + ctx.check_hostname = False + ctx.verify_mode = ssl.CERT_NONE + with url.urlopen(args.repository_url, context=ctx) as docfile: doc = xml.parse(docfile) root = doc.childNodes[0] found_species = [] @@ -164,7 +170,6 @@ def main(args): print(info.name) return # exit(2) - # output information for the single matching species assert len(found_species) == 1 species_info = found_species[0] @@ -172,9 +177,9 @@ def main(args): species_name_underscores = species_name_underscores.replace('/', '_') species_all_fasta_filename = species_name_underscores + '.fasta' species_all_fasta_file = open('{}/{}'.format(args.path, species_all_fasta_filename), 'w') - log_filename = "mlst_data_download_{}_{}.log".format(species_name_underscores, species_info.retrieved) + log_filename = "mlst_data_download_{}.log".format(species_name_underscores) log_file = open('{}/{}'.format(args.path, log_filename), "w") - log_file.write(species_info.retrieved + '\n') + #log_file.write(species_info.retrieved + '\n') profile_path = urlparse(species_info.profiles_url).path profile_filename = profile_path.split('/')[-1] log_file.write("definitions: {}\n".format(profile_filename))
Added Yersinia to species list, added ssl.CERT_NONE, enabled bytes-encoded parsing
lowandrew_OLCTools
train
42ff398f05d7ed4e656bbc8102ca9837d9082067
diff --git a/gns3server/modules/docker/__init__.py b/gns3server/modules/docker/__init__.py index <HASH>..<HASH> 100644 --- a/gns3server/modules/docker/__init__.py +++ b/gns3server/modules/docker/__init__.py @@ -49,8 +49,11 @@ class Docker(BaseManager): @asyncio.coroutine def connector(self): if not self._connected or self._connector.closed: + if not sys.platform.startswith("linux"): + raise DockerError("Docker is supported only on Linux") + try: - self._connector = aiohttp.connector.UnixConnector(self._server_url) + self._connector = aiohttp.connector.UnixConnector(self._server_url, conn_timeout=2) self._connected = True version = yield from self.query("GET", "version") except (aiohttp.errors.ClientOSError, FileNotFoundError):
Raise an error if you try to use Docker on non Linux host
GNS3_gns3-server
train
4cbe722fa592266d7da8173ca0a583eacf49472f
diff --git a/src/View/Helper/FormHelper.php b/src/View/Helper/FormHelper.php index <HASH>..<HASH> 100644 --- a/src/View/Helper/FormHelper.php +++ b/src/View/Helper/FormHelper.php @@ -2434,6 +2434,9 @@ class FormHelper extends Helper if (isset($options['empty'][$type])) { $options[$type]['empty'] = $options['empty'][$type]; } + if (isset($options['required']) && is_array($options[$type])) { + $options[$type]['required'] = $options['required']; + } } $hasYear = is_array($options['year']); diff --git a/tests/TestCase/View/Helper/FormHelperTest.php b/tests/TestCase/View/Helper/FormHelperTest.php index <HASH>..<HASH> 100644 --- a/tests/TestCase/View/Helper/FormHelperTest.php +++ b/tests/TestCase/View/Helper/FormHelperTest.php @@ -6987,6 +6987,23 @@ class FormHelperTest extends TestCase } /** + * test control() datetime & required attributes + * + * @return void + */ + public function testControlDatetimeRequired() + { + $result = $this->Form->control('birthday', [ + 'type' => 'date', + 'required' => true + ]); + $this->assertContains( + '<select name="birthday[year]" required="required"', + $result + ); + } + + /** * testYearAutoExpandRange method * * @return void
Pass required attribute into generated datetime options. The required option was not copied onto the attributes for each select element. This resulted in them not having the required attribute. Refs #<I>
cakephp_cakephp
train
70073fdc74fa8dda31512c01196aaa4e2f1266d4
diff --git a/version.php b/version.php index <HASH>..<HASH> 100644 --- a/version.php +++ b/version.php @@ -29,9 +29,9 @@ defined('MOODLE_INTERNAL') || die(); -$version = 2021100300.02; // YYYYMMDD = weekly release date of this DEV branch. +$version = 2021100600.00; // YYYYMMDD = weekly release date of this DEV branch. // RR = release increments - 00 in DEV branches. // .XX = incremental changes. -$release = '4.0dev (Build: 20211003)'; // Human-friendly version name +$release = '4.0dev+ (Build: 20211006)'; // Human-friendly version name $branch = '400'; // This version's branch. $maturity = MATURITY_ALPHA; // This version's maturity level.
on-demand release <I>dev+
moodle_moodle
train
12f53f2216e55aa8c89e940f46e5830139ee7ca1
diff --git a/SpiffWorkflow/bpmn/parser/task_parsers.py b/SpiffWorkflow/bpmn/parser/task_parsers.py index <HASH>..<HASH> 100644 --- a/SpiffWorkflow/bpmn/parser/task_parsers.py +++ b/SpiffWorkflow/bpmn/parser/task_parsers.py @@ -224,7 +224,7 @@ class SubWorkflowParser(CallActivityParser): parser = CamundaParser() parser.add_bpmn_xml(ET.fromstring(xml)) wf_spec = parser.get_spec(workflow_name) - wf_spec.xml = root + wf_spec.file = self.process_parser.filename return wf_spec diff --git a/SpiffWorkflow/bpmn/serializer/Packager.py b/SpiffWorkflow/bpmn/serializer/Packager.py index <HASH>..<HASH> 100644 --- a/SpiffWorkflow/bpmn/serializer/Packager.py +++ b/SpiffWorkflow/bpmn/serializer/Packager.py @@ -164,18 +164,8 @@ class Packager(object): for spec in self.wf_spec.get_specs_depth_first(): filename = spec.file if filename is None: - filename = spec.description - done_files.add(filename) - - self.write_to_package_zip( - "%s.bpmn" % spec.description, ET.tostring(spec.xml)) - - self.write_to_package_zip( - "src/"+filename+".bpmn", ET.tostring(spec.xml)) - - self._call_editor_hook('package_for_editor', spec, filename) - - elif filename not in done_files: + continue + if filename not in done_files: done_files.add(filename) bpmn = self.bpmn[os.path.abspath(filename)]
I had added some changes where I put the node in the wf_spec and then added that to the faux zip file, it turns out my main problem was with not having a do_workflow_steps in the right place in my test and just skipping the add to the zip file is good enough. This is a lot cleaner and easier to follow
knipknap_SpiffWorkflow
train
5c48d415f1c07ce5835a78afa2538a70c1557401
diff --git a/etesync/api.py b/etesync/api.py index <HASH>..<HASH> 100644 --- a/etesync/api.py +++ b/etesync/api.py @@ -122,9 +122,10 @@ class EteSync: for entry in manager.list(crypto_manager, last_uid): entry.verify(prev) - syncEntry = SyncEntry.from_json(entry.getContent().decode()) + content = entry.getContent().decode() + syncEntry = SyncEntry.from_json(content) collection.apply_sync_entry(syncEntry) - cache.EntryEntity.create(uid=entry.uid, content=entry.getContent(), journal=journal) + cache.EntryEntity.create(uid=entry.uid, content=content, journal=journal) prev = entry @@ -154,14 +155,14 @@ class EteSync: action = 'CHANGE' sync_entry = SyncEntry(action, pim_entry.content) raw_entry = service.RawEntry(crypto_manager) - raw_entry.update(sync_entry.to_json(), prev) + raw_entry.update(sync_entry.to_json().encode(), prev) entries.append(raw_entry) manager.add(entries, last_uid) # Add entries to cache for entry in entries: - cache.EntryEntity.create(uid=entry.uid, content=entry.getContent(), journal=journal) + cache.EntryEntity.create(uid=entry.uid, content=entry.getContent().decode(), journal=journal) # Clear dirty flags and delete deleted content pim.Content.delete().where((pim.Content.journal == journal) & pim.Content.deleted).execute()
Api: journal entries expect bytes not strings. Pass bytes.
etesync_pyetesync
train
5fa56f5ef00019736db093d2dfd33c8331564d28
diff --git a/bin/configo-build/main.go b/bin/configo-build/main.go index <HASH>..<HASH> 100644 --- a/bin/configo-build/main.go +++ b/bin/configo-build/main.go @@ -37,7 +37,7 @@ func main() { fmt.Println(err) return } - if data, err := configo.Patch(baseData, &config); err != nil { + if data, err := configo.Patch(baseData, config); err != nil { fmt.Println(err) return } else { diff --git a/configo.go b/configo.go index <HASH>..<HASH> 100644 --- a/configo.go +++ b/configo.go @@ -301,19 +301,25 @@ func Marshal(v interface{}) ([]byte, error) { //Patch the base using the value from v, the new bytes returned //combines the base's value and v's default value func Patch(base []byte, v interface{}) ([]byte, error) { + //Clone struct v, v shoud not be modified + rv := reflect.ValueOf(v) + for rv.Kind() == reflect.Ptr { + rv = rv.Elem() + } + pv := reflect.New(rv.Type()) + pv.Elem().Set(rv) + + nv := pv.Interface() + //unmarshal base table, err := toml.Parse(base) if err != nil { return nil, err } - if err := toml.UnmarshalTable(table, v); err != nil { - return nil, err - } - - if err := applyDefault(table, reflect.ValueOf(v), true); err != nil { + if err := toml.UnmarshalTable(table, nv); err != nil { return nil, err } - return Marshal(v) + return Marshal(nv) }
Remove the limit of v that should be a pointer when do patching
shafreeck_configo
train
eb9369542fb4ba55e152bea665b3066c27d862f7
diff --git a/Resources/Private/JavaScript/Host/Redux/UI/ContentView/index.js b/Resources/Private/JavaScript/Host/Redux/UI/ContentView/index.js index <HASH>..<HASH> 100644 --- a/Resources/Private/JavaScript/Host/Redux/UI/ContentView/index.js +++ b/Resources/Private/JavaScript/Host/Redux/UI/ContentView/index.js @@ -1,4 +1,5 @@ import {createAction} from 'redux-actions'; +import {Map} from 'immutable'; import {$set} from 'plow-js'; const SET_CONTEXT_PATH = '@packagefactory/guevara/UI/PageTree/SET_CONTEXT_PATH'; diff --git a/Resources/Private/JavaScript/Host/Redux/UI/FlashMessages/index.js b/Resources/Private/JavaScript/Host/Redux/UI/FlashMessages/index.js index <HASH>..<HASH> 100644 --- a/Resources/Private/JavaScript/Host/Redux/UI/FlashMessages/index.js +++ b/Resources/Private/JavaScript/Host/Redux/UI/FlashMessages/index.js @@ -38,9 +38,13 @@ export const actions = { }; // -// Export the initial state +// Export the initial state hydrator // -export const initialState = {}; +export const hydrate = () => new Map({ + ui: new Map({ + flashMessages: new Map() + }) +}); // // Export the reducer
TASK: Describe initial state of ui.flashMessages as a hydrator function
neos_neos-ui
train
d8ec697e98db3cef09b3cd8e562f6e59f902a6d3
diff --git a/src/Service/PagePurger.php b/src/Service/PagePurger.php index <HASH>..<HASH> 100644 --- a/src/Service/PagePurger.php +++ b/src/Service/PagePurger.php @@ -5,6 +5,7 @@ namespace Mediawiki\Api\Service; use Mediawiki\Api\Generator\ApiGenerator; use Mediawiki\Api\MediawikiApi; use Mediawiki\Api\SimpleRequest; +use Mediawiki\DataModel\Pages; use Mediawiki\DataModel\Page; /** @@ -43,6 +44,24 @@ class PagePurger { } /** + * @since 0.x + * + * @param Pages $pages + * + * @return bool + * + */ + public function purgePages( Pages $pages ) { + $pagesArray = $pages->toArray(); + + foreach ( $pagesArray as $page ) { + $this->purge( $page ); + } + + return true; + } + + /** * @since 0.6 * * @param ApiGenerator $generator
Add a method to PagePurger for purging multiple pages at once The function iterates through each page and calls the purge method Bug: T<I>
addwiki_mediawiki-api
train
e16fe071fb061f223a66f69763013441af9124fe
diff --git a/cdk-data/cdk-data-hcatalog/src/main/java/com/cloudera/cdk/data/hcatalog/HCatalog.java b/cdk-data/cdk-data-hcatalog/src/main/java/com/cloudera/cdk/data/hcatalog/HCatalog.java index <HASH>..<HASH> 100644 --- a/cdk-data/cdk-data-hcatalog/src/main/java/com/cloudera/cdk/data/hcatalog/HCatalog.java +++ b/cdk-data/cdk-data-hcatalog/src/main/java/com/cloudera/cdk/data/hcatalog/HCatalog.java @@ -16,20 +16,27 @@ package com.cloudera.cdk.data.hcatalog; import com.cloudera.cdk.data.MetadataProviderException; +import com.cloudera.cdk.data.hcatalog.impl.Loader; import java.util.List; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hive.conf.HiveConf; import org.apache.hadoop.hive.metastore.HiveMetaStoreClient; import org.apache.hadoop.hive.ql.metadata.Table; import org.apache.hcatalog.common.HCatUtil; -import org.apache.thrift.TException; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; final class HCatalog { + private static final Logger LOG = LoggerFactory.getLogger(HCatalog.class); + private HiveMetaStoreClient client; private HiveConf hiveConf; public HCatalog(Configuration conf) { + if (conf.get(Loader.HIVE_METASTORE_URI_PROP) == null) { + LOG.warn("Using a local Hive MetaStore (for testing only)"); + } try { hiveConf = new HiveConf(conf, HiveConf.class); client = HCatUtil.getHiveClient(hiveConf);
CDK-<I>: Add warning for local metastore. This adds a WARN message when a user connects to a local MetaStore because it is not recommended in practice. Hive <I> and later will connect to a local metastore by default if hive.metastore.uris is not set; hive.metastore.local has been removed.
kite-sdk_kite
train
ef8730b4c95a669c0ca8777c756ab3af0a947943
diff --git a/lambdas/es/indexer/document_queue.py b/lambdas/es/indexer/document_queue.py index <HASH>..<HASH> 100644 --- a/lambdas/es/indexer/document_queue.py +++ b/lambdas/es/indexer/document_queue.py @@ -6,7 +6,7 @@ import os from datetime import datetime from enum import Enum from math import floor -from typing import Dict, List +from typing import Dict import boto3 from aws_requests_auth.aws_auth import AWSRequestsAuth @@ -107,7 +107,6 @@ class DocumentQueue: # this could the hash OR tag; to be used in _id primary key package_hash: str = '', package_stats: Dict[str, int] = None, - tags: List[str] = (), text: str = '', version_id=None, *, @@ -183,7 +182,6 @@ class DocumentQueue: "hash": package_hash, "metadata": metadata, "pointer_file": pointer_file, - "tags": ",".join(tags) }) if package_stats: body.update({
Do not send always empty 'tags' field to Elasticsearch (#<I>)
quiltdata_quilt
train
d2c003c010def217a1a2f6f1c189c66e2e39a3a8
diff --git a/pushed/__init__.py b/pushed/__init__.py index <HASH>..<HASH> 100644 --- a/pushed/__init__.py +++ b/pushed/__init__.py @@ -1 +1 @@ -from pushed import Pushed, PushedAPIError +from .pushed import Pushed, PushedAPIError
Small python 3 correction
digmore_pypushed
train
a2395b0541046128f75711a1ab0e702d5b399404
diff --git a/DrdPlus/Tables/Armaments/Armourer.php b/DrdPlus/Tables/Armaments/Armourer.php index <HASH>..<HASH> 100644 --- a/DrdPlus/Tables/Armaments/Armourer.php +++ b/DrdPlus/Tables/Armaments/Armourer.php @@ -198,6 +198,7 @@ class Armourer extends StrictObject { /** @noinspection ExceptionsAnnotatingAndHandlingInspection */ return + // shooting weapons are two-handed (except minicrossbow), projectiles are not $this->isTwoHanded($weaponToHoldByBothHands) // the weapon is explicitly two-handed // or it is melee weapon with length at least 1 (see PPH page 92 right column) || ($weaponToHoldByBothHands->isMeleeArmament()
Note about shooting weapons and their two-hands requirement
drdplusinfo_tables
train
ab53457267631f789754d6bb7c52bde3ca2f7317
diff --git a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectInheritanceTestSchemaFull.java b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectInheritanceTestSchemaFull.java index <HASH>..<HASH> 100644 --- a/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectInheritanceTestSchemaFull.java +++ b/tests/src/test/java/com/orientechnologies/orient/test/database/auto/CRUDObjectInheritanceTestSchemaFull.java @@ -16,6 +16,7 @@ package com.orientechnologies.orient.test.database.auto; import com.orientechnologies.orient.client.db.ODatabaseHelper; +import com.orientechnologies.orient.client.remote.ODatabaseImportRemote; import com.orientechnologies.orient.client.remote.OEngineRemote; import com.orientechnologies.orient.core.command.OCommandOutputListener; import com.orientechnologies.orient.core.db.document.ODatabaseDocumentTx; @@ -102,20 +103,39 @@ public class CRUDObjectInheritanceTestSchemaFull extends ObjectDBBaseTest { } else { importDatabase.open("admin", "admin"); } - ODatabaseImport impor = new ODatabaseImport(importDatabase, EXPORT_DIR, listener); - // UNREGISTER ALL THE HOOKS - for (ORecordHook hook : new ArrayList<ORecordHook>(importDatabase.getHooks().keySet())) { - importDatabase.unregisterHook(hook); - } + if (importDatabase.isRemote()) { + ODatabaseImportRemote impor = new ODatabaseImportRemote(importDatabase, EXPORT_DIR, listener); + + // UNREGISTER ALL THE HOOKS + for (ORecordHook hook : new ArrayList<ORecordHook>(importDatabase.getHooks().keySet())) { + importDatabase.unregisterHook(hook); + } + + impor.importDatabase(); + impor.close(); + + importDatabase.close(); + final File importDir = new File(EXPORT_DIR); + importDir.delete(); + } else { - impor.setDeleteRIDMapping(true); - impor.importDatabase(); - impor.close(); - importDatabase.close(); - final File importDir = new File(EXPORT_DIR); - importDir.delete(); + ODatabaseImport impor = new ODatabaseImport(importDatabase, EXPORT_DIR, listener); + + // UNREGISTER ALL THE HOOKS + for (ORecordHook hook : new ArrayList<ORecordHook>(importDatabase.getHooks().keySet())) { + importDatabase.unregisterHook(hook); + } + + impor.setDeleteRIDMapping(true); + impor.importDatabase(); + impor.close(); + + importDatabase.close(); + final File importDir = new File(EXPORT_DIR); + importDir.delete(); + } } catch (IOException e) { Assert.fail("Export import didn't go as expected", e); }
Fix failing test because of embedded import in remote
orientechnologies_orientdb
train
db552a204cd376b3b9f649ebc3bf7161b7af3165
diff --git a/lib/rubocop/cop/style/documentation.rb b/lib/rubocop/cop/style/documentation.rb index <HASH>..<HASH> 100644 --- a/lib/rubocop/cop/style/documentation.rb +++ b/lib/rubocop/cop/style/documentation.rb @@ -33,17 +33,13 @@ module RuboCop def_node_search :outer_module, '(const (const nil? _) _)' def on_class(node) - _, _, body = *node + return unless node.body - return unless body - - check(node, body, :class) + check(node, node.body, :class) end def on_module(node) - _, body = *node - - check(node, body, :module) + check(node, node.body, :module) end private
Fix InternalAffairs/NodeDestructuring offenses in Documentation
rubocop-hq_rubocop
train
c5c29f4abe2b34bb1fdde9720c911cbc621e03ea
diff --git a/app/adapters/application.js b/app/adapters/application.js index <HASH>..<HASH> 100644 --- a/app/adapters/application.js +++ b/app/adapters/application.js @@ -42,7 +42,7 @@ export default DS.RESTAdapter.extend({ var projection = data.projection; Ember.assert('projection should be defined', !!projection); - var url = this.buildURL(type.typeKey, data.id); + var url = this.buildURL(type.typeKey, data.id, snapshot, 'find'); var serializer = store.serializerFor(type); var query = ProjectionQuery.get(projection, serializer); return this.ajax(url, 'GET', { data: query }).then(function(data) { @@ -72,13 +72,37 @@ export default DS.RESTAdapter.extend({ }); }, - buildURL: function(type, id, record) { - var url = [], - host = Ember.get(this, 'host'), - prefix = this.urlPrefix(); + // TODO: override createRecord and deleteRecord for projections support. + updateRecord: function(store, type, snapshot) { + var hasProjection = IdProxy.idIsProxied(snapshot.id); + + // TODO: maybe move it into serializer (serialize or serializeIntoHash)? + SnapshotTransform.transformForSerialize(snapshot, hasProjection, hasProjection); - if (type) { - url.push(this.pathForType(type)); + if (!hasProjection) { + // Sends a PUT request. + return this._super.apply(this, arguments); + } + + var data = {}; + var serializer = store.serializerFor(type.typeKey); + serializer.serializeIntoHash(data, type, snapshot); + + var url = this.buildURL(type.typeKey, snapshot.id, snapshot, 'updateRecord'); + return this.ajax(url, 'PATCH', { data: data }); + }, + + _buildURL: function(modelName, id) { + var url = []; + var host = Ember.get(this, 'host'); + var prefix = this.urlPrefix(); + var path; + + if (modelName) { + path = this.pathForType(modelName); + if (path) { + url.push(path); + } } if (prefix) { @@ -86,45 +110,32 @@ export default DS.RESTAdapter.extend({ } url = url.join('/'); - if (!host && url) { + if (!host && url && url.charAt(0) !== '/') { url = '/' + url; } - //We might get passed in an array of ids from findMany - //in which case we don't want to modify the url, as the - //ids will be passed in through a query param - if (id && !Ember.isArray(id)) { - var encId = encodeURIComponent(id), - idType = Ember.get(this, 'idType'); - if (idType !== 'number') { - encId = "'" + encId + "'"; - } - - url += '(' + encId + ')'; + if (id) { + // Append id as `(id)` (OData specification) instead of `/id`. + url = this._appendIdToURL(id, url); } - // /Customers('ALFKI') - // /Employees(4) return url; }, - // TODO: override createRecord and deleteRecord for projections support. - updateRecord: function(store, type, snapshot) { - var hasProjection = IdProxy.idIsProxied(snapshot.id); - - // TODO: maybe move it into serializer (serialize or serializeIntoHash)? - SnapshotTransform.transformForSerialize(snapshot, hasProjection, hasProjection); - - if (!hasProjection) { - // Sends a PUT request. - return this._super.apply(this, arguments); + /** + * Appends id to URL according to the OData specification. + * @private + */ + _appendIdToURL: function(id, url) { + let encId = encodeURIComponent(id); + let idType = Ember.get(this, 'idType'); + if (idType !== 'number') { + encId = "'" + encId + "'"; } - var data = {}; - var serializer = store.serializerFor(type.typeKey); - serializer.serializeIntoHash(data, type, snapshot); - - var url = this.buildURL(type.typeKey, snapshot.id, snapshot); - return this.ajax(url, 'PATCH', { data: data }); + // /Customers('ALFKI') + // /Employees(4) + url += '(' + encId + ')'; + return url; } });
Support application adapter for ember-data beta<I>
Flexberry_ember-flexberry
train
e4d5cb43f983702f18c053a780ef14960a64ea0a
diff --git a/v2/server.go b/v2/server.go index <HASH>..<HASH> 100644 --- a/v2/server.go +++ b/v2/server.go @@ -12,6 +12,8 @@ import ( "strings" ) +var nilErrorValue = reflect.Zero(reflect.TypeOf((*error)(nil)).Elem()) + // ---------------------------------------------------------------------------- // Codec // ---------------------------------------------------------------------------- @@ -61,7 +63,7 @@ type Server struct { interceptFunc func(i *RequestInfo) *http.Request beforeFunc func(i *RequestInfo) afterFunc func(i *RequestInfo) - validateFunc func(i interface{}) error + validateFunc reflect.Value } // RegisterCodec adds a new codec to the server. @@ -98,7 +100,7 @@ func (s *Server) RegisterBeforeFunc(f func(i *RequestInfo)) { // won't be invoked and this error will be considered as the method result. // The argument of this function is the already-unmarshalled *args parameter of the method. func (s *Server) RegisterValidateRequestFunc(f func(i interface{}) error) { - s.validateFunc = f + s.validateFunc = reflect.ValueOf(f) } // RegisterAfterFunc registers the specified function as the function @@ -202,17 +204,15 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { // Prepare the reply, we need it even if validation fails reply := reflect.New(methodSpec.replyType) - var errValue []reflect.Value + errValue := []reflect.Value{nilErrorValue} // Call the registered Validator Function - if s.validateFunc != nil { - if validationErr := s.validateFunc(args.Interface()); validationErr != nil { - errValue = []reflect.Value{reflect.ValueOf(validationErr)} - } + if s.validateFunc.IsValid() { + errValue = s.validateFunc.Call([]reflect.Value{args}) } // If still no errors after validation, call the method - if len(errValue) == 0 { + if errValue[0].IsNil() { errValue = methodSpec.method.Func.Call([]reflect.Value{ serviceSpec.rcvr, reflect.ValueOf(r), @@ -221,12 +221,12 @@ func (s *Server) ServeHTTP(w http.ResponseWriter, r *http.Request) { }) } - // Cast the result to error if needed. + // Extract the result to error if needed. var errResult error - errInter := errValue[0].Interface() - if errInter != nil { - errResult = errInter.(error) + if !errValue[0].IsNil() { + errResult = errValue[0].Interface().(error) } + // Prevents Internet Explorer from MIME-sniffing a response away // from the declared content-type w.Header().Set("x-content-type-options", "nosniff")
Moved ValidationFunc to the reflected world So we don't have to bring stuff from reflected world, to call it, and then reflect them again.
gorilla_rpc
train
7282ea5f3b9feb7b323f752b99b7fdc0b10aeba5
diff --git a/lib/build.js b/lib/build.js index <HASH>..<HASH> 100644 --- a/lib/build.js +++ b/lib/build.js @@ -62,15 +62,19 @@ async function compileOnWindows (targetArch) { return path.join(nodePath, 'Release/node.exe'); } +const unixThresholds = { + 'v8_base/deps/v8/src/date.o.d.raw': 50 +}; + async function compileOnUnix (targetArch) { const args = []; const cpu = { x86: 'ia32', x64: 'x64', armv6: 'arm', armv7: 'arm' }[targetArch]; args.push('--dest-cpu', cpu); - await spawn('./configure', args, - { stdio: 'inherit', cwd: nodePath }); - await spawn('make', [], - { stdio: 'inherit', cwd: nodePath }); + await spawn('./configure', args, { cwd: nodePath }); + const promise = spawn('make', [], { cwd: nodePath }); + progress(promise.child, unixThresholds); + await promise; return path.join(nodePath, 'out/Release/node'); }
unixThresholds [wip]
zeit_pkg-fetch
train
db4e5fab0dafb0186f9b16d742efaa6e4c7c98ae
diff --git a/Tone/core/Delay.js b/Tone/core/Delay.js index <HASH>..<HASH> 100644 --- a/Tone/core/Delay.js +++ b/Tone/core/Delay.js @@ -13,12 +13,13 @@ define(["Tone/core/Tone", "Tone/core/Param", "Tone/core/AudioNode"], function (T var options = Tone.defaults(arguments, ["delayTime", "maxDelay"], Tone.Delay); Tone.AudioNode.call(this); + var maxDelay = Math.max(this.toSeconds(options.maxDelay), this.toSeconds(options.delayTime)); /** * The native delay node * @type {DelayNode} * @private */ - this._delayNode = this.input = this.output = this.context.createDelay(this.toSeconds(options.maxDelay)); + this._delayNode = this.input = this.output = this.context.createDelay(maxDelay); /** * The amount of time the incoming signal is diff --git a/test/core/Delay.js b/test/core/Delay.js index <HASH>..<HASH> 100644 --- a/test/core/Delay.js +++ b/test/core/Delay.js @@ -32,6 +32,12 @@ define(["Test", "Tone/core/Tone", "Tone/core/Delay", "helper/PassAudio"], functi delay.dispose(); }); + it("if the constructor delay time is greater than maxDelay, use that as the maxDelay time", function(){ + var delay = new Delay(3); + expect(delay.delayTime.value).to.be.closeTo(3, 0.001); + delay.dispose(); + }); + it("can returns state from 'get' method", function(){ var delay = new Delay({ "delayTime" : 0.4,
if the constructor delayTime is greater than the maxDelay, use the delayTime as the maxDelay fixes #<I>
Tonejs_Tone.js
train
95e9e86aa3762c727087b30536b01606c398c969
diff --git a/publify_core/app/models/user.rb b/publify_core/app/models/user.rb index <HASH>..<HASH> 100644 --- a/publify_core/app/models/user.rb +++ b/publify_core/app/models/user.rb @@ -18,7 +18,6 @@ class User < ApplicationRecord before_validation :set_default_profile validates :login, uniqueness: true - validates :email, uniqueness: true, on: :create validates :email, :login, presence: true validates :login, length: { in: 3..40 } diff --git a/publify_core/spec/models/user_spec.rb b/publify_core/spec/models/user_spec.rb index <HASH>..<HASH> 100644 --- a/publify_core/spec/models/user_spec.rb +++ b/publify_core/spec/models/user_spec.rb @@ -79,6 +79,13 @@ describe User, type: :model do expect(bar).not_to allow_value("foo").for(:login) end + + it "does not allow duplicate emails when updating a user" do + create :user, email: "[email protected]" + bar = create :user, email: "[email protected]" + + expect(bar).not_to allow_value("[email protected]").for(:email) + end end describe "#initialize" do
Spec that email uniqueness is already guaranteed by Devise
publify_publify
train
ce2e55cc219716fff9bd98c6fc6858a68290b467
diff --git a/nodeconductor/structure/handlers.py b/nodeconductor/structure/handlers.py index <HASH>..<HASH> 100644 --- a/nodeconductor/structure/handlers.py +++ b/nodeconductor/structure/handlers.py @@ -410,8 +410,15 @@ def connect_service_to_all_projects_if_it_is_available_for_all(sender, instance, def delete_service_settings_on_service_delete(sender, instance, **kwargs): """ Delete not shared service settings without services """ service = instance - if not service.settings.shared: - service.settings.delete() + try: + service_settings = service.settings + except ServiceSettings.DoesNotExist: + # On service settings scope deletion Django collector goes wild and + # tries to delete service settings before services. To prevent this + # issue lets skip service that does not have settings. + return + if not service_settings.shared: + service_settings.delete() def init_resource_start_time(sender, instance, name, source, target, **kwargs):
Fix service settings deletion on scope deletion - wal-<I>
opennode_waldur-core
train
39d7c0d3616a802dc0214c8d20ce91994dc2309c
diff --git a/bcbio/variation/samtools.py b/bcbio/variation/samtools.py index <HASH>..<HASH> 100644 --- a/bcbio/variation/samtools.py +++ b/bcbio/variation/samtools.py @@ -35,7 +35,7 @@ def shared_variantcall(call_fn, name, align_bams, ref_file, items, if ((variant_regions is not None and isinstance(target_regions, basestring) and not os.path.isfile(target_regions)) or not all(realign.has_aligned_reads(x, region) for x in align_bams)): - vcfutils.write_empty_vcf(out_file) + vcfutils.write_empty_vcf(out_file, config) else: with file_transaction(out_file) as tx_out_file: call_fn(align_bams, ref_file, items, target_regions, diff --git a/bcbio/variation/varscan.py b/bcbio/variation/varscan.py index <HASH>..<HASH> 100644 --- a/bcbio/variation/varscan.py +++ b/bcbio/variation/varscan.py @@ -21,7 +21,6 @@ import pysam def run_varscan(align_bams, items, ref_file, assoc_files, region=None, out_file=None): - if is_paired_analysis(align_bams, items): call_file = samtools.shared_variantcall(_varscan_paired, "varscan", align_bams, ref_file, items, @@ -91,7 +90,7 @@ def _varscan_paired(align_bams, ref_file, items, target_regions, out_file): # just skip the rest of the analysis (VarScan will hang otherwise) if any(os.stat(filename).st_size == 0 for filename in cleanup_files): - write_empty_vcf(out_file) + write_empty_vcf(orig_out_file, config) return # First index is normal, second is tumor @@ -130,7 +129,7 @@ def _varscan_paired(align_bams, ref_file, items, target_regions, out_file): _fix_varscan_vcf(indel_file, paired.normal_name, paired.tumor_name) if not to_combine: - write_empty_vcf(out_file) + write_empty_vcf(orig_out_file, config) return out_file = combine_variant_files([snp_file, indel_file], @@ -283,9 +282,11 @@ def _create_sample_list(in_bams, vcf_file): def _varscan_work(align_bams, ref_file, items, target_regions, out_file): """Perform SNP and indel genotyping with VarScan. """ - config = items[0]["config"] + orig_out_file = out_file + out_file = orig_out_file.replace(".vcf.gz", ".vcf") + max_read_depth = "1000" version = programs.jar_versioner("varscan", "VarScan")(config) if version < "v2.3.6": @@ -325,6 +326,9 @@ def _varscan_work(align_bams, ref_file, items, target_regions, out_file): else: freebayes.clean_vcf_output(out_file, _clean_varscan_line) + if orig_out_file.endswith(".gz"): + vcfutils.bgzip_and_index(out_file, config) + def _clean_varscan_line(line): """Avoid lines with non-GATC bases, ambiguous output bases make GATK unhappy. """ diff --git a/bcbio/variation/vcfutils.py b/bcbio/variation/vcfutils.py index <HASH>..<HASH> 100644 --- a/bcbio/variation/vcfutils.py +++ b/bcbio/variation/vcfutils.py @@ -54,12 +54,19 @@ def get_paired_phenotype(data): # ## General utilities -def write_empty_vcf(out_file): +def write_empty_vcf(out_file, config=None): + needs_bgzip = False + if os.path.exists(".vcf.gz"): + needs_bgzip = True + out_file = out_file.replace("vcf.gz", ".vcf") with open(out_file, "w") as out_handle: out_handle.write("##fileformat=VCFv4.1\n" "## No variants; no reads aligned in region\n" "#CHROM\tPOS\tID\tREF\tALT\tQUAL\tFILTER\tINFO\n") - + if needs_bgzip: + return bgzip_and_index(out_file, config or {}) + else: + return out_file def split_snps_indels(orig_file, ref_file, config): """Split a variant call file into SNPs and INDELs for processing.
Support additional edge cases for Varscan calling with bgzipped outputs
bcbio_bcbio-nextgen
train
36b48b31e1a8a34de0d92c0385b48735b3880190
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -1,5 +1,19 @@ module.exports = function(grunt) { var browsers = [{ + browserName: "internet explorer", + version: "8" + },{ + browserName: "internet explorer", + version: "9" + },{ + browserName: "internet explorer", + version: "10" + },{ + browserName:"android", + version: "4.0" + },{ + browserName:"iphone" + },{ browserName: "chrome", platform: "linux" },{ @@ -9,57 +23,23 @@ module.exports = function(grunt) { browserName: "googlechrome", platform: "Windows XP" },{ - browserName: "internet explorer", - platform: "Windows 8", - version: "10" - },{ browserName: "firefox", - platform: "Windows 8", version: "21" },{ browserName: "firefox", - platform: "Windows 8", version: "20" },{ browserName: "firefox", - platform: "Windows 8", version: "19" },{ browserName: "firefox", - platform: "Windows 8", version: "18" },{ - browserName: "firefox", - platform: "Windows 8", - version: "17" - },{ - browserName: "firefox", - platform: "Windows 8", - version: "16" - },{ - browserName:"iphone", - platform: "OS X 10.8", - version: "6" - },{ - browserName:"iphone", - platform: "OS X 10.8", - version: "5.1" - },{ - browserName:"iphone", - platform: "OS X 10.6", - version: "5.0" - },{ browserName:"safari", - platform: "OS X 10.8", version: "6" },{ browserName:"safari", - platform: "OS X 10.6", version: "5" - },{ - browserName:"android", - platform: "Linux", - version: "4.0" }]; grunt.initConfig({
Clean and re-order browsers
gre_deprecated-qajax
train
5eb63b294c38ddba01876c1b23b8fd971e315226
diff --git a/Godeps/Godeps.json b/Godeps/Godeps.json index <HASH>..<HASH> 100644 --- a/Godeps/Godeps.json +++ b/Godeps/Godeps.json @@ -40,7 +40,7 @@ }, { "ImportPath": "github.com/cloudfoundry-incubator/uaa-go-client", - "Rev": "141c25f7dd8b9d2336c59c35a84ef399dfab26f7" + "Rev": "a04a6ef497ee2890465ef6b1c1b823c6ac59a8ab" }, { "ImportPath": "github.com/cloudfoundry/dropsonde", diff --git a/Godeps/_workspace/src/github.com/cloudfoundry-incubator/uaa-go-client/client.go b/Godeps/_workspace/src/github.com/cloudfoundry-incubator/uaa-go-client/client.go index <HASH>..<HASH> 100644 --- a/Godeps/_workspace/src/github.com/cloudfoundry-incubator/uaa-go-client/client.go +++ b/Godeps/_workspace/src/github.com/cloudfoundry-incubator/uaa-go-client/client.go @@ -142,6 +142,7 @@ func (u *UaaClient) FetchToken(forceUpdate bool) (*schema.Token, error) { } func (u *UaaClient) doFetchToken() (*schema.Token, bool, error) { + logger := u.logger.Session("uaa-client") values := url.Values{} values.Add("grant_type", "client_credentials") requestBody := values.Encode() @@ -156,7 +157,7 @@ func (u *UaaClient) doFetchToken() (*schema.Token, bool, error) { request.Header.Add("Accept", "application/json; charset=utf-8") trace.DumpRequest(request) - u.logger.Info("fetch-token-from-uaa-start", lager.Data{"endpoint": request.URL}) + logger.Info("fetch-token-from-uaa-start", lager.Data{"endpoint": request.URL}) resp, err := u.client.Do(request) if err != nil { return nil, true, err @@ -164,7 +165,7 @@ func (u *UaaClient) doFetchToken() (*schema.Token, bool, error) { defer resp.Body.Close() trace.DumpResponse(resp) - u.logger.Info("fetch-token-from-uaa-end", lager.Data{"status-code": resp.StatusCode}) + logger.Info("fetch-token-from-uaa-end", lager.Data{"status-code": resp.StatusCode}) body, err := ioutil.ReadAll(resp.Body) if err != nil { diff --git a/main.go b/main.go index <HASH>..<HASH> 100644 --- a/main.go +++ b/main.go @@ -176,7 +176,7 @@ func setupRouteFetcher(logger lager.Logger, c *config.Config, registry rregistry uaaClient := newUaaClient(logger, clock, c) - _, err := uaaClient.FetchToken(false) + _, err := uaaClient.FetchToken(true) if err != nil { logger.Fatal("unable-to-fetch-token", err) }
Force UAA fetch token on start up * bump uaa-go-client [#<I>]
cloudfoundry_gorouter
train
b0cc88dae41f9e3ebed30d68a8c9ed40e10120cb
diff --git a/spec/public/model_spec.rb b/spec/public/model_spec.rb index <HASH>..<HASH> 100644 --- a/spec/public/model_spec.rb +++ b/spec/public/model_spec.rb @@ -57,8 +57,12 @@ describe DataMapper::Model do describe 'between different models' do before :all do + @other.destroy + @article.destroy + @original.destroy + # make sure the default repository is empty - @article_model.all(:repository => @repository).destroy! + @article_model.all(:repository => @repository).should be_empty # add an extra property to the alternate model DataMapper.repository(@alternate_adapter.name) do diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -12,8 +12,8 @@ $LOAD_PATH.unshift(SPEC_ROOT.parent + 'lib') require 'dm-core' -ENV['PLUGINS'].to_s.split(' ').each do |plugin| - require plugin.strip +ENV['PLUGINS'].to_s.strip.split(/\s+/).each do |plugin| + require plugin end Pathname.glob((SPEC_ROOT + '{lib,*/shared}/**/*.rb').to_s).each { |file| require file }
Remove dependent Resources first * Minor tweak to code that loads plugins when running specs
datamapper_dm-core
train
d3e0a5b171cc66e13b28fff9d6e72be51534cb4d
diff --git a/client.go b/client.go index <HASH>..<HASH> 100644 --- a/client.go +++ b/client.go @@ -58,6 +58,7 @@ type RPCClient interface { SendRPC(rpc hrpc.Call) (proto.Message, error) } +// Option is a function used to configure optional config items for a Client. type Option func(*client) // A Client provides access to an HBase cluster. diff --git a/rpc.go b/rpc.go index <HASH>..<HASH> 100644 --- a/rpc.go +++ b/rpc.go @@ -581,9 +581,8 @@ func sleepAndIncreaseBackoff(ctx context.Context, backoff time.Duration) (time.D // TODO: Revisit how we back off here. if backoff < 5000*time.Millisecond { return backoff * 2, nil - } else { - return backoff + 5000*time.Millisecond, nil } + return backoff + 5000*time.Millisecond, nil } func (c *client) establishRegionClient(reg hrpc.RegionInfo,
Fixed a couple linting issues.
tsuna_gohbase
train
8f99d47a5f2edce34b12ecf4928767de175018ba
diff --git a/tensor2tensor/layers/common_attention.py b/tensor2tensor/layers/common_attention.py index <HASH>..<HASH> 100644 --- a/tensor2tensor/layers/common_attention.py +++ b/tensor2tensor/layers/common_attention.py @@ -681,6 +681,70 @@ def local_attention_2d(q, return tf.reshape(output, v_shape) +def compute_qkv(query_antecedent, memory_antecedent, total_key_depth, + total_value_depth, q_filter_width=1, kv_filter_width=1, + q_padding="VALID", kv_padding="VALID"): + """Computes query, key and value. + + Args: + query_antecedent: a Tensor with shape [batch, length_q, channels] + memory_antecedent: a Tensor with shape [batch, length_m, channels] + total_key_depth: an integer + total_value_depth: and integer + q_filter_width: An integer specifying how wide you want the query to be. + kv_filter_width: An integer specifying how wide you want the keys and values + to be. + q_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. + kv_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. + + Returns: + q, k, v : [batch, length, depth] tensors + """ + if memory_antecedent is None and q_filter_width == kv_filter_width == 1: + # self attention with single position q, k, and v + combined = common_layers.conv1d( + query_antecedent, + total_key_depth * 2 + total_value_depth, + 1, + name="qkv_transform") + q, k, v = tf.split( + combined, [total_key_depth, total_key_depth, total_value_depth], + axis=2) + return q, k, v + + if memory_antecedent is None: + # self attention + q = common_layers.conv1d( + query_antecedent, + total_key_depth, + q_filter_width, + padding=q_padding, + name="q_transform") + kv_combined = common_layers.conv1d( + query_antecedent, + total_key_depth + total_value_depth, + kv_filter_width, + padding=kv_padding, + name="kv_transform") + k, v = tf.split(kv_combined, [total_key_depth, total_value_depth], + axis=2) + return q, k, v + + # encoder-decoder attention + q = common_layers.conv1d( + query_antecedent, total_key_depth, q_filter_width, padding=q_padding, + name="q_transform") + combined = common_layers.conv1d( + memory_antecedent, + total_key_depth + total_value_depth, + 1, + padding=kv_padding, + name="kv_transform") + k, v = tf.split(combined, [total_key_depth, total_value_depth], axis=2) + + return q, k, v + + def multihead_attention(query_antecedent, memory_antecedent, bias, @@ -693,6 +757,10 @@ def multihead_attention(query_antecedent, attention_type="dot_product", block_length=128, block_width=128, + q_filter_width=1, + kv_filter_width=1, + q_padding="VALID", + kv_padding="VALID", name=None): """Multihead scaled-dot-product attention with input/output transformations. @@ -711,6 +779,12 @@ def multihead_attention(query_antecedent, "local_unmasked" block_length: an integer - relevant for "local_mask_right" block_width: an integer - relevant for "local_unmasked" + q_filter_width: An integer specifying how wide you want the query to be. + kv_filter_width: An integer specifying how wide you want the keys and values + to be. + q_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. + kv_padding: One of "VALID", "SAME" or "LEFT". Default is VALID: No padding. + name: an optional string Returns: @@ -726,30 +800,14 @@ def multihead_attention(query_antecedent, if total_value_depth % num_heads != 0: raise ValueError("Value depth (%d) must be divisible by the number of " "attention heads (%d)." % (total_value_depth, num_heads)) - with tf.variable_scope( name, default_name="multihead_attention", values=[query_antecedent, memory_antecedent]): - if memory_antecedent is None: - # self attention - combined = common_layers.conv1d( - query_antecedent, - total_key_depth * 2 + total_value_depth, - 1, - name="qkv_transform") - q, k, v = tf.split( - combined, [total_key_depth, total_key_depth, total_value_depth], - axis=2) - else: - q = common_layers.conv1d( - query_antecedent, total_key_depth, 1, name="q_transform") - combined = common_layers.conv1d( - memory_antecedent, - total_key_depth + total_value_depth, - 1, - name="kv_transform") - k, v = tf.split(combined, [total_key_depth, total_value_depth], axis=2) + q, k, v = compute_qkv(query_antecedent, memory_antecedent, total_key_depth, + total_value_depth, q_filter_width, kv_filter_width, + q_padding, kv_padding) + q = split_heads(q, num_heads) k = split_heads(k, num_heads) v = split_heads(v, num_heads)
The current attention computes compatibilities between single query, key, and value positions. This CL extends it to computing them between windows of queries, keys, and values. It's like a combination of convolution and attention. Does not change defaults. PiperOrigin-RevId: <I>
tensorflow_tensor2tensor
train
22446305bd65d271fd2f3847c3d37882ba1d67a1
diff --git a/src/main/java/com/github/ansell/restletutils/FixedRedirectCookieAuthenticator.java b/src/main/java/com/github/ansell/restletutils/FixedRedirectCookieAuthenticator.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/github/ansell/restletutils/FixedRedirectCookieAuthenticator.java +++ b/src/main/java/com/github/ansell/restletutils/FixedRedirectCookieAuthenticator.java @@ -697,7 +697,8 @@ public class FixedRedirectCookieAuthenticator extends ChallengeAuthenticator } catch(final Exception e) { - this.log.info("Unable to decrypt cookie credentials", e); + this.log.info("Unable to decrypt cookie credentials"); + this.log.trace("Unable to decrypt cookie credentials: Stacktrace:", e); return null; } }
only dump stacktrace to console at trace level
ansell_restlet-utils
train
e529caa9a3a7db3b6649c6becec9371f9cb519a4
diff --git a/src/test/java/org/roaringbitmap/TestRoaringBitmap.java b/src/test/java/org/roaringbitmap/TestRoaringBitmap.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/roaringbitmap/TestRoaringBitmap.java +++ b/src/test/java/org/roaringbitmap/TestRoaringBitmap.java @@ -3151,6 +3151,20 @@ public class TestRoaringBitmap { } @Test + public void testUtilUnsignedIntersection() { + short data1[] = {-19, -17, -15, -13, -11, -9, -7, -5, -3, -1}; + short data2[] = {-18, -16, -14, -12, -10, -8, -1}; + Assert.assertTrue(Util.unsignedIntersects(data1, data1.length, data2, data2.length)); + short data3[] = {-19, -17, -15, -13, -11, -9, -7}; + short data4[] = {-18, -16, -14, -12, -10, -8, -6, -4, -2, 0}; + Assert.assertFalse(Util.unsignedIntersects(data3, data3.length, data4, data4.length)); + short data5[] = {}; + short data6[] = {}; + Assert.assertFalse(Util.unsignedIntersects(data5, data5.length, data6, data6.length)); + + } + + @Test public void testSetUtilIntersection() { short data1[] = {0, 2, 4, 6, 8, 10, 12, 14, 16, 18}; short data2[] = {0, 3, 6, 9, 12, 15, 18};
Add Test Case to test Util/unsignedIntersects method
RoaringBitmap_RoaringBitmap
train
a9383846078a0943d120b42667c07db8525a9fdf
diff --git a/py/test/rsession/testing/test_reporter.py b/py/test/rsession/testing/test_reporter.py index <HASH>..<HASH> 100644 --- a/py/test/rsession/testing/test_reporter.py +++ b/py/test/rsession/testing/test_reporter.py @@ -202,7 +202,7 @@ class TestRemoteReporter(AbstractTestReporter): self._test_still_to_go() def test_report_received_item_outcome(self): - #py.test.skip("XXX rewrite test to not rely on exact formatting") + py.test.skip("XXX rewrite test to not rely on exact formatting") val = self.report_received_item_outcome() expected = """ localhost: FAILED py.test.rsession.testing.test_slave.py funcpass localhost: SKIPPED py.test.rsession.testing.test_slave.py funcpass @@ -212,6 +212,7 @@ class TestRemoteReporter(AbstractTestReporter): assert val.find(expected) != -1 def test_module(self): + py.test.skip("XXX rewrite test to not rely on exact formatting") val = self._test_module() print val expected = """ localhost: FAILED py.test.rsession.testing.test_slave.py funcpass
[svn r<I>] these tests rely on exact formatting, so skipping them again (fail for me on pypy2) --HG-- branch : trunk
pytest-dev_pytest
train
eb8137798dfd110a22069ff8c727a21f6a5024ab
diff --git a/lib/flipper.rb b/lib/flipper.rb index <HASH>..<HASH> 100644 --- a/lib/flipper.rb +++ b/lib/flipper.rb @@ -30,6 +30,18 @@ module Flipper raise DuplicateGroup, %Q{Group #{name.inspect} has already been registered} end + # Public: Returns an array of registered Types::Group instances. + def self.groups + groups_registry.values + end + + # Public: Returns an array of symbols where each symbol is a registered + # group name. If you just want the names, this is more efficient than doing + # `Flipper.groups.map(&:name)`. + def self.group_names + groups_registry.keys + end + # Public: Clears the group registry. # # Returns nothing. @@ -44,7 +56,7 @@ module Flipper groups_registry.key?(name) end - # Internal: Fetches a group by name. + # Public: Fetches a group by name. # # name - The Symbol name of the group. # diff --git a/spec/flipper_spec.rb b/spec/flipper_spec.rb index <HASH>..<HASH> 100644 --- a/spec/flipper_spec.rb +++ b/spec/flipper_spec.rb @@ -87,4 +87,26 @@ describe Flipper do end end end + + describe ".groups" do + it "returns array of group instances" do + admins = Flipper.register(:admins) { |actor| actor.admin? } + preview_features = Flipper.register(:preview_features) { |actor| actor.preview_features? } + Flipper.groups.should eq([ + admins, + preview_features, + ]) + end + end + + describe ".group_names" do + it "returns array of group names" do + Flipper.register(:admins) { |actor| actor.admin? } + Flipper.register(:preview_features) { |actor| actor.preview_features? } + Flipper.group_names.should eq([ + :admins, + :preview_features, + ]) + end + end end
Add groups and group_names to Flipper
jnunemaker_flipper
train
164a6d34e4aa7767afb23595ef0bb290e38bc41e
diff --git a/core-bundle/src/Resources/contao/library/Contao/System.php b/core-bundle/src/Resources/contao/library/Contao/System.php index <HASH>..<HASH> 100644 --- a/core-bundle/src/Resources/contao/library/Contao/System.php +++ b/core-bundle/src/Resources/contao/library/Contao/System.php @@ -177,7 +177,14 @@ abstract class System } else { - $this->arrObjects[$strKey] = new $strClass(); + try + { + $this->arrObjects[$strKey] = new $strClass(); + } + catch (\ArgumentCountError $e) + { + throw new \ArgumentCountError(sprintf('Cannot create instance of class %s, did you forget to make the service public?', $strClass), $e->getCode(), $e); + } } } } @@ -224,7 +231,14 @@ abstract class System } else { - static::$arrStaticObjects[$strKey] = new $strClass(); + try + { + static::$arrStaticObjects[$strKey] = new $strClass(); + } + catch (\ArgumentCountError $e) + { + throw new \ArgumentCountError(sprintf('Cannot create instance of class %s, did you forget to make the service public?', $strClass), $e->getCode(), $e); + } } }
Improve the error message when trying to get a non-public service (see #<I>) Description ----------- - Commits ------- c<I>d<I>cb Improved error reporting when trying to get a service with class name <I>de<I> CS 3f0b<I>d Fix the coding style
contao_contao
train
41efb7e19ce5ec0707e63c0b8bf52d828823a77d
diff --git a/src/main/java/org/dita/dost/util/URLUtils.java b/src/main/java/org/dita/dost/util/URLUtils.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/dita/dost/util/URLUtils.java +++ b/src/main/java/org/dita/dost/util/URLUtils.java @@ -313,7 +313,10 @@ public final class URLUtils { //'%', '"', '{', '}', //'?', - '|', '\\', '^', '~', '[', ']', '`', '\'', + '|', '\\', '^', + //'~', + '[', ']', '`', + //'\'', //'&' }; char ch; diff --git a/src/test/java/org/dita/dost/util/URLUtilsTest.java b/src/test/java/org/dita/dost/util/URLUtilsTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/org/dita/dost/util/URLUtilsTest.java +++ b/src/test/java/org/dita/dost/util/URLUtilsTest.java @@ -140,6 +140,7 @@ public class URLUtilsTest { assertEquals(new URI("foo%20bar.txt"), URLUtils.toURI("foo bar.txt")); assertEquals(new URI("foo/bar.txt"), URLUtils.toURI("foo" + Constants.WINDOWS_SEPARATOR + "bar.txt")); assertEquals(new URI("foo%20bar.txt"), URLUtils.toURI(" foo bar.txt ")); + assertEquals(new URI("user's%20manual.txt"), URLUtils.toURI("user's manual.txt")); assertEquals(new URI("http://www.example.com/"), URLUtils.toURI(" http://www.example.com/ ")); }
<I> Remove apos and tilde from the array of characters to be escaped.
dita-ot_dita-ot
train
ba79998819eb11eb42f0eaf856ba5540312447ce
diff --git a/lib/te3270/emulators/extra.rb b/lib/te3270/emulators/extra.rb index <HASH>..<HASH> 100644 --- a/lib/te3270/emulators/extra.rb +++ b/lib/te3270/emulators/extra.rb @@ -43,8 +43,8 @@ module TE3270 screen.SendKeys(keys) end - def wait_for_string(str) - screen.WaitForString(str) + def wait_for_string(str, row, column) + screen.WaitForString(str, row, column) end def wait_for_host(seconds) diff --git a/spec/lib/te3270/emulators/extra_spec.rb b/spec/lib/te3270/emulators/extra_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/te3270/emulators/extra_spec.rb +++ b/spec/lib/te3270/emulators/extra_spec.rb @@ -115,9 +115,9 @@ describe TE3270::Emulators::Extra do end it 'should wait for a string to appear' do - extra_screen.should_receive(:WaitForString).with('The String') + extra_screen.should_receive(:WaitForString).with('The String', 3, 10) extra.connect - extra.wait_for_string('The String') + extra.wait_for_string('The String', 3, 10) end it 'should wait for the host to be quiet' do
changed signature of wait_for_string
cheezy_te3270
train
076ef9984647bf4a9990f6d9d479a490f3419a79
diff --git a/src/effects/GlitchEffect.js b/src/effects/GlitchEffect.js index <HASH>..<HASH> 100644 --- a/src/effects/GlitchEffect.js +++ b/src/effects/GlitchEffect.js @@ -1,6 +1,5 @@ import { DataTexture, - FloatType, NearestFilter, RepeatWrapping, RGBFormat, @@ -20,7 +19,7 @@ import fragmentShader from "./glsl/glitch/shader.frag"; * @private */ -const generatedTexture = "Glitch.Generated"; +const tag = "Glitch.Generated"; /** * Returns a random float in the specified range. @@ -231,7 +230,7 @@ export class GlitchEffect extends Effect { setPerturbationMap(perturbationMap) { - if(this.perturbationMap !== null && this.perturbationMap.name === generatedTexture) { + if(this.perturbationMap !== null && this.perturbationMap.name === tag) { this.perturbationMap.dispose(); @@ -255,22 +254,22 @@ export class GlitchEffect extends Effect { generatePerturbationMap(size = 64) { const pixels = size * size; - const data = new Float32Array(pixels * 3); + const data = new Uint8Array(pixels * 3); - let i, x; + let i, l, x; - for(i = 0; i < pixels; ++i) { + for(i = 0, l = data.length; i < l; i += 3) { - x = Math.random(); + x = Math.random() * 255; - data[i * 3] = x; - data[i * 3 + 1] = x; - data[i * 3 + 2] = x; + data[i] = x; + data[i + 1] = x; + data[i + 2] = x; } - const map = new DataTexture(data, size, size, RGBFormat, FloatType); - map.name = generatedTexture; + const map = new DataTexture(data, size, size, RGBFormat); + map.name = tag; map.needsUpdate = true; return map;
Use UnsignedByteType instead of FloatType FloatType is not available on many mobile devices and doesn't yield any noticeable visual improvements in this case.
vanruesc_postprocessing
train
21cf9bf1e9fc91caf94af0b0250013bd3ca2b40c
diff --git a/aws/resource_aws_inspector_assessment_target.go b/aws/resource_aws_inspector_assessment_target.go index <HASH>..<HASH> 100644 --- a/aws/resource_aws_inspector_assessment_target.go +++ b/aws/resource_aws_inspector_assessment_target.go @@ -104,11 +104,11 @@ func resourceAwsInspectorAssessmentTargetUpdate(d *schema.ResourceData, meta int func resourceAwsInspectorAssessmentTargetDelete(d *schema.ResourceData, meta interface{}) error { conn := meta.(*AWSClient).inspectorconn - - return resource.Retry(60*time.Minute, func() *resource.RetryError { - _, err := conn.DeleteAssessmentTarget(&inspector.DeleteAssessmentTargetInput{ - AssessmentTargetArn: aws.String(d.Id()), - }) + input := &inspector.DeleteAssessmentTargetInput{ + AssessmentTargetArn: aws.String(d.Id()), + } + err := resource.Retry(60*time.Minute, func() *resource.RetryError { + _, err := conn.DeleteAssessmentTarget(input) if isAWSErr(err, inspector.ErrCodeAssessmentRunInProgressException, "") { return resource.RetryableError(err) @@ -120,7 +120,13 @@ func resourceAwsInspectorAssessmentTargetDelete(d *schema.ResourceData, meta int return nil }) - + if isResourceTimeoutError(err) { + _, err = conn.DeleteAssessmentTarget(input) + } + if err != nil { + return fmt.Errorf("Error deleting Inspector Assessment Target: %s", err) + } + return nil } func describeInspectorAssessmentTarget(conn *inspector.Inspector, arn string) (*inspector.AssessmentTarget, error) {
Final retry after timeout deleting inspector assessment target
terraform-providers_terraform-provider-aws
train