hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
e7ef2daab3a505f5c897c8b88bb59e823ddf3776 | diff --git a/lib/redlander/model.rb b/lib/redlander/model.rb
index <HASH>..<HASH> 100644
--- a/lib/redlander/model.rb
+++ b/lib/redlander/model.rb
@@ -49,11 +49,11 @@ module Redlander
storage_type.to_s,
storage_name.to_s,
Redlander.to_rdf_options(options))
- raise RedlandError.new("Failed to initialize '#{storage_name}' storage (type: #{storage_type})") if @rdf_storage.null?
+ raise RedlandError, "Failed to initialize '#{storage_name}' storage (type: #{storage_type})" if @rdf_storage.null?
ObjectSpace.define_finalizer(self, proc { Redland.librdf_free_storage(@rdf_storage) })
@rdf_model = Redland.librdf_new_model(Redlander.rdf_world, @rdf_storage, "")
- raise RedlandError.new("Failed to create a new model") if @rdf_model.null?
+ raise RedlandError, "Failed to create a new model" if @rdf_model.null?
ObjectSpace.define_finalizer(self, proc { Redland.librdf_free_model(@rdf_model) })
end
diff --git a/lib/redlander/node.rb b/lib/redlander/node.rb
index <HASH>..<HASH> 100644
--- a/lib/redlander/node.rb
+++ b/lib/redlander/node.rb
@@ -35,7 +35,7 @@ module Redlander
@datatype = Uri.new(XmlSchema.datatype_of(arg))
Redland.librdf_new_node_from_typed_literal(Redlander.rdf_world, value, nil, @datatype.rdf_uri)
end
- raise RedlandError.new("Failed to create a new node") if @rdf_node.null?
+ raise RedlandError, "Failed to create a new node" if @rdf_node.null?
ObjectSpace.define_finalizer(self, proc { Redland.librdf_free_node(@rdf_node) })
end
diff --git a/lib/redlander/statement.rb b/lib/redlander/statement.rb
index <HASH>..<HASH> 100644
--- a/lib/redlander/statement.rb
+++ b/lib/redlander/statement.rb
@@ -10,7 +10,8 @@ module Redlander
# @option source [Node, String, URI, Uri, nil] :subject
# @option source [Node, String, URI, Uri, nil] :predicate
# @option source [Node, String, URI, Uri, nil] :object
- # @raise [RedlandError] if it fails to create a Statement
+ # @raise [NotImplementedError] if cannot create a Statement from the given source.
+ # @raise [RedlandError] if it fails to create a Statement.
def initialize(source = {})
@rdf_statement = case source
when FFI::Pointer
@@ -22,10 +23,9 @@ module Redlander
o = rdf_node_from(source[:object])
Redland.librdf_new_statement_from_nodes(Redlander.rdf_world, s, p, o)
else
- # TODO
- raise NotImplementedError.new
+ raise NotImplementedError, "Cannot create Statement from '#{source.inspect}'"
end
- raise RedlandError.new("Failed to create a new statement") if @rdf_statement.null?
+ raise RedlandError, "Failed to create a new statement" if @rdf_statement.null?
ObjectSpace.define_finalizer(self, proc { Redland.librdf_free_statement(@rdf_statement) })
end
diff --git a/lib/redlander/uri.rb b/lib/redlander/uri.rb
index <HASH>..<HASH> 100644
--- a/lib/redlander/uri.rb
+++ b/lib/redlander/uri.rb
@@ -7,7 +7,8 @@ module Redlander
# Create Uri.
#
# @param [URI, String] source String or URI object to wrap into Uri.
- # @raise [RedlandError] if it fails to create Uri.
+ # @raise [NotImplementedError] if cannot create a Uri from the given source.
+ # @raise [RedlandError] if it fails to create a Uri.
def initialize(source)
@rdf_uri = case source
when FFI::Pointer
@@ -15,10 +16,9 @@ module Redlander
when URI, String
Redland.librdf_new_uri(Redlander.rdf_world, source.to_s)
else
- # TODO
- raise NotImplementedError.new
+ raise NotImplementedError, "Cannot create Uri from '#{source.inspect}'"
end
- raise RedlandError.new("Failed to create URI from '#{source.inspect}'") if @rdf_uri.null?
+ raise RedlandError, "Failed to create URI from '#{source.inspect}'" if @rdf_uri.null?
ObjectSpace.define_finalizer(self, proc { Redland.librdf_free_uri(@rdf_uri) })
end | using a uniform syntax for raising exceptions | cordawyn_redlander | train |
929302bb75230178ac9aa6fce44c6e0fccbd5f7a | diff --git a/src/ViewModels/WebFeatureServiceItemViewModel.js b/src/ViewModels/WebFeatureServiceItemViewModel.js
index <HASH>..<HASH> 100644
--- a/src/ViewModels/WebFeatureServiceItemViewModel.js
+++ b/src/ViewModels/WebFeatureServiceItemViewModel.js
@@ -306,7 +306,7 @@ function buildGeoJsonUrl(viewModel) {
service: 'WFS',
request: 'GetFeature',
typeName: viewModel.typeNames,
- version: '1.0.0',
+ version: '1.1.0',
outputFormat: 'JSON',
srsName: 'EPSG:4326'
});
@@ -318,7 +318,7 @@ function buildGmlUrl(viewModel) {
service: 'WFS',
request: 'GetFeature',
typeName: viewModel.typeNames,
- version: '1.0.0',
+ version: '1.1.0',
srsName: 'EPSG:4326'
});
} | Bump WFS version in GetFeature requests. | TerriaJS_terriajs | train |
d82173bee2e25e0f84718c41edaa43f72bb7e21b | diff --git a/client/state/sites/domains/reducer.js b/client/state/sites/domains/reducer.js
index <HASH>..<HASH> 100644
--- a/client/state/sites/domains/reducer.js
+++ b/client/state/sites/domains/reducer.js
@@ -6,11 +6,8 @@ import {
SITE_DOMAINS_REQUEST,
SITE_DOMAINS_REQUEST_SUCCESS,
SITE_DOMAINS_REQUEST_FAILURE,
- SERIALIZE,
- DESERIALIZE
} from 'state/action-types';
import { combineReducersWithPersistence } from 'state/utils';
-import { isValidStateWithSchema } from 'state/utils';
import { itemsSchema } from './schema';
/**
@@ -31,17 +28,11 @@ export const items = ( state = {}, action ) => {
[ siteId ]: action.domains
}
);
- case DESERIALIZE:
- if ( isValidStateWithSchema( state, itemsSchema ) ) {
- return state;
- }
- return {};
- case SERIALIZE:
- return state;
}
return state;
};
+items.schema = itemsSchema;
/**
* `Reducer` function which handles request/response actions
@@ -59,9 +50,6 @@ export const requesting = ( state = {}, action ) => {
return Object.assign( {}, state, {
[ action.siteId ]: action.type === SITE_DOMAINS_REQUEST
} );
- case SERIALIZE:
- case DESERIALIZE:
- return {};
}
return state;
@@ -86,10 +74,6 @@ export const errors = ( state = {}, action ) => {
return Object.assign( {}, state, {
[ action.siteId ]: action.error
} );
-
- case SERIALIZE:
- case DESERIALIZE:
- return {};
}
return state;
diff --git a/client/state/sites/domains/test/reducer.js b/client/state/sites/domains/test/reducer.js
index <HASH>..<HASH> 100644
--- a/client/state/sites/domains/test/reducer.js
+++ b/client/state/sites/domains/test/reducer.js
@@ -8,10 +8,11 @@ import deepFreeze from 'deep-freeze';
* Internal dependencies
*/
import domainsReducer, {
- items as itemsReducer,
+ items,
requesting as requestReducer,
errors as errorsReducer
} from '../reducer';
+import { withSchemaValidation } from 'state/utils';
/**
* Action types constantes
@@ -47,6 +48,8 @@ import {
import { useSandbox } from 'test/helpers/use-sinon';
+const itemsReducer = withSchemaValidation( items.schema, items );
+
describe( 'reducer', () => {
let sandbox; | State: sites/domains simplify persist opt-in | Automattic_wp-calypso | train |
0079785c9802a06bdfa29065015f7b9834c4c109 | diff --git a/allauth/compat.py b/allauth/compat.py
index <HASH>..<HASH> 100644
--- a/allauth/compat.py
+++ b/allauth/compat.py
@@ -1,6 +1,11 @@
import django
+try:
+ from collections import UserDict
+except ImportError:
+ from UserDict import UserDict # noqa
+
if django.VERSION > (1, 10,):
from django.urls import NoReverseMatch, reverse, reverse_lazy
else:
diff --git a/allauth/socialaccount/providers/openid/utils.py b/allauth/socialaccount/providers/openid/utils.py
index <HASH>..<HASH> 100644
--- a/allauth/socialaccount/providers/openid/utils.py
+++ b/allauth/socialaccount/providers/openid/utils.py
@@ -6,17 +6,12 @@ from openid.extensions.ax import FetchResponse
from openid.extensions.sreg import SRegResponse
from openid.store.interface import OpenIDStore as OIDStore
+from allauth.compat import UserDict
from allauth.utils import valid_email_or_none
from .models import OpenIDNonce, OpenIDStore
-try:
- from UserDict import UserDict
-except ImportError:
- from collections import UserDict
-
-
class JSONSafeSession(UserDict):
"""
openid puts e.g. class OpenIDServiceEndpoint in the session. | Move UserDict try/except into allauth.compat | pennersr_django-allauth | train |
1e220f15228d96d9cc646ebab2d4cbe96da6e6c1 | diff --git a/js/bl3p.js b/js/bl3p.js
index <HASH>..<HASH> 100644
--- a/js/bl3p.js
+++ b/js/bl3p.js
@@ -23,11 +23,13 @@ module.exports = class bl3p extends Exchange {
'swap': false,
'future': false,
'option': false,
+ 'addMargin': false,
'cancelOrder': true,
'createOrder': true,
'createReduceOnlyOrder': false,
'fetchBalance': true,
'fetchBorrowRate': false,
+ 'fetchBorrowRateHistories': false,
'fetchBorrowRateHistory': false,
'fetchBorrowRates': false,
'fetchBorrowRatesPerSymbol': false,
@@ -40,6 +42,7 @@ module.exports = class bl3p extends Exchange {
'fetchLeverage': false,
'fetchMarkOHLCV': false,
'fetchOrderBook': true,
+ 'fetchPosition': false,
'fetchPositions': false,
'fetchPositionsRisk': false,
'fetchPremiumIndexOHLCV': false, | bl3p has leverage methods | ccxt_ccxt | train |
d75b9994e9f33d861ee4354e92e268f5117bb0ae | diff --git a/lib/mongoid/config/database.rb b/lib/mongoid/config/database.rb
index <HASH>..<HASH> 100644
--- a/lib/mongoid/config/database.rb
+++ b/lib/mongoid/config/database.rb
@@ -6,6 +6,9 @@ module Mongoid #:nodoc:
# database from options.
class Database < Hash
+ # keys to remove from self to not pass through to Mongo::Connection
+ PRIVATE_OPTIONS = %w(uri database username password)
+
# Configure the database connections. This will return an array
# containing the master and an array of slaves.
#
@@ -142,11 +145,12 @@ module Mongoid #:nodoc:
#
# @since 2.0.0.rc.1
def optional(slave = false)
- {
+ ({
:pool_size => pool_size,
:logger => Mongoid::Logger.new,
:slave_ok => slave
- }
+ }).merge(self).reject { |k,v| PRIVATE_OPTIONS.include? k }.
+ inject({}) { |memo, (k, v)| memo[k.to_sym] = v; memo} # mongo likes symbols
end
# Get a Mongo compliant URI for the database connection.
diff --git a/spec/functional/mongoid/config/database_spec.rb b/spec/functional/mongoid/config/database_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/functional/mongoid/config/database_spec.rb
+++ b/spec/functional/mongoid/config/database_spec.rb
@@ -197,6 +197,23 @@ describe Mongoid::Config::Database do
end
end
end
+
+ context "when arbitrary options are specified", :config => :user do
+
+ let(:options) do
+ {
+ "host" => "localhost",
+ "port" => "27017",
+ "database" => "mongoid",
+ "connect" => false,
+ "booyaho" => "temptahoo",
+ }
+ end
+
+ it "connect=false doesn't connect Mongo::Connection" do
+ connection.should_not be_connected
+ end
+ end
end
context "when configuring a slave instances", :config => :slaves do | Added ability to pass parameters to Mongo::Connection through the Mongoid::Config::Database object | mongodb_mongoid | train |
3d094d4ba21b78469df506c24605e7018dcf54bd | diff --git a/lib/result-builder.js b/lib/result-builder.js
index <HASH>..<HASH> 100644
--- a/lib/result-builder.js
+++ b/lib/result-builder.js
@@ -187,8 +187,9 @@ function buildItem(parentAttrNode, row, context) {
Object.keys(parentAttrNode.attributes).forEach(attrName => {
const attrNode = parentAttrNode.attributes[attrName];
- if (context.useSelectedInternal) {
+ if (context.selectedInternalLevel > 0) {
if (!attrNode.selectedInternal) return;
+ if (!attrNode.selectedInternal[context.selectedInternalLevel - 1]) return;
} else if (!attrNode.selected) return;
if (attrNode.attributes) {
@@ -338,38 +339,42 @@ function buildItem(parentAttrNode, row, context) {
context
),
buildItem: attributes => {
- function selectInternal(isSelected) {
+ const maxRecursionLevel = 20;
+
+ function selectInternal(level, isSelected) {
attributes.forEach(attribute => {
let attrNode = parentAttrNode;
- let selectedInternalBefore;
-
attribute.forEach(attrName => {
if (!(attrNode.attributes && attrNode.attributes[attrName])) {
let attrStr = attribute.join('.');
throw new ImplementationError(
- `Result-Builder (item-extension/buildItem): Unknown attribute "${attrStr}"`
+ `item-extension/buildItem: Unknown attribute "${attrStr}"`
);
}
attrNode = attrNode.attributes[attrName];
- selectedInternalBefore = attrNode.selectedInternal;
- attrNode.selectedInternal = isSelected;
- });
- if (isSelected && selectedInternalBefore) {
- let attrStr = attribute.join('.');
- throw new ImplementationError(
- `Result-Builder (item-extension/buildItem): Invalid recursion for "${attrStr}"`
- );
- }
+ if (!attrNode.selectedInternal) {
+ // we use a flag per recursion level to handle recursive buildItem calls independently
+ attrNode.selectedInternal = new Array(maxRecursionLevel).fill(false);
+ }
+ attrNode.selectedInternal[level - 1] = isSelected;
+ });
});
}
const subContext = Object.assign({}, context);
- subContext.useSelectedInternal = true;
+ subContext.selectedInternalLevel++;
+
+ if (subContext.selectedInternalLevel > maxRecursionLevel) {
+ let attrStr = attributes.map(attribute => attribute.join('.')).join(',');
+ throw new ImplementationError(
+ `item-extension/buildItem: Recursion depth for "${attrStr}" too big`
+ );
+ }
- selectInternal(true);
+ selectInternal(subContext.selectedInternalLevel, true);
const internalItem = buildItem(parentAttrNode, row, subContext);
- selectInternal(false);
+ selectInternal(subContext.selectedInternalLevel, false);
return internalItem;
}
@@ -402,7 +407,7 @@ module.exports = function resultBuilder(api, request, rawResults, resolvedConfig
attrPath: [], // current path from root
primaryKey: null, // current primary key
secondaryRows: null, // current row of all DataSources (by primary key)
- useSelectedInternal: false // used for buildItem() in item-extension
+ selectedInternalLevel: 0 // used for (recursive) buildItem() in item-extension
};
// determine primary result of main resource to iterate over at root level: | result-builder: refactored "buildItem()" implementation (allow more recursion) | godmodelabs_flora | train |
830f46a382b63a18e5ad4871894bdbe38139adb3 | diff --git a/lib/rb/spec/binaryprotocol_spec.rb b/lib/rb/spec/binaryprotocol_spec.rb
index <HASH>..<HASH> 100644
--- a/lib/rb/spec/binaryprotocol_spec.rb
+++ b/lib/rb/spec/binaryprotocol_spec.rb
@@ -102,7 +102,7 @@ class ThriftBinaryProtocolSpec < Spec::ExampleGroup
@trans.should_receive(:write).with("\200\005").ordered
@prot.write_i16(2**15 + 5)
# a Bignum should error
- lambda { @prot.write_i16(2**65) }.should raise_error(RangeError)
+ # lambda { @prot.write_i16(2**65) }.should raise_error(RangeError)
end
it "should write an i32" do
@@ -122,7 +122,7 @@ class ThriftBinaryProtocolSpec < Spec::ExampleGroup
# try something out of signed range, it should clip
@trans.should_receive(:write).with("\200\000\000\005").ordered
@prot.write_i32(2 ** 31 + 5)
- lambda { @prot.write_i32(2 ** 65 + 5) }.should raise_error(RangeError)
+ # lambda { @prot.write_i32(2 ** 65 + 5) }.should raise_error(RangeError)
end
it "should write an i64" do
@@ -143,19 +143,19 @@ class ThriftBinaryProtocolSpec < Spec::ExampleGroup
# try something out of signed range, it should clip
@trans.should_receive(:write).with("\200\000\000\000\000\000\000\005").ordered
@prot.write_i64(2**63 + 5)
- lambda { @prot.write_i64(2 ** 65 + 5) }.should raise_error(RangeError)
+ # lambda { @prot.write_i64(2 ** 65 + 5) }.should raise_error(RangeError)
end
it "should write a double" do
# try a random scattering of values, including min/max
- @trans.should_receive(:write).with("\000\020\000\000\000\000\000\000").ordered
+ @trans.should_receive(:write).with([Float::MIN].pack('G')).ordered
@trans.should_receive(:write).with("\300\223<\234\355\221hs").ordered
@trans.should_receive(:write).with("\300\376\0173\256\024z\341").ordered
@trans.should_receive(:write).with("\3007<2\336\372v\324").ordered
@trans.should_receive(:write).with("\000\000\000\000\000\000\000\000").ordered
@trans.should_receive(:write).with("@\310\037\220\365\302\217\\").ordered
@trans.should_receive(:write).with("@\200Y\327\n=p\244").ordered
- @trans.should_receive(:write).with("\177\357\377\377\377\377\377\377").ordered
+ @trans.should_receive(:write).with([Float::MAX].pack('G')).ordered
[Float::MIN, -1231.15325, -123123.23, -23.23515123, 0, 12351.1325, 523.23, Float::MAX].each do |f|
@prot.write_double(f)
end
@@ -274,9 +274,9 @@ class ThriftBinaryProtocolSpec < Spec::ExampleGroup
it "should read a double" do
# try a random scattering of values, including min/max
@trans.should_receive(:read_all).with(8).and_return(
- "\000\020\000\000\000\000\000\000", "\301\f9\370\374\362\317\226",
+ [Float::MIN].pack('G'), "\301\f9\370\374\362\317\226",
"\300t3\274x \243\016", "\000\000\000\000\000\000\000\000", "@^\317\fCo\301Y",
- "AA\360A\217\317@\260", "\177\357\377\377\377\377\377\377"
+ "AA\360A\217\317@\260", [Float::MAX].pack('G')
)
[Float::MIN, -231231.12351, -323.233513, 0, 123.2351235, 2351235.12351235, Float::MAX].each do |f|
@prot.read_double.should == f | rb: Fix the binaryprotocol spec to run under jruby
Stop testing for RangeErrors with bignums in i<I>, that was just an artifact of the MRI interpreter that has different behavior under jruby.
JRuby uses a different Float::MIN/MAX so stop hard-coding the binary representation
git-svn-id: <URL> | limingxinleo_thrift | train |
247ab1756b57f8c009ce3c8fd27f2a4f58548c07 | diff --git a/modules/wyrl/examples/quantifiers/Quantifiers$native.java b/modules/wyrl/examples/quantifiers/Quantifiers$native.java
index <HASH>..<HASH> 100644
--- a/modules/wyrl/examples/quantifiers/Quantifiers$native.java
+++ b/modules/wyrl/examples/quantifiers/Quantifiers$native.java
@@ -83,7 +83,7 @@ public class Quantifiers$native {
* expression.
* @return
*/
- public static Automaton.Term instantiate(Automaton automaton,
+ public static Automaton.Set instantiate(Automaton automaton,
Automaton.List args) {
int concreteExpression = args.get(0);
@@ -109,24 +109,27 @@ public class Quantifiers$native {
quantifiedVariables, bindings);
// If one or more bindings have been computed, then apply them to the
- // quantified expression to produce one or more concrete expressions
- // which can be instantiated.
- int result = NULL;
+ // quantified expression to produce one or more instantiated expressions.
+ int bindings_size = bindings.size();
+ if (bindings_size > 0) {
+ // Apply the substitution for the each binding to produce o given
+ // instantiation.
+ int[] instances = new int[bindings_size];
- if (bindings.size() > 0) {
- // Apply the substitution for the first binding now.
- // TODO: need to return multiple instantiations
-
- result = automaton
- .substitute(quantifiedExpression, bindings.get(0));
- }
+ for (int i = 0; i != bindings_size; ++i) {
+ instances[i] = automaton.substitute(quantifiedExpression,
+ bindings.get(i));
+ }
- // Done
- return (Automaton.Term) automaton.get(result);
+ return new Automaton.Set(instances);
+ } else {
+ // No bindings found, so just return empty set
+ return Automaton.EMPTY_SET;
+ }
}
// Computes the (static) reference to the null state.
- private static final int NULL = Automaton.K_FREE - Quantifiers.K_Null;
+ private static final int NULL = Integer.MIN_VALUE;
/**
* <p>
diff --git a/modules/wyrl/examples/quantifiers/Quantifiers.wyrl b/modules/wyrl/examples/quantifiers/Quantifiers.wyrl
index <HASH>..<HASH> 100755
--- a/modules/wyrl/examples/quantifiers/Quantifiers.wyrl
+++ b/modules/wyrl/examples/quantifiers/Quantifiers.wyrl
@@ -118,8 +118,8 @@ infer And{
ForAll[{Var, Var...} vs,BExpr e2] qf,
// The rest
BExpr... es}:
- => let e3 = instantiate[e1,vs,e2]
- in And({e1,qf,e3} ++ es), if e3 != Null
+ => let instantiations = instantiate[e1,vs,e2]
+ in And({e1,qf} ++ es ++ instantiations), if |instantiations| > 0
// ====================================================================
// Exists
@@ -148,13 +148,11 @@ reduce And{Exists[{Var...} vs, BExpr e], BExpr... es}:
// Instantiate function
// ====================================================================
-// Attempt to computing a binding between a concrete expression e1 (left)
-// and an expression e2 (right) parameterised by a the given variable v.
-// In essence, if bind(e1,x,e2)=e3 then e1 = e2[x/e3].
-term Null
-define Result as BExpr | Null
-
-function instantiate[BExpr, {Var}, BExpr] => Result
+// Attempt to bind a quantified expression (right) with a concrete
+// expression (left), generating one or more candidate bindings over one
+// or more quantified variables. These bindings are then used to
+// instantiate the quantified expression.
+function instantiate[BExpr, {Var}, BExpr] => {BExpr}
// ====================================================================
// Expressions & Values
diff --git a/modules/wyrl/src/wyautl/core/Automaton.java b/modules/wyrl/src/wyautl/core/Automaton.java
index <HASH>..<HASH> 100644
--- a/modules/wyrl/src/wyautl/core/Automaton.java
+++ b/modules/wyrl/src/wyautl/core/Automaton.java
@@ -1697,19 +1697,19 @@ public final class Automaton {
* Internal constant used to prevent unnecessary memory
* allocations.
*/
- private static final List EMPTY_LIST = new List(NOCHILDREN);
+ public static final List EMPTY_LIST = new List(NOCHILDREN);
/**
* Internal constant used to prevent unnecessary memory
* allocations.
*/
- private static final Set EMPTY_SET = new Set(NOCHILDREN);
+ public static final Set EMPTY_SET = new Set(NOCHILDREN);
/**
* Internal constant used to prevent unnecessary memory
* allocations.
*/
- private static final Bag EMPTY_BAG = new Bag(NOCHILDREN);
+ public static final Bag EMPTY_BAG = new Bag(NOCHILDREN);
/**
* Internal constant used to prevent unnecessary memory | WyRL: updated quantifier instantiation example to support multiple quantified variables, and to produce multiple instantiations. There is still a limitation here, in that it doesn't search all possible bindings. #<I> #<I> | Whiley_WhileyCompiler | train |
4e41c38d12aeca2d9a403c84574ffed853231ec9 | diff --git a/src/Exception.php b/src/Exception.php
index <HASH>..<HASH> 100644
--- a/src/Exception.php
+++ b/src/Exception.php
@@ -1,4 +1,11 @@
<?php
+/**
+ *
+ * This file is part of Atlas for PHP.
+ *
+ * @license http://opensource.org/licenses/MIT MIT
+ *
+ */
namespace Atlas\Cli;
class Exception extends \Exception
diff --git a/src/Fsio.php b/src/Fsio.php
index <HASH>..<HASH> 100644
--- a/src/Fsio.php
+++ b/src/Fsio.php
@@ -1,4 +1,11 @@
<?php
+/**
+ *
+ * This file is part of Atlas for PHP.
+ *
+ * @license http://opensource.org/licenses/MIT MIT
+ *
+ */
namespace Atlas\Cli;
use Atlas\Cli\Exception;
diff --git a/src/Skeleton/Skeleton.php b/src/Skeleton/Skeleton.php
index <HASH>..<HASH> 100644
--- a/src/Skeleton/Skeleton.php
+++ b/src/Skeleton/Skeleton.php
@@ -1,4 +1,11 @@
<?php
+/**
+ *
+ * This file is part of Atlas for PHP.
+ *
+ * @license http://opensource.org/licenses/MIT MIT
+ *
+ */
namespace Atlas\Cli\Skeleton;
use Atlas\Cli\Fsio;
@@ -8,6 +15,13 @@ use Aura\SqlSchema\ColumnFactory;
use Exception;
use PDO;
+/**
+ *
+ * Builds the skeleton for a mapper.
+ *
+ * @package atlas/cli
+ *
+ */
class Skeleton
{
protected $logger;
diff --git a/src/Skeleton/SkeletonCommand.php b/src/Skeleton/SkeletonCommand.php
index <HASH>..<HASH> 100644
--- a/src/Skeleton/SkeletonCommand.php
+++ b/src/Skeleton/SkeletonCommand.php
@@ -1,4 +1,11 @@
<?php
+/**
+ *
+ * This file is part of Atlas for PHP.
+ *
+ * @license http://opensource.org/licenses/MIT MIT
+ *
+ */
namespace Atlas\Cli\Skeleton;
use Atlas\Cli\Fsio;
diff --git a/src/Skeleton/SkeletonFactory.php b/src/Skeleton/SkeletonFactory.php
index <HASH>..<HASH> 100644
--- a/src/Skeleton/SkeletonFactory.php
+++ b/src/Skeleton/SkeletonFactory.php
@@ -1,4 +1,11 @@
<?php
+/**
+ *
+ * This file is part of Atlas for PHP.
+ *
+ * @license http://opensource.org/licenses/MIT MIT
+ *
+ */
namespace Atlas\Cli\Skeleton;
use Atlas\Cli\Fsio;
diff --git a/src/Skeleton/SkeletonInput.php b/src/Skeleton/SkeletonInput.php
index <HASH>..<HASH> 100644
--- a/src/Skeleton/SkeletonInput.php
+++ b/src/Skeleton/SkeletonInput.php
@@ -1,4 +1,11 @@
<?php
+/**
+ *
+ * This file is part of Atlas for PHP.
+ *
+ * @license http://opensource.org/licenses/MIT MIT
+ *
+ */
namespace Atlas\Cli\Skeleton;
use Atlas\Cli\Exception; | add file-level docblocks | atlasphp_Atlas.Cli | train |
f9c407fdd7dd9729ad24a67630549c4e57becb0b | diff --git a/js/webtrees.js b/js/webtrees.js
index <HASH>..<HASH> 100644
--- a/js/webtrees.js
+++ b/js/webtrees.js
@@ -536,28 +536,31 @@ function change_family_members(famid) {
}
function addnewsource(field) {
- edit_interface({
- "action": "addnewsource",
- "pid": "newsour"
- }, null, field);
- return false;
+ pastefield=field;
+ edit_interface({
+ "action": "addnewsource",
+ "pid": "newsour"
+ }, null, field);
+ return false;
}
function addnewnote(field) {
- edit_interface({
- "action": "addnewnote",
- "noteid": "newnote"
- }, null, field);
- return false;
+ pastefield=field;
+ edit_interface({
+ "action": "addnewnote",
+ "noteid": "newnote"
+ }, null, field);
+ return false;
}
function addnewnote_assisted(field, iid) {
- edit_interface({
- "action": "addnewnote_assisted",
- "noteid": "newnote",
- "pid": iid
- }, assist_window_specs, field);
- return false;
+ pastefield=field;
+ edit_interface({
+ "action": "addnewnote_assisted",
+ "noteid": "newnote",
+ "pid": iid
+ }, assist_window_specs, field);
+ return false;
}
function addmedia_links(field, iid, iname) { | Fix: "paste result of add new shared-note/source" broke in svn<I> | fisharebest_webtrees | train |
3c1e2521694583bc1d8bade1ed5b162f5bfb065a | diff --git a/lib/handlebars/base.js b/lib/handlebars/base.js
index <HASH>..<HASH> 100644
--- a/lib/handlebars/base.js
+++ b/lib/handlebars/base.js
@@ -3,6 +3,7 @@ import Exception from './exception';
import { registerDefaultHelpers } from './helpers';
import { registerDefaultDecorators } from './decorators';
import logger from './logger';
+import { resetLoggedProperties } from './internal/proto-access';
export const VERSION = '4.7.0';
export const COMPILER_REVISION = 8;
@@ -78,6 +79,13 @@ HandlebarsEnvironment.prototype = {
},
unregisterDecorator: function(name) {
delete this.decorators[name];
+ },
+ /**
+ * Reset the memory of illegal property accesses that have already been logged.
+ * @deprecated should only be used in handlebars test-cases
+ */
+ resetLoggedPropertyAccesses() {
+ resetLoggedProperties();
}
};
diff --git a/lib/handlebars/internal/proto-access.js b/lib/handlebars/internal/proto-access.js
index <HASH>..<HASH> 100644
--- a/lib/handlebars/internal/proto-access.js
+++ b/lib/handlebars/internal/proto-access.js
@@ -1,6 +1,8 @@
import { createNewLookupObject } from './create-new-lookup-object';
import * as logger from '../logger';
+const loggedProperties = Object.create(null);
+
export function createProtoAccessControl(runtimeOptions) {
let defaultMethodWhiteList = Object.create(null);
defaultMethodWhiteList['constructor'] = false;
@@ -45,12 +47,24 @@ function checkWhiteList(protoAccessControlForType, propertyName) {
if (protoAccessControlForType.defaultValue !== undefined) {
return protoAccessControlForType.defaultValue;
}
- // eslint-disable-next-line no-console
- logger.log(
- 'error',
- `Handlebars: Access has been denied to resolve the property "${propertyName}" because it is not an "own property" of its parent.\n` +
- `You can add a runtime option to disable the check or this warning:\n` +
- `See http://localhost:8080/api-reference/runtime-options.html#options-to-control-prototype-access for details`
- );
+ logUnexpecedPropertyAccessOnce(propertyName);
return false;
}
+
+function logUnexpecedPropertyAccessOnce(propertyName) {
+ if (loggedProperties[propertyName] !== true) {
+ loggedProperties[propertyName] = true;
+ logger.log(
+ 'error',
+ `Handlebars: Access has been denied to resolve the property "${propertyName}" because it is not an "own property" of its parent.\n` +
+ `You can add a runtime option to disable the check or this warning:\n` +
+ `See http://localhost:8080/api-reference/runtime-options.html#options-to-control-prototype-access for details`
+ );
+ }
+}
+
+export function resetLoggedProperties() {
+ Object.keys(loggedProperties).forEach(propertyName => {
+ delete loggedProperties[propertyName];
+ });
+}
diff --git a/spec/security.js b/spec/security.js
index <HASH>..<HASH> 100644
--- a/spec/security.js
+++ b/spec/security.js
@@ -190,6 +190,10 @@ describe('security issues', function() {
return 'returnValue';
};
+ beforeEach(function() {
+ handlebarsEnv.resetLoggedPropertyAccesses();
+ });
+
afterEach(function() {
sinon.restore();
});
@@ -214,6 +218,23 @@ describe('security issues', function() {
expect(spy.args[0][0]).to.match(/Handlebars: Access has been denied/);
});
+ it('should only log the warning once', function() {
+ var spy = sinon.spy(console, 'error');
+
+ expectTemplate('{{aMethod}}')
+ .withInput(new TestClass())
+ .withCompileOptions(compileOptions)
+ .toCompileTo('');
+
+ expectTemplate('{{aMethod}}')
+ .withInput(new TestClass())
+ .withCompileOptions(compileOptions)
+ .toCompileTo('');
+
+ expect(spy.calledOnce).to.be.true();
+ expect(spy.args[0][0]).to.match(/Handlebars: Access has been denied/);
+ });
+
it('can be allowed, which disables the warning', function() {
var spy = sinon.spy(console, 'error'); | fix: log error for illegal property access only once per property | wycats_handlebars.js | train |
a845487e4c9979ff79df1e074c59beebf473ed23 | diff --git a/lib/swirl/aws.rb b/lib/swirl/aws.rb
index <HASH>..<HASH> 100644
--- a/lib/swirl/aws.rb
+++ b/lib/swirl/aws.rb
@@ -108,6 +108,9 @@ module Swirl
end
def call!(action, query={}, &blk)
+ log "Action: #{action}"
+ log "Query: #{query.inspect}"
+
# Hard coding this here until otherwise needed
method = "POST"
@@ -122,9 +125,8 @@ module Swirl
body += "&" + ["Signature", compile_signature(method, body)].join("=")
post(body) do |code, xml|
- if ENV["SWIRL_LOG"]
- puts response.body
- end
+ log "HTTP Response Code: #{code}"
+ log xml.gsub("\n", "\n[swirl] ")
data = Crack::XML.parse(xml)
blk.call(code, data)
@@ -149,6 +151,12 @@ module Swirl
"<#{self.class.name} version: #{@version} url: #{@url} aws_access_key_id: #{@aws_access_key_id}>"
end
+ def log(msg)
+ if ENV["SWIRL_LOG"]
+ $stderr.puts "[swirl] #{msg}"
+ end
+ end
+
end
end | add pretty logging when SWIRL_LOG is set | bmizerany_swirl | train |
5d200c5d71cbe548f5cde5f716a2457f54c8ba36 | diff --git a/app/controllers/katello/api/v2/repositories_controller.rb b/app/controllers/katello/api/v2/repositories_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/katello/api/v2/repositories_controller.rb
+++ b/app/controllers/katello/api/v2/repositories_controller.rb
@@ -107,7 +107,7 @@ class Api::V2::RepositoriesController < Api::V2::ApiController
param :id, :identifier, :required => true, :desc => "repository id"
def sync
task = async_task(::Actions::Katello::Repository::Sync, @repository)
- render :json => { uuid: task.id }
+ respond_for_async :resource => task
end
api :PUT, "/repositories/:id", "Update a repository"
diff --git a/engines/bastion/app/assets/bastion/repositories/details/repository-details-info.controller.js b/engines/bastion/app/assets/bastion/repositories/details/repository-details-info.controller.js
index <HASH>..<HASH> 100644
--- a/engines/bastion/app/assets/bastion/repositories/details/repository-details-info.controller.js
+++ b/engines/bastion/app/assets/bastion/repositories/details/repository-details-info.controller.js
@@ -105,7 +105,7 @@ angular.module('Bastion.repositories').controller('RepositoryDetailsInfoControll
$scope.syncRepository = function (repository) {
Repository.sync({id: repository.id}, function (task) {
- $state.go('products.details.tasks.details', {taskId: task.uuid});
+ $state.go('products.details.tasks.details', {taskId: task.id});
});
};
diff --git a/engines/bastion/test/repositories/details/repository-details-info.controller.test.js b/engines/bastion/test/repositories/details/repository-details-info.controller.test.js
index <HASH>..<HASH> 100644
--- a/engines/bastion/test/repositories/details/repository-details-info.controller.test.js
+++ b/engines/bastion/test/repositories/details/repository-details-info.controller.test.js
@@ -12,7 +12,7 @@
**/
describe('Controller: RepositoryDetailsInfoController', function() {
- var $scope, gettext;
+ var $scope, $state, gettext;
beforeEach(module(
'Bastion.repositories',
@@ -26,6 +26,7 @@ describe('Controller: RepositoryDetailsInfoController', function() {
Repository = $injector.get('MockResource').$new();
$scope = $injector.get('$rootScope').$new();
+ $state = $injector.get('$state');
$scope.$stateParams = {
productId: 1,
repositoryId: 1
@@ -46,6 +47,7 @@ describe('Controller: RepositoryDetailsInfoController', function() {
$controller('RepositoryDetailsInfoController', {
$scope: $scope,
+ $state: $state,
$q: $q,
gettext: gettext,
Repository: Repository,
@@ -122,8 +124,8 @@ describe('Controller: RepositoryDetailsInfoController', function() {
});
it('should provide a method to sync a repository', function() {
+ spyOn($state, 'go');
$scope.syncRepository($scope.repository);
-
- expect($scope.repository['sync_state']).toBe('running');
+ expect($state.go).toHaveBeenCalled();
});
}); | Fix repo sync js tests | Katello_katello | train |
f8a81d7a3b19f01d88cf4c70507da0f789d6a65d | diff --git a/twitter4j-stream/src/main/java/twitter4j/TwitterStreamImpl.java b/twitter4j-stream/src/main/java/twitter4j/TwitterStreamImpl.java
index <HASH>..<HASH> 100755
--- a/twitter4j-stream/src/main/java/twitter4j/TwitterStreamImpl.java
+++ b/twitter4j-stream/src/main/java/twitter4j/TwitterStreamImpl.java
@@ -569,7 +569,7 @@ class TwitterStreamImpl extends TwitterBaseImpl implements TwitterStream {
stream.onException(e, this.streamListeners, this.rawStreamListeners);
throw e;
} catch (Exception e) {
- if (!(e instanceof NullPointerException) && !e.getMessage().equals("Inflater has been closed")) {
+ if (!(e instanceof NullPointerException) && !"Inflater has been closed".equals(e.getMessage())) {
logger.info(e.getMessage());
stream.onException(e, this.streamListeners, this.rawStreamListeners);
closed = true; | Prevent NPE when original exception has no message | Twitter4J_Twitter4J | train |
448e8da94d09b397e98ffcb6f22b55a578ef79c1 | diff --git a/lib/linter.js b/lib/linter.js
index <HASH>..<HASH> 100644
--- a/lib/linter.js
+++ b/lib/linter.js
@@ -747,10 +747,15 @@ function runRules(sourceCode, configuredRules, ruleMapper, parserOptions, parser
nodeQueue.forEach(traversalInfo => {
currentNode = traversalInfo.node;
- if (traversalInfo.isEntering) {
- eventGenerator.enterNode(currentNode);
- } else {
- eventGenerator.leaveNode(currentNode);
+ try {
+ if (traversalInfo.isEntering) {
+ eventGenerator.enterNode(currentNode);
+ } else {
+ eventGenerator.leaveNode(currentNode);
+ }
+ } catch (err) {
+ err.currentNode = currentNode;
+ throw err;
}
});
@@ -901,8 +906,15 @@ module.exports = class Linter {
options.filename
);
} catch (err) {
+ err.message += `\nOccurred while linting ${options.filename}`;
debug("An error occurred while traversing");
debug("Filename:", options.filename);
+ if (err.currentNode) {
+ const { line } = err.currentNode.loc.start;
+
+ debug("Line:", line);
+ err.message += `:${line}`;
+ }
debug("Parser Options:", parserOptions);
debug("Parser Path:", parserName);
debug("Settings:", settings);
diff --git a/tests/lib/linter.js b/tests/lib/linter.js
index <HASH>..<HASH> 100644
--- a/tests/lib/linter.js
+++ b/tests/lib/linter.js
@@ -99,7 +99,7 @@ describe("Linter", () => {
assert.throws(() => {
linter.verify(code, config, filename);
- }, "Intentional error.");
+ }, `Intentional error.\nOccurred while linting ${filename}:1`);
});
it("does not call rule listeners with a `this` value", () => {
@@ -4381,7 +4381,7 @@ describe("Linter", () => {
assert.throws(() => {
linter.verify("0", { rules: { "test-rule": "error" } });
- }, /Fixable rules should export a `meta\.fixable` property.$/u);
+ }, /Fixable rules should export a `meta\.fixable` property.\nOccurred while linting <input>:1$/u);
});
it("should not throw an error if fix is passed and there is no metadata", () => { | Chore: improve crash reporting (fixes #<I>) (#<I>)
Add line number to the output in the event of a rule crash | eslint_eslint | train |
2da1830b83ad8df10ad6c6df326ca463050ff264 | diff --git a/core/codegen/javagen/src/main/java/org/overture/codegen/vdm2java/JavaCallStmToStringBuilder.java b/core/codegen/javagen/src/main/java/org/overture/codegen/vdm2java/JavaCallStmToStringBuilder.java
index <HASH>..<HASH> 100644
--- a/core/codegen/javagen/src/main/java/org/overture/codegen/vdm2java/JavaCallStmToStringBuilder.java
+++ b/core/codegen/javagen/src/main/java/org/overture/codegen/vdm2java/JavaCallStmToStringBuilder.java
@@ -116,4 +116,13 @@ public class JavaCallStmToStringBuilder extends JavaClassCreatorBase implements
return str;
}
+
+ @Override
+ public AApplyExpCG toStringOf(SExpCG exp)
+ {
+ AApplyExpCG utilsToStrCall = consUtilsToStringCall();
+ utilsToStrCall.getArgs().add(exp);
+
+ return utilsToStrCall;
+ }
}
diff --git a/core/codegen/platform/src/main/java/org/overture/codegen/traces/ICallStmToStringMethodBuilder.java b/core/codegen/platform/src/main/java/org/overture/codegen/traces/ICallStmToStringMethodBuilder.java
index <HASH>..<HASH> 100644
--- a/core/codegen/platform/src/main/java/org/overture/codegen/traces/ICallStmToStringMethodBuilder.java
+++ b/core/codegen/platform/src/main/java/org/overture/codegen/traces/ICallStmToStringMethodBuilder.java
@@ -2,8 +2,10 @@ package org.overture.codegen.traces;
import java.util.Map;
+import org.overture.codegen.cgast.SExpCG;
import org.overture.codegen.cgast.SStmCG;
import org.overture.codegen.cgast.declarations.AMethodDeclCG;
+import org.overture.codegen.cgast.expressions.AApplyExpCG;
import org.overture.codegen.ir.IRInfo;
import org.overture.codegen.trans.assistants.TransAssistantCG;
@@ -13,4 +15,6 @@ public interface ICallStmToStringMethodBuilder
Map<String, String> idConstNameMap,
StoreAssistant storeAssistant,
TransAssistantCG transAssistant);
+
+ public AApplyExpCG toStringOf(SExpCG exp);
} | Update interfaces for construction of string representations to include external 'to-string' calls | overturetool_overture | train |
c942b516925bd0517645fb903d52b1d36dc672a0 | diff --git a/src/XhProf/Profiler.php b/src/XhProf/Profiler.php
index <HASH>..<HASH> 100644
--- a/src/XhProf/Profiler.php
+++ b/src/XhProf/Profiler.php
@@ -57,6 +57,11 @@ class Profiler
$this->running = false;
$data = xhprof_disable();
+
+ if(function_exists('fastcgi_finish_request')) {
+ fastcgi_finish_request();
+ }
+
$token = sha1(uniqid().microtime());
$trace = new Trace($token, $data); | Added small fix for fastcgi scripts | quadrilatere_xhprof-persister | train |
72260616b086eea741e41cbe9e7884f6384ce141 | diff --git a/lib/gollum-lib/committer.rb b/lib/gollum-lib/committer.rb
index <HASH>..<HASH> 100644
--- a/lib/gollum-lib/committer.rb
+++ b/lib/gollum-lib/committer.rb
@@ -235,7 +235,7 @@ module Gollum
# Proxies methods t
def method_missing(name, *args)
- args.map! { |item| item.respond_to?(:force_encoding) ? item.force_encoding('ascii-8bit') : item }
+ args.map! { |item| item.respond_to?(:force_encoding) ? item.force_encoding('ascii-8bit') : item } if Gollum::GIT_ADAPTER == 'grit'
index.send(name, *args)
end
end | Addendum to making ascii-8bit force encoding conditional on git adapter being grit. | gollum_gollum-lib | train |
cdef266d7f87f74011b12618fc30bdde5a22dd11 | diff --git a/Twig/Extension/ImageExtension.php b/Twig/Extension/ImageExtension.php
index <HASH>..<HASH> 100644
--- a/Twig/Extension/ImageExtension.php
+++ b/Twig/Extension/ImageExtension.php
@@ -12,6 +12,7 @@ namespace Darvin\AdminBundle\Twig\Extension;
use Darvin\AdminBundle\Configuration\Configuration;
use Darvin\ImageBundle\UrlBuilder\Filter\ResizeFilter;
+use Liip\ImagineBundle\Exception\Binary\Loader\NotLoadableException;
/**
* Image Twig extension
@@ -59,7 +60,11 @@ class ImageExtension extends \Twig_Extension
$parameters = $this->getResizeFilterParameters();
$parameters['outbound'] = true;
- return $this->resizeFilter->buildUrl($pathname, $parameters);
+ try {
+ return $this->resizeFilter->buildUrl($pathname, $parameters);
+ } catch (NotLoadableException $ex) {
+ return null;
+ }
}
/**
@@ -69,7 +74,11 @@ class ImageExtension extends \Twig_Extension
*/
public function resizeImage($pathname)
{
- return $this->resizeFilter->buildUrl($pathname, $this->getResizeFilterParameters());
+ try {
+ return $this->resizeFilter->buildUrl($pathname, $this->getResizeFilterParameters());
+ } catch (NotLoadableException $ex) {
+ return null;
+ }
}
/** | Catch not loadable exception in "admin_image_*" Twig functions. | DarvinStudio_DarvinAdminBundle | train |
d513b903c01fc164cabc77ceccc7856e57e9bda7 | diff --git a/client/src/main/java/io/pravega/client/stream/StreamConfiguration.java b/client/src/main/java/io/pravega/client/stream/StreamConfiguration.java
index <HASH>..<HASH> 100644
--- a/client/src/main/java/io/pravega/client/stream/StreamConfiguration.java
+++ b/client/src/main/java/io/pravega/client/stream/StreamConfiguration.java
@@ -39,7 +39,7 @@ public class StreamConfiguration implements Serializable {
* The duration after the last call to {@link EventStreamWriter#noteTime(long)} which the
* timestamp should be considered valid before it is forgotten. Meaning that after this long of
* not calling {@link EventStreamWriter#noteTime(long)} the writer will be forgotten.
- * If there are no known writers, readers that call {@link EventStreamReader#getCurrentTimeWindow()}
+ * If there are no known writers, readers that call {@link EventStreamReader#getCurrentTimeWindow(Stream)}
* will receive a `null` when they are at the corresponding position in the stream.
*/
private final long timestampAggregationTimeout; | Issue <I>: Typo in Javadoc for io.pravega.client.stream.StreamConfiguration (#<I>)
Fixed method syntax of EventStreamReader#getCurrentTimeWindow(Stream) method in javadoc. | pravega_pravega | train |
a516c1225a8e76faf11aed1a4489cf4b11665018 | diff --git a/examples/controller.py b/examples/controller.py
index <HASH>..<HASH> 100644
--- a/examples/controller.py
+++ b/examples/controller.py
@@ -27,7 +27,7 @@ async def run():
model = await controller.add_model(
'libjuju-test',
'cloud-aws',
- 'cloudcred-aws_tvansteenburgh_external@aws-tim',
+ 'cloudcred-aws_tvansteenburgh@external_aws-tim',
)
await model.deploy(
'ubuntu-0', | cloudcred updated in examples/controller.py | juju_python-libjuju | train |
6da908eaad0216479791aa58806b66672ab7c18f | diff --git a/internal/dock/dock.go b/internal/dock/dock.go
index <HASH>..<HASH> 100644
--- a/internal/dock/dock.go
+++ b/internal/dock/dock.go
@@ -35,6 +35,18 @@ func run(t testing.TB, conf fullConfig) (addr string, closer func()) {
if err != nil {
t.Fatal(err)
}
+
+ // If there is not relevant image at local, pull image from remote repository.
+ if err := cli.PullImage(
+ docker.PullImageOptions{
+ Repository: conf.Image,
+ },
+ docker.AuthConfiguration{},
+ ); err != nil {
+ // If pull image fail, skip the test.
+ t.Skip(err)
+ }
+
cont, err := cli.CreateContainer(docker.CreateContainerOptions{
Config: &conf.Config,
HostConfig: &conf.HostConfig, | pull image from remote repository if there is not image at local machine | cayleygraph_cayley | train |
be43f6088c9b5d58ebc1c1f92f935976be19d7c9 | diff --git a/src/Sylius/Bundle/WebBundle/Menu/FrontendMenuBuilder.php b/src/Sylius/Bundle/WebBundle/Menu/FrontendMenuBuilder.php
index <HASH>..<HASH> 100644
--- a/src/Sylius/Bundle/WebBundle/Menu/FrontendMenuBuilder.php
+++ b/src/Sylius/Bundle/WebBundle/Menu/FrontendMenuBuilder.php
@@ -181,21 +181,27 @@ class FrontendMenuBuilder extends MenuBuilder
}
/**
- * Builds frontend currency menu.
+ * Builds frontend currency menu if we have more then 1 currency to display.
*
* @return ItemInterface
*/
public function createCurrencyMenu()
{
+ $currencies = $this->currencyProvider->getAvailableCurrencies();
+
$menu = $this->factory->createItem('root', array(
'childrenAttributes' => array(
'class' => 'nav nav-pills'
)
));
- foreach ($this->currencyProvider->getAvailableCurrencies() as $currency) {
- $code = $currency->getCode();
+ if (count($currencies) == 1) {
+ $menu->isDisplayed(false);
+ return $menu;
+ }
+ foreach ($currencies as $currency) {
+ $code = $currency->getCode();
$menu->addChild($code, array(
'route' => 'sylius_currency_change',
'routeParameters' => array('currency' => $code),
diff --git a/src/Sylius/Bundle/WebBundle/Menu/LocaleMenuBuilder.php b/src/Sylius/Bundle/WebBundle/Menu/LocaleMenuBuilder.php
index <HASH>..<HASH> 100644
--- a/src/Sylius/Bundle/WebBundle/Menu/LocaleMenuBuilder.php
+++ b/src/Sylius/Bundle/WebBundle/Menu/LocaleMenuBuilder.php
@@ -55,27 +55,29 @@ class LocaleMenuBuilder extends MenuBuilder
}
/**
- * Builds frontend locale menu.
+ * Builds frontend locale menu if more then one language is defined.
*
* @return ItemInterface
*/
public function createMenu()
{
+ $locales = $this->localeProvider->getAvailableLocales();
$menu = $this->factory->createItem('root', array(
'childrenAttributes' => array(
'class' => 'nav nav-pills'
)
));
-
- foreach ($this->localeProvider->getAvailableLocales() as $locale) {
+ if (count($locales) == 1) {
+ $menu->isDisplayed(false);
+ return $menu;
+ }
+ foreach ($locales as $locale) {
$code = $locale->getCode();
-
$menu->addChild($code, array(
'route' => 'sylius_locale_change',
'routeParameters' => array('locale' => $code),
))->setLabel(Intl::getLocaleBundle()->getLocaleName($code));
}
-
return $menu;
}
} | Locale and Currency menu shouldnt be renderd when only 1 exists | Sylius_Sylius | train |
477565cbec0c3735a2035d69bc635ae33a9e4186 | diff --git a/backend/common-servlet/src/main/java/org/geomajas/servlet/CacheFilter.java b/backend/common-servlet/src/main/java/org/geomajas/servlet/CacheFilter.java
index <HASH>..<HASH> 100644
--- a/backend/common-servlet/src/main/java/org/geomajas/servlet/CacheFilter.java
+++ b/backend/common-servlet/src/main/java/org/geomajas/servlet/CacheFilter.java
@@ -65,13 +65,25 @@ public class CacheFilter implements Filter {
public static final String ZIP_SUFFIXES = "zipSuffixes";
public static final String SKIP_PREFIXES = "skipPrefixes";
public static final String PARAMETER_SPLIT_REGEX = "[\\s,]+";
+ public static final String LOCALHOST_NAME = "localhost";
+ public static final String LOCALHOST_IP_V4 = "127.0.0.1";
+ public static final String LOCALHOST_IP_V6 = "::1";
- private long cacheDurationInSeconds = 60 * 60 * 24 * 365; // One year
+ private static final long MS_IN_S = 1000;
+ private static final long S_IN_M = 60;
+ private static final long M_IN_H = 60;
+ private static final long H_IN_DAY = 24;
+ private static final long DAYS_IN_YEAR = 365;
- private long cacheDurationInMilliSeconds = cacheDurationInSeconds * 1000;
+ private long cacheDurationInSeconds = S_IN_M * M_IN_H * H_IN_DAY * DAYS_IN_YEAR; // One year
+
+ private long cacheDurationInMilliSeconds = cacheDurationInSeconds * MS_IN_S;
private static final String HTTP_LAST_MODIFIED_HEADER = "Last-Modified";
+ private static final String HTTP_ACCEPT_ENCODING_HEADER = "Accept-Encoding";
+ private static final String HTTP_ACCEPT_ENCODING_GZIP = "gzip";
+
private static final String HTTP_EXPIRES_HEADER = "Expires";
private static final String HTTP_EXPIRES_HEADER_NOCACHE_VALUE = "Wed, 11 Jan 1984 05:00:00:GMT";
@@ -81,6 +93,7 @@ public class CacheFilter implements Filter {
private static final String HTTP_CACHE_PRAGMA = "Pragma";
private static final String HTTP_CACHE_PRAGMA_VALUE = "no-cache";
+ private static final String HTTP_CACHE_MAX_AGE_PREFIX = "max-age=";
private String[] cacheIdentifiers = new String[] {".cache."};
private String[] cacheSuffixes = new String[] {".js", ".png", ".jpg", ".jpeg", ".gif", ".css", ".html"};
@@ -134,7 +147,7 @@ public class CacheFilter implements Filter {
if (null != param) {
try {
cacheDurationInSeconds = Integer.parseInt(param);
- cacheDurationInMilliSeconds = cacheDurationInSeconds * 1000;
+ cacheDurationInMilliSeconds = cacheDurationInSeconds * MS_IN_S;
} catch (NumberFormatException nfe) {
throw new ServletException("Cannot parse " + CACHE_DURATION_IN_SECONDS + " value " + param +
", should be parable to integer", nfe);
@@ -144,7 +157,7 @@ public class CacheFilter implements Filter {
}
@edu.umd.cs.findbugs.annotations.SuppressWarnings(value = "AvoidUsingHardCodedIP",
- justification = "double-safe check on localhost")
+ justification = "double-safe check on localhost, ease configuration for development")
public void doFilter(ServletRequest request, ServletResponse response, FilterChain filterChain) throws IOException,
ServletException {
boolean chainCalled = false;
@@ -156,7 +169,8 @@ public class CacheFilter implements Filter {
if (!checkPrefixes(requestUri, skipPrefixes)) {
String serverName = httpRequest.getServerName();
- boolean isLocalhost = "localhost".equals(serverName) || "127.0.0.1".equals(serverName);
+ boolean isLocalhost = LOCALHOST_NAME.equals(serverName) ||
+ LOCALHOST_IP_V4.equals(serverName) || LOCALHOST_IP_V6.equals(serverName);
if (!isLocalhost) {
if (shouldNotCache(requestUri)) {
@@ -167,8 +181,8 @@ public class CacheFilter implements Filter {
}
if (shouldCompress(requestUri)) {
- String encodings = httpRequest.getHeader("Accept-Encoding");
- if (encodings != null && encodings.indexOf("gzip") != -1) {
+ String encodings = httpRequest.getHeader(HTTP_ACCEPT_ENCODING_HEADER);
+ if (encodings != null && encodings.contains(HTTP_ACCEPT_ENCODING_GZIP)) {
GzipServletResponseWrapper responseWrapper = new GzipServletResponseWrapper(httpResponse);
try {
filterChain.doFilter(request, responseWrapper);
@@ -261,7 +275,7 @@ public class CacheFilter implements Filter {
response.setDateHeader(HTTP_EXPIRES_HEADER, now + cacheDurationInMilliSeconds);
// HTTP 1.1 header
- response.setHeader(HTTP_CACHE_CONTROL_HEADER, "max-age=" + cacheDurationInSeconds);
+ response.setHeader(HTTP_CACHE_CONTROL_HEADER, HTTP_CACHE_MAX_AGE_PREFIX + cacheDurationInSeconds);
}
// ------------------------------------------------------------------------ | GBE-<I> more constants, allow IPv6 localhost | geomajas_geomajas-project-server | train |
1a2dd46661820614fe8dd78cd64f910bacdd8303 | diff --git a/saltcontainers/factories.py b/saltcontainers/factories.py
index <HASH>..<HASH> 100644
--- a/saltcontainers/factories.py
+++ b/saltcontainers/factories.py
@@ -93,7 +93,8 @@ class MasterSaltConfigFactory(SaltConfigFactory):
'file_client': 'local',
'file_roots': {
'base': ["/etc/salt/{}".format(destination)]
- }
+ },
+ 'pillar_roots': {'base': ["/etc/salt/pillar"]}
},
default_flow_style=False
) | Use pillar data for master highstate | dincamihai_pytest-salt-containers | train |
c92da20af91dab92f0b54a9417d1396349fc4f49 | diff --git a/package.json b/package.json
index <HASH>..<HASH> 100644
--- a/package.json
+++ b/package.json
@@ -47,14 +47,14 @@
},
"homepage": "https://github.com/nteract/nteract",
"dependencies": {
- "@reactivex/rxjs": "5.0.0-beta.2",
+ "rxjs": "^5.0.0-beta.6",
"codemirror": "^5.13.4",
"commander": "^2.9.0",
"commutable": "^0.9.0",
"electron-json-storage": "^2.0.0",
"electron-packager": "^7.0.0",
- "enchannel": "^1.1.2",
- "enchannel-zmq-backend": "^1.0.1",
+ "enchannel": "^1.1.3",
+ "enchannel-zmq-backend": "^2.0.1",
"github-api": "^1.1.0",
"github4": "^1.0.0",
"home-dir": "^1.0.0",
@@ -74,7 +74,7 @@
"remark-react": "^2.1.0",
"spawnteract": "^2.0.0",
"transformime-react": "^1.0.0",
- "uuid": "^2.0.1"
+ "uuid": "^2.0.2"
},
"devDependencies": {
"babel-cli": "^6.8.0",
diff --git a/src/notebook/agendas/index.js b/src/notebook/agendas/index.js
index <HASH>..<HASH> 100644
--- a/src/notebook/agendas/index.js
+++ b/src/notebook/agendas/index.js
@@ -1,4 +1,4 @@
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
const Immutable = require('immutable');
import {
diff --git a/src/notebook/api/messaging/index.js b/src/notebook/api/messaging/index.js
index <HASH>..<HASH> 100644
--- a/src/notebook/api/messaging/index.js
+++ b/src/notebook/api/messaging/index.js
@@ -2,7 +2,7 @@
import * as uuid from 'uuid';
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
const Observable = Rx.Observable;
export const session = uuid.v4();
diff --git a/src/notebook/components/cell/editor.js b/src/notebook/components/cell/editor.js
index <HASH>..<HASH> 100644
--- a/src/notebook/components/cell/editor.js
+++ b/src/notebook/components/cell/editor.js
@@ -3,7 +3,7 @@ import React from 'react';
import CodeMirror from 'react-codemirror';
import CM from 'codemirror';
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
import { updateCellSource } from '../../actions';
diff --git a/src/notebook/index.js b/src/notebook/index.js
index <HASH>..<HASH> 100644
--- a/src/notebook/index.js
+++ b/src/notebook/index.js
@@ -25,7 +25,7 @@ import { initGlobalHandlers } from './global-events';
const Github = require('github4');
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
const github = new Github();
diff --git a/src/notebook/publication/github.js b/src/notebook/publication/github.js
index <HASH>..<HASH> 100644
--- a/src/notebook/publication/github.js
+++ b/src/notebook/publication/github.js
@@ -1,4 +1,4 @@
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
const commutable = require('commutable');
const path = require('path');
diff --git a/src/notebook/store/index.js b/src/notebook/store/index.js
index <HASH>..<HASH> 100644
--- a/src/notebook/store/index.js
+++ b/src/notebook/store/index.js
@@ -1,4 +1,4 @@
-import * as Rx from '@reactivex/rxjs';
+import * as Rx from 'rxjs/Rx';
import { mark, measure } from '../performance';
export default function createStore(initialState, reducers) {
diff --git a/test/renderer/actions/index_spec.js b/test/renderer/actions/index_spec.js
index <HASH>..<HASH> 100644
--- a/test/renderer/actions/index_spec.js
+++ b/test/renderer/actions/index_spec.js
@@ -6,7 +6,7 @@ import * as constants from '../../../src/notebook/constants';
import createStore from '../../../src/notebook/store';
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
describe('setExecutionState', () => {
it('creates a SET_EXECUTION_STATE action', () => {
diff --git a/test/renderer/api/messaging/index_spec.js b/test/renderer/api/messaging/index_spec.js
index <HASH>..<HASH> 100644
--- a/test/renderer/api/messaging/index_spec.js
+++ b/test/renderer/api/messaging/index_spec.js
@@ -1,7 +1,7 @@
import { expect } from 'chai';
import { childOf } from '../../../../src/notebook/api/messaging';
-const Rx = require('@reactivex/rxjs');
+const Rx = require('rxjs/Rx');
describe('childOf', () => {
it('filters messages that have the same parent', () => { | Switch to commonJS import of rxjs 5 | nteract_nteract | train |
08d90f1a26b0932e1701ab64d44ecf24e2d494dc | diff --git a/environment/src/main/java/jetbrains/exodus/tree/btree/BTreeDupMutable.java b/environment/src/main/java/jetbrains/exodus/tree/btree/BTreeDupMutable.java
index <HASH>..<HASH> 100644
--- a/environment/src/main/java/jetbrains/exodus/tree/btree/BTreeDupMutable.java
+++ b/environment/src/main/java/jetbrains/exodus/tree/btree/BTreeDupMutable.java
@@ -83,12 +83,17 @@ final class BTreeDupMutable extends BTreeMutable {
} else {
canRetry = true;
}
- if (!log.isLastWrittenFileAddress(startAddress) && NullLoggable.isNullLoggable(log.getWrittenLoggableType(startAddress))) {
- final long lengthBound = log.getFileLengthBound();
- final long alignment = startAddress % lengthBound;
- startAddress += (lengthBound - alignment);
- if (log.getWrittenHighAddress() < startAddress) {
- throw new IllegalStateException("Address alignment underflow: start address " + startAddress + ", alignment " + alignment);
+ if (!log.isLastWrittenFileAddress(startAddress)) {
+ final byte writtenType = log.getWrittenLoggableType(startAddress);
+ if (NullLoggable.isNullLoggable(writtenType)) {
+ final long lengthBound = log.getFileLengthBound();
+ final long alignment = startAddress % lengthBound;
+ startAddress += (lengthBound - alignment);
+ if (log.getWrittenHighAddress() < startAddress) {
+ throw new IllegalStateException("Address alignment underflow: start address " + startAddress + ", alignment " + alignment);
+ }
+ } else if (writtenType > BTreeBase.DUP_LEAF) {
+ throw new IllegalStateException("Unknown written loggable type: " + writtenType);
}
}
sizeIterable = CompressedUnsignedLongByteIterable.getIterable((size << 1) + 1); | fail-fast for suspicious BTree behavior | JetBrains_xodus | train |
662d982379f6a9d0751419d647f7f522cd1453b5 | diff --git a/src/lib/dom_helpers.js b/src/lib/dom_helpers.js
index <HASH>..<HASH> 100644
--- a/src/lib/dom_helpers.js
+++ b/src/lib/dom_helpers.js
@@ -52,7 +52,7 @@ function bindFocusables( instance, activateOnFocus ) {
function findContainerNodes( target ) {
return ( memo, instance ) => {
const node = React.findDOMNode( instance );
- if ( node === target || node.contains( target ) ) {
+ if ( node && ( node === target || node.contains( target ) ) ) {
memo.push( { instance, node } );
}
return memo; | trap for when no dom node can be found when looking for container nodes | glortho_react-keydown | train |
fc4e10b6c067985b3a0cf9b6e4214dbff4287ca0 | diff --git a/lib/renderer/window-setup.js b/lib/renderer/window-setup.js
index <HASH>..<HASH> 100644
--- a/lib/renderer/window-setup.js
+++ b/lib/renderer/window-setup.js
@@ -103,7 +103,17 @@ function LocationProxy (ipcRenderer, guestId) {
function BrowserWindowProxy (ipcRenderer, guestId) {
this.closed = false
- this.location = new LocationProxy(ipcRenderer, guestId)
+
+ const location = new LocationProxy(ipcRenderer, guestId)
+ defineProperty(this, 'location', {
+ get: function () {
+ return location
+ },
+ set: function (url) {
+ url = resolveURL(url)
+ return ipcRenderer.sendSync('ELECTRON_GUEST_WINDOW_MANAGER_WEB_CONTENTS_METHOD_SYNC', guestId, 'loadURL', url)
+ }
+ })
ipcRenderer.once(`ELECTRON_GUEST_WINDOW_MANAGER_WINDOW_CLOSED_${guestId}`, () => {
removeProxy(guestId)
diff --git a/spec/chromium-spec.js b/spec/chromium-spec.js
index <HASH>..<HASH> 100644
--- a/spec/chromium-spec.js
+++ b/spec/chromium-spec.js
@@ -515,7 +515,7 @@ describe('chromium feature', () => {
}
app.once('browser-window-created', (event, window) => {
window.webContents.once('did-finish-load', () => {
- assert.strictEqual(b.location, targetURL)
+ assert.strictEqual(b.location.href, targetURL)
b.close()
done()
}) | fix: set setter of window.location | electron_electron | train |
e01da3359be615cc01fd7ec0b3989b36615f7055 | diff --git a/shell/src/main/java/alluxio/cli/fs/FileSystemShell.java b/shell/src/main/java/alluxio/cli/fs/FileSystemShell.java
index <HASH>..<HASH> 100644
--- a/shell/src/main/java/alluxio/cli/fs/FileSystemShell.java
+++ b/shell/src/main/java/alluxio/cli/fs/FileSystemShell.java
@@ -49,9 +49,9 @@ public final class FileSystemShell extends AbstractShell {
public static void main(String[] argv) throws IOException {
int ret;
- if (!ConfigurationUtils.masterHostConfigured()) {
+ if (!ConfigurationUtils.masterHostConfigured() && argv.length > 0 && !argv[0].equals("help")) {
System.out.println(String.format(
- "Cannot run alluxio shell; master hostname is not "
+ "Cannot run alluxio fs shell; master hostname is not "
+ "configured. Please modify %s to either set %s or configure zookeeper with "
+ "%s=true and %s=[comma-separated zookeeper master addresses]",
Constants.SITE_PROPERTIES, PropertyKey.MASTER_HOSTNAME.toString(),
diff --git a/shell/src/main/java/alluxio/cli/fsadmin/FileSystemAdminShell.java b/shell/src/main/java/alluxio/cli/fsadmin/FileSystemAdminShell.java
index <HASH>..<HASH> 100644
--- a/shell/src/main/java/alluxio/cli/fsadmin/FileSystemAdminShell.java
+++ b/shell/src/main/java/alluxio/cli/fsadmin/FileSystemAdminShell.java
@@ -53,8 +53,8 @@ public final class FileSystemAdminShell extends AbstractShell {
* @param args array of arguments given by the user's input from the terminal
*/
public static void main(String[] args) {
- if (!ConfigurationUtils.masterHostConfigured()) {
- System.out.println("Cannot run fsadmin shell as master hostname is not configured.");
+ if (!ConfigurationUtils.masterHostConfigured() && args.length > 0) {
+ System.out.println("Cannot run alluxio fsadmin shell as master hostname is not configured.");
System.exit(1);
}
// Reduce the RPC retry max duration to fall earlier for CLIs
diff --git a/shell/src/main/java/alluxio/cli/job/JobShell.java b/shell/src/main/java/alluxio/cli/job/JobShell.java
index <HASH>..<HASH> 100644
--- a/shell/src/main/java/alluxio/cli/job/JobShell.java
+++ b/shell/src/main/java/alluxio/cli/job/JobShell.java
@@ -46,7 +46,7 @@ public final class JobShell extends AbstractShell {
public static void main(String[] argv) throws IOException {
int ret;
- if (!ConfigurationUtils.masterHostConfigured()) {
+ if (!ConfigurationUtils.masterHostConfigured() && argv.length > 0) {
System.out.println(String.format(
"Cannot run alluxio job shell; master hostname is not "
+ "configured. Please modify %s to either set %s or configure zookeeper with " | Fix the issue that help subcommand requires alluxio.master.hostname set. (#<I>) | Alluxio_alluxio | train |
8d230da6a56f490745b23bd8c086b041e434cc74 | diff --git a/welly/project.py b/welly/project.py
index <HASH>..<HASH> 100644
--- a/welly/project.py
+++ b/welly/project.py
@@ -226,8 +226,16 @@ class Project(object):
limit (int): Curve must be present in at least this many wells.
"""
uwis = uwis or self.uwis
- exclude = exclude or []
wells = [w for w in self.__list if w.uwi in uwis]
+
+ # This is hacky. See remark in well.get_mnemonics_from_regex().
+ if exclude is not None:
+ exclude = utils.flatten_list([w.get_mnemonics_from_regex(e) for e in exclude for w in wells])
+ if alias is not None:
+ exclude = [alias.get(e, e) for e in exclude]
+ else:
+ exclude = []
+
counter = self.__all_curve_names(uwis=uwis, count=True)
all_keys = [i[0] for i in counter
if (i[0] not in exclude) and (i[1] >= limit)]
diff --git a/welly/well.py b/welly/well.py
index <HASH>..<HASH> 100644
--- a/welly/well.py
+++ b/welly/well.py
@@ -6,6 +6,7 @@ Defines wells.
:copyright: 2016 Agile Geoscience
:license: Apache 2.0
"""
+import re
import datetime
from io import StringIO
@@ -550,6 +551,18 @@ class Well(object):
return
+ def get_mnemonics_from_regex(self, pattern):
+ """
+ Should probably integrate getting curves with regex, vs getting with
+ aliases, even though mixing them is probably confusing. For now I can't
+ think of another use case for these wildcards, so I'll just implement
+ for the curve table and we can worry about a nice solution later if we
+ ever come back to it.
+ """
+ regex = re.compile(pattern)
+ keys = self.data.keys()
+ return [m.group(0) for k in keys for m in [regex.search(k)] if m]
+
def get_mnemonic(self, mnemonic, alias=None):
"""
Instead of picking curves by name directly from the data dict, you
@@ -599,7 +612,7 @@ class Well(object):
return len(list(filter(None, [self.get_mnemonic(k, alias=alias) for k in keys])))
def alias_has_multiple(self, mnemonic, alias):
- return len([a for a in alias[mnemonic] if a in self.data]) > 1
+ return 1 < len([a for a in alias[mnemonic] if a in self.data])
def make_synthetic(self,
srd=0, | implemented #<I> but rather use-specific and hacky | agile-geoscience_welly | train |
2fb00f2678a841c0874bae31d4a1928d83776af8 | diff --git a/lxd/storage/utils.go b/lxd/storage/utils.go
index <HASH>..<HASH> 100644
--- a/lxd/storage/utils.go
+++ b/lxd/storage/utils.go
@@ -394,7 +394,7 @@ func VolumeValidateConfig(s *state.State, name string, config map[string]string,
if err != drivers.ErrUnknownDriver {
// Note: This legacy validation function doesn't have the concept of validating
// different volumes types, so the types are hard coded as Custom and FS.
- return driver.ValidateVolume(drivers.NewVolume(driver, parentPool.Name, drivers.VolumeTypeCustom, drivers.ContentTypeFS, name, config), false)
+ return driver.ValidateVolume(drivers.NewVolume(driver, parentPool.Name, drivers.VolumeTypeCustom, drivers.ContentTypeFS, name, config, parentPool.Config), false)
}
// Otherwise fallback to doing legacy validation. | lxd/storage/utils: drivers.NewVolume usage | lxc_lxd | train |
f603e298e8a56778b24116459e73f7fb7b41509a | diff --git a/salt/modules/cp.py b/salt/modules/cp.py
index <HASH>..<HASH> 100644
--- a/salt/modules/cp.py
+++ b/salt/modules/cp.py
@@ -345,9 +345,11 @@ def get_file_str(path, saltenv='base'):
'''
fn_ = cache_file(path, saltenv)
if isinstance(fn_, six.string_types):
- with salt.utils.fopen(fn_, 'r') as fp_:
- data = fp_.read()
- return data
+ try:
+ with salt.utils.fopen(fn_, 'r') as fp_:
+ return fp_.read()
+ except IOError:
+ return False
return fn_ | Fix modules.cp.get_file_str to match the documentation.
The documentation states that it will return False if the file can't be cached,
in reality it threw an IOError. This makes it return False. | saltstack_salt | train |
726adda9b9d313711ed3705c9a8ab8877682c398 | diff --git a/organisations/service_test.go b/organisations/service_test.go
index <HASH>..<HASH> 100644
--- a/organisations/service_test.go
+++ b/organisations/service_test.go
@@ -3,6 +3,7 @@ package organisations
import (
"fmt"
"github.com/Financial-Times/neo-utils-go/neoutils"
+ "github.com/Financial-Times/up-rw-app-api-go/rwapi"
"github.com/jmcvetta/neoism"
"github.com/stretchr/testify/assert"
"reflect"
@@ -312,7 +313,7 @@ func TestToCheckYouCanNotCreateOrganisationWithDuplicateIdentifier(t *testing.T)
assert.NoError(cypherDriver.Write(fullOrg))
err := cypherDriver.Write(dupeOtherIdentifierOrg)
assert.Error(err)
- assert.IsType(neoism.NeoError{}, err)
+ assert.IsType(rwapi.ConstraintOrTransactionError{}, err)
}
func TestCount(t *testing.T) { | added coveralls to circle.yml and README. Un-modified test that fails locally | Financial-Times_organisations-rw-neo4j | train |
ff8d9c7e4373f4f66c61f8a9514e892c0b25d368 | diff --git a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
index <HASH>..<HASH> 100644
--- a/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
+++ b/activerecord/lib/active_record/connection_adapters/abstract/schema_statements.rb
@@ -507,7 +507,7 @@ module ActiveRecord
raise NotImplementedError, "change_column_default is not implemented"
end
- # Sets or removes a +NOT NULL+ constraint on a column. The +null+ flag
+ # Sets or removes a <tt>NOT NULL</tt> constraint on a column. The +null+ flag
# indicates whether the value can be +NULL+. For example
#
# change_column_null(:users, :nickname, false)
@@ -519,7 +519,7 @@ module ActiveRecord
# allows them to be +NULL+ (drops the constraint).
#
# The method accepts an optional fourth argument to replace existing
- # +NULL+s with some other value. Use that one when enabling the
+ # <tt>NULL</tt>s with some other value. Use that one when enabling the
# constraint if needed, since otherwise those rows would not be valid.
#
# Please note the fourth argument does not set a column's default. | Fix proper fonts in `change_column_null` method docs. [ci skip] | rails_rails | train |
cfe38f474db48768edcd153a67b2de5671e67f15 | diff --git a/mapbox_vector_tile/encoder.py b/mapbox_vector_tile/encoder.py
index <HASH>..<HASH> 100644
--- a/mapbox_vector_tile/encoder.py
+++ b/mapbox_vector_tile/encoder.py
@@ -61,15 +61,33 @@ def make_valid_polygon_flip(shape):
if fixed.is_empty:
return None
else:
- return reverse_polygon(fixed)
+ if fixed.type == 'Polygon':
+ return reverse_polygon(fixed)
+ elif fixed.type == 'MultiPolygon':
+ flipped_geoms = []
+ for geom in fixed.geoms:
+ reversed_geom = reverse_polygon(geom)
+ flipped_geoms.append(reversed_geom)
+ return MultiPolygon(flipped_geoms)
def area_bounds(shape):
if shape.is_empty:
return 0
- minx, miny, maxx, maxy = shape.bounds
- return (maxx - minx) * (maxy - miny)
+ elif shape.type == 'MultiPolygon':
+ area = 0
+ for geom in shape.geoms:
+ area += area_bounds(geom)
+ return area
+
+ elif shape.type == 'Polygon':
+ minx, miny, maxx, maxy = shape.bounds
+ area = (maxx - minx) * (maxy - miny)
+ return area
+
+ else:
+ assert 'area_bounds: invalid shape type: %s' % shape.type
def make_valid_polygon(shape):
diff --git a/tests/test_encoder.py b/tests/test_encoder.py
index <HASH>..<HASH> 100644
--- a/tests/test_encoder.py
+++ b/tests/test_encoder.py
@@ -530,6 +530,18 @@ class InvalidGeometryTest(unittest.TestCase):
features = result['foo']['features']
self.assertEqual(1, len(features))
+ def test_flipped_geometry_produces_multipolygon(self):
+ from mapbox_vector_tile import encode
+ from mapbox_vector_tile.encoder import on_invalid_geometry_make_valid
+ import shapely.wkt
+ shape = shapely.wkt.loads('POLYGON ((3449 1939, 3476 1967, 3473 1996, 3483 2027, 3542 2119, 3538 2160, 3563 2233, 3602 2255, 3639 2326, 3629 2388, 3573 2455, 3594 2493, 3558 2533, 3573 2549, 3518 2572, 3502 2592, 3505 2607, 3513 2614, 3535 2616, 3537 2610, 3535 2602, 3537 2599, 3548 2607, 3551 2636, 3528 2634, 3537 2668, 3549 2670, 3528 2711, 3550 2667, 3532 2635, 3550 2641, 3553 2613, 3549 2602, 3540 2596, 3512 2610, 3506 2589, 3576 2552, 3576 2543, 3563 2535, 3596 2506, 3597 2494, 3587 2469, 3589 2451, 3636 2385, 3644 2326, 3605 2251, 3566 2230, 3547 2122, 3482 2014, 3479 1966, 3455 1944, 3458 1910, 3449 1902, 3449 1939))') # noqa
+ features = [dict(geometry=shape, properties={})]
+ pbf = encode({'name': 'foo', 'features': features},
+ on_invalid_geometry=on_invalid_geometry_make_valid)
+ result = decode(pbf)
+ features = result['foo']['features']
+ self.assertEqual(0, len(features))
+
class LowLevelEncodingTestCase(unittest.TestCase):
def test_example_multi_polygon(self): | Handle case where a buffer makes a multipolygon | tilezen_mapbox-vector-tile | train |
1c550de2b5b06858e24513a3a4df7039d85c71bb | diff --git a/kernel/private/classes/ezpkernel.php b/kernel/private/classes/ezpkernel.php
index <HASH>..<HASH> 100644
--- a/kernel/private/classes/ezpkernel.php
+++ b/kernel/private/classes/ezpkernel.php
@@ -475,6 +475,8 @@ class ezpKernel
eZDisplayResult( $templateResult );
$content .= ob_get_clean();
+ $this->shutdown();
+
return array(
"content" => $content,
);
@@ -1073,7 +1075,11 @@ class ezpKernel
{
$this->requestInit();
- return $callback();
+ $return = $callback();
+
+ $this->shutdown();
+
+ return $return;
}
/**
@@ -1081,7 +1087,7 @@ class ezpKernel
*
* @todo Use in run() and runCallback() and mark as protected?
*/
- public function shutdown()
+ protected function shutdown()
{
eZExecution::cleanup();
eZExecution::setCleanExit(); | Changed: refactored shutdown process of legacy kernel | ezsystems_ezpublish-legacy | train |
a55970742125802618e2849b929818efec41b5b8 | diff --git a/lib/logging/Logger.js b/lib/logging/Logger.js
index <HASH>..<HASH> 100644
--- a/lib/logging/Logger.js
+++ b/lib/logging/Logger.js
@@ -32,7 +32,7 @@ const LogType = Object.freeze({
exports.LogType = LogType;
-/** @typedef {keyof LogType} LogTypeEnum */
+/** @typedef {keyof typeof LogType} LogTypeEnum */
const LOG_SYMBOL = Symbol("webpack logger raw log method");
const TIMERS_SYMBOL = Symbol("webpack logger times"); | Fix `LogTypeEnum` typedef
See [this comment](<URL>) - the type is incorrect for what's desired here. | webpack_webpack | train |
7be4c73ac36c15997bbdb22468e3968ec8d5a18a | diff --git a/src/select.js b/src/select.js
index <HASH>..<HASH> 100644
--- a/src/select.js
+++ b/src/select.js
@@ -304,6 +304,9 @@
};
ctrl.isActive = function(itemScope) {
+ if ( typeof itemScope[ctrl.itemProperty] === 'undefined') {
+ return false;
+ }
return ctrl.open && ctrl.items.indexOf(itemScope[ctrl.itemProperty]) === ctrl.activeIndex;
}; | Re-introduce check to prevent unintentional "all active" return for not yet selected results list elements | angular-ui_ui-select | train |
d7bf82b159751e1f7c6a4ace5bcba09e26293b89 | diff --git a/lib/Slackbot_worker.js b/lib/Slackbot_worker.js
index <HASH>..<HASH> 100755
--- a/lib/Slackbot_worker.js
+++ b/lib/Slackbot_worker.js
@@ -655,7 +655,7 @@ module.exports = function(botkit, config) {
/* helper functions for creating dialog attachments */
- bot.createDialog = function(title, callback_id, submit_label, elements) {
+ bot.createDialog = function(title, callback_id, submit_label, elements, stateString) {
var obj = {
data: {
@@ -663,6 +663,7 @@ module.exports = function(botkit, config) {
callback_id: callback_id,
submit_label: submit_label || null,
elements: elements || [],
+ state: stateString,
},
title: function(v) {
this.data.title = v;
@@ -676,6 +677,10 @@ module.exports = function(botkit, config) {
this.data.submit_label = v;
return this;
},
+ state: function(stateStr) {
+ this.data.state = stateStr;
+ return this;
+ },
addText: function(label, name, value, options, subtype) {
var element = (typeof(label) === 'object') ? label : { | Add dialog state to Slackbot worker createDialog | howdyai_botkit | train |
cf7ec423a2ffaf99adaa7d848d134223de5d438b | diff --git a/src/FuzeWorks/Factory.php b/src/FuzeWorks/Factory.php
index <HASH>..<HASH> 100644
--- a/src/FuzeWorks/Factory.php
+++ b/src/FuzeWorks/Factory.php
@@ -87,6 +87,7 @@ class Factory
// If there is no sharedFactoryInstance, prepare it
if (is_null(self::$sharedFactoryInstance))
{
+ // @codeCoverageIgnoreStart
self::$sharedFactoryInstance = $this;
$this->instances['Config'] = new Config();
$this->instances['Logger'] = new Logger();
@@ -107,6 +108,7 @@ class Factory
return true;
}
+ // @codeCoverageIgnoreEnd
// Otherwise, copy the existing instances
$this->instances = self::getInstance()->getClassInstances();
diff --git a/src/FuzeWorks/Module.php b/src/FuzeWorks/Module.php
index <HASH>..<HASH> 100644
--- a/src/FuzeWorks/Module.php
+++ b/src/FuzeWorks/Module.php
@@ -39,6 +39,7 @@ namespace FuzeWorks;
*
* @author Abel Hoogeveen <[email protected]>
* @copyright Copyright (c) 2013 - 2016, Techfuze. (http://techfuze.net)
+ * @deprecated
*/
trait Module
{
diff --git a/src/FuzeWorks/Modules.php b/src/FuzeWorks/Modules.php
index <HASH>..<HASH> 100644
--- a/src/FuzeWorks/Modules.php
+++ b/src/FuzeWorks/Modules.php
@@ -40,6 +40,7 @@ use stdClass;
*
* @author Abel Hoogeveen <[email protected]>
* @copyright Copyright (c) 2013 - 2016, Techfuze. (http://techfuze.net)
+ * @deprecated
*/
class Modules
{
diff --git a/tests/application/Config/config.core.php b/tests/application/Config/config.core.php
index <HASH>..<HASH> 100644
--- a/tests/application/Config/config.core.php
+++ b/tests/application/Config/config.core.php
@@ -1,8 +1,7 @@
<?php
return array(
- 'enable_composer' => true,
- 'enable_modules' => true,
+ 'enable_modules' => false,
'enable_events' => true,
'composer_autoloader' => '',
'registry_caching' => false, | Disabled Modules system by default.
It is not stable, it will not be stable in a long while. For now it is deprecated. A newer better system shall replace it in the future. | FuzeWorks_Core | train |
0e35e5141ab273f2b3d39a023e5bfa37a10395f5 | diff --git a/library/CM/Db/Db.php b/library/CM/Db/Db.php
index <HASH>..<HASH> 100644
--- a/library/CM/Db/Db.php
+++ b/library/CM/Db/Db.php
@@ -83,8 +83,8 @@ class CM_Db_Db extends CM_Class_Abstract {
* @param bool|null $disableQueryBuffering
* @return CM_Db_Result
*/
- public static function execMaintenance($sqlTemplate, array $parameters = null, $disableQueryBuffering = null) {
- $client = CM_Service_Manager::getInstance()->getDatabases()->getMaintenance();
+ public static function execReadMaintenance($sqlTemplate, array $parameters = null, $disableQueryBuffering = null) {
+ $client = CM_Service_Manager::getInstance()->getDatabases()->getReadMaintenance();
return self::exec($sqlTemplate, $parameters, $client, $disableQueryBuffering);
}
diff --git a/library/CM/Elasticsearch/Type/Abstract.php b/library/CM/Elasticsearch/Type/Abstract.php
index <HASH>..<HASH> 100644
--- a/library/CM/Elasticsearch/Type/Abstract.php
+++ b/library/CM/Elasticsearch/Type/Abstract.php
@@ -150,7 +150,7 @@ abstract class CM_Elasticsearch_Type_Abstract extends CM_Class_Abstract {
$query = $this->_getQuery($ids, $limit);
if ($useSlave) {
- $result = CM_Db_Db::execMaintenance($query, null, true);
+ $result = CM_Db_Db::execReadMaintenance($query, null, true);
} else {
$result = CM_Db_Db::exec($query);
}
diff --git a/library/CM/Service/Databases.php b/library/CM/Service/Databases.php
index <HASH>..<HASH> 100644
--- a/library/CM/Service/Databases.php
+++ b/library/CM/Service/Databases.php
@@ -23,10 +23,10 @@ class CM_Service_Databases extends CM_Service_ManagerAware {
/**
* @return CM_Db_Client
*/
- public function getMaintenance() {
+ public function getReadMaintenance() {
$serviceManager = $this->getServiceManager();
- if ($serviceManager->has('database-maintenance')) {
- return $serviceManager->get('database-maintenance', 'CM_Db_Client');
+ if ($serviceManager->has('database-read-maintenance')) {
+ return $serviceManager->get('database-read-maintenance', 'CM_Db_Client');
}
return $this->getRead();
}
diff --git a/resources/config/test.php b/resources/config/test.php
index <HASH>..<HASH> 100644
--- a/resources/config/test.php
+++ b/resources/config/test.php
@@ -17,7 +17,7 @@ $config->CM_Jobdistribution_Job_Abstract->gearmanEnabled = false;
$config->services['database-master'] =
$config->services['database-read'] =
-$config->services['database-maintenance'] = array(
+$config->services['database-read-maintenance'] = array(
'class' => 'CM_Db_Client',
'arguments' => array(
'localhost', | Renamed "database-maintenance" to "database-read-maintenance"
- Adjusted service config key
- Adjusted service method name
- Adjusted helper method name in CM_Db_Db | cargomedia_cm | train |
dfd945458df6e698ed0a00d8fe7ef52b45a075c5 | diff --git a/platform/android/Rhodes/src/com/rhomobile/rhodes/geolocation/GeoLocation.java b/platform/android/Rhodes/src/com/rhomobile/rhodes/geolocation/GeoLocation.java
index <HASH>..<HASH> 100644
--- a/platform/android/Rhodes/src/com/rhomobile/rhodes/geolocation/GeoLocation.java
+++ b/platform/android/Rhodes/src/com/rhomobile/rhodes/geolocation/GeoLocation.java
@@ -67,7 +67,9 @@ public class GeoLocation {
ourAltitude = loc.getAltitude();
ourAccuracy = loc.getAccuracy();
ourSpeed = loc.getSpeed();
- ourSatellities = getImpl().getSatellities();
+ //The way of getiing satellites is changed
+ //ourSatellities = getImpl().getSatellities();
+ ourSatellities = loc.getExtras().getInt("satellites");
ourIsKnownPosition = true;
}
else {
@@ -194,7 +196,7 @@ public class GeoLocation {
onUpdateLocation();
try {
checkState();
- Logger.T(TAG, "getSpeed");
+ Logger.T(TAG, "getSatellities");
return ourSatellities;
}
catch (Exception e) { | The way to get no of satellites is changed
The way to get satellites is changed as the previous way was always returning 0 | rhomobile_rhodes | train |
0e5b7596c7564d961c225a495943edcb6de7df94 | diff --git a/lib/server.js b/lib/server.js
index <HASH>..<HASH> 100644
--- a/lib/server.js
+++ b/lib/server.js
@@ -441,7 +441,7 @@ function respond(params, requestContext, renderer, renderedView, include, includ
viewContext.include[params.route.controller] = renderView(params.route.controller, params.route.view, 'html', viewContext);
}
// If debugging is enabled, append the debug output to viewContext
- if ( CTZN.config.citizen.mode === 'debug' || ( CTZN.config.citizen.mode === 'development' && params.url.debug ) ) {
+ if ( CTZN.config.citizen.mode === 'debug' || ( CTZN.config.citizen.mode === 'development' && params.url.ctzn_debug ) ) {
viewContext.debugOutput = debug(requestContext, params);
}
break; | Fixed a bug in the debugger. Oh, the irony. | jaysylvester_citizen | train |
109f7a3b7f00eac113fac9db45e772a76fa60c20 | diff --git a/src/com/google/javascript/jscomp/transpile/CachingTranspiler.java b/src/com/google/javascript/jscomp/transpile/CachingTranspiler.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/transpile/CachingTranspiler.java
+++ b/src/com/google/javascript/jscomp/transpile/CachingTranspiler.java
@@ -23,6 +23,7 @@ import com.google.common.base.Suppliers;
import com.google.common.cache.CacheBuilder;
import com.google.common.cache.CacheLoader;
import com.google.common.cache.LoadingCache;
+import com.google.common.util.concurrent.UncheckedExecutionException;
import java.nio.file.Path;
import java.util.Objects;
@@ -55,7 +56,18 @@ public final class CachingTranspiler implements Transpiler {
@Override
public TranspileResult transpile(Path path, String code) {
- return cache.getUnchecked(new Key(path, code));
+ try {
+ return cache.getUnchecked(new Key(path, code));
+ } catch (UncheckedExecutionException e) {
+ if (e.getCause() instanceof IllegalStateException) {
+ // If transpilation fails due to a parse error we can get an UncheckedExecutionException.
+ // This is because BaseTranspiler wraps the parse error as an IllegalStateException.
+ // TODO(joeltine): This would probably better as its own checked exception.
+ throw new IllegalStateException(e);
+ } else {
+ throw e;
+ }
+ }
}
@Override | Catch UncheckedExecutionException from transpilation. These can occur with parse errors. We rethrow the Exception as an IllegalStateException to match expectations in the consumers of the transpiler. See bug for an example stack.
-------------
Created by MOE: <URL> | google_closure-compiler | train |
c39f30fdf3342e81a9825fe7c547f69a8e4e9699 | diff --git a/window.py b/window.py
index <HASH>..<HASH> 100644
--- a/window.py
+++ b/window.py
@@ -82,16 +82,22 @@ class HelperHud(Subscriber):
super(HelperHud, self).__init__(channel)
self.client = client
self.show_cell_masses = False
+ self.show_remerge_times = False
self.cell_masses_key = ord('h')
+ self.remerge_times_key = ord('h')
def on_key_pressed(self, val, char):
if val == self.cell_masses_key:
self.show_cell_masses = not self.show_cell_masses
+ if val == self.remerge_times_key:
+ self.show_remerge_times = not self.show_remerge_times
def on_draw(self, c, w):
p = self.client.player
if self.show_cell_masses:
self.draw_cell_masses(c, w, p)
+ if self.show_remerge_times:
+ self.draw_remerge_times(c, w, p)
def draw_cell_masses(self, c, w, p):
for cell in p.world.cells.values():
@@ -103,6 +109,19 @@ class HelperHud(Subscriber):
text = '%i mass' % cell.mass
draw_text_center(c, pos, text)
+ def draw_remerge_times(self, c, w, p):
+ if len(p.own_ids) <= 1:
+ return # dead or only one cell, no remerge time to display
+ now = time()
+ for cell in p.own_cells:
+ split_for = now - cell.split_time
+ # formula by DebugMonkey
+ ttr = (p.total_mass * 20 + 30000) / 1000 - split_for
+ if ttr < 0: continue
+ pos = w.world_to_screen_pos(cell.pos)
+ text = 'TTR %.1fs after %.1fs' % (ttr, split_for)
+ draw_text_center(c, Vec(0, -12).iadd(pos), text)
+
def format_log(lines, width, indent=' '):
width = int(width) | Show remerge times, formula by @RealDebugMonkey | Gjum_agarnet | train |
8e6a0817f7915ebb5cfc37bb4188e7c5d9ec14a2 | diff --git a/src/main/java/com/agapsys/security/web/WebSecurity.java b/src/main/java/com/agapsys/security/web/WebSecurity.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/agapsys/security/web/WebSecurity.java
+++ b/src/main/java/com/agapsys/security/web/WebSecurity.java
@@ -45,15 +45,29 @@ public class WebSecurity extends Security {
}
public static User getCurrentUser() {
- return ((WebSecurityManager)getSecurityManager()).getCurrentUser();
+ WebSecurityManager securityManager = getSecurityManager();
+ if (securityManager != null)
+ return securityManager.getCurrentUser();
+
+ return null;
}
public static void setCurrentUser(User user) {
- ((WebSecurityManager)getSecurityManager()).setCurrentUser(user);
+ WebSecurityManager securityManager = getSecurityManager();
+
+ if (securityManager == null)
+ throw new RuntimeException("There is no security manager");
+
+ securityManager.setCurrentUser(user);
}
public static void unregisterCurrentUser() {
- ((WebSecurityManager)getSecurityManager()).unregisterCurrentUser();
+ WebSecurityManager securityManager = getSecurityManager();
+
+ if (securityManager == null)
+ throw new RuntimeException("There is no security manager");
+
+ securityManager.unregisterCurrentUser();
}
// ========================================================================= | Added check for null security manager | agapsys_web-security-framework | train |
e344555d97be7f490852dc5957b79660049fb03b | diff --git a/__tests__/tooling/operation/get-parameters-as-json-schema.test.js b/__tests__/tooling/operation/get-parameters-as-json-schema.test.js
index <HASH>..<HASH> 100644
--- a/__tests__/tooling/operation/get-parameters-as-json-schema.test.js
+++ b/__tests__/tooling/operation/get-parameters-as-json-schema.test.js
@@ -1664,4 +1664,33 @@ describe('example support', () => {
},
});
});
+
+ it('should not bug out if `examples` is an empty object', () => {
+ const oas = new Oas({
+ paths: {
+ '/': {
+ post: {
+ requestBody: {
+ content: {
+ 'application/json': {
+ schema: {
+ type: 'object',
+ properties: {
+ limit: {
+ type: 'integer',
+ },
+ },
+ },
+ examples: {},
+ },
+ },
+ },
+ },
+ },
+ },
+ });
+
+ const schema = oas.operation('/', 'post').getParametersAsJsonSchema();
+ expect(schema[0].schema).toStrictEqual({ type: 'object', properties: { limit: { type: 'integer' } } });
+ });
});
diff --git a/tooling/operation/get-parameters-as-json-schema.js b/tooling/operation/get-parameters-as-json-schema.js
index <HASH>..<HASH> 100644
--- a/tooling/operation/get-parameters-as-json-schema.js
+++ b/tooling/operation/get-parameters-as-json-schema.js
@@ -1,3 +1,4 @@
+/* eslint-disable no-continue */
// This library is built to translate OpenAPI schemas into schemas compatible with `@readme/oas-form`, and should
// not at this time be used for general purpose consumption.
const jsonpointer = require('jsonpointer');
@@ -117,7 +118,20 @@ function searchForExampleByPointer(pointer, examples = []) {
if ('example' in schema) {
schema = schema.example;
} else {
- schema = schema.examples[Object.keys(schema.examples).shift()].value;
+ const keys = Object.keys(schema.examples);
+ if (!keys.length) {
+ continue;
+ }
+
+ // Prevent us from crashing if `examples` is a completely empty object.
+ const ex = schema.examples[keys.shift()];
+ if (typeof ex !== 'object' || Array.isArray(ex)) {
+ continue;
+ } else if (!('value' in ex)) {
+ continue;
+ }
+
+ schema = ex.value;
}
try { | fix: prevent us from crashing if `examples` is an empty object (#<I>) | readmeio_oas | train |
ac6046c2cfccb502e54b089eefb455b74d1b1393 | diff --git a/salt/client/mixins.py b/salt/client/mixins.py
index <HASH>..<HASH> 100644
--- a/salt/client/mixins.py
+++ b/salt/client/mixins.py
@@ -281,23 +281,23 @@ class SyncClientMixin(object):
func_globals['__jid_event__'].fire_event(data, 'new')
- # Inject some useful globals to *all* the funciton's global namespace
- # only once per module-- not per func
- completed_funcs = []
- for mod_name in six.iterkeys(self.functions):
- mod, _ = mod_name.split('.', 1)
- if mod in completed_funcs:
- continue
- completed_funcs.append(mod)
- for global_key, value in six.iteritems(func_globals):
- if six.PY3:
- self.functions[fun].__globals__[global_key] = value
- else:
- self.functions[fun].func_globals[global_key] = value # pylint: disable=incompatible-py3-code
-
try:
self._verify_fun(fun)
+ # Inject some useful globals to *all* the funciton's global namespace
+ # only once per module-- not per func
+ completed_funcs = []
+ for mod_name in six.iterkeys(self.functions):
+ mod, _ = mod_name.split('.', 1)
+ if mod in completed_funcs:
+ continue
+ completed_funcs.append(mod)
+ for global_key, value in six.iteritems(func_globals):
+ if six.PY3:
+ self.functions[fun].__globals__[global_key] = value
+ else:
+ self.functions[fun].func_globals[global_key] = value # pylint: disable=incompatible-py3-code
+
# There are some descrepencies of what a "low" structure is
# in the publisher world it is a dict including stuff such as jid,
# fun, arg (a list of args, with kwargs packed in). Historically | Inject globals after verifying the function-- because it might not exist! | saltstack_salt | train |
c3e92cdcb802cc5e7d91b148506cde620bf6a806 | diff --git a/src/Charcoal/Property/FileProperty.php b/src/Charcoal/Property/FileProperty.php
index <HASH>..<HASH> 100644
--- a/src/Charcoal/Property/FileProperty.php
+++ b/src/Charcoal/Property/FileProperty.php
@@ -448,7 +448,7 @@ class FileProperty extends AbstractProperty
return '';
} else {
if (class_exists('\Charcoal\App\App')) {
- $basePath = \Charcoal\App\App::instance()->config()->get('ROOT');
+ $basePath = \Charcoal\App\App::instance()->config()->get('base_path');
$target = str_replace($basePath, '', $target);
}
@@ -488,7 +488,7 @@ class FileProperty extends AbstractProperty
} else {
$this->logger->notice(sprintf('File %s uploaded succesfully', $target));
if (class_exists('\Charcoal\App\App')) {
- $basePath = \Charcoal\App\App::instance()->config()->get('ROOT');
+ $basePath = \Charcoal\App\App::instance()->config()->get('base_path');
$target = str_replace($basePath, '', $target);
}
@@ -504,7 +504,7 @@ class FileProperty extends AbstractProperty
public function uploadTarget($filename = null)
{
if (class_exists('\Charcoal\App\App')) {
- $basePath = \Charcoal\App\App::instance()->config()->get('ROOT');
+ $basePath = \Charcoal\App\App::instance()->config()->get('base_path');
} else {
$basePath = '';
} | Replaced 'ROOT' with 'base_path' | locomotivemtl_charcoal-property | train |
697873ac7729cdd7b3366853b2df9112395494d7 | diff --git a/pyinfra/facts/files.py b/pyinfra/facts/files.py
index <HASH>..<HASH> 100644
--- a/pyinfra/facts/files.py
+++ b/pyinfra/facts/files.py
@@ -2,6 +2,8 @@ from __future__ import unicode_literals
import re
+from six.moves import shlex_quote
+
from pyinfra.api.facts import FactBase
from .util.files import parse_ls_output
@@ -63,8 +65,10 @@ class FindInFile(FactBase):
def command(self, name, pattern):
self.name = name
+ pattern = shlex_quote(pattern)
+
return (
- 'grep "{0}" {1} 2> /dev/null || '
+ 'grep {0} {1} 2> /dev/null || '
'(find {1} -type f > /dev/null && echo "__pyinfra_exists_{1}")'
).format(pattern, name).strip() | Properly quote patterns before passing to grap in `find_in_file` fact. | Fizzadar_pyinfra | train |
5566727efb386059386c03659499f3e1985af38e | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -61,11 +61,11 @@ module.exports = function(file, options, cb) {
});
jobs = toShift.map(function(parts, i) {
+ var target = parts.join('.');
if (count !== null && toShift.length > count && i <= (toShift.length - count)) {
return remove(target);
}
- var target = parts.join('.');
// increment the log file index
for (var j = parts.length; j >= 0; j--) {
if ( isNaN(parts[j]) === false ) { parts[j] = +parts[j] + 1; break; }
diff --git a/test/rotate.js b/test/rotate.js
index <HASH>..<HASH> 100644
--- a/test/rotate.js
+++ b/test/rotate.js
@@ -26,15 +26,16 @@ test('moves rotated file to zero index', function(t) {
});
});
-test.only('increments previously rotated files', function(t) {
+test('increments previously rotated files', function(t) {
var file = make()
, count = 20
+ , keep = 18
, rotated = 0;
- t.plan(count);
+ t.plan(keep);
function done() {
var name = '', files = [];
- for(var i = 0, l = count; i < l; i++) {
+ for(var i = 0, l = keep; i < l; i++) {
files.push(name = file +'.'+ i);
t.assert(fs.existsSync(name), 'index '+ [i] +' should exist');
}
@@ -42,7 +43,7 @@ test.only('increments previously rotated files', function(t) {
}
(function next() {
- rotate(make(file), { count: 20 }, function(err, r) {
+ rotate(make(file), { count: keep }, function(err, r) {
if (err) throw err;
if (++rotated === count) return done();
next(); | fix removal of extraneous files on rotation | dstokes_log-rotate | train |
7e86cbabe881e7db4087625a7f6b1ec1543972ed | diff --git a/tweepy/models.py b/tweepy/models.py
index <HASH>..<HASH> 100644
--- a/tweepy/models.py
+++ b/tweepy/models.py
@@ -227,7 +227,7 @@ class User(Model):
def __hash__(self):
if hasattr(self, 'id'):
- return hash(self.id)
+ return self.id
else:
raise TypeError('unhashable type: {} (no id attribute)'.format(type(self))) | use id as hash since its an integer | tweepy_tweepy | train |
c7ab653b44e186d3306821083a8e42f8d7b5f4d5 | diff --git a/go/kbfs/libkbfs/folder_branch_ops.go b/go/kbfs/libkbfs/folder_branch_ops.go
index <HASH>..<HASH> 100644
--- a/go/kbfs/libkbfs/folder_branch_ops.go
+++ b/go/kbfs/libkbfs/folder_branch_ops.go
@@ -7956,6 +7956,8 @@ func (fbo *folderBranchOps) ClearPrivateFolderMD(ctx context.Context) {
fbo.cancelEdits = nil
}
fbo.editHistory = kbfsedits.NewTlfHistory()
+ // Allow the edit monitor to be re-launched later whenever the
+ // MD is set again.
fbo.launchEditMonitor = sync.Once{}
fbo.convLock.Lock()
defer fbo.convLock.Unlock() | folder_branch_ops: add comment when resetting edit monitor state
Suggested by jakob<I>.
Issue: #<I> | keybase_client | train |
0ae84cbb817fde80d371c454b69531226e5edc49 | diff --git a/src/com/esotericsoftware/yamlbeans/DeferredConstruction.java b/src/com/esotericsoftware/yamlbeans/DeferredConstruction.java
index <HASH>..<HASH> 100644
--- a/src/com/esotericsoftware/yamlbeans/DeferredConstruction.java
+++ b/src/com/esotericsoftware/yamlbeans/DeferredConstruction.java
@@ -31,18 +31,25 @@ import com.esotericsoftware.yamlbeans.Beans.Property;
class DeferredConstruction {
private final Constructor constructor;
private final String[] parameterNames;
- private final Object[] parameterValues;
+ private final ParameterValue[] parameterValues;
private final List<PropertyValue> propertyValues = new ArrayList(16);
public DeferredConstruction (Constructor constructor, String[] parameterNames) {
this.constructor = constructor;
this.parameterNames = parameterNames;
- parameterValues = new Object[parameterNames.length];
+ parameterValues = new ParameterValue[parameterNames.length];
}
public Object construct () throws InvocationTargetException {
try {
- Object object = constructor.newInstance(parameterValues);
+ Object[] parameters = new Object[parameterValues.length];
+ int i = 0;
+ for (ParameterValue parameter : parameterValues) {
+ if (parameter == null)
+ throw new InvocationTargetException(new YamlException("Missing constructor property: " + parameterNames[i]));
+ parameters[i++] = parameter.value;
+ }
+ Object object = constructor.newInstance(parameters);
for (PropertyValue propertyValue : propertyValues)
propertyValue.property.set(object, propertyValue.value);
return object;
@@ -56,7 +63,9 @@ class DeferredConstruction {
int index = 0;
for (String name : parameterNames) {
if (property.getName().equals(name)) {
- parameterValues[index] = value;
+ ParameterValue parameterValue = new ParameterValue();
+ parameterValue.value = value;
+ parameterValues[index] = parameterValue;
return;
}
index++;
@@ -78,4 +87,8 @@ class DeferredConstruction {
Property property;
Object value;
}
+
+ static class ParameterValue {
+ Object value;
+ }
} | Better exception for missing constructor properties. | EsotericSoftware_yamlbeans | train |
5460f198c232d1ab3fe3d417159a4fac6644910c | diff --git a/lib/el_finder_s3/connector.rb b/lib/el_finder_s3/connector.rb
index <HASH>..<HASH> 100755
--- a/lib/el_finder_s3/connector.rb
+++ b/lib/el_finder_s3/connector.rb
@@ -78,7 +78,7 @@ module ElFinderS3
def run(params)
@adapter = ElFinderS3::Adapter.new(@options[:server], @options[:cache_connector])
- @root = ElFinderS3::Pathname.new(adapter)
+ @root = ElFinderS3::Pathname.new(adapter, @options[:root]) #Change - Pass the root dir here
begin
@params = params.dup
diff --git a/lib/el_finder_s3/pathname.rb b/lib/el_finder_s3/pathname.rb
index <HASH>..<HASH> 100755
--- a/lib/el_finder_s3/pathname.rb
+++ b/lib/el_finder_s3/pathname.rb
@@ -44,7 +44,7 @@ module ElFinderS3
if other.is_a? ::ElFinderS3::Pathname
other = other.path
end
- self.class.new(@adapter, @path + other)
+ self.class.new(@adapter, (other.include?(@path.to_s) ? other : @path + other) ) #Change - last child is not showing up because of root dir added twice in the path
end
# of + | Root dir issue and last child is not showing up because of root dir added twice in the path - fixed | raulanatol_el_finder_s3 | train |
82ab4dbf697e5113be056ab62c7a9ae77a597672 | diff --git a/src/sample/index.php b/src/sample/index.php
index <HASH>..<HASH> 100644
--- a/src/sample/index.php
+++ b/src/sample/index.php
@@ -1,3 +1,24 @@
+<?php
+
+ini_set('display_errors', 1);
+
+require_once(dirname(__FILE__) . '/../lib/YandexMoney.php');
+require_once(dirname(__FILE__) . '/consts.php');
+
+$code = $_GET['code'];
+if (!isset($code)) { // If we are just begginig OAuth
+ $scope = "account-info " .
+ "operation-history " .
+ "operation-details " .
+ "payment.to-account(\"410011161616877\",\"account\").limit(30,10) " .
+ "payment.to-pattern(\"337\").limit(30,10) " .
+ "money-source(\"wallet\",\"card\") ";
+ $authUri = YandexMoneyNew::authorizeUri(CLIENT_ID, REDIRECT_URI, $scope);
+ header('Location: ' . $authUri);
+
+} else { // when we recieved a temporary code on redirect
+ ?>
+
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
@@ -10,23 +31,6 @@
<?php
- ini_set('display_errors', 1);
-
- require_once(dirname(__FILE__) . '/../lib/YandexMoney.php');
- require_once(dirname(__FILE__) . '/consts.php');
-
- $code = $_GET['code'];
- if (!isset($code)) { // If we are just begginig OAuth
- $scope = "account-info " .
- "operation-history " .
- "operation-details " .
- "payment.to-account(\"410011161616877\",\"account\").limit(30,10) " .
- "payment.to-pattern(\"337\").limit(30,10) " .
- "money-source(\"wallet\",\"card\") ";
- $authUri = YandexMoneyNew::authorizeUri(CLIENT_ID, REDIRECT_URI, $scope);
- header('Location: ' . $authUri);
-
- } else { // when we recieved a temporary code on redirect
$ym = new YandexMoneyNew(CLIENT_ID);
$receiveTokenResp = $ym->receiveOAuthToken($code, REDIRECT_URI, CLIENT_SECRET); | html output after replaced after header() | romkavt_yandex-money-sdk-php | train |
04fa8b045844da6830ffcfc87f137eed3d25d465 | diff --git a/i3pystatus/load.py b/i3pystatus/load.py
index <HASH>..<HASH> 100644
--- a/i3pystatus/load.py
+++ b/i3pystatus/load.py
@@ -1,5 +1,8 @@
from i3pystatus import IntervalModule
-from os import cpu_count
+try:
+ from os import cpu_count
+except ImportError:
+ from multiprocessing import cpu_count
class Load(IntervalModule): | add fallback to multiprocessing.cpu_count because os.cpu_count is not available before <I> | enkore_i3pystatus | train |
76e8a067d17a2dc087e756047611f592f60caee8 | diff --git a/cmd/kubeadm/app/apis/kubeadm/v1beta2/doc.go b/cmd/kubeadm/app/apis/kubeadm/v1beta2/doc.go
index <HASH>..<HASH> 100644
--- a/cmd/kubeadm/app/apis/kubeadm/v1beta2/doc.go
+++ b/cmd/kubeadm/app/apis/kubeadm/v1beta2/doc.go
@@ -176,12 +176,12 @@ limitations under the License.
// effect: "NoSchedule"
// kubeletExtraArgs:
// v: 4
-// ignorePreflightErrors:
-// - IsPrivilegedUser
+// ignorePreflightErrors:
+// - IsPrivilegedUser
// localAPIEndpoint:
// advertiseAddress: "10.100.0.1"
// bindPort: 6443
-// certificateKey: "e6a2eb8581237ab72a4f494f30285ec12a9694d750b9785706a83bfcbbbd2204"
+// certificateKey: "e6a2eb8581237ab72a4f494f30285ec12a9694d750b9785706a83bfcbbbd2204"
// ---
// apiVersion: kubeadm.k8s.io/v1beta2
// kind: ClusterConfiguration
diff --git a/cmd/kubeadm/app/apis/kubeadm/v1beta3/doc.go b/cmd/kubeadm/app/apis/kubeadm/v1beta3/doc.go
index <HASH>..<HASH> 100644
--- a/cmd/kubeadm/app/apis/kubeadm/v1beta3/doc.go
+++ b/cmd/kubeadm/app/apis/kubeadm/v1beta3/doc.go
@@ -180,15 +180,15 @@ limitations under the License.
// effect: "NoSchedule"
// kubeletExtraArgs:
// v: 4
-// ignorePreflightErrors:
-// - IsPrivilegedUser
-// imagePullPolicy: "IfNotPresent"
+// ignorePreflightErrors:
+// - IsPrivilegedUser
+// imagePullPolicy: "IfNotPresent"
// localAPIEndpoint:
// advertiseAddress: "10.100.0.1"
// bindPort: 6443
-// certificateKey: "e6a2eb8581237ab72a4f494f30285ec12a9694d750b9785706a83bfcbbbd2204"
-// skipPhases:
-// - addon/kube-proxy
+// certificateKey: "e6a2eb8581237ab72a4f494f30285ec12a9694d750b9785706a83bfcbbbd2204"
+// skipPhases:
+// - addon/kube-proxy
// ---
// apiVersion: kubeadm.k8s.io/v1beta3
// kind: ClusterConfiguration | kubeadm: fix bad indentation in the API go docs
For the YAML examples, make the indentation consistent
by starting with a space and following with a TAB.
Also adjust the indentation of some fields to place them under
the right YAML field parent - e.g. ignorePreflightErrors
is under nodeRegistration. | kubernetes_kubernetes | train |
48783e5063ca945b2537a2d4ab6ec816686dad31 | diff --git a/packages/insomnia-app/app/ui/components/activity-toggle.js b/packages/insomnia-app/app/ui/components/activity-toggle.js
index <HASH>..<HASH> 100644
--- a/packages/insomnia-app/app/ui/components/activity-toggle.js
+++ b/packages/insomnia-app/app/ui/components/activity-toggle.js
@@ -25,6 +25,7 @@ export default function ActivityToggle({ activity, handleActivityChange, workspa
name="activity-toggle"
onChange={a => handleActivityChange(workspace._id, a)}
choices={choices}
+ selectedValue={activity}
/>
);
}
diff --git a/packages/insomnia-components/components/multi-switch.stories.js b/packages/insomnia-components/components/multi-switch.stories.js
index <HASH>..<HASH> 100644
--- a/packages/insomnia-components/components/multi-switch.stories.js
+++ b/packages/insomnia-components/components/multi-switch.stories.js
@@ -3,15 +3,24 @@ import MultiSwitch from './multi-switch';
export default { title: 'Navigation | Sliding Switch' };
-export const _default = () => (
- <MultiSwitch
- name="activity"
- defaultValue="debug"
- onChange={v => console.log(v)}
- choices={[
- { label: 'Design', value: 'design' },
- { label: 'Debug', value: 'debug' },
- { label: 'Test', value: 'test' },
- ]}
- />
-);
+export const _default = () => {
+ const defaultValue = 'debug';
+ const [selectedValue, setSelectedValue] = React.useState(defaultValue);
+ const onChangeHandler = v => {
+ console.log(v);
+ setSelectedValue(v);
+ };
+ return (
+ <MultiSwitch
+ name="activity"
+ defaultValue={defaultValue}
+ onChange={onChangeHandler}
+ choices={[
+ { label: 'Design', value: 'design' },
+ { label: 'Debug', value: 'debug' },
+ { label: 'Test', value: 'test' },
+ ]}
+ selectedValue={selectedValue}
+ />
+ );
+};
diff --git a/packages/insomnia-components/components/radio-button-group.js b/packages/insomnia-components/components/radio-button-group.js
index <HASH>..<HASH> 100644
--- a/packages/insomnia-components/components/radio-button-group.js
+++ b/packages/insomnia-components/components/radio-button-group.js
@@ -11,6 +11,7 @@ export type Props = {
value: string,
}>,
className?: string,
+ selectedValue: string,
};
const StyledRadioButtonGroup: React.ComponentType<{}> = styled.div`
@@ -53,6 +54,7 @@ export default function RadioButtonGroup({
defaultValue,
onChange,
className,
+ selectedValue,
}: Props) {
const handleChange = e => {
if (typeof onChange !== 'function') {
@@ -71,6 +73,7 @@ export default function RadioButtonGroup({
name={name}
value={value}
defaultChecked={defaultValue === value}
+ checked={selectedValue === value}
onChange={handleChange}
/>
<span>{label}</span>
diff --git a/packages/insomnia-components/components/radio-button-group.stories.js b/packages/insomnia-components/components/radio-button-group.stories.js
index <HASH>..<HASH> 100644
--- a/packages/insomnia-components/components/radio-button-group.stories.js
+++ b/packages/insomnia-components/components/radio-button-group.stories.js
@@ -5,16 +5,26 @@ import RadioButtonGroup from './radio-button-group';
export default { title: 'Navigation | Radio Button Group' };
-export const _default = () => (
- <RadioButtonGroup
- name="dummy"
- defaultValue="scratch"
- onChange={v => console.log(v)}
- choices={[
- { label: 'From Scratch', value: 'scratch' },
- { label: 'From Repository', value: 'repo' },
- { label: 'From Clipboard', value: 'clip' },
- { label: 'From Spec', value: 'spec' },
- ]}
- />
-);
+export const _default = () => {
+ const defaultValue = 'scratch';
+ const [selectedValue, setSelectedValue] = React.useState(defaultValue);
+ const onChangeHandler = v => {
+ console.log(v);
+ setSelectedValue(v);
+ };
+
+ return (
+ <RadioButtonGroup
+ name="dummy"
+ defaultValue={defaultValue}
+ onChange={onChangeHandler}
+ choices={[
+ { label: 'From Scratch', value: 'scratch' },
+ { label: 'From Repository', value: 'repo' },
+ { label: 'From Clipboard', value: 'clip' },
+ { label: 'From Spec', value: 'spec' },
+ ]}
+ selectedValue={selectedValue}
+ />
+ );
+}; | fix: made radio-button-group a controlled component (#<I>) | getinsomnia_insomnia | train |
c98b15506ca332932eaa51a06aeeefee46fa9e66 | diff --git a/orb/testing/mock_connection.py b/orb/testing/mock_connection.py
index <HASH>..<HASH> 100644
--- a/orb/testing/mock_connection.py
+++ b/orb/testing/mock_connection.py
@@ -3,6 +3,7 @@ Defines a mock backend database connection
"""
import orb
+import logging
from collections import defaultdict
@@ -14,6 +15,7 @@ class MockConnection(orb.Connection):
self.counter = defaultdict(lambda: 0)
self.responses = responses or {}
self.base_connection = base
+ self.log = logging.getLogger(__name__)
def onSync(self, event):
assert isinstance(event, orb.events.SyncEvent)
@@ -146,7 +148,7 @@ class MockConnection(orb.Connection):
:return <bool>
"""
# validate inputs
- assert isinstance(records, orb.Collection)
+ assert isinstance(records, (orb.Collection, list))
assert isinstance(context, orb.Context)
# return the desired response
@@ -179,8 +181,10 @@ class MockConnection(orb.Connection):
:return: <variant>
"""
- self.counter['all'] += 1
- self.counter[method] += 1
+ if self.log.propagate:
+ self.log.info('{0}{1}'.format(method, args))
+ self.counter['all'] += 1
+ self.counter[method] += 1
resp = self.responses.get(method)
@@ -265,7 +269,7 @@ class MockConnection(orb.Connection):
:return <bool>
"""
- assert isinstance(records, orb.Context)
+ assert isinstance(records, (orb.Collection, list))
assert isinstance(context, orb.Context)
return self.next_response('update', records, context) | * added logging propagation to mock connections | orb-framework_orb | train |
bf42d5715bf1adba9e4ebea35defbefacdff4f3d | diff --git a/autograd/numpy/numpy_extra.py b/autograd/numpy/numpy_extra.py
index <HASH>..<HASH> 100644
--- a/autograd/numpy/numpy_extra.py
+++ b/autograd/numpy/numpy_extra.py
@@ -119,7 +119,9 @@ else:
arraycast.defgrad(lambda ans, val: lambda g : g)
def _is_basic(idx):
- return isinstance(idx, (int, slice)) or idx in [np.newaxis, Ellipsis]
+ """Returns True iff idx is a single basic (i.e., not fancy) index (and
+ therefore doesn't have any repeated elements)."""
+ return isinstance(idx, (int, slice)) or idx is np.newaxis or idx is Ellipsis
@primitive
def primitive_sum_arrays(*arrays):
@@ -128,8 +130,10 @@ def primitive_sum_arrays(*arrays):
if isinstance(array, SparseArray):
if (_is_basic(array.idx) or
isinstance(array.idx, tuple) and all(_is_basic(i) for i in array.idx)):
+ # Faster than np.add.at
new_array[array.idx] += array.val
else:
+ # Safe even if array.idx has repeated elements
np.add.at(new_array, array.idx, array.val)
else:
new_array += array | fixed failing tests (sorry about that) | HIPS_autograd | train |
8264dc0a12a56a15c9ebb098bd35dcdd472bb829 | diff --git a/cmd/minikube/cmd/start.go b/cmd/minikube/cmd/start.go
index <HASH>..<HASH> 100644
--- a/cmd/minikube/cmd/start.go
+++ b/cmd/minikube/cmd/start.go
@@ -188,7 +188,9 @@ func runStart(cmd *cobra.Command, args []string) {
}
selectedKubernetesVersion := viper.GetString(kubernetesVersion)
-
+ if strings.Compare(selectedKubernetesVersion, "") == 0 {
+ selectedKubernetesVersion = constants.DefaultKubernetesVersion
+ }
// Load profile cluster config from file
cc, err := loadConfigFromFile(viper.GetString(cfg.MachineProfile))
if err != nil && !os.IsNotExist(err) { | fix issue where version was not being set | kubernetes_minikube | train |
07619517939f55d721a8835c8b43a6ff5b8a057b | diff --git a/src/StreamingServer.php b/src/StreamingServer.php
index <HASH>..<HASH> 100644
--- a/src/StreamingServer.php
+++ b/src/StreamingServer.php
@@ -325,6 +325,13 @@ final class StreamingServer extends EventEmitter
/** @internal */
public function handleResponse(ConnectionInterface $connection, ServerRequestInterface $request, ResponseInterface $response)
{
+ // return early and close response body if connection is already closed
+ $body = $response->getBody();
+ if (!$connection->isWritable()) {
+ $body->close();
+ return;
+ }
+
$response = $response->withProtocolVersion($request->getProtocolVersion());
// assign default "X-Powered-By" header as first for history reasons
@@ -348,8 +355,8 @@ final class StreamingServer extends EventEmitter
$response = $response->withoutHeader('Date');
}
- if (!$response->getBody() instanceof HttpBodyStream) {
- $response = $response->withHeader('Content-Length', (string)$response->getBody()->getSize());
+ if (!$body instanceof HttpBodyStream) {
+ $response = $response->withHeader('Content-Length', (string)$body->getSize());
} elseif (!$response->hasHeader('Content-Length') && $request->getProtocolVersion() === '1.1') {
// assign chunked transfer-encoding if no 'content-length' is given for HTTP/1.1 responses
$response = $response->withHeader('Transfer-Encoding', 'chunked');
@@ -381,7 +388,6 @@ final class StreamingServer extends EventEmitter
// 101 (Switching Protocols) response (for Upgrade request) forwards upgraded data through duplex stream
// 2xx (Successful) response to CONNECT forwards tunneled application data through duplex stream
- $body = $response->getBody();
if (($code === 101 || ($request->getMethod() === 'CONNECT' && $code >= 200 && $code < 300)) && $body instanceof HttpBodyStream && $body->input instanceof WritableStreamInterface) {
if ($request->getBody()->isReadable()) {
// request is still streaming => wait for request close before forwarding following data from connection
@@ -417,11 +423,6 @@ final class StreamingServer extends EventEmitter
return $connection->end();
}
- // close response stream if connection is already closed
- if (!$connection->isWritable()) {
- return $stream->close();
- }
-
$connection->write($headers . "\r\n");
if ($stream->isReadable()) { | Simplify closing response body when connection is already closed | reactphp_http | train |
f53b534479e467fd895975f2b15c63838aa36182 | diff --git a/fabric/utils.py b/fabric/utils.py
index <HASH>..<HASH> 100644
--- a/fabric/utils.py
+++ b/fabric/utils.py
@@ -47,7 +47,7 @@ def isatty(stream):
def get_pty_size():
"""
- Obtain (rows, cols) tuple for sizing a pty on the remote end.
+ Obtain (cols, rows) tuple for sizing a pty on the remote end.
Defaults to 80x24 but will try to detect local (stdout-based) terminal
window size on non-Windows platforms.
@@ -57,8 +57,8 @@ def get_pty_size():
import termios
import struct
- default_rows, default_cols = 24, 80
- rows, cols = default_rows, default_cols
+ default_cols, default_rows = 80, 24
+ cols, rows = default_cols, default_rows
if not win32 and isatty(sys.stdout):
# We want two short unsigned integers (rows, cols)
fmt = 'HH'
@@ -69,7 +69,8 @@ def get_pty_size():
try:
result = fcntl.ioctl(sys.stdout.fileno(), termios.TIOCGWINSZ,
buffer)
- # Unpack buffer back into Python data types
+ # Unpack buffer back into Python data types. (Note: WINSZ gives us
+ # rows-by-cols, instead of cols-by-rows.)
rows, cols = struct.unpack(fmt, result)
# Fall back to defaults if TIOCGWINSZ returns unreasonable values
if rows == 0:
@@ -80,4 +81,4 @@ def get_pty_size():
# Or termios not having a TIOCGWINSZ.
except AttributeError:
pass
- return rows, cols
+ return cols, rows | Switch get_pty_size to use X-by-Y because that's more intuitive.
Fab 1 implementation probably stemmed from the order WINSZ gives
you, but that's just silly, human computer users always think
in terms of X by Y. | fabric_fabric | train |
f10bd9d3a4c686507c4ae251a21800a0e7c575ed | diff --git a/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileInputFormat.java b/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileInputFormat.java
index <HASH>..<HASH> 100644
--- a/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileInputFormat.java
+++ b/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileInputFormat.java
@@ -17,7 +17,6 @@ package org.ojai.json.mapreduce;
import java.io.IOException;
-import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.mapreduce.InputSplit;
@@ -37,19 +36,12 @@ public class JSONFileInputFormat extends FileInputFormat<LongWritable, Document>
}
+ /**
+ * Input JSON files can not be split while processing in M/R jobs.
+ */
@Override
public boolean isSplitable(JobContext context, Path path) {
-
- /*
- * define a config parameter to determine if we want to make it not
- * splittable.
- */
- Configuration conf = context.getConfiguration();
- if (conf.get("jsonfileinputformat.nosplit") != null) {
- return false;
- }
-
- return true;
+ return false;
}
}
diff --git a/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileRecordReader.java b/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileRecordReader.java
index <HASH>..<HASH> 100644
--- a/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileRecordReader.java
+++ b/map-reduce/src/main/java/org/ojai/json/mapreduce/JSONFileRecordReader.java
@@ -16,7 +16,6 @@
package org.ojai.json.mapreduce;
import java.io.IOException;
-import java.io.InputStreamReader;
import java.util.Iterator;
import org.apache.hadoop.conf.Configuration;
@@ -43,7 +42,6 @@ public class JSONFileRecordReader extends RecordReader<LongWritable, Document> {
private long currentPos;
private long start;
private long end;
- private long blockLength;
@Override
public void close() throws IOException {
@@ -76,33 +74,6 @@ public class JSONFileRecordReader extends RecordReader<LongWritable, Document> {
return Math.min(1.0F, (float)(currentPos - start) / (float)(end - start));
}
- private long bytesToSkip(long start, long blockLength)
- throws IOException {
- long toSkip = 0;
- inputStream.seek(start - 1);
-
- //create InputStreamReader
- InputStreamReader in = new InputStreamReader(inputStream, "UTF-8");
- boolean gotStart = false;
- char curChar;
- while (toSkip <= blockLength) {
- curChar = (char)in.read();
- if (curChar == '}') {
- gotStart = true;
- }
- if (curChar == '{') {
- if (gotStart) {
- break;
- }
- }
- if (curChar == ',') {
- gotStart = false;
- }
- toSkip += 1;
- }
-
- return toSkip;
- }
@Override
public void initialize(InputSplit arg0, TaskAttemptContext taskContext)
@@ -126,23 +97,8 @@ public class JSONFileRecordReader extends RecordReader<LongWritable, Document> {
FileSystem fs = path.getFileSystem(job);
inputStream = fs.open(path);
-
- /*
- * if this block is not the first block check if it falls on document
- * boundary. If not, skip bytes to start to the next document boundary.
- */
start = split.getStart();
- blockLength = split.getLength();
- long skipBytes = 0;
-
- if (start != 0) {
- /*
- * not the first block check if it starts on a document boundary
- */
- skipBytes = bytesToSkip(start, blockLength);
- currentPos = start - 1 + skipBytes;
- inputStream.seek(currentPos);
- }
+ end = start + split.getLength();
/* Initialize a stream reader so that it can read multiple documents from */
/* the file */
@@ -156,16 +112,12 @@ public class JSONFileRecordReader extends RecordReader<LongWritable, Document> {
public boolean nextKeyValue() throws IOException, InterruptedException {
boolean hasNextKeyVal = false;
- long thisPos = documentStream.getInputStreamPosition();
- if (thisPos >= (start + blockLength)) {
- return false;
- }
-
if (it.hasNext()) {
key.set(documentCount);
document = it.next();
documentCount++;
hasNextKeyVal = true;
+ currentPos = documentStream.getInputStreamPosition();
}
return hasNextKeyVal; | MAPR-<I> : Disable splitting of JSON files in Map-Reduce framework. | ojai_ojai | train |
cace7062773a013bae0ee3fb283905eeabfe74c6 | diff --git a/website/package.json b/website/package.json
index <HASH>..<HASH> 100644
--- a/website/package.json
+++ b/website/package.json
@@ -4,11 +4,13 @@
"version": "1.0.2",
"description": "The documentation website for the Curi router",
"scripts": {
- "prebuild": "node ./scripts/buildSetup & webpack-cli --mode=production",
+ "prebuild": "npm run setupBuild && npm run versions && webpack-cli --mode=production",
"build": "npm run generate",
"dev": "webpack-cli --mode=development --watch & cross-env BABEL_ENV=serve node ./src/server",
"generate": "cross-env BABEL_ENV=serve node ./scripts/build",
- "prettier": "prettier --single-quote --write \"src/**/*.js\""
+ "prettier": "prettier --single-quote --write \"src/**/*.js\"",
+ "setupBuild": "node ./scripts/buildSetup",
+ "versions": "node ./scripts/updatePackageVersions"
},
"repository": {
"type": "git",
diff --git a/website/scripts/build.js b/website/scripts/build.js
index <HASH>..<HASH> 100644
--- a/website/scripts/build.js
+++ b/website/scripts/build.js
@@ -1,7 +1,6 @@
require("@babel/register");
const generateStaticFiles = require("./generateStaticFiles");
-const updatePackageVersions = require("./updatePackageVersions");
const createApp = require("../src/server/app");
const routes = require("../src/client/routes").default;
@@ -22,8 +21,6 @@ const exampleParams = Object.keys(categories)
}, []);
const tutorialNames = tutorials_api.all().map(t => ({ slug: t.slug }));
-updatePackageVersions();
-
let server;
const app = createApp();
server = app.listen("8000", () => {
diff --git a/website/scripts/updatePackageVersions.js b/website/scripts/updatePackageVersions.js
index <HASH>..<HASH> 100644
--- a/website/scripts/updatePackageVersions.js
+++ b/website/scripts/updatePackageVersions.js
@@ -35,7 +35,7 @@ function packageName(pkg) {
return pkg.name.slice(6);
}
-module.exports = function updatePackageVersions() {
+function updatePackageVersions() {
const mappedVersions = getPackagesFromDir(BASE_PATH);
fs.writeFile(
OUTPUT_FILE,
@@ -47,4 +47,6 @@ export default ${JSON.stringify(mappedVersions, null, 2)};\n`,
}
}
);
-};
+}
+
+updatePackageVersions(); | (website) Standalone version updates [ci skip] | pshrmn_curi | train |
68aed8910e14842eddaf069c3d812e0b22b3d21e | diff --git a/km3pipe/pumps/aanet.py b/km3pipe/pumps/aanet.py
index <HASH>..<HASH> 100644
--- a/km3pipe/pumps/aanet.py
+++ b/km3pipe/pumps/aanet.py
@@ -21,6 +21,8 @@ class AanetPump(Pump):
import aa
from ROOT import TFile, Evt
self.filename = self.get('filename')
+ if not self.filename:
+ raise ValueError("No filename defined")
self.index = 0
self.rootfile = TFile(self.filename)
self.evt = Evt() | Raises exception if no filename is given | tamasgal_km3pipe | train |
0b69a26b09394f27de4f45c64a5ec70829fbc121 | diff --git a/lib/duck_map/mapper.rb b/lib/duck_map/mapper.rb
index <HASH>..<HASH> 100644
--- a/lib/duck_map/mapper.rb
+++ b/lib/duck_map/mapper.rb
@@ -35,6 +35,10 @@ module DuckMap
@set.routes.last.sitemap_route_name = last_route_name
@set.routes.last.sitemap_raw_route_name = sitemap_raw_route_name
+ # this is how I am faking to always point to the SitemapController
+ # regardless of namespace
+ @set.routes.last.defaults[:controller] = "sitemap"
+
# determine if we added a duplicate route.
# The gem defines a default sitemap in config/routes.rb (inside the gem, not the app).
# So, it is very likely that most apps will be creating duplicates since most of the code is geared towards | tweaked mapper.rb to have route always point to SitemapController. | jduckett_duck_map | train |
8d3be7aa53de62965ee529f06d25d0d26a34ef64 | diff --git a/cltk/tests/test_cltk.py b/cltk/tests/test_cltk.py
index <HASH>..<HASH> 100644
--- a/cltk/tests/test_cltk.py
+++ b/cltk/tests/test_cltk.py
@@ -331,7 +331,6 @@ class TestSequenceFunctions(unittest.TestCase): # pylint: disable=R0904
def test_tlgu_convert(self):
"""Test TLGU convert.
Note: assertEquals fails on some accented characters ('ή', 'ί').
- TODO: Remove out_test file at end of function.
"""
in_test = os.path.abspath('cltk/tests/tlgu_test_text_beta_code.txt')
out_test = os.path.expanduser('~/cltk_data/tlgu_test_text_unicode.txt')
@@ -339,6 +338,7 @@ class TestSequenceFunctions(unittest.TestCase): # pylint: disable=R0904
t.convert(in_test, out_test)
with open(out_test) as out_file:
new_text = out_file.read()
+ os.remove(out_test)
target = """
βλλον δ' ἀλλλους χαλκρεσιν ἐγχεῃσιν.
""" | rm tlgu test file | cltk_cltk | train |
ef6a737aa960b65d7ee3357f3691aa8e41f47d69 | diff --git a/helpers/blog_helper.php b/helpers/blog_helper.php
index <HASH>..<HASH> 100644
--- a/helpers/blog_helper.php
+++ b/helpers/blog_helper.php
@@ -24,5 +24,33 @@ if ( ! function_exists( 'blog_setting' ) )
}
}
+
+// --------------------------------------------------------------------------
+
+
+/**
+ * Get latest blog posts
+ *
+ * @access public
+ * @param none
+ * @return void
+ */
+if ( ! function_exists( 'blog_latest_posts' ) )
+{
+ function blog_latest_posts( $limit = 9 )
+ {
+ // Load the model if it's not already loaded
+ if ( ! get_instance()->load->model_is_loaded( 'post' ) ) :
+
+ get_instance()->load->model( 'blog/blog_post_model', 'post' );
+
+ endif;
+
+ // --------------------------------------------------------------------------
+
+ return get_instance()->post->get_latest( $limit );
+ }
+}
+
/* End of file blog_helper.php */
/* Location: ./modules/blog/helpers/blog_helper.php */
\ No newline at end of file | added blog_get_latest() helper. | nails_common | train |
bdd68b1c35278f293e7752561a055eb24ad9fec2 | diff --git a/great_expectations/data_context/types/__init__.py b/great_expectations/data_context/types/__init__.py
index <HASH>..<HASH> 100644
--- a/great_expectations/data_context/types/__init__.py
+++ b/great_expectations/data_context/types/__init__.py
@@ -20,9 +20,9 @@ from .resource_identifiers import (
ValidationResultIdentifier,
)
-# TODO: Deprecate this in favor of DataAssetIdentifier
-NormalizedDataAssetName = namedtuple("NormalizedDataAssetName", [
- "datasource",
- "generator",
- "generator_asset"
-])
\ No newline at end of file
+# # TODO: Deprecate this in favor of DataAssetIdentifier
+# NormalizedDataAssetName = namedtuple("NormalizedDataAssetName", [
+# "datasource",
+# "generator",
+# "generator_asset"
+# ])
\ No newline at end of file | Dup definition most likely due to git merge | great-expectations_great_expectations | train |
b3e415a873f8d13a93a8ebda30262db2ada2c215 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -15,16 +15,6 @@ PKGDIR = os.path.join(BASEDIR, PKGNAME)
MODULES = [filename[:-4] for filename in os.listdir(PKGDIR)
if filename.endswith('.pyx')]
-ext_args = {}
-extra_setup_args = {}
-
-
-# support 'test' target if setuptools/distribute is available
-
-if 'setuptools' in sys.modules:
- extra_setup_args['test_suite'] = 'fastrlock.tests.suite'
- extra_setup_args["zip_safe"] = False
-
def has_option(name):
if name in sys.argv[1:]:
@@ -33,8 +23,11 @@ def has_option(name):
return False
+ext_args = {
+ 'define_macros': [('CYTHON_CLINE_IN_TRACEBACK', '1')],
+}
if has_option('--without-assert'):
- ext_args['define_macros'] = [('CYTHON_WITHOUT_ASSERTIONS', None)]
+ ext_args['define_macros'].append(('CYTHON_WITHOUT_ASSERTIONS', None))
use_cython = has_option('--with-cython')
@@ -86,18 +79,14 @@ long_description = '\n\n'.join([
for text_file in ['README.rst', 'CHANGES.rst']])
-if sys.version_info >= (2,6):
- extra_setup_args['license'] = 'MIT style'
-
-
setup(
name="fastrlock",
version=VERSION,
author="Stefan Behnel",
author_email="[email protected]",
url="https://github.com/scoder/fastrlock",
+ license='MIT style',
description="Fast, re-entrant optimistic lock implemented in Cython",
-
long_description=long_description,
classifiers=[
'Development Status :: 5 - Production/Stable',
@@ -112,7 +101,10 @@ setup(
],
packages=[PKGNAME],
- package_data = {PKGNAME: ['*.pxd', '*.pxi']},
- ext_modules = ext_modules,
- **extra_setup_args
+ package_data={PKGNAME: ['*.pxd', '*.pxi']},
+ ext_modules=ext_modules,
+ zip_safe=False,
+
+ # support 'test' target if setuptools/distribute is available
+ test_suite='fastrlock.tests.suite',
) | Remove dead code from setup.py and statically set CYTHON_CLINE_IN_TRACEBACK=1 to avoid runtime overhead. | scoder_fastrlock | train |
03d814a67e6535ee642477fdfe31afeca000b15e | diff --git a/src/text/BitmapText.js b/src/text/BitmapText.js
index <HASH>..<HASH> 100644
--- a/src/text/BitmapText.js
+++ b/src/text/BitmapText.js
@@ -37,20 +37,43 @@ function BitmapText(text, style) {
this.textHeight = 0;
/**
- * @member {Array}
+ * Private tracker for the letter sprite pool.
+ *
+ * @member {Sprite[]}
* @private
*/
this._pool = [];
- this.setText(text);
- this.setStyle(style);
- this.updateText();
+ /**
+ * Private tracker for the current style.
+ *
+ * @member {object}
+ * @private
+ */
+ this._style = {
+ tint: style.tint,
+ align: style.align,
+ fontName: null,
+ fontSize: 0
+ };
+ this.font = style.font; // run font setter
+
+ /**
+ * Private tracker for the current text.
+ *
+ * @member {string}
+ * @private
+ */
+ this._text = text;
/**
* The dirty state of this object.
+ *
* @member {boolean}
*/
this.dirty = false;
+
+ this.updateText();
}
// constructor
@@ -58,35 +81,79 @@ BitmapText.prototype = Object.create(core.DisplayObjectContainer.prototype);
BitmapText.prototype.constructor = BitmapText;
module.exports = BitmapText;
-/**
- * Set the text string to be rendered.
- *
- * @param text {string} The text that you would like displayed
- */
-BitmapText.prototype.setText = function (text) {
- this.text = text || ' ';
- this.dirty = true;
-};
+Object.defineProperties(BitmapText.prototype, {
+ /**
+ * The tint of the BitmapText object
+ *
+ * @member {number}
+ * @memberof BitmapText#
+ */
+ tint: {
+ get: function () {
+ return this._style.tint;
+ },
+ set: function (value) {
+ this._style.tint = value;
+
+ this.dirty = true;
+ }
+ },
-/**
- * Set the style of the text
- * style.font {string} The size (optional) and bitmap font id (required) eq 'Arial' or '20px Arial' (must have loaded previously)
- * [style.align='left'] {string} Alignment for multiline text ('left', 'center' or 'right'), does not affect single lines of text
- *
- * @param style {object} The style parameters, contained as properties of an object
- */
-BitmapText.prototype.setStyle = function (style) {
- style = style || {};
- style.align = style.align || 'left';
- this.style = style;
+ /**
+ * The tint of the BitmapText object
+ *
+ * @member {string}
+ * @default 'left'
+ * @memberof BitmapText#
+ */
+ align: {
+ get: function () {
+ return this._style.align;
+ },
+ set: function (value) {
+ this._style.align = value;
+
+ this.dirty = true;
+ }
+ },
- var font = style.font.split(' ');
- this.fontName = font[font.length - 1];
- this.fontSize = font.length >= 2 ? parseInt(font[font.length - 2], 10) : BitmapText.fonts[this.fontName].size;
+ /**
+ * The tint of the BitmapText object
+ *
+ * @member {Font}
+ * @memberof BitmapText#
+ */
+ font: {
+ get: function () {
+ return this._style.font;
+ },
+ set: function (value) {
+ value = value.split(' ');
- this.dirty = true;
- this.tint = style.tint;
-};
+ this._style.fontName = font[font.length - 1];
+ this._style.fontSize = font.length >= 2 ? parseInt(font[font.length - 2], 10) : BitmapText.fonts[this.fontName].size;
+
+ this.dirty = true;
+ }
+ },
+
+ /**
+ * The text of the BitmapText object
+ *
+ * @member {string}
+ * @memberof BitmapText#
+ */
+ text: {
+ get: function () {
+ return this._text;
+ },
+ set: function (value) {
+ this._text = value;
+
+ this.dirty = true;
+ }
+ }
+});
/**
* Renders text and updates it when needed | merge #<I>, and remove some setters | pixijs_pixi.js | train |
512df3c7eef98a361013123e96cf2942476dc707 | diff --git a/src/Commands/Other/DrushCommand.php b/src/Commands/Other/DrushCommand.php
index <HASH>..<HASH> 100644
--- a/src/Commands/Other/DrushCommand.php
+++ b/src/Commands/Other/DrushCommand.php
@@ -42,7 +42,7 @@ class DrushCommand extends BaseGenerator {
}
/**
- * Returns command file name.
+ * Returns default command file name.
*/
protected function defaultCommandFile($vars) {
// The suggestion depends on whether the command global or local.
@@ -52,7 +52,7 @@ class DrushCommand extends BaseGenerator {
}
/**
- * Returns command alias.
+ * Returns default command alias.
*/
protected function defaultAlias($vars) {
return substr($vars['command_name'], 0, 3); | Fixed function docs in drush command generator. | Chi-teck_drupal-code-generator | train |
3cd034f964de4fd5fee1c587096674f10ca959c3 | diff --git a/scout/adapter/mongo/query.py b/scout/adapter/mongo/query.py
index <HASH>..<HASH> 100644
--- a/scout/adapter/mongo/query.py
+++ b/scout/adapter/mongo/query.py
@@ -2,7 +2,7 @@ import logging
logger = logging.getLogger(__name__)
-from scout.constants import (SPIDEX_HUMAN)
+from scout.constants import (SPIDEX_HUMAN, CLINSIG_MAP)
class QueryHandler(object):
@@ -265,7 +265,11 @@ class QueryHandler(object):
# trust clnsig entries with trusted revstat levels.
if query.get('clinsig'):
- rank = [int(item) for item in query['clinsig']]
+ rank = []
+ for item in query['clinsig']:
+ rank.append(int(item))
+ # search for human readable clinsig values in newer cases
+ rank.append(CLINSIG_MAP[int(item)])
if query.get('clinsig_confident_always_returned') == True:
@@ -292,13 +296,13 @@ class QueryHandler(object):
if mongo_query_minor and mongo_query_major:
if gene_query:
- mongo_query['$and'] = [
- {'$or': gene_query},
+ mongo_query['$and'] = [
+ {'$or': gene_query},
{
'$or': [
{'$and': mongo_query_minor}, mongo_query_major
]
- }
+ }
]
else:
mongo_query['$or'] = [ {'$and': mongo_query_minor}, | modified mongo query to include human readable clinsigs | Clinical-Genomics_scout | train |
c11d94105acf12b36257fc996e55e14e04a68f62 | diff --git a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationPackages.java b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationPackages.java
index <HASH>..<HASH> 100644
--- a/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationPackages.java
+++ b/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/AutoConfigurationPackages.java
@@ -56,8 +56,7 @@ public abstract class AutoConfigurationPackages {
// Currently we only store a single base package, but we return a list to
// allow this to change in the future if needed
try {
- return Collections.singletonList(beanFactory.getBean(BEAN, BasePackage.class)
- .toString());
+ return beanFactory.getBean(BEAN, BasePackages.class).get();
}
catch (NoSuchBeanDefinitionException ex) {
throw new IllegalStateException(
@@ -67,7 +66,7 @@ public abstract class AutoConfigurationPackages {
static void set(BeanDefinitionRegistry registry, String packageName) {
GenericBeanDefinition beanDefinition = new GenericBeanDefinition();
- beanDefinition.setBeanClass(BasePackage.class);
+ beanDefinition.setBeanClass(BasePackages.class);
beanDefinition.getConstructorArgumentValues().addIndexedArgumentValue(0,
packageName);
beanDefinition.setRole(BeanDefinition.ROLE_INFRASTRUCTURE);
@@ -81,40 +80,50 @@ public abstract class AutoConfigurationPackages {
@Order(Ordered.HIGHEST_PRECEDENCE)
static class Registrar implements ImportBeanDefinitionRegistrar {
- private static final String NO_SUCH_PACKAGE = "not.scanning.root";
-
@Override
- public void registerBeanDefinitions(AnnotationMetadata importingClassMetadata,
+ public void registerBeanDefinitions(AnnotationMetadata metadata,
BeanDefinitionRegistry registry) {
- String packageName = ClassUtils.getPackageName(importingClassMetadata
- .getClassName());
- if (StringUtils.hasText(packageName)) {
- set(registry, packageName);
- logger.info("@EnableAutoConfiguration was declared on a class in the package '"
- + packageName + "'. Automatic @Repository scanning is enabled.");
- }
- else {
- set(registry, NO_SUCH_PACKAGE);
- logger.warn("@EnableAutoConfiguration was declared on a class in the default package. "
- + "Automatic @Repository scanning is not enabled.");
- }
+ set(registry, ClassUtils.getPackageName(metadata.getClassName()));
}
+
}
/**
- * Holder for the base package.
+ * Holder for the base package (name may be null to indicate no scanning).
*/
- final static class BasePackage {
+ final static class BasePackages {
- private final String name;
+ private final List<String> packages;
- public BasePackage(String name) {
- this.name = name;
+ private boolean loggedBasePackageInfo;
+
+ public BasePackages(String name) {
+ this.packages = (StringUtils.hasText(name) ? Collections.singletonList(name)
+ : Collections.<String> emptyList());
}
- @Override
- public String toString() {
- return this.name;
+ public List<String> get() {
+ if (!this.loggedBasePackageInfo) {
+ if (this.packages.isEmpty()) {
+ if (logger.isWarnEnabled()) {
+ logger.warn("@EnableAutoConfiguration was declared on a class "
+ + "in the default package. Automatic @Repository and "
+ + "@Entity scanning is not enabled.");
+ }
+ }
+ else {
+ if (logger.isDebugEnabled()) {
+ String packageNames = StringUtils
+ .collectionToCommaDelimitedString(this.packages);
+ logger.debug("@EnableAutoConfiguration was declared on a class "
+ + "in the package '" + packageNames
+ + "'. Automatic @Repository and @Entity scanning is "
+ + "enabled.");
+ }
+ }
+ this.loggedBasePackageInfo = true;
+ }
+ return this.packages;
}
} | Log AutoConfigurationPackages warnings just once
Update AutoConfigurationPackages to log warnings on the first access,
rather than during setup. This works around the fact that the CLI
currently add multiple @EnableAutoConfiguration annotations.
Fixes gh-<I> | spring-projects_spring-boot | train |
6efe656badb7a32b14165f76c9078edfb9bc75b2 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@ def read(fname):
REQUIRES = ["argparse"] if sys.version[:3] in ('2.6', '3.0', '3.1') else []
setup(
- version='0.2.0',
+ version='0.2.1.dev0',
zip_safe = True,
name = "seqfile",
author = "Utkarsh Upadhyay", | Back to development: <I> | musically-ut_seqfile | train |
1b8ad7a5d394b0e40987dd2842ea7c608ffe881c | diff --git a/internal/services/eventhub/eventhub_namespace_resource.go b/internal/services/eventhub/eventhub_namespace_resource.go
index <HASH>..<HASH> 100644
--- a/internal/services/eventhub/eventhub_namespace_resource.go
+++ b/internal/services/eventhub/eventhub_namespace_resource.go
@@ -110,7 +110,7 @@ func resourceEventHubNamespace() *pluginsdk.Resource {
Type: pluginsdk.TypeInt,
Optional: true,
Computed: true,
- ValidateFunc: validation.IntBetween(0, 20),
+ ValidateFunc: validation.IntBetween(0, 40),
},
"network_rulesets": {
diff --git a/internal/services/eventhub/eventhub_namespace_resource_test.go b/internal/services/eventhub/eventhub_namespace_resource_test.go
index <HASH>..<HASH> 100644
--- a/internal/services/eventhub/eventhub_namespace_resource_test.go
+++ b/internal/services/eventhub/eventhub_namespace_resource_test.go
@@ -434,7 +434,7 @@ func TestAccEventHubNamespace_maximumThroughputUnitsUpdate(t *testing.T) {
check.That(data.ResourceName).ExistsInAzure(r),
check.That(data.ResourceName).Key("sku").HasValue("Standard"),
check.That(data.ResourceName).Key("capacity").HasValue("2"),
- check.That(data.ResourceName).Key("maximum_throughput_units").HasValue("20"),
+ check.That(data.ResourceName).Key("maximum_throughput_units").HasValue("25"),
),
},
{
@@ -854,7 +854,7 @@ resource "azurerm_eventhub_namespace" "test" {
sku = "Standard"
capacity = "2"
auto_inflate_enabled = true
- maximum_throughput_units = 20
+ maximum_throughput_units = 25
}
`, data.RandomInteger, data.Locations.Primary, data.RandomInteger)
} | azurerm_eventhub_namespace - support upto <I> for maximum_throughput_units (#<I>)
Azure recently increased the limit of maximum_througput_units for autoscaling from <I> to <I>.
You would get an error right now if you choose anything above <I>.
Fixes #<I> | terraform-providers_terraform-provider-azurerm | train |
4098025c117225d0aa5092cb5146ce3cbf97b444 | diff --git a/cmd/object-api-putobject_test.go b/cmd/object-api-putobject_test.go
index <HASH>..<HASH> 100644
--- a/cmd/object-api-putobject_test.go
+++ b/cmd/object-api-putobject_test.go
@@ -23,6 +23,7 @@ import (
"io/ioutil"
"os"
"path"
+ "runtime"
"testing"
)
@@ -326,6 +327,9 @@ func testObjectAPIPutObjectStaleFiles(obj ObjectLayer, instanceType string, disk
// Wrapper for calling Multipart PutObject tests for both XL multiple disks and single node setup.
func TestObjectAPIMultipartPutObjectStaleFiles(t *testing.T) {
+ if runtime.GOOS == "windows" {
+ return
+ }
ExecObjectLayerStaleFilesTest(t, testObjectAPIMultipartPutObjectStaleFiles)
}
diff --git a/cmd/posix.go b/cmd/posix.go
index <HASH>..<HASH> 100644
--- a/cmd/posix.go
+++ b/cmd/posix.go
@@ -944,5 +944,11 @@ func (s *posix) RenameFile(srcVolume, srcPath, dstVolume, dstPath string) (err e
}
return err
}
+
+ // Remove parent dir of the source file if empty
+ if parentDir := slashpath.Dir(preparePath(srcFilePath)); isDirEmpty(parentDir) {
+ deleteFile(srcVolumeDir, parentDir)
+ }
+
return nil
}
diff --git a/cmd/xl-v1-multipart.go b/cmd/xl-v1-multipart.go
index <HASH>..<HASH> 100644
--- a/cmd/xl-v1-multipart.go
+++ b/cmd/xl-v1-multipart.go
@@ -394,7 +394,8 @@ func (xl xlObjects) PutObjectPart(bucket, object, uploadID string, partID int, s
partSuffix := fmt.Sprintf("part.%d", partID)
tmpSuffix := getUUID()
- tmpPartPath := tmpSuffix
+ tmpPart := tmpSuffix
+ tmpPartPath := path.Join(tmpSuffix, partSuffix)
// Initialize md5 writer.
md5Writer := md5.New()
@@ -424,7 +425,7 @@ func (xl xlObjects) PutObjectPart(bucket, object, uploadID string, partID int, s
teeReader := io.TeeReader(lreader, mw)
// Delete the temporary object part. If PutObjectPart succeeds there would be nothing to delete.
- defer xl.deleteObject(minioMetaTmpBucket, tmpPartPath)
+ defer xl.deleteObject(minioMetaTmpBucket, tmpPart)
if size > 0 {
for _, disk := range onlineDisks { | Remove XL multipart tmp files when the latter is canceled (#<I>)
XL multipart fails to remove tmp files when an error occurs during upload, this case covers the scenario where an upload is canceled manually by the client in the middle of job. | minio_minio | train |
fd21597d02452ba1176292b4f936d1198693c426 | diff --git a/Configuration.php b/Configuration.php
index <HASH>..<HASH> 100644
--- a/Configuration.php
+++ b/Configuration.php
@@ -2,6 +2,7 @@
namespace Innmind\Rest\Server;
+use Innmind\Rest\Server\Definition\Types;
use Symfony\Component\Config\Definition\ConfigurationInterface;
use Symfony\Component\Config\Definition\Builder\TreeBuilder;
@@ -53,8 +54,9 @@ class Configuration implements ConfigurationInterface
->requiresAtLeastOneElement()
->prototype('array')
->children()
- ->scalarNode('type')
+ ->enumNode('type')
->isRequired()
+ ->values(Types::keys())
->end()
->arrayNode('access')
->isRequired()
diff --git a/Definition/Types.php b/Definition/Types.php
index <HASH>..<HASH> 100644
--- a/Definition/Types.php
+++ b/Definition/Types.php
@@ -52,6 +52,20 @@ class Types
}
/**
+ * Return all the types supported
+ *
+ * @return array
+ */
+ public static function keys()
+ {
+ if (self::$types === null) {
+ self::addDefaults();
+ }
+
+ return array_keys(self::$types);
+ }
+
+ /**
* Initialize all types
*
* @return void
diff --git a/Tests/Definition/TypesTest.php b/Tests/Definition/TypesTest.php
index <HASH>..<HASH> 100644
--- a/Tests/Definition/TypesTest.php
+++ b/Tests/Definition/TypesTest.php
@@ -33,4 +33,12 @@ class TypesTest extends \PHPUnit_Framework_TestCase
Types::get('int')
);
}
+
+ public function testKeys()
+ {
+ $this->assertSame(
+ ['array', 'bool', 'date', 'float', 'int', 'string', 'resource'],
+ Types::keys()
+ );
+ }
} | enforce property type to the ones defined in Types | Innmind_rest-server | train |
a00d2ad0302638e3e2a6bd9f3435fab2cde66627 | diff --git a/light_test.go b/light_test.go
index <HASH>..<HASH> 100644
--- a/light_test.go
+++ b/light_test.go
@@ -3,6 +3,7 @@ package hue
import (
"testing"
"fmt"
+ "time"
)
func TestGetAllLights(t *testing.T) {
@@ -20,7 +21,14 @@ func TestSetLightState(t *testing.T) {
bridge := NewBridge("192.168.1.128", "319b36233bd2328f3e40731b23479207")
lights, _ := GetAllLights(bridge)
selectedLight := lights[0]
- newState := LightState{On: true,} //On: false, *XY: [2]float32{5.0, 5.0},
- //fmt.Println("\n\nSTATE: ", newState)
+
+ // Turn light on, off, on again
+ newState := LightState{On: true,}
+ SetLightState(bridge, selectedLight.Index, newState)
+ time.Sleep(time.Second)
+ newState = LightState{On: false,}
+ SetLightState(bridge, selectedLight.Index, newState)
+ time.Sleep(time.Second)
+ newState = LightState{On: true,}
SetLightState(bridge, selectedLight.Index, newState)
} | Improved visual testing for light state SetLightState. | Collinux_gohue | train |
c8df0349487aea66b1e1223b939821e216836ce3 | diff --git a/code/MemberProfileField.php b/code/MemberProfileField.php
index <HASH>..<HASH> 100644
--- a/code/MemberProfileField.php
+++ b/code/MemberProfileField.php
@@ -60,7 +60,7 @@ class MemberProfileField extends DataObject {
$fields->removeByName('ProfilePageID');
$fields->fieldByName('Root.Main')->getChildren()->changeFieldOrder(array(
- 'CustomTitle',
+ 'CustomTitle',$
'DefaultValue',
'Note',
'ProfileVisibility',
@@ -93,7 +93,7 @@ class MemberProfileField extends DataObject {
_t('MemberProfiles.DEFAULTVALUE', 'Default Value'),
$memberField->getSource()
));
- $default->setHasEmptyDefault(true);
+ $default->setEmptyString(' ');
} elseif($memberField instanceof TextField) {
$fields->replaceField('DefaultValue', new TextField(
'DefaultValue', _t('MemberProfiles.DEFAULTVALUE', 'Default Value') | Field default value dropdown set empty string
When a `MemberProfileField` is a `DropdownField` there is no way to select nothing as the default dropdown value.
`$default->setHasEmptyDefault(true);` doesn't seem to work. I have replaced this with `$default->setEmptyString(' ');`.
This allows the user to select the empty string as a default value. | symbiote_silverstripe-memberprofiles | train |
a5ba33d6a6268ff8b77600e3cfa8ac5005660288 | diff --git a/ezp/Content/Concrete.php b/ezp/Content/Concrete.php
index <HASH>..<HASH> 100644
--- a/ezp/Content/Concrete.php
+++ b/ezp/Content/Concrete.php
@@ -100,6 +100,7 @@ class Concrete extends Model implements Content
// in Content/Version/Concrete for the following properties:
'modified' => true,
'published' => true,
+ 'initialLanguageId' => false,
);
/**
@@ -118,8 +119,7 @@ class Concrete extends Model implements Content
'relations' => false,
'reverseRelations' => false,
'currentVersion' => false,
- 'initialLanguage' => true,
- 'initialLanguageId' => true,
+ 'initialLanguage' => false,
);
/**
@@ -699,15 +699,6 @@ class Concrete extends Model implements Content
}
/**
- * Gets the initial language Id
- * @return mixed
- */
- protected function getInitialLanguageId()
- {
- return $this->initialLanguage->id;
- }
-
- /**
* Sets the initial language
* @param \ezp\Content\Language
*/
diff --git a/ezp/Content/Version/Concrete.php b/ezp/Content/Version/Concrete.php
index <HASH>..<HASH> 100644
--- a/ezp/Content/Version/Concrete.php
+++ b/ezp/Content/Version/Concrete.php
@@ -113,6 +113,7 @@ class Concrete extends Model implements Version
array(
'contentId' => $content->id,
'status' => self::STATUS_DRAFT,
+ 'initialLanguageId' => $content->initialLanguageId
)
);
$this->content = $content; | Use from VO property instead of from dynamic property | ezsystems_ezpublish-kernel | train |
aefb45fb8c1c9e18154453805050a57b0ff2addd | diff --git a/java/src/com/swiftnav/sbp/SBPMessage.java b/java/src/com/swiftnav/sbp/SBPMessage.java
index <HASH>..<HASH> 100644
--- a/java/src/com/swiftnav/sbp/SBPMessage.java
+++ b/java/src/com/swiftnav/sbp/SBPMessage.java
@@ -18,6 +18,7 @@ import java.lang.reflect.Array;
import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
+import java.util.Arrays;
import java.util.LinkedList;
/** Superclass of all SBP messages. */
@@ -188,9 +189,7 @@ public class SBPMessage {
}
private byte[] getPayload() {
- byte[] payload = new byte[buf.position()];
- buf.get(payload, 0, buf.position());
- return payload;
+ return Arrays.copyOf(buf.array(), buf.position());
}
public void putU8(int x) { | java: Fix rebuilding payload from expanded message classes. | swift-nav_libsbp | train |
1363860d93d8b2b81b3e8afe6f648bfc50e741e9 | diff --git a/pyparsing.py b/pyparsing.py
index <HASH>..<HASH> 100644
--- a/pyparsing.py
+++ b/pyparsing.py
@@ -95,8 +95,8 @@ classes inherit from. Use the docstrings for examples of how to:
namespace class
"""
-__version__ = "2.4.1"
-__versionTime__ = "20 Jul 2019 18:17 UTC"
+__version__ = "2.5.0"
+__versionTime__ = "22 Jul 2019 10:25 UTC"
__author__ = "Paul McGuire <[email protected]>"
import string | Update version in prep for new <I>.x work | pyparsing_pyparsing | train |
75ea8d0d67a213a63cf825cacfc9f913eb023c16 | diff --git a/src/Symfony/Component/Mime/Address.php b/src/Symfony/Component/Mime/Address.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Mime/Address.php
+++ b/src/Symfony/Component/Mime/Address.php
@@ -23,6 +23,15 @@ use Symfony\Component\Mime\Exception\RfcComplianceException;
*/
final class Address
{
+ /**
+ * A regex that matches a structure like 'Name <[email protected]>'.
+ * It matches anything between the first < and last > as email address.
+ * This allows to use a single string to construct an Address, which can be convenient to use in
+ * config, and allows to have more readable config.
+ * This does not try to cover all edge cases for address.
+ */
+ private const FROM_STRING_PATTERN = '~(?<displayName>[^<]*)<(?<addrSpec>.*)>[^>]*~';
+
private static $validator;
private static $encoder;
@@ -100,4 +109,15 @@ final class Address
return $addrs;
}
+
+ public static function fromString(string $string): self
+ {
+ if (false === strpos($string, '<')) {
+ return new self($string, '');
+ }
+ if (!preg_match(self::FROM_STRING_PATTERN, $string, $matches)) {
+ throw new InvalidArgumentException(sprintf('Could not parse "%s" to a "%s" instance.', $string, static::class));
+ }
+ return new self($matches['addrSpec'], trim($matches['displayName'], ' \'"'));
+ }
}
diff --git a/src/Symfony/Component/Mime/CHANGELOG.md b/src/Symfony/Component/Mime/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Mime/CHANGELOG.md
+++ b/src/Symfony/Component/Mime/CHANGELOG.md
@@ -7,6 +7,7 @@ CHANGELOG
* [BC BREAK] Removed `NamedAddress` (`Address` now supports a name)
* Added PHPUnit constraints
* Added `AbstractPart::asDebugString()`
+ * Added `Address::fromString()`
4.3.3
-----
diff --git a/src/Symfony/Component/Mime/Tests/AddressTest.php b/src/Symfony/Component/Mime/Tests/AddressTest.php
index <HASH>..<HASH> 100644
--- a/src/Symfony/Component/Mime/Tests/AddressTest.php
+++ b/src/Symfony/Component/Mime/Tests/AddressTest.php
@@ -13,6 +13,7 @@ namespace Symfony\Component\Mime\Tests;
use PHPUnit\Framework\TestCase;
use Symfony\Component\Mime\Address;
+use Symfony\Component\Mime\Exception\InvalidArgumentException;
class AddressTest extends TestCase
{
@@ -77,4 +78,79 @@ class AddressTest extends TestCase
{
return [[''], [' '], [" \r\n "]];
}
+
+ /**
+ * @dataProvider fromStringProvider
+ */
+ public function testFromString($string, $displayName, $addrSpec)
+ {
+ $address = Address::fromString($string);
+ $this->assertEquals($displayName, $address->getName());
+ $this->assertEquals($addrSpec, $address->getAddress());
+ $fromToStringAddress = Address::fromString($address->toString());
+ $this->assertEquals($displayName, $fromToStringAddress->getName());
+ $this->assertEquals($addrSpec, $fromToStringAddress->getAddress());
+ }
+
+ public function testFromStringFailure()
+ {
+ $this->expectException(InvalidArgumentException::class);
+ Address::fromString('Jane Doe <[email protected]');
+ }
+
+ public function fromStringProvider()
+ {
+ return [
+ [
+ '[email protected]',
+ '',
+ '[email protected]',
+ ],
+ [
+ '<[email protected]>',
+ '',
+ '[email protected]',
+ ],
+ [
+ 'Jane Doe <[email protected]>',
+ 'Jane Doe',
+ '[email protected]',
+ ],
+ [
+ 'Jane Doe<[email protected]>',
+ 'Jane Doe',
+ '[email protected]',
+ ],
+ [
+ '\'Jane Doe\' <[email protected]>',
+ 'Jane Doe',
+ '[email protected]',
+ ],
+ [
+ '"Jane Doe" <[email protected]>',
+ 'Jane Doe',
+ '[email protected]',
+ ],
+ [
+ 'Jane Doe <"ex<ample"@example.com>',
+ 'Jane Doe',
+ '"ex<ample"@example.com',
+ ],
+ [
+ 'Jane Doe <"ex<amp>le"@example.com>',
+ 'Jane Doe',
+ '"ex<amp>le"@example.com',
+ ],
+ [
+ 'Jane Doe > <"ex<am p>le"@example.com>',
+ 'Jane Doe >',
+ '"ex<am p>le"@example.com',
+ ],
+ [
+ 'Jane Doe <[email protected]>discarded',
+ 'Jane Doe',
+ '[email protected]',
+ ],
+ ];
+ }
} | Add Address::fromString
This will allow to create an Address from a string such as 'Name <<EMAIL>>' | symfony_symfony | train |
26e8f361c8bd0149235608ed14c09dd2cce3f6fb | diff --git a/src/Offer/Offer.php b/src/Offer/Offer.php
index <HASH>..<HASH> 100644
--- a/src/Offer/Offer.php
+++ b/src/Offer/Offer.php
@@ -335,6 +335,31 @@ abstract class Offer extends EventSourcedAggregateRoot
}
/**
+ * Publish the offer when it has workflowstatus draft.
+ */
+ public function publish()
+ {
+ $this->guardPublish() ?: $this->apply($this->createPublishedEvent());
+ }
+
+ /**
+ * @return bool
+ * @throws Exception
+ */
+ private function guardPublish()
+ {
+ if ($this->workflowStatus === WorkflowStatus::READY_FOR_VALIDATION()) {
+ return true; // nothing left to do if the offer has already been published
+ }
+
+ if ($this->workflowStatus !== WorkflowStatus::DRAFT()) {
+ throw new Exception('You can not publish an offer that is not draft');
+ }
+
+ return false;
+ }
+
+ /**
* Approve the offer when it's waiting for validation.
*/
public function approve()
@@ -404,6 +429,14 @@ abstract class Offer extends EventSourcedAggregateRoot
}
/**
+ * @param AbstractPublished $published
+ */
+ protected function applyPublished(AbstractPublished $published)
+ {
+ $this->workflowStatus = WorkflowStatus::READY_FOR_VALIDATION();
+ }
+
+ /**
* @param AbstractApproved $approved
*/
protected function applyApproved(AbstractApproved $approved)
diff --git a/src/Offer/OfferCommandHandler.php b/src/Offer/OfferCommandHandler.php
index <HASH>..<HASH> 100644
--- a/src/Offer/OfferCommandHandler.php
+++ b/src/Offer/OfferCommandHandler.php
@@ -416,6 +416,16 @@ abstract class OfferCommandHandler extends Udb3CommandHandler
}
/**
+ * @param AbstractPublish $publish
+ */
+ private function handlePublish(AbstractPublish $publish)
+ {
+ $offer = $this->load($publish->getItemId());
+ $offer->publish();
+ $this->offerRepository->save($offer);
+ }
+
+ /**
* @param AbstractApprove $approve
*/
private function handleApprove(AbstractApprove $approve)
diff --git a/test/Offer/Item/Item.php b/test/Offer/Item/Item.php
index <HASH>..<HASH> 100644
--- a/test/Offer/Item/Item.php
+++ b/test/Offer/Item/Item.php
@@ -49,6 +49,7 @@ class Item extends Offer
protected function applyItemCreated(ItemCreated $created)
{
$this->id = $created->getItemId();
+ $this->workflowStatus = $created->getWorkflowStatus();
}
/**
diff --git a/test/Offer/OfferTest.php b/test/Offer/OfferTest.php
index <HASH>..<HASH> 100644
--- a/test/Offer/OfferTest.php
+++ b/test/Offer/OfferTest.php
@@ -14,6 +14,7 @@ use CultuurNet\UDB3\Offer\Item\Events\MainImageSelected;
use CultuurNet\UDB3\Offer\Item\Events\Moderation\Approved;
use CultuurNet\UDB3\Offer\Item\Events\Moderation\FlaggedAsDuplicate;
use CultuurNet\UDB3\Offer\Item\Events\Moderation\FlaggedAsInappropriate;
+use CultuurNet\UDB3\Offer\Item\Events\Moderation\Published;
use CultuurNet\UDB3\Offer\Item\Events\Moderation\Rejected;
use CultuurNet\UDB3\Offer\Item\Item;
use Exception;
@@ -262,6 +263,67 @@ class OfferTest extends AggregateRootScenarioTestCase
/**
* @test
*/
+ public function it_publishes_an_offer_with_workflow_status_draft()
+ {
+ $itemId = 'itemId';
+
+ $this->scenario
+ ->given([
+ new ItemCreated($itemId, WorkflowStatus::DRAFT())
+ ])
+ ->when(function (Item $item) {
+ $item->publish();
+ })
+ ->then([
+ new Published($itemId)
+ ]);
+ }
+
+ /**
+ * @test
+ */
+ public function it_does_not_publish_an_offer_more_then_once()
+ {
+ $itemId = 'itemId';
+
+ $this->scenario
+ ->given([
+ new ItemCreated($itemId, WorkflowStatus::DRAFT()),
+ new Published($itemId)
+ ])
+ ->when(function (Item $item) {
+ $item->publish();
+ })
+ ->then([]);
+ }
+
+ /**
+ * @test
+ */
+ public function it_throws_when_trying_to_publish_a_non_draft_offer()
+ {
+ $this->setExpectedException(
+ Exception::class,
+ 'You can not publish an offer that is not draft'
+ );
+
+ $itemId = 'itemId';
+
+ $this->scenario
+ ->given([
+ new ItemCreated($itemId, WorkflowStatus::DRAFT()),
+ new Published($itemId),
+ new FlaggedAsDuplicate($itemId)
+ ])
+ ->when(function (Item $item) {
+ $item->publish();
+ })
+ ->then([]);
+ }
+
+ /**
+ * @test
+ */
public function it_should_approve_an_offer_that_is_ready_for_validation()
{
$itemId = UUID::generateAsString(); | III-<I> Apply the publish command to an offer. | cultuurnet_udb3-php | train |
fbe8243b96f9ad890ec03d0132b159ad9aab4afe | diff --git a/system_maintenance/models.py b/system_maintenance/models.py
index <HASH>..<HASH> 100644
--- a/system_maintenance/models.py
+++ b/system_maintenance/models.py
@@ -78,7 +78,7 @@ class Maintenance(models.Model):
software = models.ManyToManyField(
'Software',
blank=True,
- help_text='Select the software(s) involved in the system maintenance.',
+ help_text='Select the software involved in the system maintenance.',
)
description = MarkupField( | Simplify the help text for software involved in system maintenance | mfcovington_django-system-maintenance | train |
756eb01142cb6868e925bebb943faf5b9ccbad67 | diff --git a/src/Assetic/Filter/BaseCssFilter.php b/src/Assetic/Filter/BaseCssFilter.php
index <HASH>..<HASH> 100644
--- a/src/Assetic/Filter/BaseCssFilter.php
+++ b/src/Assetic/Filter/BaseCssFilter.php
@@ -84,6 +84,6 @@ abstract class BaseCssFilter implements FilterInterface
*/
protected function filterIEFilters($content, $callback, $limit = -1, &$count = 0)
{
- return preg_replace_callback('/src=(["\']?)(?<url>.*?)\\1/', $callback, $content, $limit, $count);
+ return preg_replace_callback('/src=(["\']?)(?P<url>.*?)\\1/', $callback, $content, $limit, $count);
}
} | Modify regex in filterIEFilters()
Changed (?<url>) in regex to (?P<url>) because the short version is not supported in PCRE lower than <I> such as in CentOS <I>. | kriswallsmith_assetic | train |
7ca49875991d808aa965af5840a7eabe5c03de79 | diff --git a/lib/graphql/object_type.rb b/lib/graphql/object_type.rb
index <HASH>..<HASH> 100644
--- a/lib/graphql/object_type.rb
+++ b/lib/graphql/object_type.rb
@@ -31,10 +31,11 @@ class GraphQL::ObjectType < GraphQL::BaseType
# Shovel this type into each interface's `possible_types` array.
#
- # (There's a bug here: if you define interfaces twice, it won't remove previous definitions.)
# @param new_interfaces [Array<GraphQL::Interface>] interfaces that this type implements
def interfaces=(new_interfaces)
- new_interfaces.each {|i| i.possible_types << self }
+ @interfaces ||= []
+ (@interfaces - new_interfaces).each { |i| i.possible_types.delete(self) }
+ (new_interfaces - @interfaces).each { |i| i.possible_types << self }
@interfaces = new_interfaces
end
diff --git a/spec/graphql/interface_type_spec.rb b/spec/graphql/interface_type_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/graphql/interface_type_spec.rb
+++ b/spec/graphql/interface_type_spec.rb
@@ -11,6 +11,24 @@ describe GraphQL::InterfaceType do
assert_equal(MilkType, interface.resolve_type(MILKS.values.first))
end
+ it 'handles when interfaces are re-assigned' do
+ iface = GraphQL::InterfaceType.define do
+ end
+ type = GraphQL::ObjectType.define do
+ interfaces [iface]
+ end
+ assert_equal([type], iface.possible_types)
+
+ type.interfaces = []
+ assert_equal([], iface.possible_types)
+
+ type.interfaces = [iface]
+ assert_equal([type], iface.possible_types)
+
+ type.interfaces = [iface]
+ assert_equal([type], iface.possible_types)
+ end
+
describe 'query evaluation' do
let(:result) { DummySchema.execute(query_string, context: {}, variables: {"cheeseId" => 2})}
let(:query_string) {%| | Correctly handle `interfaces` being re-assigned
Remove stale `possible_types` entries and add to only the ones that are new. | rmosolgo_graphql-ruby | train |
2593165b61af8b70ac154fbb987823dcd37d7081 | diff --git a/src/models/StyledComponent.js b/src/models/StyledComponent.js
index <HASH>..<HASH> 100644
--- a/src/models/StyledComponent.js
+++ b/src/models/StyledComponent.js
@@ -101,20 +101,20 @@ class StyledComponent extends Component<*> {
const isTargetTag = isTag(elementToBeCreated);
const propsForElement: Object = {};
- const nextProps: Object = { ...this.attrs, ...this.props };
+ const computedProps: Object = { ...this.attrs, ...this.props };
let key;
// eslint-disable-next-line guard-for-in
- for (key in nextProps) {
+ for (key in computedProps) {
if (process.env.NODE_ENV !== 'production' && key === 'innerRef') {
warnInnerRef();
}
if (key === 'forwardedClass' || key === 'as') continue;
- else if (key === 'forwardedRef') propsForElement.ref = nextProps[key];
+ else if (key === 'forwardedRef') propsForElement.ref = computedProps[key];
else if (!isTargetTag || validAttr(key)) {
// Don't pass through non HTML tags through to HTML elements
- propsForElement[key] = nextProps[key];
+ propsForElement[key] = computedProps[key];
}
} | Rename nextProps to computedProps | styled-components_styled-components | train |
efa86a952a9acafde745d229676386380c03ed66 | diff --git a/mockserver-netty/src/test/java/org/mockserver/lifecycle/StopIntegrationTest.java b/mockserver-netty/src/test/java/org/mockserver/lifecycle/StopIntegrationTest.java
index <HASH>..<HASH> 100644
--- a/mockserver-netty/src/test/java/org/mockserver/lifecycle/StopIntegrationTest.java
+++ b/mockserver-netty/src/test/java/org/mockserver/lifecycle/StopIntegrationTest.java
@@ -1,6 +1,7 @@
package org.mockserver.lifecycle;
import org.hamcrest.Matchers;
+import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.ExpectedException;
@@ -28,6 +29,7 @@ import static org.mockserver.model.HttpRequest.request;
/**
* @author jamesdbloom
*/
+@Ignore
public class StopIntegrationTest {
private static final int MOCK_SERVER_PORT = PortFactory.findFreePort(); | ignoring test that has suddenly for no reason become random on build server but never fails anywhere else | jamesdbloom_mockserver | train |
f50277f900ab4d7ae5987f597c23ff6a7bcd7dd8 | diff --git a/client/isolate/format.go b/client/isolate/format.go
index <HASH>..<HASH> 100644
--- a/client/isolate/format.go
+++ b/client/isolate/format.go
@@ -441,7 +441,7 @@ func (lhs *ConfigSettings) union(rhs *ConfigSettings) (*ConfigSettings, error) {
lFiles, rFiles = rhs.Files, lhs.Files
}
- rebasePath, err := filepath.Rel(rRelCwd, lRelCwd)
+ rebasePath, err := filepath.Rel(lRelCwd, rRelCwd)
if err != nil {
return nil, err
}
diff --git a/client/isolate/format_test.go b/client/isolate/format_test.go
index <HASH>..<HASH> 100644
--- a/client/isolate/format_test.go
+++ b/client/isolate/format_test.go
@@ -9,6 +9,7 @@ import (
"errors"
"io/ioutil"
"os"
+ "path"
"path/filepath"
"sort"
"strings"
@@ -381,8 +382,6 @@ func TestLoadIsolateForConfigMissingVars(t *testing.T) {
ut.AssertEqualf(t, true, strings.Contains(err.Error(), "OS"), "%s", err)
}
-// TODO(tandrii): make sure these tests pass on windows.
-
func TestLoadIsolateForConfig(t *testing.T) {
t.Parallel()
// Case linux64, matches first condition.
@@ -464,15 +463,55 @@ func TestLoadIsolateAsConfigWithIncludes(t *testing.T) {
ut.AssertEqual(t, NotSet, ro) // first condition has no read_only specified.
ut.AssertEqual(t, []string{"python", "64linuxOrWin"}, cmd)
ut.AssertEqual(t, []string{
- filepath.Join("..", "inc_file"),
"64linuxOrWin",
filepath.Join("<(DIR)", "inc_unittest"), // no rebasing for this.
filepath.Join("<(PRODUCT_DIR)", "unittest<(EXECUTABLE_SUFFIX)"),
+ filepath.Join("inc", "inc_file"),
}, deps)
}
+func TestConfigSettingsUnion(t *testing.T) {
+ left := &ConfigSettings{
+ Command: []string{"left takes precedence"},
+ Files: []string{"../../le/f/t"}, // Must be POSIX.
+ IsolateDir: absPath1,
+ }
+ right := &ConfigSettings{
+ Files: []string{"../ri/g/ht"},
+ IsolateDir: absPath2,
+ }
+
+ out, err := left.union(right)
+ ut.AssertEqual(t, nil, err)
+ ut.AssertEqual(t, left.Command, out.Command)
+ ut.AssertEqual(t, left.IsolateDir, out.IsolateDir)
+ ut.AssertEqual(t, append(getAbsoluteFilenames(left), getAbsoluteFilenames(right)...),
+ getAbsoluteFilenames(out))
+}
+
// Helper functions.
+var absPath1 string
+var absPath2 string
+
+func init() {
+ absPath1 = "/tmp/bar/"
+ absPath2 = "/var/lib/"
+ if common.IsWindows() {
+ absPath1 = "E:\\tmp\\bar\\"
+ absPath2 = "X:\\var\\lib\\"
+ }
+}
+
+func getAbsoluteFilenames(c *ConfigSettings) []string {
+ files := []string{}
+ for _, f := range c.Files {
+ files = append(files, path.Join(c.IsolateDir, f))
+ }
+ sort.Strings(files)
+ return files
+}
+
// makeVVs simplifies creating variableValue:
// "unbound" => unbound
// "123" => int(123) | Fix processing of file deps from relative includes.
R=<EMAIL>
BUG=<I>
Review URL: <URL> | luci_luci-go | train |
3e067c0c103c7633e7d71a7bd5b05a69928494de | diff --git a/UploadBehavior.php b/UploadBehavior.php
index <HASH>..<HASH> 100644
--- a/UploadBehavior.php
+++ b/UploadBehavior.php
@@ -112,10 +112,11 @@ class UploadBehavior extends \yii\base\Behavior
/**
* Save uploaded file into [[$uploadPath]]
+ * @param boolean $deleteOldFile If true and file exists, file will be deleted.
* @return boolean|null if success return true, fault return false.
* Return null mean no uploaded file.
*/
- public function saveUploadedFile()
+ public function saveUploadedFile($deleteOldFile = false)
{
/* @var $file UploadedFile */
$file = $this->{$this->attribute};
@@ -123,7 +124,11 @@ class UploadBehavior extends \yii\base\Behavior
$model = FileModel::saveAs($file, $this->uploadPath, $this->directoryLevel);
if ($model) {
if ($this->savedAttribute !== null) {
+ $oldId = $this->owner->{$this->savedAttribute};
$this->owner->{$this->savedAttribute} = $model->id;
+ if ($deleteOldFile && ($oldModel = FileModel::findOne($oldId)) !== null) {
+ $oldModel->delete();
+ }
}
return true;
} | Ability to delete old file when reupload file | mdmsoft_yii2-upload-file | train |
760caa10aabfd4540b53827584e35e630030c0d6 | diff --git a/src/canvas/view/CanvasView.js b/src/canvas/view/CanvasView.js
index <HASH>..<HASH> 100644
--- a/src/canvas/view/CanvasView.js
+++ b/src/canvas/view/CanvasView.js
@@ -238,6 +238,15 @@ module.exports = Backbone.View.extend({
cursor: -webkit-grabbing;
}
+ .${ppfx}is__grabbing {
+ overflow-x: hidden;
+ }
+
+ .${ppfx}is__grabbing,
+ .${ppfx}is__grabbing * {
+ cursor: grabbing !important;
+ }
+
${conf.canvasCss || ''}
${conf.protectedCss || ''}
`;
diff --git a/src/commands/view/ComponentDrag.js b/src/commands/view/ComponentDrag.js
index <HASH>..<HASH> 100644
--- a/src/commands/view/ComponentDrag.js
+++ b/src/commands/view/ComponentDrag.js
@@ -416,10 +416,10 @@ module.exports = {
toggleDrag(enable) {
const { ppfx, editor } = this;
const methodCls = enable ? 'add' : 'remove';
- const canvas = this.getCanvas();
const classes = [`${ppfx}is__grabbing`];
const { Canvas } = editor;
- classes.forEach(cls => canvas.classList[methodCls](cls));
+ const body = Canvas.getBody();
+ classes.forEach(cls => body.classList[methodCls](cls));
Canvas[enable ? 'startAutoscroll' : 'stopAutoscroll']();
}
};
diff --git a/src/styles/scss/_gjs_canvas.scss b/src/styles/scss/_gjs_canvas.scss
index <HASH>..<HASH> 100644
--- a/src/styles/scss/_gjs_canvas.scss
+++ b/src/styles/scss/_gjs_canvas.scss
@@ -122,7 +122,7 @@ $guide_pad: 5px;
&#{gjs-is(grab)},
&#{gjs-is(grabbing)} {
.#{$cv-prefix}canvas__frames {
- pointer-events: none;
+ pointer-events: none; // Need this in multi-frame mode
}
} | Append grabbing class in iframe | artf_grapesjs | train |
c20fff55a6dfe701f524dceeda93cac8c9eb1042 | diff --git a/fs.go b/fs.go
index <HASH>..<HASH> 100644
--- a/fs.go
+++ b/fs.go
@@ -7,6 +7,7 @@ import (
"html"
"io"
"mime"
+ "net/http"
"os"
"path/filepath"
"sort"
@@ -836,8 +837,17 @@ func (h *fsHandler) newFSFile(f *os.File, fileInfo os.FileInfo, compressed bool)
return nil, fmt.Errorf("too big file: %d bytes", n)
}
+ // detect content-type
ext := fileExtension(fileInfo.Name(), compressed)
contentType := mime.TypeByExtension(ext)
+ if len(contentType) == 0 {
+ data := make([]byte, 512)
+ n, err := f.ReadAt(data, 0)
+ if err != nil && err != io.EOF {
+ return nil, fmt.Errorf("cannot read header of the file %q: %s", f.Name(), err)
+ }
+ contentType = http.DetectContentType(data[:n])
+ }
lastModified := fileInfo.ModTime()
ff := &fsFile{ | FS: detect file content-type with net/http.DetectContentType if the type cannot be determined by file extension | valyala_fasthttp | train |
64a2a0ca3b74c79de92e2566b0af6a4912c5cf54 | diff --git a/test/index_test.rb b/test/index_test.rb
index <HASH>..<HASH> 100644
--- a/test/index_test.rb
+++ b/test/index_test.rb
@@ -289,6 +289,31 @@ class IndexMergeFileTest < Rugged::TestCase
assert_equal merge_file_result[:path], "conflicts-one.txt"
assert_equal merge_file_result[:data], "<<<<<<< ours\nThis is most certainly a conflict!\n=======\nThis is a conflict!!!\n>>>>>>> theirs\n"
end
+
+ def test_merge_file_without_ancestor
+ # remove the stage 1 (ancestor), this is now an add/add conflict
+ @repo.index.remove("conflicts-one.txt", 1)
+ merge_file_result = @repo.index.merge_file("conflicts-one.txt", our_label: "ours", their_label: "theirs")
+ assert !merge_file_result[:automergeable]
+ assert_equal merge_file_result[:path], "conflicts-one.txt"
+ assert_equal merge_file_result[:data], "<<<<<<< ours\nThis is most certainly a conflict!\n=======\nThis is a conflict!!!\n>>>>>>> theirs\n"
+ end
+
+ def test_merge_file_without_ours
+ # turn this into a modify/delete conflict
+ @repo.index.remove("conflicts-one.txt", 2)
+ assert_raises RuntimeError do
+ @repo.index.merge_file("conflicts-one.txt", our_label: "ours", their_label: "theirs")
+ end
+ end
+
+ def test_merge_file_without_theirs
+ # turn this into a modify/delete conflict
+ @repo.index.remove("conflicts-one.txt", 3)
+ assert_raises RuntimeError do
+ @repo.index.merge_file("conflicts-one.txt", our_label: "ours", their_label: "theirs")
+ end
+ end
end
class IndexRepositoryTest < Rugged::TestCase | Test `Index.merge_file` with missing sides
`Index.merge_file` produces the conflict file for a given conflict.
This can be produced for add/add conflicts (that are missing ancestors)
but - by definition - cannot be produced for edit/delete conflicts.
(Git simply leaves the edited file in the working tree without conflict
markup.)
Validate that we handle this case correctly. | libgit2_rugged | train |
41755553ca6527889618bc9ac2f75bc575a004ef | diff --git a/lib/specinfra/command/ubuntu/base/ppa.rb b/lib/specinfra/command/ubuntu/base/ppa.rb
index <HASH>..<HASH> 100644
--- a/lib/specinfra/command/ubuntu/base/ppa.rb
+++ b/lib/specinfra/command/ubuntu/base/ppa.rb
@@ -1,11 +1,11 @@
class Specinfra::Command::Ubuntu::Base::Ppa < Specinfra::Command::Debian::Base::Ppa
class << self
def check_exists(package)
- %Q{find /etc/apt/ -name \*.list | xargs grep -o "deb http://ppa.launchpad.net/#{to_apt_line_uri(package)}"}
+ %Q{find /etc/apt/ -name \*.list | xargs grep -o "deb +http://ppa.launchpad.net/#{to_apt_line_uri(package)}"}
end
def check_is_enabled(package)
- %Q{find /etc/apt/ -name \*.list | xargs grep -o "^deb http://ppa.launchpad.net/#{to_apt_line_uri(package)}"}
+ %Q{find /etc/apt/ -name \*.list | xargs grep -o "^deb +http://ppa.launchpad.net/#{to_apt_line_uri(package)}"}
end
private | Wildcard whitespace in repository regex
The chef "apt" cookbook adds repositories with extra spaces. Since the size of the whitespace isn't important we need to wildcard it. | mizzy_specinfra | train |
Subsets and Splits