hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
97e943ecf7cc05efd9104a1cd56b4edbf6805778 | diff --git a/src/Lodge/Postcode/Postcode.php b/src/Lodge/Postcode/Postcode.php
index <HASH>..<HASH> 100644
--- a/src/Lodge/Postcode/Postcode.php
+++ b/src/Lodge/Postcode/Postcode.php
@@ -1,9 +1,19 @@
<?php namespace Lodge\Postcode;
-
class Postcode {
+ protected $apiKey;
+ protected $postcode;
+ protected $country;
+
+ public function __constructor($apiKey = null) {
+ $this->apiKey = $apiKey;
+ }
public function getCoordinates($address)
{
+ if (!empty($this->country)) {
+ $address = trim($address) . ' ' . $this->country;
+ }
+
// Sanitize the address:
$search_code = urlencode($address);
@@ -11,14 +21,7 @@ class Postcode {
$url = 'https://maps.googleapis.com/maps/api/geocode/json?address=' . $search_code . '&sensor=false';
// If Google Maps API fails, catch it and throw a better error
- try
- {
- $json = json_decode(file_get_contents($url));
- }
- catch(\Exception $e)
- {
- throw new ServiceUnavailableException;
- }
+ $json = $this->callGoogleApi($url);
if(!empty($json->results))
{
@@ -35,6 +38,12 @@ class Postcode {
return array();
}
+ public function setCountry($country = null) {
+ $this->country = $country;
+
+ return $this;
+ }
+
public function mutatePostcode($postcode)
{
// Ensure the postcode is all upper case with no spaces
@@ -55,7 +64,8 @@ class Postcode {
// A second call will now retrieve the address
$address_url = 'https://maps.googleapis.com/maps/api/geocode/json?latlng=' . $coords['latitude'] . ',' . $coords['longitude'] . '&sensor=false';
- $address_json = json_decode(file_get_contents($address_url));
+
+ $address_json = $this->callGoogleApi($address_url);
// The correct result is not always the first one, so loop through results here
foreach($address_json->results as $current_address)
@@ -136,4 +146,45 @@ class Postcode {
return $array;
}
-}
+ public function getApiKey() {
+ return !empty($this->apiKey) ? $this->apiKey : null;
+ }
+
+ public function setApiKey($apiKey = null) {
+ $this->apiKey = $apiKey;
+
+ return $this;
+ }
+
+ private function callGoogleApi($url)
+ {
+ $url = $this->addApiKeyToUrl($url);
+
+ try
+ {
+ $json = json_decode(file_get_contents($url));
+ }
+ catch(\Exception $e)
+ {
+ throw new ServiceUnavailableException;
+ }
+
+ $this->checkApiError($json);
+
+ return $json;
+ }
+
+ private function addApiKeyToUrl($url)
+ {
+ return ($api_key = $this->getApiKey()) ?
+ $url . $api_key :
+ $url;
+ }
+
+ private function checkApiError($json)
+ {
+ if (property_exists($json, 'error_message'))
+ throw new ServiceUnavailableException($json->error_message);
+ }
+
+}
\ No newline at end of file | Added support for API key in constructor, and setting country | Daursu_postcode-lookup | train |
04f994fece02b1acc69685d70de2a415c6424621 | diff --git a/utils.py b/utils.py
index <HASH>..<HASH> 100644
--- a/utils.py
+++ b/utils.py
@@ -92,7 +92,7 @@ def _get_legacy_bibdocs(recid, filename=None):
(bibdoc.fullpath, bibdoc.is_restricted(current_user))
for bibdoc in BibRecDocs(recid).list_latest_files(list_hidden=False)
if not bibdoc.subformat and not filename or
- bibdoc.name + bibdoc.superformat == filename
+ bibdoc.get_full_name() == filename
]
@@ -122,7 +122,7 @@ def _parse_legacy_syntax(identifier):
`recid:{recid}-{filename}`.
"""
if '-' in identifier:
- record_id, filename = identifier.split('recid:')[1].split('-')
+ record_id, filename = identifier.split('recid:')[1].split('-', 1)
else:
record_id, filename = identifier.split('recid:')[1], None
return record_id, filename | documents: improvement of legacy filename handling
* Improves support in filenames with dashes `-` on legacy
`BibDocFile`. | inveniosoftware-attic_invenio-documents | train |
18f5cc80e6260e67be22dac472f335b2ec22b1c6 | diff --git a/testsuite/manualmode/src/test/java/org/jboss/as/test/manualmode/management/cli/CLIEmbedServerTestCase.java b/testsuite/manualmode/src/test/java/org/jboss/as/test/manualmode/management/cli/CLIEmbedServerTestCase.java
index <HASH>..<HASH> 100644
--- a/testsuite/manualmode/src/test/java/org/jboss/as/test/manualmode/management/cli/CLIEmbedServerTestCase.java
+++ b/testsuite/manualmode/src/test/java/org/jboss/as/test/manualmode/management/cli/CLIEmbedServerTestCase.java
@@ -117,6 +117,10 @@ public class CLIEmbedServerTestCase extends AbstractCliTestBase {
@BeforeClass
public static void beforeClass() throws Exception {
+ // Initialize the log manager before the STDIO context is initialized. This ensures that any capturing of the
+ // standard output streams in the log manager is done before they are replaced by the stdio context.
+ Class.forName("org.jboss.logmanager.LogManager", true, CLIEmbedServerTestCase.class.getClassLoader());
+
CLIEmbedUtil.copyConfig(ROOT, "standalone", "logging.properties", "logging.properties.backup", false);
// Set up ability to manipulate stdout | [WFCORE-<I>] Initialize the log manager early for the CLIEmbededServerTestCase to ensure early initialization doesn't break replacing stdout. | wildfly_wildfly-core | train |
6e5cdf8515fa6bb5331ffe712a857cb46b0c701e | diff --git a/src/util/application.rb b/src/util/application.rb
index <HASH>..<HASH> 100644
--- a/src/util/application.rb
+++ b/src/util/application.rb
@@ -3,8 +3,9 @@
require 'sbsm/drbserver'
require 'state/global'
-require 'util/transaction'
+require 'util/polling_manager'
require 'util/session'
+require 'util/transaction'
require 'util/validator'
require 'odba' | ChangeSet <I>: require statement added
src/util/application.rb: minor change | zdavatz_xmlconv | train |
3d1299c5d302b7242d70103c1fc015b2baaf5776 | diff --git a/guava/src/com/google/common/collect/MapMaker.java b/guava/src/com/google/common/collect/MapMaker.java
index <HASH>..<HASH> 100644
--- a/guava/src/com/google/common/collect/MapMaker.java
+++ b/guava/src/com/google/common/collect/MapMaker.java
@@ -79,12 +79,15 @@ import javax.annotation.Nullable;
* created, it is undefined which of these changes, if any, are reflected in that iterator. These
* iterators never throw {@link ConcurrentModificationException}.
*
- * <p>If soft or weak references were requested, it is possible for a key or value present in the
- * map to be reclaimed by the garbage collector. If this happens, the entry automatically
- * disappears from the map. A partially-reclaimed entry is never exposed to the user. Any {@link
- * java.util.Map.Entry} instance retrieved from the map's {@linkplain Map#entrySet entry set} is a
- * snapshot of that entry's state at the time of retrieval; such entries do, however, support {@link
- * java.util.Map.Entry#setValue}, which simply calls {@link Map#put} on the entry's key.
+ * <p>If {@link #weakKeys}, {@link #weakValues}, or {@link #softValues} are requested, it is
+ * possible for a key or value present in the map to be reclaimed by the garbage collector. Entries
+ * with reclaimed keys or values may be removed from the map on each map modification or on
+ * occasional map accesses; such entries may be counted by {@link Map#size}, but will never be
+ * visible to read or write operations. A partially-reclaimed entry is never exposed to the user.
+ * Any {@link java.util.Map.Entry} instance retrieved from the map's
+ * {@linkplain Map#entrySet entry set} is a snapshot of that entry's state at the time of
+ * retrieval; such entries do, however, support {@link java.util.Map.Entry#setValue}, which simply
+ * calls {@link Map#put} on the entry's key.
*
* <p>The maps produced by {@code MapMaker} are serializable, and the deserialized maps retain all
* the configuration properties of the original map. During deserialization, if the original map had | Clarify the behavior of Map.size() in a MapMaker-built Map with weak or soft references, borrowing phrasing from CacheBuilder.
Motivated by <URL> | google_guava | train |
5c4dfa80dcc13ced99dfda4b325e8ee5ae27c567 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -26,6 +26,7 @@ class build_ext_subclass(build_ext):
self.cfitsio_dir = 'cfitsio%s' % self.cfitsio_version
self.cfitsio_build_dir = os.path.join('build', self.cfitsio_dir)
self.cfitsio_zlib_dir = os.path.join(self.cfitsio_build_dir,'zlib')
+
build_ext.initialize_options(self)
self.link_objects = []
self.extra_link_args = []
@@ -34,8 +35,6 @@ class build_ext_subclass(build_ext):
build_ext.finalize_options(self)
- self.force = True
-
if self.use_system_fitsio:
# Include bz2 by default? Depends on how system cfitsio was built.
# FIXME: use pkg-config to tell if bz2 shall be included ?
@@ -71,6 +70,11 @@ class build_ext_subclass(build_ext):
self.compiler.set_link_objects(link_objects)
+ # Ultimate hack: append the .a files to the dependency list
+ # so they will be properly rebuild if cfitsio source is updated.
+ for ext in self.extensions:
+ ext.depends += link_objects
+
# call the original build_extensions
build_ext.build_extensions(self) | Properly handle the dependency on libfitsio.a, via a hack. | esheldon_fitsio | train |
a3ef36bb72d8dc22ebf6faf1e1ef2c38509a0b15 | diff --git a/run_tests.py b/run_tests.py
index <HASH>..<HASH> 100755
--- a/run_tests.py
+++ b/run_tests.py
@@ -59,6 +59,7 @@ if not settings.configured:
'django.core.context_processors.request',
'timepiece.context_processors.quick_search',
'timepiece.context_processors.extra_nav',
+ 'timepiece.context_processors.active_entries',
),
TEMPLATE_DIRS=(
'%s/example_project/templates' % directory, | Added missing context processor to settings in run_tests.py | caktus_django-timepiece | train |
36641ce4131753f8e353dbb94705af8b19264f61 | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -1,5 +1,9 @@
-webforge-common
-===============
+# webforge-common
-Boilerplate for Webforge and Psc - CMS
+Boilerplate for Webforge and Psc - CMS
This small library should cover all common code between webforge and Psc - CMS, to have a small set from classes that are sufficient to start off a litte project.
+
+## todo
+
+- Put a lot of DateTime / Date / Time testdata into webforge-testdata and refactor the DateTime libs to 100% code-coverage
+- Find a better solution for the Common\Util debugging tool (like Doctrines dump, but in nicer)
\ No newline at end of file
diff --git a/lib/Webforge/Common/DateTime/TranslationEN.php b/lib/Webforge/Common/DateTime/TranslationEN.php
index <HASH>..<HASH> 100644
--- a/lib/Webforge/Common/DateTime/TranslationEN.php
+++ b/lib/Webforge/Common/DateTime/TranslationEN.php
@@ -38,7 +38,7 @@ class TranslationEN extends Translation {
4=>'Th',
5=>'Fr',
6=>'Sa',
- 0=>'So',
+ 0=>'Su',
);
public $monthsAbbrev = array(
diff --git a/tests/Webforge/Common/DateTime/DateTimeTest.php b/tests/Webforge/Common/DateTime/DateTimeTest.php
index <HASH>..<HASH> 100644
--- a/tests/Webforge/Common/DateTime/DateTimeTest.php
+++ b/tests/Webforge/Common/DateTime/DateTimeTest.php
@@ -2,7 +2,58 @@
namespace Webforge\Common\DateTime;
+/**
+ * put test data into testdata repository
+ */
class DateTimeTest extends \Webforge\Common\TestCase {
+
+ /**
+ * @dataProvider i18nFormats
+ */
+ public function testI18nFormat($expectedFormat, $date, $formatString, $lang = 'en') {
+ $this->assertEquals($expectedFormat, $date->i18n_format($formatString, $lang));
+ }
+
+ public function i18nFormats() {
+ $tests = array();
+
+ // in php this is the three chars abbrev!
+ // there is no abbrev for 2 digits
+ $tests[] = array('Mo', new DateTime('21.03.2011'), 'D');
+ $tests[] = array('Tu', new DateTime('22.03.2011'), 'D');
+ $tests[] = array('We', new DateTime('23.03.2011'), 'D');
+ $tests[] = array('Th', new DateTime('24.03.2011'), 'D');
+ $tests[] = array('Fr', new DateTime('25.03.2011'), 'D');
+ $tests[] = array('Sa', new DateTime('26.03.2011'), 'D');
+ $tests[] = array('Su', new DateTime('27.03.2011'), 'D');
+
+ $tests[] = array('Monday', new DateTime('21.03.2011'), 'l');
+ $tests[] = array('Tuesday', new DateTime('22.03.2011'), 'l');
+ $tests[] = array('Wednesday', new DateTime('23.03.2011'), 'l');
+ $tests[] = array('Thursday', new DateTime('24.03.2011'), 'l');
+ $tests[] = array('Friday', new DateTime('25.03.2011'), 'l');
+ $tests[] = array('Saturday', new DateTime('26.03.2011'), 'l');
+ $tests[] = array('Sunday', new DateTime('27.03.2011'), 'l');
+
+ $tests[] = array('Mo', new DateTime('21.03.2011'), 'D', 'de');
+ $tests[] = array('Di', new DateTime('22.03.2011'), 'D', 'de');
+ $tests[] = array('Mi', new DateTime('23.03.2011'), 'D', 'de');
+ $tests[] = array('Do', new DateTime('24.03.2011'), 'D', 'de');
+ $tests[] = array('Fr', new DateTime('25.03.2011'), 'D', 'de');
+ $tests[] = array('Sa', new DateTime('26.03.2011'), 'D', 'de');
+ $tests[] = array('So', new DateTime('27.03.2011'), 'D', 'de');
+
+ $tests[] = array('Montag', new DateTime('21.03.2011'), 'l', 'de');
+ $tests[] = array('Dienstag', new DateTime('22.03.2011'), 'l', 'de');
+ $tests[] = array('Mittwoch', new DateTime('23.03.2011'), 'l', 'de');
+ $tests[] = array('Donnerstag',new DateTime('24.03.2011'), 'l', 'de');
+ $tests[] = array('Freitag', new DateTime('25.03.2011'), 'l', 'de');
+ $tests[] = array('Samstag', new DateTime('26.03.2011'), 'l', 'de');
+ $tests[] = array('Sonntag', new DateTime('27.03.2011'), 'l', 'de');
+
+ return $tests;
+ }
+
public function testYesterday() {
$now = time();
diff --git a/tests/Webforge/Common/UtilTest.php b/tests/Webforge/Common/UtilTest.php
index <HASH>..<HASH> 100644
--- a/tests/Webforge/Common/UtilTest.php
+++ b/tests/Webforge/Common/UtilTest.php
@@ -40,6 +40,14 @@ class UtilTest extends TestCase {
);
$tests[] = array(
+ true
+ );
+
+ $tests[] = array(
+ false
+ );
+
+ $tests[] = array(
0.17
);
@@ -47,6 +55,7 @@ class UtilTest extends TestCase {
new TestValueObject('v1', 'v2')
);
+ // how can we create a resource type simple?
return $tests;
} | a better start for datetime tests | webforge-labs_webforge-common | train |
92a3d6c9c97bc6fdef7ae5c71dfd62e9ffd2ce0a | diff --git a/hydpy/core/variabletools.py b/hydpy/core/variabletools.py
index <HASH>..<HASH> 100644
--- a/hydpy/core/variabletools.py
+++ b/hydpy/core/variabletools.py
@@ -157,6 +157,8 @@ def _compare_variables_function_generator(
"""
def comparison_function(self, other):
"""Wrapper for comparison functions for class |Variable|."""
+ if self is other:
+ return method_string in ('__eq__', '__le__', '__ge__')
method = getattr(self.value, method_string)
try:
if isinstance(other, abctools.VariableABC): | Improve the performance of comparisons between identical `Variable` objects (module `variabletools`). | hydpy-dev_hydpy | train |
39e1422a38eb4d11157066d3e2b1c3b9503f984e | diff --git a/src/base/datastorage.js b/src/base/datastorage.js
index <HASH>..<HASH> 100644
--- a/src/base/datastorage.js
+++ b/src/base/datastorage.js
@@ -340,20 +340,26 @@ export class Storage {
}, []);
// get min/max for the filtered rows
- let min;
- let max;
+ let min = filtered[0];
+ let max = min;
+ let minAbsNear0 = Math.abs(filtered.find(v => v != 0));
const limits = {};
for (let i = 0; i < filtered.length; i += 1) {
const c = filtered[i];
- if (typeof min === "undefined" || c < min) {
+ if (c < min) {
min = c;
- }
- if (typeof max === "undefined" || c > max) {
+ } else if (c > max) {
max = c;
}
+ if (c > 0 && c < minAbsNear0) {
+ minAbsNear0 = c;
+ } else if (c < 0 && c > -minAbsNear0) {
+ minAbsNear0 = -c;
+ }
}
limits.min = min || 0;
limits.max = max || 100;
+ limits.minAbsNear0 = minAbsNear0 || (min === 0 ? max : min);
return limits;
}
@@ -425,7 +431,7 @@ export class Storage {
getFrames(dataId, framesArray, keys, conceptprops) {
const _this = this;
//if(dataId === false) return Promise.resolve([]);
-
+
const whatId = getCacheKey(dataId, framesArray, keys);
if (!this._collectionPromises[dataId][whatId]) {
this._collectionPromises[dataId][whatId] = {
@@ -451,7 +457,7 @@ export class Storage {
getFrame(dataId, framesArray, neededFrame, keys) {
const _this = this;
//if(dataId === false) return Promise.resolve([]);
-
+
const whatId = getCacheKey(dataId, framesArray, keys);
return new Promise((resolve, reject) => {
if (_this._collection[dataId]["frames"][whatId] && _this._collection[dataId]["frames"][whatId][neededFrame]) {
diff --git a/src/components/colorlegend/colorlegend.js b/src/components/colorlegend/colorlegend.js
index <HASH>..<HASH> 100644
--- a/src/components/colorlegend/colorlegend.js
+++ b/src/components/colorlegend/colorlegend.js
@@ -395,8 +395,8 @@ const ColorLegend = Component.extend({
const labelScaleType = (d3.min(domain) <= 0 && d3.max(domain) >= 0 && this.colorModel.scaleType === "log") ? "genericLog" : this.colorModel.scaleType;
- this.labelScale = d3[`scale${utils.capitalize(labelScaleType === "time" ? "linear" : labelScaleType)}`]()
- .domain(domain)
+ this.labelScale = cScale.copy()
+ .interpolate(d3.interpolate)
.range(range);
const marginLeft = parseInt(this.rainbowEl.style("left"), 10) || 0;
@@ -452,7 +452,7 @@ const ColorLegend = Component.extend({
if (this.rainbowLegendEl.style("display") !== "none") {
const edgeDomain = d3.extent(domain);
- this.domainScale = d3[`scale${utils.capitalize(labelScaleType === "time" ? "linear" : labelScaleType)}`]()
+ this.domainScale = this.labelScale.copy()
.domain(edgeDomain)
.range(edgeDomain);
diff --git a/src/helpers/d3.genericLogScale.js b/src/helpers/d3.genericLogScale.js
index <HASH>..<HASH> 100644
--- a/src/helpers/d3.genericLogScale.js
+++ b/src/helpers/d3.genericLogScale.js
@@ -16,7 +16,7 @@ export default function genericLog() {
return scaleDomain();
}
const min = d3.min(abs(domain).filter(val => !!val));
- if (min) scale.constant(Math.min(defaultConstant, min / 100));
+ //if (min) scale.constant(Math.min(defaultConstant, min / 100));
return scaleDomain(domain);
};
diff --git a/src/models/color.js b/src/models/color.js
index <HASH>..<HASH> 100644
--- a/src/models/color.js
+++ b/src/models/color.js
@@ -312,9 +312,9 @@ const ColorModel = Hook.extend({
} else if (!this.isDiscrete()) {
- let limits = this.getLimits(this.which);
+ const limitsObj = this.getLimits(this.which);
//default domain is based on limits
- limits = [limits.min, limits.max];
+ const limits = [limitsObj.min, limitsObj.max];
const singlePoint = (limits[1] - limits[0] == 0);
@@ -327,7 +327,8 @@ const ColorModel = Hook.extend({
if (scaleType === "log" || scaleType === "genericLog") {
const s = d3.scaleGenericlog()
.domain(limits)
- .range(limits);
+ .range(limits)
+ .constant(limitsObj.minAbsNear0);
domain = domain.map(d => s.invert(d));
}
@@ -336,6 +337,9 @@ const ColorModel = Hook.extend({
.range(range)
.interpolate(d3.interpolateRgb.gamma(2.2));
+ if (scale.constant) {
+ scale.constant(limitsObj.minAbsNear0);
+ }
} else {
range = range.map(m => utils.isArray(m) ? m[0] : m); | add minAbsNear0 to hook getLimits() result. Use it to get correct "symlog" linear part near 0 | vizabi_vizabi | train |
f0de349575ca7c30f05a6840f8bba18c6b95ca83 | diff --git a/db.go b/db.go
index <HASH>..<HASH> 100755
--- a/db.go
+++ b/db.go
@@ -216,7 +216,10 @@ func ListColumnFamilies(opts *Options, name string) ([]string, error) {
}
namesLen := int(cLen)
names := make([]string, namesLen)
- cNamesArr := (*[1 << 30]*C.char)(unsafe.Pointer(cNames))[:namesLen:namesLen]
+ // The maximum capacity of the following two slices is limited to (2^29)-1 to remain compatible
+ // with 32-bit platforms. The size of a `*C.char` (a pointer) is 4 Byte on a 32-bit system
+ // and (2^29)*4 == math.MaxInt32 + 1. -- See issue golang/go#13656
+ cNamesArr := (*[(1 << 29) - 1]*C.char)(unsafe.Pointer(cNames))[:namesLen:namesLen]
for i, n := range cNamesArr {
names[i] = C.GoString(n)
} | Workaround for Linux <I>-bit build | tecbot_gorocksdb | train |
a87abdfb883a49e815b000d2ddeb493e3deb0966 | diff --git a/rsocket-core/src/main/java/io/rsocket/core/LeaseSpec.java b/rsocket-core/src/main/java/io/rsocket/core/LeaseSpec.java
index <HASH>..<HASH> 100644
--- a/rsocket-core/src/main/java/io/rsocket/core/LeaseSpec.java
+++ b/rsocket-core/src/main/java/io/rsocket/core/LeaseSpec.java
@@ -38,7 +38,7 @@ public final class LeaseSpec {
* no leases is available
*/
public LeaseSpec maxPendingRequests(int maxPendingRequests) {
- this.maxPendingRequests = 0;
+ this.maxPendingRequests = maxPendingRequests;
return this;
}
} | fixes typo in LeaseSpec initialisation (#<I>) | rsocket_rsocket-java | train |
1e4cd047f483b2eb433b022278f1d5aad5edd1db | diff --git a/src/Models/Attributes/MbString.php b/src/Models/Attributes/MbString.php
index <HASH>..<HASH> 100644
--- a/src/Models/Attributes/MbString.php
+++ b/src/Models/Attributes/MbString.php
@@ -47,7 +47,7 @@ class MbString
*
* @return string[]
*/
- public static function str_split($value)
+ public static function split($value)
{
return preg_split('/(?<!^)(?!$)/u', $value);
}
diff --git a/src/Models/Attributes/TSProperty.php b/src/Models/Attributes/TSProperty.php
index <HASH>..<HASH> 100644
--- a/src/Models/Attributes/TSProperty.php
+++ b/src/Models/Attributes/TSProperty.php
@@ -283,7 +283,7 @@ class TSProperty
// For a string, we just split the chars. This seems
// to be the easiest way to handle UTF-8 characters
// instead of trying to work with their hex values.
- $chars = $string ? MbString::str_split($value) : array_reverse(str_split($this->dec2hex($value, 8), 2));
+ $chars = $string ? MbString::split($value) : array_reverse(str_split($this->dec2hex($value, 8), 2));
$encoded = ''; | Renamed str_split to strSplit to conform with code style. | Adldap2_Adldap2 | train |
7d11c9b3d832666d5abb87b34e535c3ac4938aa1 | diff --git a/resolwe/flow/views/collection.py b/resolwe/flow/views/collection.py
index <HASH>..<HASH> 100644
--- a/resolwe/flow/views/collection.py
+++ b/resolwe/flow/views/collection.py
@@ -41,7 +41,9 @@ class CollectionViewSet(ElasticSearchCombinedViewSet,
permission_classes = (get_permissions_class(),)
document_class = CollectionDocument
- filtering_fields = ('id', 'slug', 'name', 'created', 'modified', 'contributor', 'owners', 'text')
+ filtering_fields = (
+ 'id', 'slug', 'name', 'created', 'modified', 'contributor', 'owners', 'text', 'tags',
+ )
filtering_map = {
'name': 'name.ngrams',
'contributor': 'contributor_id',
diff --git a/resolwe/flow/views/entity.py b/resolwe/flow/views/entity.py
index <HASH>..<HASH> 100644
--- a/resolwe/flow/views/entity.py
+++ b/resolwe/flow/views/entity.py
@@ -32,7 +32,7 @@ class EntityViewSet(CollectionViewSet):
latest_date=Max('data__modified')
).order_by('-latest_date')
- filtering_fields = CollectionViewSet.filtering_fields + ('descriptor_completed', 'tags', 'collections')
+ filtering_fields = CollectionViewSet.filtering_fields + ('descriptor_completed', 'collections')
def custom_filter_tags(self, value, search):
"""Support tags query.""" | Add filter by tags to collection api endpoint | genialis_resolwe | train |
4e5c07522c3d9afc52b07389c933c5ff17c1d255 | diff --git a/num2words/lang_PT_BR.py b/num2words/lang_PT_BR.py
index <HASH>..<HASH> 100644
--- a/num2words/lang_PT_BR.py
+++ b/num2words/lang_PT_BR.py
@@ -30,9 +30,9 @@ class Num2Word_PT_BR(lang_EU.Num2Word_EU):
def setup(self):
self.negword = "menos "
- self.pointword = "ponto"
+ self.pointword = "vírgula"
self.errmsg_nornum = "Somente números podem ser convertidos para palavras"
- self.exclude_title = ["e", "ponto", "menos"]
+ self.exclude_title = ["e", "vírgula", "menos"]
self.mid_numwords = [
(1000, "mil"), (100, "cem"), (90, "noventa"),
diff --git a/tests/test_pt_BR.py b/tests/test_pt_BR.py
index <HASH>..<HASH> 100644
--- a/tests/test_pt_BR.py
+++ b/tests/test_pt_BR.py
@@ -81,18 +81,18 @@ class Num2WordsPTBRTest(TestCase):
def test_cardinal_float(self):
self.assertEquals(num2words(Decimal('1.00'), lang='pt_BR'), 'um')
- self.assertEquals(num2words(Decimal('1.01'), lang='pt_BR'), 'um ponto zero um')
- self.assertEquals(num2words(Decimal('1.035'), lang='pt_BR'), 'um ponto zero três')
- self.assertEquals(num2words(Decimal('1.35'), lang='pt_BR'), 'um ponto três cinco')
- self.assertEquals(num2words(Decimal('3.14159'), lang='pt_BR'), 'três ponto um quatro')
- self.assertEquals(num2words(Decimal('101.22'), lang='pt_BR'), 'cento e um ponto dois dois')
- self.assertEquals(num2words(Decimal('2345.75'), lang='pt_BR'), 'dois mil, trezentos e quarenta e cinco ponto sete cinco')
+ self.assertEquals(num2words(Decimal('1.01'), lang='pt_BR'), 'um vírgula zero um')
+ self.assertEquals(num2words(Decimal('1.035'), lang='pt_BR'), 'um vírgula zero três')
+ self.assertEquals(num2words(Decimal('1.35'), lang='pt_BR'), 'um vírgula três cinco')
+ self.assertEquals(num2words(Decimal('3.14159'), lang='pt_BR'), 'três vírgula um quatro')
+ self.assertEquals(num2words(Decimal('101.22'), lang='pt_BR'), 'cento e um vírgula dois dois')
+ self.assertEquals(num2words(Decimal('2345.75'), lang='pt_BR'), 'dois mil, trezentos e quarenta e cinco vírgula sete cinco')
def test_cardinal_float_negative(self):
- self.assertEquals(num2words(Decimal('-2.34'), lang='pt_BR'), 'menos dois ponto três quatro')
- self.assertEquals(num2words(Decimal('-9.99'), lang='pt_BR'), 'menos nove ponto nove nove')
- self.assertEquals(num2words(Decimal('-7.01'), lang='pt_BR'), 'menos sete ponto zero um')
- self.assertEquals(num2words(Decimal('-222.22'), lang='pt_BR'), 'menos duzentos e vinte e dois ponto dois dois')
+ self.assertEquals(num2words(Decimal('-2.34'), lang='pt_BR'), 'menos dois vírgula três quatro')
+ self.assertEquals(num2words(Decimal('-9.99'), lang='pt_BR'), 'menos nove vírgula nove nove')
+ self.assertEquals(num2words(Decimal('-7.01'), lang='pt_BR'), 'menos sete vírgula zero um')
+ self.assertEquals(num2words(Decimal('-222.22'), lang='pt_BR'), 'menos duzentos e vinte e dois vírgula dois dois')
def test_ordinal(self):
self.assertEquals(num2words(1, lang='pt_BR', ordinal=True), 'primeiro') | In Brazilian Portuguese the decimal sign is comma ',' instead of dot '.'. Ex.: we use 1,<I> instead of <I> | savoirfairelinux_num2words | train |
96382d487f7d8cce7c94290af743f7ec1d974973 | diff --git a/trollimage/xrimage.py b/trollimage/xrimage.py
index <HASH>..<HASH> 100644
--- a/trollimage/xrimage.py
+++ b/trollimage/xrimage.py
@@ -179,20 +179,28 @@ class XRImage(object):
"""Mode of the image."""
return ''.join(self.data['bands'].values)
- def save(self, filename, fformat=None, fill_value=None, format_kw=None):
+ def save(self, filename, fformat=None, fill_value=None, compute=True,
+ format_kw=None):
"""Save the image to the given *filename*.
For some formats like jpg and png, the work is delegated to
:meth:`pil_save`, which doesn't support the *compression* option.
+
+ The `compute` keyword is only used when saving GeoTIFFS and is passed
+ directly to the `rio_save` method. See that documentation for more
+ details.
+
"""
fformat = fformat or os.path.splitext(filename)[1][1:4]
if fformat == 'tif' and rasterio:
- self.rio_save(filename, fformat, fill_value, format_kw)
+ return self.rio_save(filename, fformat=fformat,
+ fill_value=fill_value, compute=compute,
+ format_kw=format_kw)
else:
- self.pil_save(filename, fformat, fill_value, format_kw)
+ return self.pil_save(filename, fformat, fill_value, format_kw)
def rio_save(self, filename, fformat=None, fill_value=None,
- dtype=np.uint8, format_kw=None):
+ dtype=np.uint8, compute=True, format_kw=None):
"""Save the image using rasterio."""
fformat = fformat or os.path.splitext(filename)[1][1:4]
format_kw = format_kw or {}
@@ -231,7 +239,9 @@ class XRImage(object):
east, north,
width, height)
if "start_time" in data.attrs:
- new_tags = {'TIFFTAG_DATETIME': data.attrs["start_time"].strftime("%Y:%m:%d %H:%M:%S")}
+ stime = data.attrs['start_time']
+ stime_str = stime.strftime("%Y:%m:%d %H:%M:%S")
+ new_tags = {'TIFFTAG_DATETIME': stime_str}
except (KeyError, AttributeError):
logger.info("Couldn't create geotransform")
@@ -249,7 +259,7 @@ class XRImage(object):
r_file.colorinterp = color_interp(data)
r_file.rfile.update_tags(**new_tags)
- da.store(data.data, r_file, lock=True)
+ return da.store(data.data, r_file, lock=True, compute=compute)
def pil_save(self, filename, fformat=None, fill_value=None,
format_kw=None): | Add 'compute' keyword to `rio_save` for delayed dask storing | pytroll_trollimage | train |
81f599ff39c0967a304c84dbec3accb8a8e3b515 | diff --git a/tests/calc/disagg_test.py b/tests/calc/disagg_test.py
index <HASH>..<HASH> 100644
--- a/tests/calc/disagg_test.py
+++ b/tests/calc/disagg_test.py
@@ -64,6 +64,10 @@ class _BaseDisaggTestCase(unittest.TestCase):
assert tom.time_span == self.tom.time_span
return iter(self.ruptures)
+ class FailSource(FakeSource):
+ def iter_ruptures(self, tom):
+ raise ValueError('Something bad happened')
+
class FakeGSIM(object):
def __init__(self, iml, imt, truncation_level, n_epsilons,
disaggregated_poes):
@@ -315,6 +319,28 @@ class DisaggregateTestCase(_BaseDisaggTestCase):
self.assertEqual(matrix.sum(), 0)
+ def test_source_errors(self):
+ # exercise the case where an error occurs while computing on a given
+ # seismic source; in this case, we expect an error to be raised which
+ # signals the id of the source in question
+ fail_source = self.FailSource(self.source2.source_id,
+ self.source2.ruptures,
+ self.source2.tom,
+ self.source2.tectonic_region_type)
+ sources = iter([self.source1, fail_source])
+
+ with self.assertRaises(RuntimeError) as ae:
+ bin_edges, matrix = disagg.disaggregation_poissonian(
+ sources, self.site, self.imt, self.iml, self.gsims,
+ self.time_span, self.truncation_level, n_epsilons=3,
+ mag_bin_width=3, dist_bin_width=4, coord_bin_width=2.4
+ )
+ expected_error = (
+ 'An error occurred with source id=2. Error: Something bad happened'
+ )
+ self.assertEqual(expected_error, ae.exception.message)
+
+
def test_no_contributions_from_ruptures(self):
# Test that the `disaggregation` function returns `None, None` if no
# ruptures contribute to the hazard level. | tests/calcs/disagg_test:
Added `test_source_errors` to test for handling/source_id reporting in
the case of computation failures on a given source. | gem_oq-engine | train |
f434884e6aa6f5ec20fc12f763e5d68306f73cb1 | diff --git a/qtpy/QtGui.py b/qtpy/QtGui.py
index <HASH>..<HASH> 100644
--- a/qtpy/QtGui.py
+++ b/qtpy/QtGui.py
@@ -14,6 +14,7 @@ from . import PYQT6, PYQT5, PYSIDE2, PYSIDE6, PythonQtError
if PYQT6:
from PyQt6 import QtGui
from PyQt6.QtGui import *
+ QFontMetrics.width = QFontMetrics.horizontalAdvance
# Map missing/renamed methods
QDrag.exec_ = QDrag.exec | 'width' need for PyQt6
same as PySide6 | spyder-ide_qtpy | train |
13ce4e0ea514627e1cbab17ce7fae4744f229fcf | diff --git a/manage.py b/manage.py
index <HASH>..<HASH> 100644
--- a/manage.py
+++ b/manage.py
@@ -34,19 +34,5 @@ def check():
os.system('pep257 examples/')
[email protected]
-def test():
- """Run unittests."""
- os.system('find ./objects -name "*.pyc" -exec rm -rf {} \\;')
- os.system('find ./tests -name "*.pyc" -exec rm -rf {} \\;')
-
- os.system('find ./objects -name "*.pyo" -exec rm -rf {} \\;')
- os.system('find ./tests -name "*.pyo" -exec rm -rf {} \\;')
-
- os.system('coverage run --rcfile=./.coveragerc `which unit2` discover')
- os.system('coverage html --rcfile=./.coveragerc')
- os.system('rm -f .coverage')
-
-
if __name__ == '__main__':
manager.main() | Remove test command from manage.py | ets-labs_python-dependency-injector | train |
5f39ac7fc908cc0120b5d3a5d5aa1434082a7d6c | diff --git a/django_mailbox/models.py b/django_mailbox/models.py
index <HASH>..<HASH> 100755
--- a/django_mailbox/models.py
+++ b/django_mailbox/models.py
@@ -110,7 +110,9 @@ class Mailbox(models.Model):
msg.mailbox = self
msg.subject = message['subject'][0:255]
msg.message_id = message['message-id'][0:255]
- msg.address = rfc822.parseaddr(message['from'])[1][0:255]
+ msg.from_header = message['from']
+ msg.to_header = message['to']
+ msg.outgoing = False
msg.body = message.as_string()
if message['in-reply-to']:
try:
@@ -174,7 +176,10 @@ class Message(models.Model):
blank=True,
null=True,
)
- address = models.CharField(max_length=255)
+ from_header = models.CharField(
+ max_length=255,
+ )
+ to_header = models.TextField()
outgoing = models.BooleanField(
default=False,
blank=True,
@@ -191,12 +196,35 @@ class Message(models.Model):
outgoing_messages = OutgoingMessageManager()
@property
+ def address(self):
+ """Property allowing one to get the relevant address(es).
+
+ In earlier versions of this library, the model had an `address` field
+ storing the e-mail address from which a message was received. During
+ later refactorings, it became clear that perhaps storing sent messages
+ would also be useful, so the address field was replaced with two
+ separate fields.
+
+ """
+ if self.outgoing:
+ return self.to_addresses()
+ else:
+ return self.from_addresses()
+
+ @property
def from_address(self):
- return self.get_email_object(self)['from']
+ return rfc822.parseaddr(self.from_header)[1]
@property
- def to_address(self):
- return self.get_email_object(self)['to']
+ def to_addresses(self):
+ addresses = []
+ for address in self.to_header.split(','):
+ addresses.append(
+ rfc822.parseaddr(
+ address
+ )[1]
+ )
+ return addresses
def get_email_object(self):
return email.message_from_string(self.body) | Separate address field into two distinct fields storing TO and FROM headers. | coddingtonbear_django-mailbox | train |
9867011cc24aa75bfd3ac1b294349c869ecb9bfe | diff --git a/uniqueid-core/src/main/java/org/lable/util/uniqueid/UniqueIDGenerator.java b/uniqueid-core/src/main/java/org/lable/util/uniqueid/UniqueIDGenerator.java
index <HASH>..<HASH> 100644
--- a/uniqueid-core/src/main/java/org/lable/util/uniqueid/UniqueIDGenerator.java
+++ b/uniqueid-core/src/main/java/org/lable/util/uniqueid/UniqueIDGenerator.java
@@ -3,9 +3,6 @@ package org.lable.util.uniqueid;
import java.nio.ByteBuffer;
import java.util.ArrayDeque;
import java.util.Deque;
-import java.util.Stack;
-import java.util.concurrent.ConcurrentHashMap;
-import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.TimeUnit;
/**
@@ -76,8 +73,6 @@ public abstract class UniqueIDGenerator {
* @throws GeneratorException Thrown when an ID could not be generated. In practice,
* this exception is usually only thrown by the more complex subclasses of
* {@link org.lable.util.uniqueid.UniqueIDGenerator}.
- * @throws java.lang.IllegalStateException Thrown when this method is called after {@link #close()} has been
- * called.
*/
public synchronized byte[] generate() throws GeneratorException {
if (closed) {
@@ -113,8 +108,6 @@ public abstract class UniqueIDGenerator {
* @throws GeneratorException Thrown when an ID could not be generated. In practice,
* this exception is usually only thrown by the more complex subclasses of
* {@link org.lable.util.uniqueid.UniqueIDGenerator}.
- * @throws java.lang.IllegalStateException Thrown when this method is called after {@link #close()} has been
- * called.
*/
public Deque<byte[]> batch(int size) throws GeneratorException {
Deque<byte[]> stack = new ArrayDeque<byte[]>(); | Remove unused imports and superfluous javadoc comments. | LableOrg_java-uniqueid | train |
2d16250cea311b4cb4bf2b3cbedd038a053b3e1b | diff --git a/src/authorize.js b/src/authorize.js
index <HASH>..<HASH> 100644
--- a/src/authorize.js
+++ b/src/authorize.js
@@ -7,6 +7,8 @@
hash;
if (hashPos === -1) { return; }
hash = location.href.substring(hashPos+1);
+ // if hash is not of the form #key=val&key=val, it's probably not for us
+ if (hash.indexOf('=') === -1) { return; }
return hash.split('&').reduce(function(m, kvs) {
var kv = kvs.split('=');
m[decodeURIComponent(kv[0])] = decodeURIComponent(kv[1]); | Fix params extraction from location hash
The current code broke applications that use hash navigation. For example, if I access my application with url <URL> | remotestorage_remotestorage.js | train |
57e3afa7a3f20ce91de3989d67574df0f55b1b80 | diff --git a/MongodbManager.php b/MongodbManager.php
index <HASH>..<HASH> 100755
--- a/MongodbManager.php
+++ b/MongodbManager.php
@@ -17,6 +17,10 @@ use yii\db\Expression;
use yii\di\Instance;
use yii\base\InvalidCallException;
use yii\base\InvalidParamException;
+use yii\rbac\Assignment;
+use yii\rbac\Item;
+use yii\rbac\Permission;
+use yii\rbac\Role;
class MongodbManager extends BaseManager
{
@@ -155,7 +159,7 @@ class MongodbManager extends BaseManager
* @inheritdoc
*/
protected function removeItem($item) {
- $this->db->getCollection($this->itemChildTable)->remove(['or', 'parent=:name', 'child=:name'], [':name' => $item->name]);
+ $this->db->getCollection($this->itemChildTable)->remove(['or', ['parent' => $item->name], ['child' => $item->name]]);
$this->db->getCollection($this->assignmentTable)->remove(['item_name' => $item->name]);
$this->db->getCollection($this->itemTable)->remove(['name' => $item->name]);
return true; | Added namespaces.
Fixed set condition in method removeItem(), because method yii\mongodb\Collection::buildCondition() expected array | letyii_yii2-rbac-mongodb | train |
5e1a2801cb3868e4055fb705d02fe106b90d3709 | diff --git a/lib/aposLocals.js b/lib/aposLocals.js
index <HASH>..<HASH> 100644
--- a/lib/aposLocals.js
+++ b/lib/aposLocals.js
@@ -141,6 +141,7 @@ module.exports = function(self) {
if (!options.controls) {
options.controls = self.defaultControls;
}
+ options.styles = options.styles || self.controlTypes.style.choices;
var area = options.area;
delete options.area;
diff --git a/lib/editor.js b/lib/editor.js
index <HASH>..<HASH> 100644
--- a/lib/editor.js
+++ b/lib/editor.js
@@ -491,6 +491,7 @@ module.exports = {
// slug and save attributes from appearing
options.edit = true;
options.virtual = true;
+ options.styles = options.styles || self.controlTypes.style.choices;
return self.render(res, 'area', { options: options, area: area, id: wid });
}
}); | Make the style choices available to the normal view and virtual view of areas for the benefit of always-on editors like AposEditor2 | apostrophecms_apostrophe | train |
d1794c171453919a157a19c9adb1784b8318138a | diff --git a/api/tasks.go b/api/tasks.go
index <HASH>..<HASH> 100644
--- a/api/tasks.go
+++ b/api/tasks.go
@@ -334,10 +334,13 @@ func (tmpl *Template) Canonicalize() {
if tmpl.ChangeMode == nil {
tmpl.ChangeMode = helper.StringToPtr("restart")
}
- if *tmpl.ChangeMode == "signal" && tmpl.ChangeSignal == nil {
- tmpl.ChangeSignal = helper.StringToPtr("SIGHUP")
- }
- if tmpl.ChangeSignal != nil {
+ if tmpl.ChangeSignal == nil {
+ if *tmpl.ChangeMode == "signal" {
+ tmpl.ChangeSignal = helper.StringToPtr("SIGHUP")
+ } else {
+ tmpl.ChangeSignal = helper.StringToPtr("")
+ }
+ } else {
sig := *tmpl.ChangeSignal
tmpl.ChangeSignal = helper.StringToPtr(strings.ToUpper(sig))
} | Missed initializing another field in Template! | hashicorp_nomad | train |
30de0eb4e1bd1861a777e9d8e80d612680fac712 | diff --git a/test/tests/button/error.js b/test/tests/button/error.js
index <HASH>..<HASH> 100644
--- a/test/tests/button/error.js
+++ b/test/tests/button/error.js
@@ -19,6 +19,84 @@ for (let flow of [ 'popup', 'iframe' ]) {
window.paypal.Checkout.contexts.iframe = false;
});
+ it.only('should render button, render checkout, and throw an error in payment', (done) => {
+
+ return window.paypal.Button.render({
+
+ test: { flow, action: 'error' },
+
+ payment() : string | SyncPromise<string> {
+ throw new Error('error');
+ },
+
+ onError(err) : void {
+ assert.isOk(err instanceof Error);
+ return done();
+ },
+
+ onAuthorize() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ },
+
+ onCancel() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ }
+
+ }, '#testContainer');
+ });
+
+ it('should render button, render checkout, and return a rejected promise in payment', (done) => {
+
+ return window.paypal.Button.render({
+
+ test: { flow, action: 'error' },
+
+ payment() : string | SyncPromise<string> {
+ return window.paypal.Promise.reject(new Error('error'));
+ },
+
+ onError(err) : void {
+ assert.isOk(err instanceof Error);
+ return done();
+ },
+
+ onAuthorize() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ },
+
+ onCancel() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ }
+
+ }, '#testContainer');
+ });
+
+ it('should render button, render checkout, and call reject in payment', (done) => {
+
+ return window.paypal.Button.render({
+
+ test: { flow, action: 'error' },
+
+ payment(resolve, reject) {
+ reject(new Error('error'));
+ },
+
+ onError(err) : void {
+ assert.isOk(err instanceof Error);
+ return done();
+ },
+
+ onAuthorize() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ },
+
+ onCancel() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ }
+
+ }, '#testContainer');
+ });
+
it('should render button, then fall back and complete the payment', (done) => {
return window.paypal.Button.render({
diff --git a/test/tests/checkout/error.js b/test/tests/checkout/error.js
index <HASH>..<HASH> 100644
--- a/test/tests/checkout/error.js
+++ b/test/tests/checkout/error.js
@@ -18,6 +18,102 @@ for (let flow of [ 'popup', 'iframe' ]) {
window.paypal.Checkout.contexts.iframe = false;
});
+ it('should render checkout and throw an error in payment', (done) => {
+
+ let testButton = createElement({ tag: 'button', id: 'testButton', container: 'testContainer' });
+
+ testButton.addEventListener('click', (event : Event) => {
+ return window.paypal.Checkout.render({
+
+ test: { action: 'error' },
+
+ payment() {
+ throw new Error('error');
+ },
+
+ onError(err) : void {
+ assert.isOk(err instanceof Error);
+ return done();
+ },
+
+ onAuthorize() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ },
+
+ onCancel() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ }
+
+ });
+ });
+
+ testButton.click();
+ });
+
+ it('should render checkout and return a rejected promise in payment', (done) => {
+
+ let testButton = createElement({ tag: 'button', id: 'testButton', container: 'testContainer' });
+
+ testButton.addEventListener('click', (event : Event) => {
+ return window.paypal.Checkout.render({
+
+ test: { action: 'error' },
+
+ payment() : string | SyncPromise<string> {
+ return window.paypal.Promise.reject(new Error('error'));
+ },
+
+ onError(err) : void {
+ assert.isOk(err instanceof Error);
+ return done();
+ },
+
+ onAuthorize() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ },
+
+ onCancel() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ }
+
+ });
+ });
+
+ testButton.click();
+ });
+
+ it('should render checkout and call reject in payment', (done) => {
+
+ let testButton = createElement({ tag: 'button', id: 'testButton', container: 'testContainer' });
+
+ testButton.addEventListener('click', (event : Event) => {
+ return window.paypal.Checkout.render({
+
+ test: { action: 'error' },
+
+ payment(resolve, reject) {
+ reject(new Error('error'));
+ },
+
+ onError(err) : void {
+ assert.isOk(err instanceof Error);
+ return done();
+ },
+
+ onAuthorize() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ },
+
+ onCancel() : void {
+ return done(new Error('Expected onCancel to not be called'));
+ }
+
+ });
+ });
+
+ testButton.click();
+ });
+
it('should render checkout, then fall back and complete the payment', (done) => {
let testButton = createElement({ tag: 'button', id: 'testButton', container: 'testContainer' }); | Add tests for error cases in payment method | paypal_paypal-checkout-components | train |
aa27643a02c07a21e16babb261d99c5528b64fe2 | diff --git a/Twig/AvatarExtension.php b/Twig/AvatarExtension.php
index <HASH>..<HASH> 100644
--- a/Twig/AvatarExtension.php
+++ b/Twig/AvatarExtension.php
@@ -52,7 +52,13 @@ class AvatarExtension extends \Twig_Extension
*/
public function getUserEmail()
{
- return $this->context->getToken()->getUser()->getEmail();
+ $token = $this->context->getToken();
+
+ if (!$token) {
+ return null;
+ }
+
+ return $token->getUser()->getEmail();
}
/** | [Backoffice] Safe check before access user. | Clastic_BackofficeBundle | train |
f88499a77b33597afd5b24142db10e89a56643f7 | diff --git a/src/printer-php.js b/src/printer-php.js
index <HASH>..<HASH> 100644
--- a/src/printer-php.js
+++ b/src/printer-php.js
@@ -37,16 +37,14 @@ function handleLiteral(node) {
}
function handleArugments(args) {
- return group(
- join(", ",
- args.map((param) => {
- return group(concat([
- line,
- "$",
- param.name
- ]));
- }))
- );
+ return join(", ",
+ args.map((param) => {
+ return group(concat([
+ softline,
+ "$",
+ param.name
+ ]));
+ }));
}
function handleNode(node) {
@@ -205,8 +203,7 @@ function handleNode(node) {
"function ",
node.name,
"(",
- indent(handleArugments(node.arguments)),
- ") {",
+ group(concat([handleArugments(node.arguments), concat([softline, ") {"])])),
indent(concat([hardline, handleNode(node.body)])),
concat([hardline, "}"])
]); | glimmer of grouping happyness on long lines | prettier_plugin-php | train |
a2cb3478bfc476ad2a060f02403f6f4c02e02196 | diff --git a/src/main/java/com/datumbox/common/utilities/RandomGenerator.java b/src/main/java/com/datumbox/common/utilities/RandomGenerator.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/datumbox/common/utilities/RandomGenerator.java
+++ b/src/main/java/com/datumbox/common/utilities/RandomGenerator.java
@@ -36,7 +36,7 @@ public class RandomGenerator {
*
* @return
*/
- public static Long getGlobalSeed() {
+ public static synchronized Long getGlobalSeed() {
return globalSeed;
}
@@ -49,7 +49,7 @@ public class RandomGenerator {
*
* @param globalSeed
*/
- public static void setGlobalSeed(Long globalSeed) {
+ public static synchronized void setGlobalSeed(Long globalSeed) {
RandomGenerator.globalSeed = globalSeed;
}
@@ -61,7 +61,7 @@ public class RandomGenerator {
*
* @return
*/
- public synchronized static Random getThreadLocalRandom() {
+ public static synchronized Random getThreadLocalRandom() {
if(threadLocalRandom == null) {
threadLocalRandom = new ThreadLocal<Random>() {
@Override | Making public methods of RandomGenerator synchronized | datumbox_datumbox-framework | train |
86f649ecf328be79b40ee0a999c984f3d6c2dd5b | diff --git a/spec/page_spec.rb b/spec/page_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/page_spec.rb
+++ b/spec/page_spec.rb
@@ -194,6 +194,16 @@ describe PDF::Reader::Page, "graphic_states()" do
end
end
+describe PDF::Reader::Page, "orientation()" do
+
+ it "should return the orientation of cairo-basic.pdf page 1 as 'portrait'" do
+ @browser = PDF::Reader.new(pdf_spec_file("cairo-basic"))
+ @page = @browser.page(1)
+ @page.orientation.should eql("portrait")
+ end
+
+end
+
describe PDF::Reader::Page, "patterns()" do
it "should return an empty hash from cairo-basic.pdf page 1" do | Add spec for new method orientation() | yob_pdf-reader | train |
6257eafcf360197c3a091810ef1d51c2180c4c66 | diff --git a/shardingsphere-db-protocol/shardingsphere-db-protocol-opengauss/src/main/java/org/apache/shardingsphere/db/protocol/opengauss/packet/command/query/extended/bind/OpenGaussComBatchBindPacket.java b/shardingsphere-db-protocol/shardingsphere-db-protocol-opengauss/src/main/java/org/apache/shardingsphere/db/protocol/opengauss/packet/command/query/extended/bind/OpenGaussComBatchBindPacket.java
index <HASH>..<HASH> 100644
--- a/shardingsphere-db-protocol/shardingsphere-db-protocol-opengauss/src/main/java/org/apache/shardingsphere/db/protocol/opengauss/packet/command/query/extended/bind/OpenGaussComBatchBindPacket.java
+++ b/shardingsphere-db-protocol/shardingsphere-db-protocol-opengauss/src/main/java/org/apache/shardingsphere/db/protocol/opengauss/packet/command/query/extended/bind/OpenGaussComBatchBindPacket.java
@@ -107,10 +107,9 @@ public final class OpenGaussComBatchBindPacket extends OpenGaussCommandPacket {
return parameterFormats.isEmpty() || 0 == parameterFormats.get(parameterIndex % parameterFormats.size());
}
- private Object getTextParameters(final PostgreSQLPacketPayload payload, final int parameterValueLength, final PostgreSQLColumnType columnType) {
- byte[] bytes = new byte[parameterValueLength];
- payload.getByteBuf().readBytes(bytes);
- return getTextParameters(new String(bytes), columnType);
+ private Object getTextParameters(final PostgreSQLPacketPayload payload, final int parameterValueLength, final PostgreSQLColumnType parameterType) {
+ String value = payload.getByteBuf().readCharSequence(parameterValueLength, payload.getCharset()).toString();
+ return getTextParameters(value, parameterType);
}
private Object getTextParameters(final String textValue, final PostgreSQLColumnType columnType) {
diff --git a/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/PostgreSQLComBindPacket.java b/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/PostgreSQLComBindPacket.java
index <HASH>..<HASH> 100644
--- a/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/PostgreSQLComBindPacket.java
+++ b/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/packet/command/query/extended/bind/PostgreSQLComBindPacket.java
@@ -94,9 +94,8 @@ public final class PostgreSQLComBindPacket extends PostgreSQLCommandPacket {
}
private Object getTextParameters(final PostgreSQLPacketPayload payload, final int parameterValueLength, final PostgreSQLColumnType parameterType) {
- byte[] bytes = new byte[parameterValueLength];
- payload.getByteBuf().readBytes(bytes);
- return getTextParameters(new String(bytes, payload.getCharset()), parameterType);
+ String value = payload.getByteBuf().readCharSequence(parameterValueLength, payload.getCharset()).toString();
+ return getTextParameters(value, parameterType);
}
private Object getTextParameters(final String textValue, final PostgreSQLColumnType parameterType) {
diff --git a/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/payload/PostgreSQLPacketPayload.java b/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/payload/PostgreSQLPacketPayload.java
index <HASH>..<HASH> 100644
--- a/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/payload/PostgreSQLPacketPayload.java
+++ b/shardingsphere-db-protocol/shardingsphere-db-protocol-postgresql/src/main/java/org/apache/shardingsphere/db/protocol/postgresql/payload/PostgreSQLPacketPayload.java
@@ -134,10 +134,9 @@ public final class PostgreSQLPacketPayload implements PacketPayload {
* @return null terminated string
*/
public String readStringNul() {
- byte[] result = new byte[byteBuf.bytesBefore((byte) 0)];
- byteBuf.readBytes(result);
+ String result = byteBuf.readCharSequence(byteBuf.bytesBefore((byte) 0), charset).toString();
byteBuf.skipBytes(1);
- return new String(result, charset);
+ return result;
}
/** | Replace ByteBuf#readBytes with readCharSequence (#<I>) | apache_incubator-shardingsphere | train |
0bfce1ee02b94d978e4654c4512e1e810b432618 | diff --git a/heroku/models.py b/heroku/models.py
index <HASH>..<HASH> 100644
--- a/heroku/models.py
+++ b/heroku/models.py
@@ -180,7 +180,7 @@ class App(BaseResource):
def __repr__(self):
return "<app '{0}'>".format(self.name)
- def new(self, name=None, stack='cedar'):
+ def new(self, name=None, stack='cedar', region=None):
"""Creates a new app."""
payload = {}
@@ -191,6 +191,9 @@ class App(BaseResource):
if stack:
payload['app[stack]'] = stack
+ if region:
+ payload['app[region]'] = region
+
r = self._h._http_resource(
method='POST',
resource=('apps',), | allowing client to provide region for new app | heroku_heroku.py | train |
91a3178c7bb436d2b671a5efee3149ee0ab8a6cd | diff --git a/Demo_Compare_Files.py b/Demo_Compare_Files.py
index <HASH>..<HASH> 100644
--- a/Demo_Compare_Files.py
+++ b/Demo_Compare_Files.py
@@ -5,16 +5,18 @@ sg.SetOptions(button_color=sg.COLOR_SYSTEM_DEFAULT)
def GetFilesToCompare():
with sg.FlexForm('File Compare') as form:
form_rows = [[sg.Text('Enter 2 files to comare')],
- [sg.Text('File 1', size=(15, 1)), sg.InputText(), sg.FileBrowse()],
- [sg.Text('File 2', size=(15, 1)), sg.InputText(), sg.FileBrowse()],
+ [sg.Text('File 1', size=(15, 1)), sg.InputText(key='file1'), sg.FileBrowse()],
+ [sg.Text('File 2', size=(15, 1)), sg.InputText(key='file2'), sg.FileBrowse(target='file2')],
[sg.Submit(), sg.Cancel()]]
button, values = form.LayoutAndRead(form_rows)
return button, values
def main():
- button, (f1, f2) = GetFilesToCompare()
+ button, values = GetFilesToCompare()
+ f1 = values['file1']
+ f2 = values['file2']
if any((button != 'Submit', f1 =='', f2 == '')):
- sg.MsgBoxError('Operation cancelled')
+ sg.PopupError('Operation cancelled')
exit(69)
with open(f1, 'rb') as file1:
@@ -24,11 +26,11 @@ def main():
for i, x in enumerate(a):
if x != b[i]:
- sg.MsgBox('Compare results for files', f1, f2, '**** Mismatch at offset {} ****'.format(i))
+ sg.Popup('Compare results for files', f1, f2, '**** Mismatch at offset {} ****'.format(i))
break
else:
if len(a) == len(b):
- sg.MsgBox('**** The files are IDENTICAL ****')
+ sg.Popup('**** The files are IDENTICAL ****')
if __name__ == '__main__':
diff --git a/PySimpleGUI.py b/PySimpleGUI.py
index <HASH>..<HASH> 100644
--- a/PySimpleGUI.py
+++ b/PySimpleGUI.py
@@ -969,7 +969,7 @@ class Button(Element):
if target == (None, None):
strvar = self.TKStringVar
else:
- if len(target) == 2:
+ if not isinstance(target, str):
if target[0] < 0:
target = [self.Position[0] + target[0], target[1]]
target_element = self.ParentForm._GetElementAtLocation(target)
@@ -2179,6 +2179,8 @@ def ReadFormButton(button_text, image_filename=None, image_size=(None, None),ima
return Button(BUTTON_TYPE_READ_FORM, image_filename=image_filename, image_size=image_size, image_subsample=image_subsample, border_width=border_width, button_text=button_text, scale=scale, size=size, auto_size_button=auto_size_button, button_color=button_color, font=font, bind_return_key=bind_return_key, focus=focus, pad=pad, key=key)
ReadButton = ReadFormButton
+RButton = ReadFormButton
+RFButton = ReadFormButton
# ------------------------- Realtime BUTTON Element lazy function ------------------------- #
@@ -2552,7 +2554,7 @@ def PackFormIntoFrame(form, containing_frame, toplevel_form):
tktext_label.configure(background=element.BackgroundColor)
if element.TextColor != COLOR_SYSTEM_DEFAULT and element.TextColor is not None:
tktext_label.configure(fg=element.TextColor)
- tktext_label.pack(side=tk.LEFT,padx=element.Pad[0], pady=element.Pad[1], fill='both', expand=True)
+ tktext_label.pack(side=tk.LEFT,padx=element.Pad[0], pady=element.Pad[1], expand=True)
element.TKText = tktext_label
if element.ClickSubmits:
tktext_label.bind('<Button-1>', element.TextClickedHandler) | Changed MsgBox to Popup, fix for when button target is key | PySimpleGUI_PySimpleGUI | train |
b45fbdac7a7eb10f0202939cf64df577103af3c3 | diff --git a/lib/review/book/index.rb b/lib/review/book/index.rb
index <HASH>..<HASH> 100644
--- a/lib/review/book/index.rb
+++ b/lib/review/book/index.rb
@@ -1,4 +1,4 @@
-# Copyright (c) 2008-2018 Minero Aoki, Kenshi Muto
+# Copyright (c) 2008-2019 Minero Aoki, Kenshi Muto
# 2002-2007 Minero Aoki
#
# This program is free software.
@@ -316,6 +316,7 @@ module ReVIEW
if m.nil? || m[1].size > 10 # Ignore too deep index
next
end
+
index = m[1].size - 2
# column
@@ -343,9 +344,15 @@ module ReVIEW
indexs[i] ||= 0
end
end
- indexs[index] += 1
- headlines[index] = m[3].present? ? m[3].strip : m[4].strip
- items.push Item.new(headlines.join('|'), indexs.dup, m[4].strip)
+
+ if %w(notoc nodisp).include?(m[2])
+ headlines[index] = m[3].present? ? m[3].strip : m[4].strip
+ items.push Item.new(headlines.join('|'), nil, m[4].strip)
+ else
+ indexs[index] += 1
+ headlines[index] = m[3].present? ? m[3].strip : m[4].strip
+ items.push Item.new(headlines.join('|'), indexs.dup, m[4].strip)
+ end
end
new(items, chap)
end
@@ -364,6 +371,10 @@ module ReVIEW
end
def number(id)
+ unless self[id].number
+ # when notoc
+ return ''
+ end
n = @chap.number
# XXX: remove magic number (move to lib/review/book/chapter.rb)
if @chap.on_appendix? && @chap.number > 0 && @chap.number < 28
diff --git a/lib/review/htmlbuilder.rb b/lib/review/htmlbuilder.rb
index <HASH>..<HASH> 100644
--- a/lib/review/htmlbuilder.rb
+++ b/lib/review/htmlbuilder.rb
@@ -1002,7 +1002,7 @@ module ReVIEW
def inline_hd_chap(chap, id)
n = chap.headline_index.number(id)
- if chap.number and @book.config['secnolevel'] >= n.split('.').size
+ if chap.number && @book.config['secnolevel'] >= n.split('.').size && n.present?
str = I18n.t('hd_quote', [n, compile_inline(chap.headline(id).caption)])
else
str = I18n.t('hd_quote_without_number', compile_inline(chap.headline(id).caption))
diff --git a/lib/review/idgxmlbuilder.rb b/lib/review/idgxmlbuilder.rb
index <HASH>..<HASH> 100644
--- a/lib/review/idgxmlbuilder.rb
+++ b/lib/review/idgxmlbuilder.rb
@@ -1148,7 +1148,7 @@ module ReVIEW
def inline_hd_chap(chap, id)
if chap.number
n = chap.headline_index.number(id)
- if @book.config['secnolevel'] >= n.split('.').size
+ if @book.config['secnolevel'] >= n.split('.').size && n.present?
return I18n.t('hd_quote', [n, compile_inline(chap.headline(id).caption)])
end
end
diff --git a/lib/review/latexbuilder.rb b/lib/review/latexbuilder.rb
index <HASH>..<HASH> 100644
--- a/lib/review/latexbuilder.rb
+++ b/lib/review/latexbuilder.rb
@@ -1103,7 +1103,7 @@ module ReVIEW
def inline_hd_chap(chap, id)
n = chap.headline_index.number(id)
- if chap.number and @book.config['secnolevel'] >= n.split('.').size
+ if chap.number and @book.config['secnolevel'] >= n.split('.').size && n.present?
str = I18n.t('hd_quote', [chap.headline_index.number(id), compile_inline(chap.headline(id).caption)])
else
str = I18n.t('hd_quote_without_number', compile_inline(chap.headline(id).caption))
diff --git a/lib/review/plaintextbuilder.rb b/lib/review/plaintextbuilder.rb
index <HASH>..<HASH> 100644
--- a/lib/review/plaintextbuilder.rb
+++ b/lib/review/plaintextbuilder.rb
@@ -1,4 +1,4 @@
-# Copyright (c) 2018 Kenshi Muto
+# Copyright (c) 2018-2019 Kenshi Muto
#
# This program is free software.
# You can distribute or modify this program under the terms of
@@ -366,7 +366,7 @@ module ReVIEW
def inline_hd_chap(chap, id)
if chap.number
n = chap.headline_index.number(id)
- if @book.config['secnolevel'] >= n.split('.').size
+ if @book.config['secnolevel'] >= n.split('.').size && n.present?
return I18n.t('hd_quote', [n, compile_inline(chap.headline(id).caption)])
end
end | skip notoc from counting. Closes: #<I> | kmuto_review | train |
732c137cf4ed27bf3289981721da318dce436ce0 | diff --git a/lib/kafka/prometheus.rb b/lib/kafka/prometheus.rb
index <HASH>..<HASH> 100644
--- a/lib/kafka/prometheus.rb
+++ b/lib/kafka/prometheus.rb
@@ -74,8 +74,7 @@ module Kafka
@process_message_latency =
Prometheus.registry.histogram(:consumer_process_message_latency, 'Latency', {}, LATENCY_BUCKETS)
@offset_lag = Prometheus.registry.gauge(:consumer_offset_lag, 'Offset lag')
- @time_lag_now = Prometheus.registry.gauge(:consumer_time_lag_now, 'Time lag of message')
- @time_lag = Prometheus.registry.histogram(:consumer_time_lag, 'Time lag of message', {}, DELAY_BUCKETS)
+ @time_lag = Prometheus.registry.gauge(:consumer_time_lag, 'Time lag of message')
@process_batch_errors = Prometheus.registry.counter(:consumer_process_batch_errors, 'Total errors in batch')
@process_batch_latency =
Prometheus.registry.histogram(:consumer_process_batch_latency, 'Latency in batch', {}, LATENCY_BUCKETS)
@@ -114,8 +113,7 @@ module Kafka
# Not all messages have timestamps.
return unless time_lag
- @time_lag_now.set(key, time_lag)
- @time_lag.observe(key, time_lag)
+ @time_lag.set(key, time_lag)
end
def process_batch(event)
diff --git a/spec/prometheus_spec.rb b/spec/prometheus_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/prometheus_spec.rb
+++ b/spec/prometheus_spec.rb
@@ -89,17 +89,10 @@ describe Kafka::Prometheus do
expect(metric).not_to be_nil
end
- it 'emits metrics to consumer_time_lag_now' do
- metric = @registry.get(:consumer_time_lag_now)
- expect(metric).not_to be_nil
- expect(metric.get(key)).to be_within(1).of(5000)
- end
-
it 'emits metrics to consumer_time_lag' do
metric = @registry.get(:consumer_time_lag)
expect(metric).not_to be_nil
- expect(metric.get(key)).to eq({ 1 => 0.0, 3 => 0.0, 10 => 0.0, 30 => 0.0, 100 => 0.0, 300 => 0.0,
- 1000 => 0.0, 3000 => 0.0, 10000 => 1.0, 30000 => 1.0 })
+ expect(metric.get(key)).to eq 5000
end
context 'with expection' do | Removed consumer_timelag_now to be more similar to statsd plugin | zendesk_ruby-kafka | train |
59ec0dd16cc4fe18dd0998b0be096c822b4d2710 | diff --git a/static/js/deposit/form.js b/static/js/deposit/form.js
index <HASH>..<HASH> 100644
--- a/static/js/deposit/form.js
+++ b/static/js/deposit/form.js
@@ -110,7 +110,8 @@ define(function(require, exports, module) {
}
var fields = $(selector).serializeArray(),
uploader = this.select('uploaderSelector'),
- $checkboxes = $('input[type=checkbox]:not(:checked)');
+ $checkboxes = $('input[type=checkbox]:not(:checked)'),
+ $bootstrap_multiselect = $("[multiple=multiple]");
if (uploader.length) {
fields.push({
@@ -119,6 +120,15 @@ define(function(require, exports, module) {
});
}
+ if ($bootstrap_multiselect.length && !$bootstrap_multiselect.val()) {
+ fields = fields.concat(
+ $bootstrap_multiselect.map(
+ function() {
+ return {name: this.name, value: $(this).val()}
+ }).get()
+ );
+ }
+
if ($checkboxes.length) {
fields = fields.concat(
$checkboxes.map(
@@ -598,7 +608,7 @@ define(function(require, exports, module) {
}
this.onCheckboxChanged = function (event) {
- if(event.target.name.indexOf('__input__') == -1){
+ if(event.target.name.indexOf('__input__') == -1 && event.target.name ){
if ($(event.target).prop("checked")) {
save_field(this.attr.save_url, event.target.name, event.target.value);
} else { | deposit: Bootstrap multiselect fix
* Adds special support for Bootstrap multiselect as serializeArray()
omits the field.
* Amends onCheckboxChanged to not send a request when a
Bootstrap multiselect checkbox is changed. | inveniosoftware_invenio-deposit | train |
9132ef29cdbbc0fcc7670bf8dc03ec55f76bb6e6 | diff --git a/src/main/java/com/bitso/Bitso.java b/src/main/java/com/bitso/Bitso.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/bitso/Bitso.java
+++ b/src/main/java/com/bitso/Bitso.java
@@ -274,7 +274,14 @@ public class Bitso {
System.err.println("Unable to request quote: " + ret);
return null;
}
- return new BitsoTransferQuote(o);
+ BitsoTransferQuote btq = null;
+ try {
+ btq = new BitsoTransferQuote(o);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ System.err.println(o);
+ }
+ return btq;
}
public BitsoTransfer createTransfer(BigDecimal btcAmount, BigDecimal amount, String currency,
@@ -300,7 +307,14 @@ public class Bitso {
System.err.println("Unable to request quote: " + ret);
return null;
}
- return new BitsoTransfer(o);
+ BitsoTransfer bt = null;
+ try {
+ bt = new BitsoTransfer(o);
+ } catch (JSONException e) {
+ e.printStackTrace();
+ System.err.println(o);
+ }
+ return bt;
}
public BitsoTransfer getTransferStatus(String transferId) { | if there's a JSONException, print the stack trace and the returned json | bitsoex_bitso-java | train |
f1df4878ba6a6ebed8b787143e8ada232256127d | diff --git a/XBRL-Instance.php b/XBRL-Instance.php
index <HASH>..<HASH> 100644
--- a/XBRL-Instance.php
+++ b/XBRL-Instance.php
@@ -226,7 +226,7 @@ class XBRL_Instance
/**
* Creates an instance object from a JSON string, perhaps in a zip file
- * @param string $output_path
+ * @param string $cache_path
* @param string $cache_basename
* @param string $taxonomyNamespace
* @param string $compiledTaxonomyFile
@@ -1259,6 +1259,8 @@ class XBRL_Instance
*/
private function normalizePrefix( $localPrefix )
{
+ if ( $localPrefix == 'xml' )
+ return $localPrefix;
$namespace = $this->getInstanceNamespaces()[ $localPrefix ];
$taxonomy = $this->getInstanceTaxonomy()->getTaxonomyForNamespace( $namespace );
return $taxonomy
@@ -1616,6 +1618,10 @@ class XBRL_Instance
return $component;
}
+ /**
+ * A list of duplicate facts. Duplicate facts require a validate warning.
+ * @var array
+ */
private $duplicateFacts = null;
/**
@@ -6236,7 +6242,7 @@ class ContextsFilter
/**
* Get a specific context by reference
- * @param unknown $ref
+ * @param string $ref
* @return boolean|Array
*/
public function getContext( $ref )
@@ -6278,6 +6284,7 @@ class ContextsFilter
/**
* Return a list of the context with start or end date in $year
* @param int|string $year
+ * @param bool $matchEndDate True if the year should match only the end date. Otherwise the start date is compared as well.
* @return ContextsFilter
*/
public function ContextsForYear( $year, $matchEndDate = true ) | DocBlock updates and fix to normalizePrefix() to accommodate the
implicit 'xml' prefix. | bseddon_XBRL | train |
29d3f5ae70828394e629c91bbfd08e02ca3726da | diff --git a/faker/tests/__init__.py b/faker/tests/__init__.py
index <HASH>..<HASH> 100644
--- a/faker/tests/__init__.py
+++ b/faker/tests/__init__.py
@@ -17,12 +17,12 @@ import logging
try:
from mock import patch
-except ImportError:
+except ImportError: # pragma: no cover
from unittest.mock import patch
try:
from StringIO import StringIO
-except ImportError:
+except ImportError: # pragma: no cover
from io import StringIO
from faker import Generator, Factory
@@ -31,7 +31,7 @@ from faker.utils import text, decorators
try:
string_types = (basestring,)
-except NameError:
+except NameError: # pragma: no cover
string_types = (str,)
@@ -682,4 +682,4 @@ class GeneratorTestCase(unittest.TestCase):
if __name__ == '__main__':
- unittest.main()
+ unittest.main() # pragma: no cover | add # pragma: no cover comments | joke2k_faker | train |
bfb1247dea9143ded5738f2dca9be3707b3cc3a9 | diff --git a/treeherder/services/elasticsearch/connection.py b/treeherder/services/elasticsearch/connection.py
index <HASH>..<HASH> 100644
--- a/treeherder/services/elasticsearch/connection.py
+++ b/treeherder/services/elasticsearch/connection.py
@@ -1,4 +1,5 @@
from django.conf import settings
from elasticsearch import Elasticsearch
-es_conn = Elasticsearch(settings.ELASTICSEARCH_URL)
+url = settings.ELASTICSEARCH_URL
+es_conn = Elasticsearch(url) if url else None | Don't break Django startup
The Elasticsearch connection is constructed at import time, which
happens during Django's bootstrapping. When no Elasticsearch URL exists
we return None instead. | mozilla_treeherder | train |
39348b47ce04c243e8fbf696005c270c88969258 | diff --git a/src/main/java/org/efaps/eql/JSONCI.java b/src/main/java/org/efaps/eql/JSONCI.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/efaps/eql/JSONCI.java
+++ b/src/main/java/org/efaps/eql/JSONCI.java
@@ -20,8 +20,10 @@ import java.util.UUID;
import org.efaps.admin.datamodel.Attribute;
import org.efaps.admin.datamodel.Type;
+import org.efaps.api.ci.DMAttributeType;
import org.efaps.eql.stmt.ICIPrintStmt;
import org.efaps.json.ci.AbstractCI;
+import org.efaps.json.ci.AttributeType;
import org.efaps.util.UUIDUtil;
import org.efaps.util.cache.CacheReloadException;
@@ -52,7 +54,7 @@ public final class JSONCI
AbstractCI<?> ret = null;
switch (_stmt.getCINature()) {
case TYPE:
- Type type;
+ final Type type;
if (UUIDUtil.isUUID(_stmt.getCI())) {
type = Type.get(UUID.fromString(_stmt.getCI()));
} else {
@@ -64,8 +66,22 @@ public final class JSONCI
.setUUID(type.getUUID())
.setId(type.getId());
for (final Attribute attr : type.getAttributes().values()) {
- jsonType.addAttribute(new org.efaps.json.ci.Attribute()
- .setName(attr.getName()));
+ final AttributeType attrType = new AttributeType()
+ .setName(attr.getAttributeType().getName());
+ switch (DMAttributeType.fromValue(attr.getAttributeType().getName())) {
+ case LINK:
+ case LINK_WITH_RANGES:
+ case STATUS:
+ if (attr.hasLink()) {
+ attrType.setInfo(attr.getLink().getName() + ", " + attr.getLink().getUUID());
+ }
+ break;
+ default:
+ break;
+ }
+ jsonType.addAttribute(new org.efaps.json.ci.Attribute()
+ .setName(attr.getName())
+ .setType(attrType));
}
ret = jsonType;
} | - Issue #<I>: Add the AttributeType to the JSON reply for CIType
closes #<I> | eFaps_eFaps-Kernel | train |
8e0f41e489d7e60a5733a880860a847857688a99 | diff --git a/arctic/chunkstore/chunkstore.py b/arctic/chunkstore/chunkstore.py
index <HASH>..<HASH> 100644
--- a/arctic/chunkstore/chunkstore.py
+++ b/arctic/chunkstore/chunkstore.py
@@ -94,7 +94,7 @@ class ChunkStore(object):
chunk_range: range object
a date range to delete
"""
- if chunk_range:
+ if chunk_range is not None:
# read out chunks that fall within the range and filter out
# data within the range
df = self.read(symbol, chunk_range=chunk_range, filter_data=False)
@@ -166,7 +166,8 @@ class ChunkStore(object):
the symbol to retrieve
chunk_range: object
corresponding range object for the specified chunker (for
- DateChunker it is a DateRange object)
+ DateChunker it is a DateRange object or a DatetimeIndex,
+ as returned by pandas.date_range
columns: list of str
subset of columns to read back (index will always be included, if
one exists)
@@ -186,7 +187,7 @@ class ChunkStore(object):
spec = {SYMBOL: symbol,
}
- if chunk_range:
+ if chunk_range is not None:
spec.update(self.chunker.to_mongo(chunk_range))
segments = []
@@ -284,7 +285,7 @@ class ChunkStore(object):
if sym[TYPE] == 'dataframe' and not isinstance(item, DataFrame):
raise Exception("Cannot combine DataFrame and Series")
- if chunk_range:
+ if chunk_range is not None:
self.delete(symbol, chunk_range)
sym = self._get_symbol_info(symbol)
@@ -365,7 +366,7 @@ class ChunkStore(object):
original data.
"""
- if chunk_range:
+ if chunk_range is not None:
if self.chunker.filter(item, chunk_range).empty:
raise Exception('Range must be inclusive of data')
self.__update(symbol, item, combine_method=self.__concat, chunk_range=chunk_range)
diff --git a/arctic/chunkstore/date_chunker.py b/arctic/chunkstore/date_chunker.py
index <HASH>..<HASH> 100644
--- a/arctic/chunkstore/date_chunker.py
+++ b/arctic/chunkstore/date_chunker.py
@@ -61,6 +61,8 @@ class DateChunker(Chunker):
-------
string
"""
+ if isinstance(range_obj, pd.DatetimeIndex):
+ range_obj = DateRange(range_obj.min(), range_obj.max())
if range_obj.start and range_obj.end:
return {'$and': [{START: {'$lte': range_obj.end}}, {END: {'$gte': range_obj.start}}]}
elif range_obj.start:
@@ -83,6 +85,8 @@ class DateChunker(Chunker):
-------
data, filtered by range_obj
"""
+ if isinstance(range_obj, pd.DatetimeIndex):
+ range_obj = DateRange(range_obj.min(), range_obj.max())
if 'date' in data.index.names:
return data[range_obj.start:range_obj.end]
elif 'date' in data.columns:
@@ -98,6 +102,8 @@ class DateChunker(Chunker):
-------
data, filtered by range_obj
"""
+ if isinstance(range_obj, pd.DatetimeIndex):
+ range_obj = DateRange(range_obj.min(), range_obj.max())
if 'date' in data.index.names:
return data[(data.index.get_level_values('date') < range_obj.start) | (data.index.get_level_values('date') > range_obj.end)]
elif 'date' in data.columns:
diff --git a/tests/integration/chunkstore/test_chunkstore.py b/tests/integration/chunkstore/test_chunkstore.py
index <HASH>..<HASH> 100644
--- a/tests/integration/chunkstore/test_chunkstore.py
+++ b/tests/integration/chunkstore/test_chunkstore.py
@@ -120,6 +120,8 @@ def test_write_read_with_daterange(chunkstore_lib):
read_df = chunkstore_lib.read('test_df', chunk_range=DateRange(dt(2016, 1, 1), dt(2016, 1, 2)))
assert_frame_equal(read_df, dg)
+ read_with_dr = chunkstore_lib.read('test_df', chunk_range=pd.date_range(dt(2016, 1, 1), dt(2016, 1, 2)))
+ assert_frame_equal(read_df, dg)
def test_write_read_with_daterange_noindex(chunkstore_lib):
diff --git a/tests/unit/chunkstore/test_date_chunker.py b/tests/unit/chunkstore/test_date_chunker.py
index <HASH>..<HASH> 100644
--- a/tests/unit/chunkstore/test_date_chunker.py
+++ b/tests/unit/chunkstore/test_date_chunker.py
@@ -3,6 +3,7 @@ from pandas import DataFrame, MultiIndex
from datetime import datetime as dt
from arctic.date import DateRange
from pandas.util.testing import assert_frame_equal
+import pandas as pd
def test_date_filter():
@@ -28,3 +29,16 @@ def test_date_filter():
assert_frame_equal(c.filter(df, DateRange(None, dt(2020, 1, 1))), df)
# CLOSED - CLOSED (after range)
assert(c.filter(df, DateRange(dt(2017, 1, 1), dt(2018, 1, 1))).empty)
+
+
+def test_date_filter_with_pd_date_range():
+ c = DateChunker()
+ df = DataFrame(data={'data': [1, 2, 3]},
+ index=MultiIndex.from_tuples([(dt(2016, 1, 1), 1),
+ (dt(2016, 1, 2), 1),
+ (dt(2016, 1, 3), 1)],
+ names=['date', 'id'])
+ )
+
+ assert(c.filter(df, pd.date_range(dt(2017, 1, 1), dt(2018, 1, 1))).empty)
+ assert_frame_equal(c.filter(df, pd.date_range(dt(2016, 1, 1), dt(2017, 1, 1))), df) | Date Chunker support for DatetimeIndexes | manahl_arctic | train |
310236eeadaca05fcff22a0865d37443fdbb4537 | diff --git a/lib/rack/traffic_logger/formatter/json.rb b/lib/rack/traffic_logger/formatter/json.rb
index <HASH>..<HASH> 100644
--- a/lib/rack/traffic_logger/formatter/json.rb
+++ b/lib/rack/traffic_logger/formatter/json.rb
@@ -5,8 +5,11 @@ module Rack
class Formatter
class JSON < self
- def format(hash)
- ::JSON.generate hash
+ def initialize(pretty_print: false)
+ formatter = pretty_print ?
+ -> hash { ::JSON.pretty_generate(hash) << "\n" } :
+ -> hash { ::JSON.generate(hash) << "\n" }
+ define_singleton_method :format, formatter
end
end | added pretty printing to JSON formatter | hx_rack-traffic-logger | train |
64176f8d63a87dd50b865a9298543bc892d33950 | diff --git a/java/src/main/java/gherkin/Main.java b/java/src/main/java/gherkin/Main.java
index <HASH>..<HASH> 100644
--- a/java/src/main/java/gherkin/Main.java
+++ b/java/src/main/java/gherkin/Main.java
@@ -20,7 +20,7 @@ public class Main {
}
};
- private Lexer lexer;
+ private Parser parser;
private final Writer out;
public Main(final Writer out, boolean prettyOrNull) {
@@ -44,8 +44,7 @@ public class Main {
}
}
};
- Parser parser = new Parser(formatter);
- lexer = new I18nLexer(parser);
+ parser = new Parser(formatter);
}
private void scanAll(File file) throws IOException {
@@ -67,9 +66,9 @@ public class Main {
private void parse(File file) {
try {
String input = FixJava.readReader(new FileReader(file));
- lexer.scan(input);
+ parser.parse(input, file.getPath(), 0);
} catch (Exception e) {
- System.err.println(e.getMessage());
+ e.printStackTrace(System.err);
System.exit(1);
}
}
diff --git a/java/src/main/java/gherkin/parser/Parser.java b/java/src/main/java/gherkin/parser/Parser.java
index <HASH>..<HASH> 100644
--- a/java/src/main/java/gherkin/parser/Parser.java
+++ b/java/src/main/java/gherkin/parser/Parser.java
@@ -49,6 +49,7 @@ public class Parser implements Listener {
pushMachine(machineName);
listener.location(featureURI);
lexer.scan(gherkin);
+ popMachine();
}
public I18n getI18nLanguage() {
@@ -127,8 +128,6 @@ public class Parser implements Listener {
if (event("eof", 1)) {
listener.eof();
}
- popMachine();
- pushMachine(machineName);
}
public void syntaxError(String state, String event, List<String> legalEvents, String uri, int line) { | Fixing bug in Main. Release <I> | cucumber-attic_gherkin2 | train |
d03b711047ca5dd092f7a0a2b7055b1174dae4f2 | diff --git a/tests/test_core.py b/tests/test_core.py
index <HASH>..<HASH> 100755
--- a/tests/test_core.py
+++ b/tests/test_core.py
@@ -97,12 +97,6 @@ class CoreTest(GorillaTestCase):
settings_2.some_value = 123
self.assertEqual(settings_1, settings_2)
- self.assertEqual(str(gorilla.Settings()), "Settings(allow_hit=False, store_hit=True)")
- self.assertEqual(str(gorilla.Settings(allow_hit=True)), "Settings(allow_hit=True, store_hit=True)")
- self.assertEqual(str(gorilla.Settings(store_hit=False)), "Settings(allow_hit=False, store_hit=False)")
- self.assertEqual(str(gorilla.Settings(some_value=123)), "Settings(allow_hit=False, some_value=123, store_hit=True)")
- self.assertEqual(str(gorilla.Settings(string='abc')), "Settings(allow_hit=False, store_hit=True, string='abc')")
-
def test_patch(self):
patch_1 = gorilla.Patch(_tomodule, 'dummy', _frommodule.function)
patch_2 = gorilla.Patch(_tomodule, 'dummy', _frommodule.function, settings=None)
@@ -121,12 +115,6 @@ class CoreTest(GorillaTestCase):
patch_2.some_value = 123
self.assertEqual(patch_1, patch_2)
- patch = gorilla.Patch(_tomodule, 'dummy', _frommodule.function)
- self.assertEqual(str(patch), "Patch(destination=%r, name='dummy', obj=%r, settings=None)" % (_tomodule, _frommodule.function))
-
- patch.some_value = 123
- self.assertEqual(str(patch), "Patch(destination=%r, name='dummy', obj=%r, settings=None)" % (_tomodule, _frommodule.function))
-
def test_apply_patch_no_hit(self):
name = 'dummy'
settings = gorilla.Settings() | Give up on testing the representation output
Python 2/3 compatibility makes it too annoying to test. | christophercrouzet_gorilla | train |
6efd95cfb04a3c25bd8433dcf4223507f9c5864c | diff --git a/store.go b/store.go
index <HASH>..<HASH> 100644
--- a/store.go
+++ b/store.go
@@ -139,7 +139,8 @@ func (c *controller) getNetworksFromStore() ([]*network, error) {
ec := &endpointCnt{n: n}
err = store.GetObject(datastore.Key(ec.Key()...), ec)
if err != nil {
- return nil, fmt.Errorf("could not find endpoint count key %s for network %s while listing: %v", datastore.Key(ec.Key()...), n.Name(), err)
+ log.Warnf("could not find endpoint count key %s for network %s while listing: %v", datastore.Key(ec.Key()...), n.Name(), err)
+ continue
}
n.Lock() | getNetworksFromStore should not fail on inconsistent network state | docker_libnetwork | train |
6f2a55557bad5d352e7d2097153e98841117e34f | diff --git a/docs/Hash.md b/docs/Hash.md
index <HASH>..<HASH> 100644
--- a/docs/Hash.md
+++ b/docs/Hash.md
@@ -5,7 +5,7 @@ The examplified methods are already ported. The non documented methods must be i
## Hash (ported or to port methods)
- clear
-- collect
+- collect - Use map instead.
- **compact** (not in ruby Hash)
Removes null and empty
@@ -60,6 +60,18 @@ $hash->compact()->toArray(); // array('foo' => 'bar')
- lazy
- length
- map
+
+Maps modified elements into a new hash
+
+```php
+$hash = new Hash(array('a' => 'b', 'c' => 'd'));
+
+$mapped = $hash->map(function($value, $key) {
+ return $key . $value;
+})->toArray();
+
+// array('ab', 'cd');
+```
- max
- max_by
- member?
diff --git a/lib/Hash.php b/lib/Hash.php
index <HASH>..<HASH> 100644
--- a/lib/Hash.php
+++ b/lib/Hash.php
@@ -166,4 +166,21 @@ class Hash extends Object implements ArrayAccess, Iterator
return new Hash;
}
+ /**
+ * Maps elements into a new Hash
+ *
+ * @param function $callback
+ * @return Hash
+ */
+ public function map($callback)
+ {
+ $hash = $this->create();
+
+ foreach ($this as $key => $value) {
+ $hash[] = $callback($value, $key);
+ }
+
+ return $hash;
+ }
+
}
diff --git a/tests/HashSugarTest.php b/tests/HashSugarTest.php
index <HASH>..<HASH> 100644
--- a/tests/HashSugarTest.php
+++ b/tests/HashSugarTest.php
@@ -76,4 +76,20 @@ class HashSugarTest extends HashTest
$this->assertHash($filtered);
}
+ /**
+ * @covers Hash::map()
+ */
+ public function testItCanMapElements()
+ {
+ $hash = new Hash(array('a' => 'b', 'c' => 'd'));
+
+ $mapped = $hash->map(function($value, $key) {
+ return $key . $value;
+ });
+
+ $expectation = array('ab', 'cd');
+
+ $this->assertEquals($expectation, $mapped->toArray());
+ }
+
} | Implemented Hash::map() | mjacobus_php-objects | train |
8a296b92b447f86825d26efe0ecebeed8bfa9c17 | diff --git a/algorithm/cga.go b/algorithm/cga.go
index <HASH>..<HASH> 100644
--- a/algorithm/cga.go
+++ b/algorithm/cga.go
@@ -8,6 +8,7 @@ import (
"github.com/twpayne/go-geom/algorithm/internal/ray_crossing"
"github.com/twpayne/go-geom/algorithm/location"
"github.com/twpayne/go-geom/algorithm/orientation"
+ "math"
)
// Returns the index of the direction of the point <code>q</code> relative to
@@ -158,3 +159,49 @@ func IsRingCounterClockwise(ring []geom.Coord) bool {
}
return isCCW
}
+
+// Computes the distance from a point p to a line segment startLine/endLine
+//
+// Note: NON-ROBUST!
+//
+// Return the distance from p to line segment AB
+func DistanceFromPointToLine(p, startLine, endLine geom.Coord) float64 {
+ // if start = end, then just compute distance to one of the endpoints
+ if startLine[0] == endLine[0] && startLine[1] == endLine[1] {
+ return p.Distance2D(startLine)
+ }
+
+ // otherwise use comp.graphics.algorithms Frequently Asked Questions method
+
+ // (1) r = AC dot AB
+ // ---------
+ // ||AB||^2
+ //
+ // r has the following meaning:
+ // r=0 P = A
+ // r=1 P = B
+ // r<0 P is on the backward extension of AB
+ // r>1 P is on the forward extension of AB
+ // 0<r<1 P is interior to AB
+
+ len2 := (endLine[0]-startLine[0])*(endLine[0]-startLine[0]) + (endLine[1]-startLine[1])*(endLine[1]-startLine[1])
+ r := ((p[0]-startLine[0])*(endLine[0]-startLine[0]) + (p[1]-startLine[1])*(endLine[1]-startLine[1])) / len2
+
+ if r <= 0.0 {
+ return p.Distance2D(startLine)
+ }
+ if r >= 1.0 {
+ return p.Distance2D(endLine)
+ }
+
+ // (2) s = (Ay-Cy)(Bx-Ax)-(Ax-Cx)(By-Ay)
+ // -----------------------------
+ // L^2
+ //
+ // Then the distance from C to P = |s|*L.
+ //
+ // This is the same calculation as {@link #distancePointLinePerpendicular}.
+ // Unrolled here for performance.
+ s := ((startLine[1]-p[1])*(endLine[0]-startLine[0]) - (startLine[0]-p[0])*(endLine[1]-startLine[1])) / len2
+ return math.Abs(s) * math.Sqrt(len2)
+}
diff --git a/algorithm/cga_test.go b/algorithm/cga_test.go
index <HASH>..<HASH> 100644
--- a/algorithm/cga_test.go
+++ b/algorithm/cga_test.go
@@ -143,3 +143,58 @@ func TestIsRingCounterClockwise(t *testing.T) {
}
}
}
+
+func TestDistanceFromPointToLine(t *testing.T) {
+ for i, tc := range []struct {
+ p geom.Coord
+ startLine, endLine geom.Coord
+ distance float64
+ }{
+ {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{1, 0},
+ endLine: geom.Coord{1, 1},
+ distance: 1,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{1, 1},
+ endLine: geom.Coord{1, -1},
+ distance: 1,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{0, 1},
+ endLine: geom.Coord{0, -1},
+ distance: 0,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{1, 0},
+ endLine: geom.Coord{2, 0},
+ distance: 1,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{2, 0},
+ endLine: geom.Coord{1, 0},
+ distance: 1,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{2, 0},
+ endLine: geom.Coord{0, 0},
+ distance: 0,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{0, 0},
+ endLine: geom.Coord{0, 0},
+ distance: 0,
+ }, {
+ p: geom.Coord{0, 0},
+ startLine: geom.Coord{1, 0},
+ endLine: geom.Coord{1, 0},
+ distance: 1,
+ },
+ } {
+ calculatedDistance := algorithm.DistanceFromPointToLine(tc.p, tc.startLine, tc.endLine)
+ if tc.distance != calculatedDistance {
+ t.Errorf("Test '%v' failed: expected \n%v but was \n%v", i+1, tc.distance, calculatedDistance)
+ }
+ }
+} | Add DistanceFromPointToLine to algorithms | twpayne_go-geom | train |
ec20211fbcc9299bf0fff597a079d4611876caef | diff --git a/lib/memcached.js b/lib/memcached.js
index <HASH>..<HASH> 100644
--- a/lib/memcached.js
+++ b/lib/memcached.js
@@ -442,7 +442,7 @@ Client.config = {
}
, 'SERVER_ERROR': function servererror(tokens, dataSet, err, queue, S, memcached) {
- (memcached || this.memcached).connectionIssue(tokens.splice(1).join(' '), S);
+ (memcached || this.memcached).connectionIssue(tokens.splice(1).join(' '), this);
return [CONTINUE, false];
} | pass socket into connectionIssue on SERVER_ERROR
fixes 3rd-Eden/node-memcached#<I> | 3rd-Eden_memcached | train |
aefeb4b7f372196cf26404303e06fc071f21ede8 | diff --git a/src/main/java/no/priv/garshol/duke/Processor.java b/src/main/java/no/priv/garshol/duke/Processor.java
index <HASH>..<HASH> 100644
--- a/src/main/java/no/priv/garshol/duke/Processor.java
+++ b/src/main/java/no/priv/garshol/duke/Processor.java
@@ -7,6 +7,7 @@ import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
+import java.util.concurrent.CopyOnWriteArrayList;
import no.priv.garshol.duke.matchers.MatchListener;
import no.priv.garshol.duke.matchers.PrintMatchListener;
@@ -55,7 +56,7 @@ public class Processor {
public Processor(ConfigurationInterface config, Database database) {
this.config = config;
this.database = database;
- this.listeners = new ArrayList<MatchListener>();
+ this.listeners = new CopyOnWriteArrayList<MatchListener>();
this.logger = new DummyLogger();
this.threads = 1;
@@ -103,6 +104,17 @@ public class Processor {
public void addMatchListener(MatchListener listener) {
listeners.add(listener);
}
+
+
+ /**
+ * Removes a listener from being notified of the processing events.
+ */
+ public boolean removeMatchListener(MatchListener listener) {
+ if(listener != null) {
+ return listeners.remove(listener);
+ }
+ return true;
+ }
/**
* Returns all registered listeners. | Issue <I> - Switched the listener collection from an ArrayList to CopyOnWriteArrayList and exposed an new method for removing a listener. | larsga_Duke | train |
4e5a7a1b1acde3d53002465016e0c3f596eb45b9 | diff --git a/presto-kafka/src/main/java/com/facebook/presto/kafka/KafkaConnectorFactory.java b/presto-kafka/src/main/java/com/facebook/presto/kafka/KafkaConnectorFactory.java
index <HASH>..<HASH> 100644
--- a/presto-kafka/src/main/java/com/facebook/presto/kafka/KafkaConnectorFactory.java
+++ b/presto-kafka/src/main/java/com/facebook/presto/kafka/KafkaConnectorFactory.java
@@ -19,9 +19,7 @@ import com.facebook.presto.spi.NodeManager;
import com.facebook.presto.spi.SchemaTableName;
import com.facebook.presto.spi.type.TypeManager;
import com.google.common.base.Throwables;
-import com.google.inject.Binder;
import com.google.inject.Injector;
-import com.google.inject.Module;
import com.google.inject.Scopes;
import com.google.inject.TypeLiteral;
import io.airlift.bootstrap.Bootstrap;
@@ -62,7 +60,7 @@ public class KafkaConnectorFactory
}
@Override
- public Connector create(final String connectorId, Map<String, String> config)
+ public Connector create(String connectorId, Map<String, String> config)
{
checkNotNull(connectorId, "connectorId is null");
checkNotNull(config, "config is null");
@@ -71,21 +69,16 @@ public class KafkaConnectorFactory
Bootstrap app = new Bootstrap(
new JsonModule(),
new KafkaConnectorModule(),
- new Module()
- {
- @Override
- public void configure(Binder binder)
- {
- binder.bind(KafkaConnectorId.class).toInstance(new KafkaConnectorId(connectorId));
- binder.bind(TypeManager.class).toInstance(typeManager);
- binder.bind(NodeManager.class).toInstance(nodeManager);
+ binder -> {
+ binder.bind(KafkaConnectorId.class).toInstance(new KafkaConnectorId(connectorId));
+ binder.bind(TypeManager.class).toInstance(typeManager);
+ binder.bind(NodeManager.class).toInstance(nodeManager);
- if (tableDescriptionSupplier.isPresent()) {
- binder.bind(new TypeLiteral<Supplier<Map<SchemaTableName, KafkaTopicDescription>>>() {}).toInstance(tableDescriptionSupplier.get());
- }
- else {
- binder.bind(new TypeLiteral<Supplier<Map<SchemaTableName, KafkaTopicDescription>>>() {}).to(KafkaTableDescriptionSupplier.class).in(Scopes.SINGLETON);
- }
+ if (tableDescriptionSupplier.isPresent()) {
+ binder.bind(new TypeLiteral<Supplier<Map<SchemaTableName, KafkaTopicDescription>>>() {}).toInstance(tableDescriptionSupplier.get());
+ }
+ else {
+ binder.bind(new TypeLiteral<Supplier<Map<SchemaTableName, KafkaTopicDescription>>>() {}).to(KafkaTableDescriptionSupplier.class).in(Scopes.SINGLETON);
}
}
); | Cleanup warnings in KafkaConnectorFactory | prestodb_presto | train |
d3868f4bbb7dc4908ac2be04ca40252cb12302f4 | diff --git a/fbchat_archive_parser/parser.py b/fbchat_archive_parser/parser.py
index <HASH>..<HASH> 100644
--- a/fbchat_archive_parser/parser.py
+++ b/fbchat_archive_parser/parser.py
@@ -7,7 +7,6 @@ import hashlib
from io import open
from datetime import datetime, timedelta
-from threading import Thread
from sortedcontainers import SortedList
from colorama import Fore, Back, Style
@@ -43,8 +42,7 @@ class FacebookChatHistory:
__DATE_FORMAT = "%A, %B %d, %Y at %I:%M%p"
- def __init__(self, stream, callback=None, progress_output=False,
- filter=None):
+ def __init__(self, stream, progress_output=False, filter=None):
self.chat_threads = dict()
self.message_cache = None
@@ -63,13 +61,7 @@ class FacebookChatHistory:
self.wait_for_next_thread = False
self.thread_signatures = set()
- if callback:
- if not callable(callback):
- raise Exception("Callback must be callable")
- thread = Thread(target=self.__parse_content)
- thread.start()
- else:
- self.__parse_content()
+ self.__parse_content()
def __parse_content(self): | Removed the threading/callback thing | ownaginatious_fbchat-archive-parser | train |
00a8e302a1cf7239e735a9f3e5b6575da15f9648 | diff --git a/lib/endpoints/class-wp-rest-terms-controller.php b/lib/endpoints/class-wp-rest-terms-controller.php
index <HASH>..<HASH> 100755
--- a/lib/endpoints/class-wp-rest-terms-controller.php
+++ b/lib/endpoints/class-wp-rest-terms-controller.php
@@ -113,8 +113,13 @@ class WP_REST_Terms_Controller extends WP_REST_Controller {
}
$response = rest_ensure_response( $response );
+
+ // Store pagation values for headers then unset for count query.
+ $per_page = (int) $prepared_args['number'];
+ $page = ceil( ( ( (int) $prepared_args['offset'] ) / $per_page ) + 1 );
unset( $prepared_args['number'] );
unset( $prepared_args['offset'] );
+
$total_terms = wp_count_terms( $this->taxonomy, $prepared_args );
// wp_count_terms can return a falsy value when the term has no children
@@ -123,20 +128,20 @@ class WP_REST_Terms_Controller extends WP_REST_Controller {
}
$response->header( 'X-WP-Total', (int) $total_terms );
- $max_pages = ceil( $total_terms / $request['per_page'] );
+ $max_pages = ceil( $total_terms / $per_page );
$response->header( 'X-WP-TotalPages', (int) $max_pages );
$base = add_query_arg( $request->get_query_params(), rest_url( '/wp/v2/' . $this->get_taxonomy_base( $this->taxonomy ) ) );
- if ( $request['page'] > 1 ) {
- $prev_page = $request['page'] - 1;
+ if ( $page > 1 ) {
+ $prev_page = $page - 1;
if ( $prev_page > $max_pages ) {
$prev_page = $max_pages;
}
$prev_link = add_query_arg( 'page', $prev_page, $base );
$response->link_header( 'prev', $prev_link );
}
- if ( $max_pages > $request['page'] ) {
- $next_page = $request['page'] + 1;
+ if ( $max_pages > $page ) {
+ $next_page = $page + 1;
$next_link = add_query_arg( 'page', $next_page, $base );
$response->link_header( 'next', $next_link );
} | Use the `prepared_args` that are passed to `get_terms` to determine pagination headers | WP-API_WP-API | train |
034d6514e0b094b2ea249d0e5fed0e249d09fa6f | diff --git a/activesupport/test/core_ext/time_with_zone_test.rb b/activesupport/test/core_ext/time_with_zone_test.rb
index <HASH>..<HASH> 100644
--- a/activesupport/test/core_ext/time_with_zone_test.rb
+++ b/activesupport/test/core_ext/time_with_zone_test.rb
@@ -677,7 +677,7 @@ class TimeWithZoneTest < ActiveSupport::TestCase
def test_ruby_19_weekday_name_query_methods
%w(sunday? monday? tuesday? wednesday? thursday? friday? saturday?).each do |name|
assert_respond_to @twz, name
- assert_equal @twz.send(name), @twz.method(name).call
+ assert_equal @twz.public_send(name), @twz.method(name).call
end
end | TimeWithZone#sunday?, monday?, tuesday?, ... are public methods | rails_rails | train |
5f317e778f548fa696bfd57e314306d87a593fdb | diff --git a/packet/bgp.go b/packet/bgp.go
index <HASH>..<HASH> 100644
--- a/packet/bgp.go
+++ b/packet/bgp.go
@@ -2294,25 +2294,26 @@ func flowSpecFragmentParser(rf RouteFamily, args []string) (FlowSpecComponentInt
if len(args) < 2 {
return nil, fmt.Errorf("invalid flowspec fragment specifier")
}
- value := 0
+ items := make([]*FlowSpecComponentItem, 0)
for _, a := range args[1:] {
+ value := 0
switch a {
case "dont-fragment":
if afi, _ := RouteFamilyToAfiSafi(rf); afi == AFI_IP6 {
return nil, fmt.Errorf("can't specify dont-fragment for ipv6")
}
- value |= 0x1
+ value = 0x1
case "is-fragment":
- value |= 0x2
+ value = 0x2
case "first-fragment":
- value |= 0x4
+ value = 0x4
case "last-fragment":
- value |= 0x8
+ value = 0x8
default:
return nil, fmt.Errorf("invalid flowspec fragment specifier")
}
+ items = append(items, NewFlowSpecComponentItem(0, value))
}
- items := []*FlowSpecComponentItem{NewFlowSpecComponentItem(0, value)}
return NewFlowSpecComponent(FlowSpecValueMap[args[0]], items), nil
}
@@ -2541,7 +2542,7 @@ func (v *FlowSpecComponentItem) Serialize() ([]byte, error) {
}
buf := make([]byte, 1+(1<<order))
- buf[0] = byte(uint32(v.Op) | order << 4)
+ buf[0] = byte(uint32(v.Op) | order<<4)
switch order {
case 0:
buf[1] = byte(v.Value) | packet: make multiple flow spec fragmentation flags ORed match instead of ANDed | osrg_gobgp | train |
ff0e428891545c6640d9a8adf90b791b5976ab03 | diff --git a/app/models/manager_refresh/inventory_collection.rb b/app/models/manager_refresh/inventory_collection.rb
index <HASH>..<HASH> 100644
--- a/app/models/manager_refresh/inventory_collection.rb
+++ b/app/models/manager_refresh/inventory_collection.rb
@@ -657,7 +657,7 @@ module ManagerRefresh
@unique_indexes_cache = model_class.connection.indexes(model_class.table_name).select(&:unique)
if @unique_indexes_cache.blank?
- raise "#{inventory_collection} and its table #{model_class.table_name} must have a unique index defined, to"\
+ raise "#{self} and its table #{model_class.table_name} must have a unique index defined, to"\
" be able to use saver_strategy :concurrent_safe or :concurrent_safe_batch."
end
@@ -677,7 +677,7 @@ module ManagerRefresh
uniq_key_candidates = unique_indexes.each_with_object([]) { |i, obj| obj << i if (keys - i.columns.map(&:to_sym)).empty? }
if @unique_indexes_cache.blank?
- raise "#{inventory_collection} and its table #{model_class.table_name} must have a unique index defined "\
+ raise "#{self} and its table #{model_class.table_name} must have a unique index defined "\
"covering columns #{keys} to be able to use saver_strategy :concurrent_safe or :concurrent_safe_batch."
end | Inventory collection logging is bad
Inventory collection logging is bad
(transferred from ManageIQ/manageiq@2ad<I>aadc1b<I>eaabe<I>f<I>df4f<I>ea) | ManageIQ_inventory_refresh | train |
e6fb78344eba7a7a3dfc77bd18d2fb4eb4e682b7 | diff --git a/src/selectize.js b/src/selectize.js
index <HASH>..<HASH> 100644
--- a/src/selectize.js
+++ b/src/selectize.js
@@ -115,6 +115,7 @@ $.extend(Selectize.prototype, {
var timeout_focus;
var classes;
var classes_plugins;
+ var inputId;
inputMode = self.settings.mode;
classes = $input.attr('class') || '';
@@ -126,6 +127,11 @@ $.extend(Selectize.prototype, {
$dropdown = $('<div>').addClass(settings.dropdownClass).addClass(inputMode).hide().appendTo($dropdown_parent);
$dropdown_content = $('<div>').addClass(settings.dropdownContentClass).appendTo($dropdown);
+ if(inputId = $input.attr('id')) {
+ $control_input.attr('id', inputId + '_selectized');
+ $('label[for='+inputId+']').attr('for', inputId + '_selectized');
+ }
+
if(self.settings.copyClassesToDropdown) {
$dropdown.addClass(classes);
}
diff --git a/test/interaction.js b/test/interaction.js
index <HASH>..<HASH> 100644
--- a/test/interaction.js
+++ b/test/interaction.js
@@ -100,6 +100,45 @@
});
+ describe('clicking label', function() {
+
+ it('should give it focus to select', function(done) {
+ var inputId = "labeledSelect";
+ $('#fixture').append('<label for="'+inputId+'">select</label>');
+ var label = $('label[for="'+inputId+'"]');
+
+ var test = setup_test('<select id="'+inputId+'">' +
+ '<option value="a">A</option>' +
+ '<option value="b">B</option>' +
+ '</select>', {});
+
+ Syn
+ .click(label)
+ .delay(0, function() {
+ label.remove();
+ expect(test.selectize.isFocused).to.be.equal(true);
+ done();
+ });
+ });
+
+ it('should give it focus to input', function(done) {
+ var inputId = "labeledInput";
+ $('#fixture').append('<label for="'+inputId+'">input</label>');
+ var label = $('label[for="'+inputId+'"]');
+
+ var test = setup_test('<input id="'+inputId+'" type="text" value="a,b,c,d">', {});
+
+ Syn
+ .click(label)
+ .delay(0, function() {
+ label.remove();
+ expect(test.selectize.isFocused).to.be.equal(true);
+ done();
+ });
+ });
+
+ });
+
describe('clicking option', function() {
it('should select it', function(done) { | update existing labels pointing to the original input/select to point to the selectize input/select #<I> | selectize_selectize.js | train |
a38aa54e3ac68bd6a166a459ed3e5a11e952e5dc | diff --git a/src/Rector/AbstractRector.php b/src/Rector/AbstractRector.php
index <HASH>..<HASH> 100644
--- a/src/Rector/AbstractRector.php
+++ b/src/Rector/AbstractRector.php
@@ -137,6 +137,8 @@ abstract class AbstractRector extends NodeVisitorAbstract implements RectorInter
}
/**
+ * @todo maybe use leave node instead where is used array_splice() method?
+ *
* Adds new nodes before or after particular Expression nodes.
*
* @param Node[] $nodes
@@ -145,10 +147,14 @@ abstract class AbstractRector extends NodeVisitorAbstract implements RectorInter
private function prependExpressionNodes(array $nodes): array
{
foreach ($nodes as $i => $node) {
- if ($node instanceof Expression) {
- $nodes = $this->prependNodesAfterAndBeforeExpression($nodes, $node, $i);
- } elseif (isset($node->stmts)) {
+ if (isset($node->stmts)) {
$node->stmts = $this->prependExpressionNodes($node->stmts);
+ if ($node instanceof Node\Stmt\If_) {
+ $node->else->stmts = $this->prependExpressionNodes($node->else->stmts);
+ }
+
+ } elseif ($node instanceof Expression) {
+ $nodes = $this->prependNodesAfterAndBeforeExpression($nodes, $node, $i);
}
}
@@ -168,7 +174,7 @@ abstract class AbstractRector extends NodeVisitorAbstract implements RectorInter
}
if (isset($this->expressionsToPrependAfter[$node])) {
- array_splice($nodes, $i + 1, 0, $this->expressionsToPrependAfter[$node]);
+ array_splice($nodes, $i + 1, 1, $this->expressionsToPrependAfter[$node]);
unset($this->expressionsToPrependAfter[$node]);
}
@@ -197,9 +203,10 @@ abstract class AbstractRector extends NodeVisitorAbstract implements RectorInter
$expressionToBeAddedInString = $this->betterStandardPrinter->prettyPrint([$expressionToBeAdded]);
throw new ShouldNotHappenException(sprintf(
- '"%s" expression was not added %s "%s" in "%s" class',
+ '"%s" expression was not added %s%s"%s" in "%s" class',
$expressionToBeAddedInString,
$type,
+ PHP_EOL,
$targetExpressionInString,
self::class
)); | AbstractRector: fix node prepending for If_ statement | rectorphp_rector | train |
a34d955e9677950e06f4aa2bf9504e0499ea156e | diff --git a/lib/hive/diagnostic.rb b/lib/hive/diagnostic.rb
index <HASH>..<HASH> 100644
--- a/lib/hive/diagnostic.rb
+++ b/lib/hive/diagnostic.rb
@@ -1,53 +1,55 @@
require 'hive'
-require 'device_api/android'
+# require 'device_api/android'
require 'hive/results'
-
+
module Hive
- class Diagnostic
-
- class InvalidParameterError < StandardError
- end
+ class Diagnostic
+
+ class InvalidParameterError < StandardError
+ end
- attr_accessor :config, :last_run, :device_api
+ attr_accessor :config, :last_run, :device_api
- def initialize(config, options)
- @options = options
- @config = config
- @serial = @options['serial']
- @device_api = @options['device_api']
- end
+ def initialize(config, options)
+ @options = options
+ @config = config
+ @serial = @options['serial']
+ @device_api = @options['device_api']
+ end
- def should_run?
- return true if @last_run == nil
- time_now = Time.new.getutc
- last_run_time = @last_run.timestamp
- diff = ((time_now - last_run_time)/5.minutes).round
- if (diff > 2 && @last_run.passed?) || diff > 1
- true
- else
- false
- end
- end
+ def should_run?
+ return true if @last_run == nil
+ time_now = Time.new.getutc
+ last_run_time = @last_run.timestamp
+ diff = ((time_now - last_run_time)/300).round
+ if (diff > 2 && @last_run.passed?) || diff > 1
+ true
+ else
+ false
+ end
+ end
- def run
- Hive.logger.info("Trying to run diagnostic '#{self.class}'")
- if should_run?
- result = diagnose
- result = repair(result) if result.failed?
- @last_run = result
- end
- @last_run
- end
+ def run
+ Hive.logger.info("Trying to run diagnostic '#{self.class}'")
+ if should_run?
+ result = diagnose
+ result = repair(result) if result.failed?
+ @last_run = result
+ else
+ Hive.logger.info("Diagnostic '#{self.class}' last ran less than five minutes before")
+ end
+ @last_run
+ end
- def pass(message= {}, data = {})
- Hive.logger.info(message)
- Hive::Results.new("pass", message, data )
- end
+ def pass(message= {}, data = {})
+ Hive.logger.info(message)
+ Hive::Results.new("pass", message, data )
+ end
- def fail(message ={}, data = {})
- Hive.logger.info(message)
- Hive::Results.new("fail", message, data)
- end
- end
+ def fail(message ={}, data = {})
+ Hive.logger.info(message)
+ Hive::Results.new("fail", message, data)
+ end
+ end
end | Removed Rails ref and fix indentation | bbc_hive-runner | train |
370459095efad7c24a9e83565ca12e928f71e592 | diff --git a/src/com/google/javascript/jscomp/Compiler.java b/src/com/google/javascript/jscomp/Compiler.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/Compiler.java
+++ b/src/com/google/javascript/jscomp/Compiler.java
@@ -1164,6 +1164,11 @@ public class Compiler extends AbstractCompiler implements ErrorHandler, SourceFi
}
final String getCurrentJsSource() {
+ SourceMap sourceMap = getSourceMap();
+ if (sourceMap != null) {
+ sourceMap.reset();
+ }
+
List<String> fileNameRegexList = options.filesToPrintAfterEachPassRegexList;
List<String> moduleNameRegexList = options.modulesToPrintAfterEachPassRegexList;
StringBuilder builder = new StringBuilder(); | Call SourceMap.reset() before generating code for the --print_source_after_each_pass flag and similar flags.
Otherwise (if source maps are enabled) we get a Preconditions failure when trying to generate code the second time.
-------------
Created by MOE: <URL> | google_closure-compiler | train |
78519dca00c4dcce5646fbd787db4f01947eb664 | diff --git a/lib/sprockets/rails.rb b/lib/sprockets/rails.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/rails.rb
+++ b/lib/sprockets/rails.rb
@@ -2,73 +2,91 @@ require 'sprockets'
require 'action_controller/railtie'
require 'sprockets/rails/helper'
+module Rails
+ class Application < Engine
+ # Returns Sprockets::Environment for app config.
+ def assets
+ return unless config.assets.compile
+
+ return @assets if defined? @assets
+
+ @assets = Sprockets::Environment.new(root.to_s) do |env|
+ env.version = ::Rails.env + "-#{config.assets.version}"
+
+ if config.assets.cache_store != false
+ path = "#{config.root}/tmp/cache/assets/#{::Rails.env}"
+ env.cache = ActiveSupport::Cache.lookup_store([:file_store, path])
+ end
+
+ config.assets.paths.each do |path|
+ env.append_path(path)
+ end
+
+ env.js_compressor = config.assets.js_compressor
+ env.css_compressor = config.assets.css_compressor
+
+ app = self
+ env.context_class.class_eval do
+ include ::Sprockets::Rails::Helper
+ define_method(:_rails_app) { app }
+ end
+ end
+ end
+
+ def assets_manifest
+ return @assets_manifest if defined? @assets_manifest
+ path = File.join(::Rails.public_path, config.assets.prefix)
+ @assets_manifest = Sprockets::Manifest.new(assets, path)
+ end
+ end
+end
+
module Sprockets
module Rails
- class Railtie < ::Rails::Railtie
- rake_tasks do |app|
- require 'sprockets/rails/task'
- Task.new(app)
+ module Config
+ def debug_assets?
+ _rails_app.config.assets.debug || super
end
- initializer "sprockets.environment" do |app|
- config = app.config
-
- config_helpers = Module.new do
- define_method :debug_assets? do
- config.assets.debug || super()
- end
- define_method :digest_assets? do
- config.assets.digest
- end
- define_method :assets_prefix do
- config.assets.prefix
- end
- define_method :assets_manifest do
- config.assets.manifest
- end
- define_method :assets_environment do
- app.assets
- end
- end
+ def digest_assets?
+ _rails_app.config.assets.digest
+ end
- if config.assets.compile
- app.assets = Sprockets::Environment.new(app.root.to_s) do |env|
- env.version = ::Rails.env + "-#{config.assets.version}"
+ def assets_prefix
+ _rails_app.config.assets.prefix
+ end
- if config.assets.cache_store != false
- env.cache = ActiveSupport::Cache.lookup_store([:file_store, "#{config.root}/tmp/cache/assets/#{::Rails.env}"])
- end
+ def assets_manifest
+ _rails_app.assets_manifest
+ end
- env.context_class.class_eval do
- include ::Sprockets::Rails::Helper
- include config_helpers
- end
- end
- end
+ def assets_environment
+ _rails_app.assets
+ end
+ end
- manifest_path = File.join(::Rails.public_path, config.assets.prefix)
- config.assets.manifest = Manifest.new(app.assets, manifest_path)
+ class Railtie < ::Rails::Railtie
+ rake_tasks do |app|
+ require 'sprockets/rails/task'
+ Task.new(app)
+ end
+ initializer "sprockets.environment" do |app|
ActiveSupport.on_load(:action_view) do
include ::Sprockets::Rails::Helper
- include config_helpers
+ include Config
+ define_method(:_rails_app) { app }
end
end
config.after_initialize do |app|
return unless app.assets
- config = app.config
- config.assets.paths.each { |path| app.assets.append_path(path) }
-
- app.assets.js_compressor = config.assets.js_compressor
- app.assets.css_compressor = config.assets.css_compressor
-
app.routes.prepend do
- mount app.assets => config.assets.prefix
+ mount app.assets => app.config.assets.prefix
end
- if config.assets.digest
+ if app.config.assets.digest
app.assets = app.assets.index
end
end
diff --git a/lib/sprockets/rails/task.rb b/lib/sprockets/rails/task.rb
index <HASH>..<HASH> 100644
--- a/lib/sprockets/rails/task.rb
+++ b/lib/sprockets/rails/task.rb
@@ -19,7 +19,7 @@ module Sprockets
end
def manifest
- @app.config.assets.manifest
+ @app.assets_manifest
end
def assets
@@ -27,7 +27,7 @@ module Sprockets
end
def output
- File.join(::Rails.public_path, app.config.assets.prefix)
+ File.join(::Rails.public_path, @app.config.assets.prefix)
end
def define | Shuffle stuff around to get rake task ordering right | rails_sprockets-rails | train |
4868055d024f99a57206e3d0d0f36da1223118ff | diff --git a/packages/reporters/dev-server/src/Server.js b/packages/reporters/dev-server/src/Server.js
index <HASH>..<HASH> 100644
--- a/packages/reporters/dev-server/src/Server.js
+++ b/packages/reporters/dev-server/src/Server.js
@@ -204,7 +204,10 @@ export default class Server extends EventEmitter {
setHeaders(res);
res.setHeader('Content-Length', '' + stat.size);
- res.setHeader('Content-Type', mime.getType(filePath));
+ let mimeType = mime.getType(filePath);
+ if (mimeType != null) {
+ res.setHeader('Content-Type', mimeType);
+ }
if (req.method === 'HEAD') {
res.end();
return; | Fix devserver flow violations with mime | parcel-bundler_parcel | train |
6c524d42302bcdc5e41734367ac6dd72e783c27f | diff --git a/src/flake8_aaa/line_markers.py b/src/flake8_aaa/line_markers.py
index <HASH>..<HASH> 100644
--- a/src/flake8_aaa/line_markers.py
+++ b/src/flake8_aaa/line_markers.py
@@ -94,14 +94,35 @@ class LineMarkers(list):
numbered_lines = list(enumerate(self))
return next(filter(lambda l: l[1] is block_type, numbered_lines))[0] + self.fn_offset
- def check_block_spacing(self, first_block_type: LineType, second_block_type: LineType, error_message: str) -> None:
+ def check_block_spacing(
+ self,
+ first_block_type: LineType,
+ second_block_type: LineType,
+ error_message: str,
+ ) -> None:
+ """
+ Checks there is a clear single line between ``first_block_type`` and
+ ``second_block_type``.
+
+ Note:
+ Is tested via ``check_arrange_act_spacing()`` and
+ ``check_act_assert_spacing()``.
+ """
numbered_lines = list(enumerate(self))
+ first_block_lines = filter(lambda l: l[1] is first_block_type, numbered_lines)
try:
- first_block_lineno = list(filter(lambda l: l[1] is first_block_type, numbered_lines))[-1][0]
+ first_block_lineno = list(first_block_lines)[-1][0]
except IndexError:
# First block has no lines
return
- second_block_lineno = next(filter(lambda l: l[1] is second_block_type, numbered_lines))[0]
+
+ second_block_lines = filter(lambda l: l[1] is second_block_type, numbered_lines)
+ try:
+ second_block_lineno = next(second_block_lines)[0]
+ except StopIteration:
+ # Second block has no lines
+ return
+
blank_lines = [
bl for bl in numbered_lines[first_block_lineno + 1:second_block_lineno] if bl[1] is LineType.blank_line
]
diff --git a/tests/line_markers/test_check_act_assert_spacing.py b/tests/line_markers/test_check_act_assert_spacing.py
index <HASH>..<HASH> 100644
--- a/tests/line_markers/test_check_act_assert_spacing.py
+++ b/tests/line_markers/test_check_act_assert_spacing.py
@@ -24,6 +24,16 @@ def test_comment_before_assert():
assert result is None
+def test_none():
+ line_markers = LineMarkers(2, 0)
+ line_markers[0] = LineType.func_def
+ line_markers[1] = LineType.act_block # do_thing() # act
+
+ result = line_markers.check_act_assert_spacing()
+
+ assert result is None
+
+
# --- FAILURES --- | Handle case where second block does not exist when checking spacing | jamescooke_flake8-aaa | train |
5a57ab6005d15d77b5e81ec4943719556a381e6b | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -685,12 +685,19 @@ end
<td>Class method to make the current table searchable.</td>
</tr>
<tr>
- <td>refreshable</td>
- <td>Class method to make the current table refreshable.<p>You must also specify the following block in your <code>will_appear</code> method:</p>
- <pre><code>on_refresh do
+ <td><pre><code>refreshable(
+ callback: :on_refresh,
+ pull_message: "Pull to refresh",
+ refreshing: "Refreshing data…",
+ updated_format: "Last updated at %s",
+ updated_time_format: "%l:%M %p"
+)</code></pre></td>
+ <td>Class method to make the current table refreshable.
+ <p>All parameters are optional. If you do not specify a a callback, it will assume you've implemented an <code>on_refresh</code> method in your tableview.</p>
+ <pre><code>def on_refresh
# Code to start the refresh
end</code></pre>
- <p>And after you're done refreshing everything, call <code>end_refreshing</code> and your tableview will refresh the data automatically.</p></td>
+ <p>And after you're done with your asyncronous process, call <code>end_refreshing</code> to collapse the refresh view and update the last refreshed time and then <code>update_table_data</code>.</p></td>
</tr>
<tr>
<td colspan="2">
diff --git a/lib/ProMotion/screen_helpers/_tables/_refreshable_table.rb b/lib/ProMotion/screen_helpers/_tables/_refreshable_table.rb
index <HASH>..<HASH> 100644
--- a/lib/ProMotion/screen_helpers/_tables/_refreshable_table.rb
+++ b/lib/ProMotion/screen_helpers/_tables/_refreshable_table.rb
@@ -1,8 +1,14 @@
module ProMotion::MotionTable
module RefreshableTable
- def make_refreshable
+ def make_refreshable(params={})
+ pull_message = params[:pull_message] || "Pull to refresh"
+ @refreshing = params[:refreshing] || "Refreshing data..."
+ @updated_format = params[:updated_format] || "Last updated at %s"
+ @updated_time_format = params[:updated_time_format] || "%l:%M %p"
+ @refreshable_callback = params[:callback]
+
@refresh = UIRefreshControl.alloc.init
- @refresh.attributedTitle = NSAttributedString.alloc.initWithString("Pull to Refresh")
+ @refresh.attributedTitle = NSAttributedString.alloc.initWithString(pull_message)
@refresh.addTarget(self, action:'refreshView:', forControlEvents:UIControlEventValueChanged)
self.refreshControl = @refresh
end
@@ -12,20 +18,21 @@ module ProMotion::MotionTable
# UIRefreshControl Delegates
def refreshView(refresh)
- refresh.attributedTitle = NSAttributedString.alloc.initWithString("Refreshing data...")
- @on_refresh.call if @on_refresh
+ refresh.attributedTitle = NSAttributedString.alloc.initWithString(@refreshing)
+ self.send(@refreshable_callback) if @refreshable_callback
end
- def on_refresh(&block)
- @on_refresh = block
+ def start_refreshing
+ return unless @refresh
+
+ @refresh.beginRefreshing
end
def end_refreshing
return unless @refresh
- @refresh.attributedTitle = NSAttributedString.alloc.initWithString("Last updated on #{Time.now.strftime("%H:%M:%S")}")
+ @refresh.attributedTitle = NSAttributedString.alloc.initWithString(sprintf(@updated_format, Time.now.strftime(@updated_time_format)))
@refresh.endRefreshing
- self.update_table_data
end
end
end
\ No newline at end of file
diff --git a/lib/ProMotion/screen_helpers/_tables/_sectioned_table.rb b/lib/ProMotion/screen_helpers/_tables/_sectioned_table.rb
index <HASH>..<HASH> 100644
--- a/lib/ProMotion/screen_helpers/_tables/_sectioned_table.rb
+++ b/lib/ProMotion/screen_helpers/_tables/_sectioned_table.rb
@@ -8,7 +8,7 @@ module ProMotion::MotionTable
self.make_searchable(content_controller: self, search_bar: self.class.get_searchable_params)
end
if ios_version_greater_eq?("6.0") && self.class.respond_to?(:get_refreshable) && self.class.get_refreshable
- self.make_refreshable
+ self.make_refreshable(self.class.get_refreshable_params)
end
end
diff --git a/lib/ProMotion/screens/_table_screen_module.rb b/lib/ProMotion/screens/_table_screen_module.rb
index <HASH>..<HASH> 100644
--- a/lib/ProMotion/screens/_table_screen_module.rb
+++ b/lib/ProMotion/screens/_table_screen_module.rb
@@ -25,8 +25,10 @@ module ProMotion
end
# Refreshable
- def refreshable(&block)
- @refreshable_block = block
+ def refreshable(params = {})
+ params[:callback] = :on_refresh unless params[:callback]
+
+ @refreshable_params = params
@refreshable = true
end
@@ -34,6 +36,10 @@ module ProMotion
@refreshable ||= false
end
+ def get_refreshable_params
+ @refreshable_params ||= nil
+ end
+
end
def self.included(base)
base.extend(ClassMethods) | Made callback pattern more consistent. Made all strings configurable. Updated documentation. | infinitered_ProMotion | train |
e8bb8b81a48c24761374c78ff86acf211c533d94 | diff --git a/topydo/lib/EditCommand.py b/topydo/lib/EditCommand.py
index <HASH>..<HASH> 100644
--- a/topydo/lib/EditCommand.py
+++ b/topydo/lib/EditCommand.py
@@ -32,12 +32,15 @@ class EditCommand(MultiCommand, ListCommand):
p_error, p_input)
self.is_expression = False
+ self.edit_archive = False
def _process_flags(self):
- opts, args = self.getopt('xe')
+ opts, args = self.getopt('xed')
for opt, value in opts:
- if opt == '-x':
+ if opt == '-d':
+ self.edit_archive = True
+ elif opt == '-x':
self.show_all = True
elif opt == '-e':
self.is_expression = True
@@ -101,6 +104,11 @@ class EditCommand(MultiCommand, ListCommand):
else:
self._process_flags()
+ if self.edit_archive:
+ archive = config().archive()
+
+ return call([editor, archive]) == 0
+
if self.is_expression:
self.todos = self._view()._viewdata
else:
@@ -136,7 +144,8 @@ class EditCommand(MultiCommand, ListCommand):
return """Synopsis:
edit
edit <NUMBER1> [<NUMBER2> ...]
- edit -e [-x] [expression]"""
+ edit -e [-x] [expression]
+ edit -d"""
def help(self):
return """\
@@ -145,7 +154,7 @@ Launches a text editor to edit todos.
Without any arguments it will just open the todo.txt file. Alternatively it can
edit todo item(s) with the given number(s) or edit relevant todos matching
the given expression. See `topydo help ls` for more information on relevant
-todo items.
+todo items. It is also possible to open the archive file.
By default it will use $EDITOR in your environment, otherwise it will fall back
to 'vi'.
@@ -153,4 +162,5 @@ to 'vi'.
-e : Treat the subsequent arguments as an expression.
-x : Edit *all* todos matching the expression (i.e. do not filter on
dependencies or relevance).
+-d : Open the archive file.
""" | Edit the archive file with '-d' option.
This implements archive option from #2. | bram85_topydo | train |
9c1ba6b6dda4c0d573adb0e0bf6c116eaa6542b7 | diff --git a/morango/syncsession.py b/morango/syncsession.py
index <HASH>..<HASH> 100644
--- a/morango/syncsession.py
+++ b/morango/syncsession.py
@@ -85,9 +85,9 @@ class SyncClient(object):
with connection.cursor() as cursor:
queue_buffer = """INSERT INTO {outgoing_buffer}
(model_uuid, serialized, deleted, last_saved_instance, last_saved_counter,
- model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id)
+ model_name, profile, partition, source_id, conflicting_serialized_data, transfer_session_id, _self_ref_fk)
SELECT id, serialized, deleted, last_saved_instance, last_saved_counter,
- model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}'
+ model_name, profile, partition, source_id, conflicting_serialized_data, '{transfer_session_id}', _self_ref_fk
FROM {store}
WHERE {condition}""".format(outgoing_buffer=Buffer._meta.db_table,
transfer_session_id=self.transfer_session_id,
@@ -179,10 +179,10 @@ class SyncClient(object):
def _dequeuing_merge_conflict_buffer(self, cursor, current_id):
# transfer buffer serialized into conflicting store
merge_conflict_store = """REPLACE INTO {store} (id, serialized, deleted, last_saved_instance, last_saved_counter, model_name,
- profile, partition, source_id, conflicting_serialized_data, dirty_bit)
+ profile, partition, source_id, conflicting_serialized_data, dirty_bit, _self_ref_fk)
SELECT store.id, store.serialized, store.deleted OR buffer.deleted, '{current_instance_id}',
{current_instance_counter}, store.model_name, store.profile, store.partition, store.source_id,
- buffer.serialized || '\n' || store.conflicting_serialized_data, 1
+ buffer.serialized || '\n' || store.conflicting_serialized_data, 1, store._self_ref_fk
FROM {buffer} AS buffer, {store} AS store
/*Scope to a single record.*/
WHERE store.id = buffer.model_uuid
@@ -269,9 +269,10 @@ class SyncClient(object):
def _dequeuing_insert_remaining_buffer(self, cursor):
# insert remaining records into store
insert_remaining_buffer = """REPLACE INTO {store} (id, serialized, deleted, last_saved_instance, last_saved_counter,
- model_name, profile, partition, source_id, conflicting_serialized_data, dirty_bit)
+ model_name, profile, partition, source_id, conflicting_serialized_data, dirty_bit, _self_ref_fk)
SELECT buffer.model_uuid, buffer.serialized, buffer.deleted, buffer.last_saved_instance, buffer.last_saved_counter,
- buffer.model_name, buffer.profile, buffer.partition, buffer.source_id, buffer.conflicting_serialized_data, 1
+ buffer.model_name, buffer.profile, buffer.partition, buffer.source_id, buffer.conflicting_serialized_data, 1,
+ buffer._self_ref_fk
FROM {buffer} AS buffer
WHERE buffer.transfer_session_id = '{transfer_session_id}'
""".format(buffer=Buffer._meta.db_table, | Update sql queries to include new field. | learningequality_morango | train |
e64ac20a3b168a5950c8900cf66d8e39e9747ff3 | diff --git a/v2/handler.js b/v2/handler.js
index <HASH>..<HASH> 100644
--- a/v2/handler.js
+++ b/v2/handler.js
@@ -133,7 +133,8 @@ TChannelV2Handler.prototype.writeCopy = function writeCopy(buffer) {
TChannelV2Handler.prototype.pushFrame = function pushFrame(frame) {
var self = this;
- var writeBuffer = self.writeBuffer || new Buffer(v2.Frame.MaxSize);
+ var isShared = !!self.writeBuffer;
+ var writeBuffer = isShared || new Buffer(v2.Frame.MaxSize);
var res = v2.Frame.RW.writeInto(frame, writeBuffer, 0);
var err = res.err;
@@ -145,7 +146,11 @@ TChannelV2Handler.prototype.pushFrame = function pushFrame(frame) {
}
var buf = writeBuffer.slice(0, res.offset);
- self.writeCopy(buf);
+ if (isShared) {
+ self.writeCopy(buf);
+ } else {
+ self.write(buf);
+ }
};
TChannelV2Handler.prototype.nextFrameId = function nextFrameId() { | TChannelV2Handler: don't make a gratuitous copy of a non-shared buffer | uber_tchannel-node | train |
3ebf2977f1b0cec4d91c51a82fff9158e0e12c4e | diff --git a/bzr_exporter.py b/bzr_exporter.py
index <HASH>..<HASH> 100755
--- a/bzr_exporter.py
+++ b/bzr_exporter.py
@@ -42,37 +42,6 @@ from bzrlib import (
from bzrlib.plugins.fastimport import commands, helpers, marks_file
-# This is adapted from _linear_view_verisons in log.py in bzr 1.12.
-def _iter_linear_revisions(branch, start_rev_id, end_rev_id):
- """Calculate a sequence of revisions, newest to oldest.
-
- :param start_rev_id: the lower revision-id
- :param end_rev_id: the upper revision-id
- :return: An iterator of revision_ids
- :raises ValueError: if a start_rev_id is specified but
- is not found walking the left-hand history
- """
- br_revno, br_rev_id = branch.last_revision_info()
- repo = branch.repository
- if start_rev_id is None and end_rev_id is None:
- for revision_id in repo.iter_reverse_revision_history(br_rev_id):
- yield revision_id
- else:
- if end_rev_id is None:
- end_rev_id = br_rev_id
- found_start = start_rev_id is None
- for revision_id in repo.iter_reverse_revision_history(end_rev_id):
- if not found_start and revision_id == start_rev_id:
- yield revision_id
- found_start = True
- break
- else:
- yield revision_id
- else:
- if not found_start:
- raise ValueError()
-
-
class BzrFastExporter(object):
def __init__(self, source, destination, git_branch=None, checkpoint=-1,
@@ -123,18 +92,16 @@ class BzrFastExporter(object):
start_rev_id = None
end_rev_id = None
self.note("Calculating the revisions to include ...")
- view_revisions = reversed(list(_iter_linear_revisions(self.branch,
- start_rev_id, end_rev_id)))
+ view_revisions = reversed([rev_id for rev_id, _, _, _ in
+ self.branch.iter_merge_sorted_revisions(end_rev_id, start_rev_id)])
# If a starting point was given, we need to later check that we don't
# start emitting revisions from before that point. Collect the
# revisions to exclude now ...
if start_rev_id is not None:
- # The result is inclusive so skip the first (the oldest) one
self.note("Calculating the revisions to exclude ...")
- uninteresting = list(_iter_linear_revisions(self.branch, None,
- start_rev_id))[1:]
- self.excluded_revisions = set(uninteresting)
- return list(view_revisions)
+ self.excluded_revisions = set([rev_id for rev_id, _, _, _ in
+ self.branch.iter_merge_sorted_revisions(start_rev_id)])
+ return view_revisions
def run(self):
# Open the source
@@ -143,7 +110,9 @@ class BzrFastExporter(object):
# Export the data
self.branch.repository.lock_read()
try:
- for revid in self.interesting_history():
+ interesting = self.interesting_history()
+ self.note("Starting export ...")
+ for revid in interesting:
self.emit_commit(revid, self.git_branch)
if self.branch.supports_tags():
self.emit_tags()
@@ -200,7 +169,7 @@ class BzrFastExporter(object):
def is_empty_dir(self, tree, path):
path_id = tree.path2id(path)
- if path_id == None:
+ if path_id is None:
self.warning("Skipping empty_dir detection - no file_id for %s" %
(path,))
return False
@@ -228,14 +197,13 @@ class BzrFastExporter(object):
self.revid_to_mark[revid] = -1
return
- # Emit parents
- nparents = len(revobj.parent_ids)
- if nparents:
- for parent in revobj.parent_ids:
- self.emit_commit(parent, git_branch)
-
# Get the primary parent
+ # TODO: Consider the excluded revisions when deciding the parents.
+ # Currently, a commit with parents that are excluded ought to be
+ # triggering the git_branch calculation below (and it is not).
+ # IGC 20090824
ncommits = len(self.revid_to_mark)
+ nparents = len(revobj.parent_ids)
if nparents == 0:
if ncommits:
# This is a parentless commit but it's not the first one | Stop fast-export from exceeding the maximum recursion depth | jelmer_python-fastimport | train |
d299cc359aec623af7839fb57bfb07d1c54a9038 | diff --git a/src/Defender/Middlewares/NeedsPermissionMiddleware.php b/src/Defender/Middlewares/NeedsPermissionMiddleware.php
index <HASH>..<HASH> 100644
--- a/src/Defender/Middlewares/NeedsPermissionMiddleware.php
+++ b/src/Defender/Middlewares/NeedsPermissionMiddleware.php
@@ -1,7 +1,7 @@
<?php namespace Artesaos\Defender\Middlewares;
use Closure;
-use Illuminate\Contracts\Auth\Authenticatable;
+use Illuminate\Contracts\Auth\Guard;
/**
* Class DefenderHasPermissionMiddleware
@@ -17,11 +17,11 @@ class NeedsPermissionMiddleware extends AbstractDefenderMiddleware {
protected $user;
/**
- * @param Authenticatable $user
+ * @param Guard $auth
*/
- public function __construct(Authenticatable $user)
+ public function __construct(Guard $auth)
{
- $this->user = $user;
+ $this->user = $auth->user();
}
/**
@@ -34,6 +34,11 @@ class NeedsPermissionMiddleware extends AbstractDefenderMiddleware {
$permissions = $this->getPermissions($request);
$anyPermission = $this->getAny($request);
+ if (is_null($this->user))
+ {
+ return response('Forbidden', 403); // TODO: Exception?
+ }
+
if (is_array($permissions) and count($permissions) > 0)
{
$canResult = true;
diff --git a/src/Defender/Middlewares/NeedsRoleMiddleware.php b/src/Defender/Middlewares/NeedsRoleMiddleware.php
index <HASH>..<HASH> 100644
--- a/src/Defender/Middlewares/NeedsRoleMiddleware.php
+++ b/src/Defender/Middlewares/NeedsRoleMiddleware.php
@@ -1,7 +1,7 @@
<?php namespace Artesaos\Defender\Middlewares;
use Closure;
-use Illuminate\Contracts\Auth\Authenticatable;
+use Illuminate\Contracts\Auth\Guard;
/**
* Class DefenderHasPermissionMiddleware
@@ -15,11 +15,11 @@ class NeedsRoleMiddleware extends AbstractDefenderMiddleware {
protected $user;
/**
- * @param Authenticatable $user
+ * @param Guard $auth
*/
- public function __construct(Authenticatable $user)
+ public function __construct(Guard $auth)
{
- $this->user = $user;
+ $this->user = $auth->user();
}
/**
@@ -32,6 +32,11 @@ class NeedsRoleMiddleware extends AbstractDefenderMiddleware {
$roles = $this->getRoles($request);
$anyRole = $this->getAny($request);
+ if (is_null($this->user))
+ {
+ return response('Forbidden', 403); // TODO: Exception?
+ }
+
if (is_array($roles) and count($roles) > 0)
{
$hasResult = true; | [Fix] Fix the exception when the application does not have a user logged in. | artesaos_defender | train |
b99ff5edcc567f34c275c0dc2646e72417af8282 | diff --git a/__tests__/stringify.js b/__tests__/stringify.js
index <HASH>..<HASH> 100644
--- a/__tests__/stringify.js
+++ b/__tests__/stringify.js
@@ -105,3 +105,11 @@ test('Map with non-Pair item', () => {
doc.contents.items.push('TEST')
expect(() => String(doc)).toThrow(/^Map items must all be pairs.*TEST/)
})
+
+test('eemeli/yaml#43: Quoting colons', () => {
+ const doc = new YAML.Document()
+ doc.contents = YAML.createNode({ key: ':' })
+ const str = String(doc)
+ expect(() => YAML.parse(str)).not.toThrow()
+ expect(str).toBe('key: ":"\n')
+})
diff --git a/src/schema/_string.js b/src/schema/_string.js
index <HASH>..<HASH> 100644
--- a/src/schema/_string.js
+++ b/src/schema/_string.js
@@ -220,7 +220,7 @@ function plainString(item, ctx, onComment) {
}
if (
!value ||
- /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t ]$/.test(
+ /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(
value
)
) {
@@ -229,7 +229,7 @@ function plainString(item, ctx, onComment) {
// - start with an indicator character (except [?:-]) or /[?-] /
// - '\n ', ': ' or ' \n' anywhere
// - '#' not preceded by a non-space char
- // - end with ' '
+ // - end with ' ' or ':'
return implicitKey || inFlow || value.indexOf('\n') === -1
? value.indexOf('"') !== -1 && value.indexOf("'") === -1
? singleQuotedString(value, ctx) | Quote plain strings ending with a ":" (Fixes #<I>) | eemeli_yaml | train |
42b22f1acc8d51850cd05e10628c73d51bead493 | diff --git a/src/java/com/threerings/presents/dobj/DObject.java b/src/java/com/threerings/presents/dobj/DObject.java
index <HASH>..<HASH> 100644
--- a/src/java/com/threerings/presents/dobj/DObject.java
+++ b/src/java/com/threerings/presents/dobj/DObject.java
@@ -1,5 +1,5 @@
//
-// $Id: DObject.java,v 1.49 2002/09/19 16:38:03 mdb Exp $
+// $Id: DObject.java,v 1.50 2002/09/25 03:02:00 mdb Exp $
package com.threerings.presents.dobj;
@@ -514,9 +514,9 @@ public class DObject implements Streamable
public String toString ()
{
StringBuffer buf = new StringBuffer();
- StringUtil.fieldsToString(buf, this);
+ StringUtil.fieldsToString(buf, this, "\n");
if (buf.length() > 0) {
- buf.insert(0, ", ");
+ buf.insert(0, "\n");
}
buf.insert(0, _oid);
buf.insert(0, "[oid="); | Experimental alternate toString() formatting style for DObject. Hopefully
it will be well liked.
git-svn-id: svn+ssh://src.earth.threerings.net/narya/trunk@<I> <I>f4-<I>e9-<I>-aa3c-eee0fc<I>fb1 | threerings_narya | train |
a45b1f9987effb444ff9fc33a499aa3aaab8e168 | diff --git a/lib/strong_migrations/checker.rb b/lib/strong_migrations/checker.rb
index <HASH>..<HASH> 100644
--- a/lib/strong_migrations/checker.rb
+++ b/lib/strong_migrations/checker.rb
@@ -250,6 +250,7 @@ Then add the foreign key in separate migrations."
result = yield
+ # outdated statistics + a new index can hurt performance of existing queries
if StrongMigrations.auto_analyze && direction == :up && method == :add_index
if postgresql?
# TODO remove verbose in 0.7.0 | Added comment about analyze [skip ci] | ankane_strong_migrations | train |
0b4da8c71bc4e46b3b7f244aa54ffe61656844b6 | diff --git a/README.rdoc b/README.rdoc
index <HASH>..<HASH> 100644
--- a/README.rdoc
+++ b/README.rdoc
@@ -77,7 +77,7 @@ See RestClient::Resource docs for details.
== Exceptions (see http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html)
-* for results code between 200 and 206 a RestClient::Response will be returned
+* for results code between 200 and 207 a RestClient::Response will be returned
* for results code 301 and 302 the redirection will be followed if the request is a get or a head
* for result code 303 the redirection will be followed and the request transformed into a get
* for other cases a RestClient::Exception holding the Response will be raised, a specific exception class will be thrown for know error codes
diff --git a/lib/restclient/abstract_response.rb b/lib/restclient/abstract_response.rb
index <HASH>..<HASH> 100644
--- a/lib/restclient/abstract_response.rb
+++ b/lib/restclient/abstract_response.rb
@@ -37,7 +37,7 @@ module RestClient
# Return the default behavior corresponding to the response code:
# the response itself for code in 200..206, redirection for 301 and 302 in get and head cases, redirection for 303 and an exception in other cases
def return! request = nil, &block
- if (200..206).include? code
+ if (200..207).include? code
self
elsif [301, 302].include? code
unless [:get, :head].include? args[:method]
diff --git a/lib/restclient/exceptions.rb b/lib/restclient/exceptions.rb
index <HASH>..<HASH> 100644
--- a/lib/restclient/exceptions.rb
+++ b/lib/restclient/exceptions.rb
@@ -9,6 +9,7 @@ module RestClient
204 => 'No Content',
205 => 'Reset Content',
206 => 'Partial Content',
+ 207 => 'Multi-Status',
300 => 'Multiple Choices',
301 => 'Moved Permanently',
302 => 'Found',
diff --git a/spec/request_spec.rb b/spec/request_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/request_spec.rb
+++ b/spec/request_spec.rb
@@ -59,7 +59,7 @@ describe RestClient::Request do
end
it "doesn't classify successful requests as failed" do
- 203.upto(206) do |code|
+ 203.upto(207) do |code|
res = mock("result")
res.stub!(:code).and_return(code.to_s)
res.stub!(:body).and_return("")
diff --git a/spec/response_spec.rb b/spec/response_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/response_spec.rb
+++ b/spec/response_spec.rb
@@ -66,7 +66,7 @@ describe RestClient::Response do
it "should throw an exception for other codes" do
RestClient::Exceptions::EXCEPTIONS_MAP.each_key do |code|
- unless (200..206).include? code
+ unless (200..207).include? code
net_http_res = mock('net http response', :code => code.to_i)
response = RestClient::Response.create('abc', net_http_res, {})
lambda { response.return!}.should raise_error | added <I> Multi-Status to the list of legit codes | rest-client_rest-client | train |
dc1d8c3781a9fef1209faa599287259b7be40e7a | diff --git a/ide/webstorm/webstorm.js b/ide/webstorm/webstorm.js
index <HASH>..<HASH> 100644
--- a/ide/webstorm/webstorm.js
+++ b/ide/webstorm/webstorm.js
@@ -23,8 +23,11 @@ function WebStorm() {
this.executable = 'wstorm';
} else {
// todo in windows check if this webstorm.exe exists otherwise prompt for a path and save it to disk}
- this.executable = '"C:/Program Files (x86)/JetBrains/WebStorm 8.0.1/bin/WebStorm.exe"';
- if (!fs.existsSync(this.executable))
+ var webstormPath = path.resolve('/', 'Program Files (x86)/JetBrains/WebStorm 8.0.1/bin/WebStorm.exe');
+
+ this.executable = '"' + webstormPath + '"';
+
+ if (!this.templateUtil.fileExists(this.executable))
console.error('Error the WebStorm.exe is not present at', this.executable);
}
}
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -103,4 +103,13 @@ ideTemplate.isDirectory = function (source)
return fs.statSync(source).isDirectory();
};
+ideTemplate.fileExists = function (filePath){
+ try{
+ fs.statSync(filePath);
+ }catch(err){
+ if(err.code == 'ENOENT') return false;
+ }
+ return true;
+};
+
module.exports = ideTemplate;
\ No newline at end of file | fix(windows) Fixed the check for existence of webstorm executable file on Windows. | impaler_ide-template | train |
6e00c5432b1dcbd207b25906b57e6c1e8469bb91 | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -22,7 +22,7 @@
* [chain.unsuppress](#chainunsuppress)
* [chain.bind](#chainbind)
* [chain.name](#chainname)
- * [promix.handler](#promixhandler)
+ * [promix.handle](#promixhandle)
* [promix.promise](#promixpromise)
4. [Examples](#examples)
* [In the browser](#in-the-browser)
@@ -770,17 +770,17 @@ console.log(promise);
`````
<br />
-###promix.handler()
+###promix.handle()
Set the global error handler for uncaught promise/chain errors.
Usage:
-> **promix.handler( function )**
+> **promix.handle( function )**
If a promise is rejected with an error and has no error handler of its own to receive it, Promix will pass that error into the global handler specified with `.handler()`, if it exists. This will keep the error from being thrown:
`````javascript
var promise = promix.promise();
-promix.handler(function ( error ) {
+promix.handle(function ( error ) {
console.log(error);
//Error: An arbitrary error
@@ -796,7 +796,7 @@ promise.reject(new Error('An arbitrary error'));
Any uncaught errors within chains created with `promix.when()` will pass to the global handler, as well:
`````javascript
-promix.handler(function ( error ) {
+promix.handle(function ( error ) {
console.log(error);
//Error: This function throws errors (foo)
diff --git a/lib/Chain.js b/lib/Chain.js
index <HASH>..<HASH> 100644
--- a/lib/Chain.js
+++ b/lib/Chain.js
@@ -211,6 +211,10 @@ Chain.prototype = {
}
if ( this.suppressed ) {
console.log('working here');
+ if ( this.rejected || this.suppressed ) {
+ if ( Handler.handle ) {
+ Handler.handle(error);
+ }
return;
}
length = this.steps.length;
@@ -223,8 +227,8 @@ Chain.prototype = {
offset ++;
}
if ( this.silenced === false ) {
- if ( Handler.handler ) {
- Handler.handler(error);
+ if ( Handler.handle ) {
+ return void Handler.handle(error);
}
else {
throw error;
diff --git a/lib/Handler.js b/lib/Handler.js
index <HASH>..<HASH> 100644
--- a/lib/Handler.js
+++ b/lib/Handler.js
@@ -1,8 +1,8 @@
var Handler = {
- set : function ( handler ) {
- this.handler = handler;
+ set : function set ( handle ) {
+ this.handle = handle;
},
- handler : null
+ handle : null
};
module.exports = Handler;
diff --git a/lib/Promise.js b/lib/Promise.js
index <HASH>..<HASH> 100644
--- a/lib/Promise.js
+++ b/lib/Promise.js
@@ -39,8 +39,8 @@ function Promise ( base ) {
result = true;
}
if ( ! result ) {
- if ( Handler.handler ) {
- Handler.handler(error);
+ if ( Handler.handle ) {
+ Handler.handle(error);
}
}
} | Changed promix.handler() to promix.handle() throughout the module. | pachet_promix | train |
ed0cf2a7366e9ae3929635d74154f208b71e2e6f | diff --git a/lib/active_enum/extensions.rb b/lib/active_enum/extensions.rb
index <HASH>..<HASH> 100644
--- a/lib/active_enum/extensions.rb
+++ b/lib/active_enum/extensions.rb
@@ -63,24 +63,26 @@ module ActiveEnum
# user.sex(:meta_key)
#
def define_active_enum_read_method(attribute)
- define_method("#{attribute}") do |*arg|
- arg = arg.first
- value = super()
- enum = self.class.active_enum_for(attribute)
+ class_eval <<-DEF
+ def #{attribute}(arg=nil)
+ value = super()
+ return if value.nil? && arg.nil?
- case arg
- when :id
- value if enum[value]
- when :name
- enum[value]
- when :enum
- enum
- when Symbol
- (enum.meta(value) || {})[arg]
- else
- ActiveEnum.use_name_as_value ? enum[value] : value
+ enum = self.class.active_enum_for(:#{attribute})
+ case arg
+ when :id
+ value if enum[value]
+ when :name
+ enum[value]
+ when :enum
+ enum
+ when Symbol
+ (enum.meta(value) || {})[arg]
+ else
+ #{ActiveEnum.use_name_as_value ? 'enum[value]' : 'value' }
+ end
end
- end
+ DEF
end
# Define write method to also handle enum value
@@ -89,15 +91,16 @@ module ActiveEnum
# user.sex = :male
#
def define_active_enum_write_method(attribute)
- define_method("#{attribute}=") do |arg|
- enum = self.class.active_enum_for(attribute)
- if arg.is_a?(Symbol)
- value = enum[arg]
- super(value)
- else
- super(arg)
+ class_eval <<-DEF
+ def #{attribute}=(arg)
+ if arg.is_a?(Symbol)
+ value = self.class.active_enum_for(:#{attribute})[arg]
+ super(value)
+ else
+ super(arg)
+ end
end
- end
+ DEF
end
# Define question method to check enum value against attribute value
diff --git a/spec/active_enum/extensions_spec.rb b/spec/active_enum/extensions_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/active_enum/extensions_spec.rb
+++ b/spec/active_enum/extensions_spec.rb
@@ -198,6 +198,13 @@ describe ActiveEnum::Extensions do
ActiveEnum.use_name_as_value = true
end
+ before do
+ reset_class Person do
+ enumerate :sex, :with => Sex
+ end
+ @person = Person.new(:sex =>1)
+ end
+
it 'should return text name value for attribute' do
@person.sex.should == 'Male'
end | refactor extension method definitions
AR method overrides with class_eval to remove some awkward code and
evaluate the read method return value once instead of per attribute
method call | adzap_active_enum | train |
c2f152ac216f3c8863c20abb96e918b0432ade21 | diff --git a/lang/fr/moodle.php b/lang/fr/moodle.php
index <HASH>..<HASH> 100644
--- a/lang/fr/moodle.php
+++ b/lang/fr/moodle.php
@@ -288,6 +288,8 @@ $string['deletingexistingcoursedata'] = 'Suppression des donn
$string['deletingolddata'] = 'Suppression des anciennes donn�es';
$string['department'] = 'D�partement';
$string['description'] = 'Description';
+$string['detailedmore'] = 'Plus de d�tails';
+$string['detailedless'] = 'Moins de d�tails';
$string['disable'] = 'D�sactiver';
$string['displayingfirst'] = 'Seules les $a->count premier(�re)s $a->things sont affich�(e)s';
$string['displayingrecords'] = '$a enregistrements affich�s';
@@ -975,6 +977,7 @@ $string['userdata'] = 'Donn
$string['userdeleted'] = 'Le compte de cet utilisateur a �t� supprim�';
$string['userdescription'] = 'Description';
$string['userfiles'] = 'Fichiers utilisateur';
+$string['userlist'] = 'Liste des utilisateurs';
$string['username'] = 'Nom d\'utilisateur';
$string['usernameexists'] = 'Ce nom d\'utilisateur existe d�j�, veuillez en choisir un autre';
$string['usernotconfirmed'] = 'L\'utilisateur $a n\'a pas pu �tre confirm�'; | New strings for the user list type selection feature. | moodle_moodle | train |
c07a6c505a35b18b648f7a574450da0c0547348e | diff --git a/hebel/layers/logistic_layer.py b/hebel/layers/logistic_layer.py
index <HASH>..<HASH> 100644
--- a/hebel/layers/logistic_layer.py
+++ b/hebel/layers/logistic_layer.py
@@ -295,7 +295,7 @@ class LogisticLayer(TopLayer):
loss = cross_entropy(activations, targets)
- if average: loss = loss.mean()
+ if average: loss /= targets.shape[0]
return loss
train_error = cross_entropy_error
@@ -314,7 +314,7 @@ class LogisticLayer(TopLayer):
targets = targets.get().argmax(1)
class_error = np.sum(activations.get().argmax(1) != targets)
- if average: class_error = class_error.mean()
+ if average: class_error = float(class_error) / targets.shape[0]
return class_error
def kl_error(self, input_data, targets, average=True, | Fix average test error on LogisticLayer | hannes-brt_hebel | train |
f7757a58fc2c62a8583fd897eac114a19956555b | diff --git a/src/utils/utils.js b/src/utils/utils.js
index <HASH>..<HASH> 100644
--- a/src/utils/utils.js
+++ b/src/utils/utils.js
@@ -454,7 +454,7 @@ Utils.multiCentroid = function (polygons) {
return centroid;
};
-Utils.polygonArea = function (polygon) {
+Utils.signedPolygonAreaSum = function (polygon) {
let area = 0;
let n = polygon.length;
@@ -466,8 +466,12 @@ Utils.polygonArea = function (polygon) {
}
area += polygon[n - 1][0] * polygon[0][1] - polygon[0][0] * polygon[n - 1][1];
+ return area;
+};
- return Math.abs(area) / 2;
+// TODO: subtract inner ring areas
+Utils.polygonArea = function (polygon) {
+ return math.abs(Utils.signedPolygonAreaSum(polygon)) / 2;
};
Utils.multiPolygonArea = function (polygons) {
@@ -481,6 +485,10 @@ Utils.multiPolygonArea = function (polygons) {
return area;
};
+Utils.ringWinding = function (ring) {
+ return Utils.signedPolygonAreaSum(ring) > 0 ? 'CW' : 'CCW';
+};
+
Utils.toPixelSize = function (size, kind) {
if (kind === "px") {
return size; | add util to detect polygon ring winding order | tangrams_tangram | train |
a8953a2a5da530657a19d8d0231739347021aa27 | diff --git a/aws-java-sdk-core/src/main/java/com/amazonaws/ClientConfiguration.java b/aws-java-sdk-core/src/main/java/com/amazonaws/ClientConfiguration.java
index <HASH>..<HASH> 100644
--- a/aws-java-sdk-core/src/main/java/com/amazonaws/ClientConfiguration.java
+++ b/aws-java-sdk-core/src/main/java/com/amazonaws/ClientConfiguration.java
@@ -1078,24 +1078,35 @@ public class ClientConfiguration {
/**
* Returns the Java system property for nonProxyHosts. We still honor this property even
* {@link #getProtocol()} is https, see http://docs.oracle.com/javase/7/docs/api/java/net/doc-files/net-properties.html.
+ * This method expects the property to be set as pipe separated list.
*/
private String getNonProxyHostsProperty() {
return getSystemProperty("http.nonProxyHosts");
}
/**
- * Returns the value of the environment variable NO_PROXY/no_proxy.
+ * Returns the value of the environment variable NO_PROXY/no_proxy. This method expects
+ * the environment variable to be set as a comma separated list, so this method
+ * converts the comma separated list to pipe separated list to be used internally.
*/
private String getNonProxyHostsEnvironment() {
- return getEnvironmentVariableCaseInsensitive("NO_PROXY");
+ String nonProxyHosts = getEnvironmentVariableCaseInsensitive("NO_PROXY");
+ if (nonProxyHosts != null) {
+ nonProxyHosts = nonProxyHosts.replace(",", "|");
+ }
+
+ return nonProxyHosts;
}
/**
* Returns the optional hosts the client will access without going
* through the proxy. Returns either the nonProxyHosts set on this
* object, or if not provided, returns the value of the Java system property
- * http.nonProxyHosts. If neither are set, returns the value of the
- * environment variable NO_PROXY/no_proxy.
+ * http.nonProxyHosts. We still honor this property even
+ * {@link #getProtocol()} is https, see http://docs.oracle.com/javase/7/docs/api/java/net/doc-files/net-properties.html.
+ * This property is expected to be set as a pipe separated list. If neither are set,
+ * returns the value of the environment variable NO_PROXY/no_proxy. This environment
+ * variable is expected to be set as a comma separated list.
*
* @return The hosts the client will connect through bypassing the proxy.
*/
diff --git a/aws-java-sdk-core/src/test/java/com/amazonaws/ClientConfigurationTest.java b/aws-java-sdk-core/src/test/java/com/amazonaws/ClientConfigurationTest.java
index <HASH>..<HASH> 100644
--- a/aws-java-sdk-core/src/test/java/com/amazonaws/ClientConfigurationTest.java
+++ b/aws-java-sdk-core/src/test/java/com/amazonaws/ClientConfigurationTest.java
@@ -408,7 +408,16 @@ public class ClientConfigurationTest {
assertEquals("test3.com", config.getNonProxyHosts());
config.setNonProxyHosts("test4.com");
assertEquals("test4.com", config.getNonProxyHosts());
- System.clearProperty("https.nonProxyHosts");
+ System.clearProperty("http.nonProxyHosts");
+ environmentVariableHelper.reset();
+
+ config = new ClientConfiguration();
+ assertNull(config.getNonProxyHosts());
+ config.setProtocol(Protocol.HTTP);
+ environmentVariableHelper.set("no_proxy", "test1.com,test2.com,test3.com");
+ assertEquals("test1.com|test2.com|test3.com", config.getNonProxyHosts());
+ environmentVariableHelper.set("no_proxy", "test1.com|test2.com|test3.com");
+ assertEquals("test1.com|test2.com|test3.com", config.getNonProxyHosts());
environmentVariableHelper.reset();
} | Change environment non proxy hosts to expect a comma separated list | aws_aws-sdk-java | train |
048cee32b8b247c0a6d71144e038309a69205168 | diff --git a/golfilesystem/copy.go b/golfilesystem/copy.go
index <HASH>..<HASH> 100644
--- a/golfilesystem/copy.go
+++ b/golfilesystem/copy.go
@@ -59,7 +59,7 @@ func CopyFile(src, dst string) error {
if err = MkDir(path.Dir(dst)); err != nil {
return err
}
- err = copyFileContents(src, dst)
+ err = copyFileContents(src, dst, sfi.Mode())
return err
}
@@ -69,13 +69,14 @@ by dst. The file will be created if it does not already exist. If the
destination file exists, all it's contents will be replaced by the contents
of the source file.
*/
-func copyFileContents(src, dst string) (err error) {
+func copyFileContents(src string, dst string, srcMode os.FileMode) (err error) {
in, err := os.Open(src)
if err != nil {
return
}
defer in.Close()
out, err := os.Create(dst)
+ out.Chmod(srcMode)
if err != nil {
return
} | [golfilesystem] made copy over FileMode to destination | abhishekkr_gol | train |
c86486a62fb3f9663de913c7034ea13f47af4239 | diff --git a/lib/query_string_search/comparator.rb b/lib/query_string_search/comparator.rb
index <HASH>..<HASH> 100644
--- a/lib/query_string_search/comparator.rb
+++ b/lib/query_string_search/comparator.rb
@@ -43,13 +43,15 @@ module QueryStringSearch
module Comparator
class ComparisonFactory
def self.build(config)
- if config.subject.respond_to?(:each)
- SetComparison.new(config.subject, config.other)
- elsif [:<, :>, :<=, :>=].include?(config.operator.to_sym)
- InequalityComparison.new(config.subject, config.other, config.operator)
- else
- EqualityComparison.new(config.subject, config.other)
- end
+ comparison = if config.subject.respond_to?(:each)
+ SetComparison
+ elsif [:<, :>, :<=, :>=].include?(config.operator.to_sym)
+ InequalityComparison
+ else
+ EqualityComparison
+ end
+
+ comparison.new(config.subject, config.other, config.operator.to_sym)
end
end | Consolidate comparison instantiation
Rubocop likes this way better | umn-asr_query_string_search | train |
1e0fbb068366b7b5d01cd90771e5467a6971cd6a | diff --git a/kite/main.go b/kite/main.go
index <HASH>..<HASH> 100644
--- a/kite/main.go
+++ b/kite/main.go
@@ -233,7 +233,7 @@ func (k *Kite) createMethodMap(rcvr interface{}, methods map[string]string) {
for alternativeName, method := range methods {
m, ok := kiteStruct.MethodByName(method)
if !ok {
- slog.Printf("warning: no method with name: %s\n", method)
+ panic(fmt.Sprintf("addmethods err: no method with name: %s\n", method))
continue
}
diff --git a/kontrol/main.go b/kontrol/main.go
index <HASH>..<HASH> 100644
--- a/kontrol/main.go
+++ b/kontrol/main.go
@@ -374,61 +374,78 @@ func (k *Kontrol) Publish(filter string, msg []byte) {
// creates a new struct, stores it and returns it.
func (k *Kontrol) RegisterKite(req *protocol.Request) (*models.Kite, error) {
kite := storage.Get(req.Uuid)
- if kite == nil {
- // in the future we'll check other things too, for now just make sure that
- // the variables are not empty
- if req.Kitename == "" && req.Version == "" && req.Addr == "" {
- return nil, fmt.Errorf("kite fields are not initialized correctly")
- }
+ if kite != nil {
+ return kite, nil
+ }
- kite = &models.Kite{
- Base: protocol.Base{
- Username: req.Username,
- Kitename: req.Kitename,
- Version: req.Version,
- PublicKey: req.PublicKey,
- Uuid: req.Uuid,
- Hostname: req.Hostname,
- Addr: req.Addr,
- LocalIP: req.LocalIP,
- PublicIP: req.PublicIP,
- Port: req.Port,
- },
- }
+ return createAndAddKite(req)
+}
- kodingKey, err := modelhelper.GetKodingKeysByKey(kite.PublicKey)
- if err != nil {
- return nil, fmt.Errorf("register kodingkey err %s", err)
- }
+func createAndAddKite(req *protocol.Request) (*models.Kite, error) {
+ // in the future we'll check other things too, for now just make sure that
+ // the variables are not empty
+ if req.Kitename == "" && req.Version == "" && req.Addr == "" {
+ return nil, fmt.Errorf("kite fields are not initialized correctly")
+ }
+
+ kite, err := createKiteModel(req)
+ if err != nil {
+ return nil, err
+ }
+
+ username, err := usernameFromKey(kite.PublicKey)
+ if err != nil {
+ return nil, err
+ }
+
+ kite.Username = username
+ storage.Add(kite)
+
+ slog.Printf("[%s (%s)] belong to '%s'. ready to go..\n", kite.Kitename, kite.Version, username)
- account, err := modelhelper.GetAccountById(kodingKey.Owner)
+ if req.Kind == "vm" {
+ err := addToVM(username)
if err != nil {
- return nil, fmt.Errorf("register get user err %s", err)
+ fmt.Println("register get user id err")
}
+ }
- startLog := fmt.Sprintf("[%s (%s)] belong to '%s'. ready to go..",
- kite.Kitename,
- kite.Version,
- account.Profile.Nickname,
- )
- slog.Println(startLog)
+ return kite, nil
+}
- if account.Profile.Nickname == "" {
- return nil, errors.New("nickname is empty, could not register kite")
- }
+func createKiteModel(req *protocol.Request) (*models.Kite, error) {
+ return &models.Kite{
+ Base: protocol.Base{
+ Username: req.Username,
+ Kitename: req.Kitename,
+ Version: req.Version,
+ PublicKey: req.PublicKey,
+ Uuid: req.Uuid,
+ Hostname: req.Hostname,
+ Addr: req.Addr,
+ LocalIP: req.LocalIP,
+ PublicIP: req.PublicIP,
+ Port: req.Port,
+ },
+ }, nil
+}
- kite.Username = account.Profile.Nickname
- storage.Add(kite)
+func usernameFromKey(key string) (string, error) {
+ kodingKey, err := modelhelper.GetKodingKeysByKey(key)
+ if err != nil {
+ return "", fmt.Errorf("register kodingkey err %s", err)
+ }
- if req.Kind == "vm" {
- err := addToVM(account.Profile.Nickname)
- if err != nil {
- fmt.Println("register get user id err")
- }
- }
+ account, err := modelhelper.GetAccountById(kodingKey.Owner)
+ if err != nil {
+ return "", fmt.Errorf("register get user err %s", err)
+ }
+ if account.Profile.Nickname == "" {
+ return "", errors.New("nickname is empty, could not register kite")
}
- return kite, nil
+
+ return account.Profile.Nickname, nil
}
func addToVM(username string) error { | kite: refactor and small cosmetic fixes | koding_kite | train |
8a1e611a03da8374567c9654f8baf29b66c83c6e | diff --git a/persist/sqldb/migrate.go b/persist/sqldb/migrate.go
index <HASH>..<HASH> 100644
--- a/persist/sqldb/migrate.go
+++ b/persist/sqldb/migrate.go
@@ -148,7 +148,7 @@ func (m migrate) Exec(ctx context.Context) error {
ansiSQLChange(`alter table argo_archived_workflows add primary key(clustername,uid)`),
ansiSQLChange(`create index argo_archived_workflows_i1 on argo_archived_workflows (clustername,namespace)`),
// argo_archived_workflows now looks like:
- // clustername(not null) uid(not null) | phase(not null) | namespace(not null) | workflow(not null) | startedat(not null) | finishedat(not null)
+ // clustername(not null) | uid(not null) | | name (null) | phase(not null) | namespace(not null) | workflow(not null) | startedat(not null) | finishedat(not null)
// remove unused columns
ansiSQLChange(`alter table ` + m.tableName + ` drop column phase`),
ansiSQLChange(`alter table ` + m.tableName + ` drop column startedat`),
@@ -192,6 +192,15 @@ func (m migrate) Exec(ctx context.Context) error {
ansiSQLChange(`create index ` + m.tableName + `_i1 on ` + m.tableName + ` (clustername,namespace)`),
// argo_workflows now looks like:
// clustername(not null) | uid(not null) | namespace(not null) | version(not null) | nodes(not null) | updatedat(not null)
+ ternary(dbType == "mysql",
+ ansiSQLChange(`alter table argo_archived_workflows modify column workflow json not null`),
+ ansiSQLChange(`alter table argo_archived_workflows alter column workflow type json using workflow::json`),
+ ),
+ ternary(dbType == "mysql",
+ ansiSQLChange(`alter table argo_archived_workflows modify column name varchar(256) not null`),
+ ansiSQLChange(`alter table argo_archived_workflows alter column name set not null`),
+ ),
+ // clustername(not null) | uid(not null) | | name (not null) | phase(not null) | namespace(not null) | workflow(not null) | startedat(not null) | finishedat(not null)
} {
err := m.applyChange(ctx, changeSchemaVersion, change)
if err != nil { | feat: Update archived workdflow column to be JSON. Closes #<I> (#<I>) | argoproj_argo | train |
e6f04890312dccafb295fb0a38f441c81c9f137f | diff --git a/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java b/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java
+++ b/src/test/java/org/junit/tests/internal/runners/statements/FailOnTimeoutTest.java
@@ -4,15 +4,16 @@ import static java.lang.Long.MAX_VALUE;
import static java.lang.Math.atan;
import static java.lang.System.currentTimeMillis;
import static java.lang.Thread.sleep;
+import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static org.hamcrest.core.Is.is;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
+import static org.junit.internal.runners.statements.FailOnTimeout.builder;
import java.util.concurrent.TimeUnit;
-import org.junit.Ignore;
import org.junit.Rule;
import org.junit.Test;
import org.junit.internal.runners.statements.FailOnTimeout;
@@ -24,16 +25,15 @@ import org.junit.runners.model.TestTimedOutException;
* @author Asaf Ary, Stefan Birkner
*/
public class FailOnTimeoutTest {
- private static final int TIMEOUT = 100;
- private static final int DURATION_THAT_EXCEEDS_TIMEOUT = 60 * 60 * 1000; //1 hour
+ private static final long TIMEOUT = 100;
+ private static final long DURATION_THAT_EXCEEDS_TIMEOUT = TimeUnit.MILLISECONDS.convert(1, TimeUnit.HOURS);
@Rule
public final ExpectedException thrown = ExpectedException.none();
private final TestStatement statement = new TestStatement();
- private final FailOnTimeout failOnTimeout = new FailOnTimeout(statement,
- TIMEOUT);
+ private final FailOnTimeout failOnTimeout = builder().withTimeout(TIMEOUT, MILLISECONDS).build(statement);
@Test
public void throwsTestTimedOutException() throws Throwable {
@@ -91,14 +91,14 @@ public class FailOnTimeoutTest {
failOnTimeout.evaluate();
}
- private void evaluateWithWaitDuration(int waitDuration) throws Throwable {
+ private void evaluateWithWaitDuration(long waitDuration) throws Throwable {
statement.nextException = null;
statement.waitDuration = waitDuration;
failOnTimeout.evaluate();
}
private static final class TestStatement extends Statement {
- int waitDuration;
+ long waitDuration;
Exception nextException;
@@ -114,8 +114,7 @@ public class FailOnTimeoutTest {
@Test
public void stopEndlessStatement() throws Throwable {
InfiniteLoopStatement infiniteLoop = new InfiniteLoopStatement();
- FailOnTimeout infiniteLoopTimeout = new FailOnTimeout(infiniteLoop,
- TIMEOUT);
+ FailOnTimeout infiniteLoopTimeout = builder().withTimeout(TIMEOUT, MILLISECONDS).build(infiniteLoop);
try {
infiniteLoopTimeout.evaluate();
} catch (Exception timeoutException) {
@@ -142,7 +141,7 @@ public class FailOnTimeoutTest {
@Test
public void stackTraceContainsRealCauseOfTimeout() throws Throwable {
StuckStatement stuck = new StuckStatement();
- FailOnTimeout stuckTimeout = new FailOnTimeout(stuck, TIMEOUT);
+ FailOnTimeout stuckTimeout = builder().withTimeout(TIMEOUT, MILLISECONDS).build(stuck);
try {
stuckTimeout.evaluate();
// We must not get here, we expect a timeout exception | Use builder instead of deprecated constructor | junit-team_junit4 | train |
e662071f75408ef199d7f08a4897f26d15b52f89 | diff --git a/htmlparsing.py b/htmlparsing.py
index <HASH>..<HASH> 100644
--- a/htmlparsing.py
+++ b/htmlparsing.py
@@ -32,7 +32,7 @@ GITHUB_FILE_TAG = 'blob-wrapper data type-text'
# ---------------------------
def get_github_text(raw_html):
- html = BeautifulSoup(raw_html)
+ html = BeautifulSoup(raw_html, "html.parser")
gist_description = html.body.find('div', attrs={'class': GITHUB_CONTENT_TAG})
@@ -63,7 +63,7 @@ def get_search_text(service, raw_html):
if service == 'facebook':
raw_html = raw_html.replace('<!--', '').replace('-->', '')
- html_soup = BeautifulSoup(raw_html)
+ html_soup = BeautifulSoup(raw_html, "html.parser")
if service in SITES:
query_data = SITES[service]['html_query'] | get rid of Beautifulsoup warning | blockstack_blockstack-proofs-py | train |
6628efcc6e78490740cff5a30a087112273c56dc | diff --git a/datajoint/base_relation.py b/datajoint/base_relation.py
index <HASH>..<HASH> 100644
--- a/datajoint/base_relation.py
+++ b/datajoint/base_relation.py
@@ -261,7 +261,7 @@ class BaseRelation(RelationalOperand, metaclass=abc.ABCMeta):
sql = 'INSERT IGNORE'
else:
sql = 'INSERT'
- attributes = (a for a in attributes if a[0]) # omit dropped attributes
+ attributes = (a for a in attributes if a[0] is not None) # omit dropped attributes
names, placeholders, values = tuple(zip(*attributes))
sql += " INTO %s (`%s`) VALUES (%s)" % (
self.from_clause, '`,`'.join(names), ','.join(placeholders))
diff --git a/datajoint/connection.py b/datajoint/connection.py
index <HASH>..<HASH> 100644
--- a/datajoint/connection.py
+++ b/datajoint/connection.py
@@ -59,7 +59,7 @@ class Connection:
else:
port = config['database.port']
self.conn_info = dict(host=host, port=port, user=user, passwd=passwd,
- max_allowed_packet=1024 ** 3) # 1073741824
+ max_allowed_packet=1024 ** 3) # this is a hack to fix problems with pymysql (remove later)
self._conn = client.connect(init_command=init_fun, **self.conn_info)
if self.is_connected:
logger.info("Connected {user}@{host}:{port}".format(**self.conn_info)) | be more verbose on changes | datajoint_datajoint-python | train |
4fc410545637e22f1b4131f4393c0af2afc1d3b6 | diff --git a/lxd/storage_volumes.go b/lxd/storage_volumes.go
index <HASH>..<HASH> 100644
--- a/lxd/storage_volumes.go
+++ b/lxd/storage_volumes.go
@@ -570,7 +570,7 @@ func doVolumeCreateOrCopy(d *Daemon, r *http.Request, requestProjectName string,
return pool.CreateCustomVolume(projectName, req.Name, req.Description, req.Config, contentType, op)
}
- return pool.CreateCustomVolumeFromCopy(projectName, req.Name, req.Description, req.Config, req.Source.Pool, req.Source.Name, req.Source.VolumeOnly, op)
+ return pool.CreateCustomVolumeFromCopy(projectName, req.Source.Project, req.Name, req.Description, req.Config, req.Source.Pool, req.Source.Name, req.Source.VolumeOnly, op)
}
// If no source name supplied then this a volume create operation.
@@ -1056,7 +1056,7 @@ func storagePoolVolumeTypePostMove(d *Daemon, r *http.Request, poolName string,
// Provide empty description and nil config to instruct CreateCustomVolumeFromCopy to copy it
// from source volume.
- err = newPool.CreateCustomVolumeFromCopy(projectName, newVol.Name, "", nil, pool.Name(), vol.Name, false, op)
+ err = newPool.CreateCustomVolumeFromCopy(projectName, projectName, newVol.Name, "", nil, pool.Name(), vol.Name, false, op)
if err != nil {
return err
} | lxd: Support for copy/move custom storage volume between projects | lxc_lxd | train |
323128105f335bed6ef5ab50b639c64a4828ac14 | diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -46,15 +46,9 @@ test('basic', async t => {
t.ok(bundleJsZip);
});
-test('roundtrip', async t => {
- const out = randomPath();
- const outSrc = join(out, 'src');
- const outDst = join(out, 'dst');
-
- await runWithOptions({ path: outSrc, filename: 'bundle.js' });
-
+async function unzip(zipFilePath, outDirPath) {
const zipFile = await new Promise((resolve, reject) => {
- yauzl.open(join(outSrc, 'bundle.js.zip'), { lazyEntries: true }, (err, zipFile) => {
+ yauzl.open(zipFilePath, { lazyEntries: true }, (err, zipFile) => {
err ? reject(err) : resolve(zipFile);
});
});
@@ -64,8 +58,8 @@ test('roundtrip', async t => {
zipFile.on('entry', entry => {
zipFile.openReadStream(entry, (err, readStream) => {
if (err) throw err;
- mkdirp.sync(join(outDst, dirname(entry.fileName)));
- const writeStream = createWriteStream(join(outDst, entry.fileName));
+ mkdirp.sync(join(outDirPath, dirname(entry.fileName)));
+ const writeStream = createWriteStream(join(outDirPath, entry.fileName));
readStream.pipe(writeStream);
writeStream.on('close', () => zipFile.readEntry());
});
@@ -75,6 +69,16 @@ test('roundtrip', async t => {
zipFile.on('close', resolve);
zipFile.on('error', reject);
});
+}
+
+test('roundtrip', async t => {
+ const out = randomPath();
+ const outSrc = join(out, 'src');
+ const outDst = join(out, 'dst');
+
+ await runWithOptions({ path: outSrc, filename: 'bundle.js' });
+
+ await unzip(join(outSrc, 'bundle.js.zip'), outDst);
t.is(Buffer.compare(
readFileSync(join(outSrc, 'subdir', 'bye.jpg')), | extract unzipping to a function | erikdesjardins_zip-webpack-plugin | train |
a7b1d1631e2fe7c7014ac7d732a53b8891089f2c | diff --git a/bundles/org.eclipse.orion.client.editor/web/orion/editor/edit.js b/bundles/org.eclipse.orion.client.editor/web/orion/editor/edit.js
index <HASH>..<HASH> 100644
--- a/bundles/org.eclipse.orion.client.editor/web/orion/editor/edit.js
+++ b/bundles/org.eclipse.orion.client.editor/web/orion/editor/edit.js
@@ -105,16 +105,18 @@ define('orion/editor/edit', [
});
};
- var contentAssist;
- var contentAssistFactory = {
- createContentAssistMode: function(editor) {
- contentAssist = new mContentAssist.ContentAssist(editor.getTextView());
- var contentAssistWidget = new mContentAssist.ContentAssistWidget(contentAssist);
- return new mContentAssist.ContentAssistMode(contentAssist, contentAssistWidget);
- }
- };
- var cssContentAssistProvider = new mCSSContentAssist.CssContentAssistProvider();
- var jsTemplateContentAssistProvider = new mJSContentAssist.JSTemplateContentAssistProvider();
+ var contentAssist, contentAssistFactory;
+ if (!options.readonly) {
+ contentAssistFactory = {
+ createContentAssistMode: function(editor) {
+ contentAssist = new mContentAssist.ContentAssist(editor.getTextView());
+ var contentAssistWidget = new mContentAssist.ContentAssistWidget(contentAssist);
+ return new mContentAssist.ContentAssistMode(contentAssist, contentAssistWidget);
+ }
+ };
+ var cssContentAssistProvider = new mCSSContentAssist.CssContentAssistProvider();
+ var jsTemplateContentAssistProvider = new mJSContentAssist.JSTemplateContentAssistProvider();
+ }
// Canned highlighters for js, java, and css. Grammar-based highlighter for html
var syntaxHighlighter = {
@@ -173,13 +175,15 @@ define('orion/editor/edit', [
editor.installTextView();
editor.setInput(options.title, null, contents);
syntaxHighlighter.highlight(options.lang, editor);
- contentAssist.addEventListener("Activating", function() {
- if (/css$/.test(options.lang)) {
- contentAssist.setProviders([cssContentAssistProvider]);
- } else if (/js$/.test(options.lang)) {
- contentAssist.setProviders([jsTemplateContentAssistProvider]);
- }
- });
+ if (contentAssist) {
+ contentAssist.addEventListener("Activating", function() {
+ if (/css$/.test(options.lang)) {
+ contentAssist.setProviders([cssContentAssistProvider]);
+ } else if (/js$/.test(options.lang)) {
+ contentAssist.setProviders([jsTemplateContentAssistProvider]);
+ }
+ });
+ }
return editor;
} | content assist should not activate when readonly | eclipse_orion.client | train |
ca49cd476b0e7b56fec35ac24d7b861687685287 | diff --git a/xibless/base.py b/xibless/base.py
index <HASH>..<HASH> 100644
--- a/xibless/base.py
+++ b/xibless/base.py
@@ -54,9 +54,8 @@ class GeneratedItem(object):
PROPERTIES = []
def __init__(self):
- self.creationOrder = globalvars.globalGenerationCounter.creationToken()
- # In case we are never assigned to a top level variable and thus never given a varname
- self.varname = "_tmp%d" % self.creationOrder
+ globalvars.globalGenerationCounter.register(self)
+ self._varname = None
# properties to be set at generation time. For example, if "editable" is set to False,
# a "[$varname$ setEditable:NO];" statement will be generated.
self.properties = {}
@@ -111,6 +110,16 @@ class GeneratedItem(object):
def generated(self):
return globalvars.globalGenerationCounter.isGenerated(self)
+ @property
+ def varname(self):
+ if not self._varname:
+ self._varname = "_tmp%d" % globalvars.globalGenerationCounter.varnameToken()
+ return self._varname
+
+ @varname.setter
+ def varname(self, value):
+ self._varname = value
+
def bind(self, name, target, keyPath, valueTransformer=None):
options = {}
if valueTransformer:
@@ -168,12 +177,17 @@ class GeneratedItem(object):
class GenerationCounter(object):
def __init__(self):
- self.reset()
+ self.varnameTokenCounter = 0
+ self.createdItems = []
+ self.generatedItems = set()
+
+ def register(self, item):
+ self.createdItems.append(item)
- def creationToken(self):
- count = self.creationCount
- self.creationCount += 1
- return count
+ def varnameToken(self):
+ result = self.varnameTokenCounter
+ self.varnameTokenCounter += 1
+ return result
def addGenerated(self, item):
self.generatedItems.add(item)
@@ -182,7 +196,10 @@ class GenerationCounter(object):
return item in self.generatedItems
def reset(self):
- self.creationCount = 0
+ for item in set(self.createdItems) | self.generatedItems:
+ item.varname = None
+ self.varnameTokenCounter = 0
+ self.createdItems = []
self.generatedItems = set()
diff --git a/xibless/gen.py b/xibless/gen.py
index <HASH>..<HASH> 100644
--- a/xibless/gen.py
+++ b/xibless/gen.py
@@ -114,13 +114,10 @@ def generate(modulePath, dest, runmode=False, localizationTable=None, args=None)
else:
tmpl.mainimport = "#import \"XiblessSupport.h\""
tmpl.ownerimport = ownerimport
- toGenerate = []
for key, value in module_locals.items():
- if not isinstance(value, GeneratedItem):
- continue
- value.varname = key
- toGenerate.append(value)
- toGenerate.sort(key=lambda x: x.creationOrder)
+ if isinstance(value, GeneratedItem) and value.varname.startswith('_tmp'):
+ value.varname = key
+ toGenerate = globalvars.globalGenerationCounter.createdItems
codePieces = []
for item in toGenerate:
if item.generated: | Make sure we generate all generated items (not just those in the module namespace) and fixed a bug where we would sometimes end up with two generated item with the same varname. | hsoft_xibless | train |
24963ce53cae8b3b40252db749922f6e26848de7 | diff --git a/parse.go b/parse.go
index <HASH>..<HASH> 100644
--- a/parse.go
+++ b/parse.go
@@ -324,13 +324,13 @@ func (p *parser) posErr(pos position, format string, v ...interface{}) {
}
func (p *parser) curErr(format string, v ...interface{}) {
+ if p.tok == EOF {
+ p.pos = p.npos
+ }
p.posErr(p.pos, format, v...)
}
func (p *parser) errWantedStr(s string) {
- if p.tok == EOF {
- p.pos = p.npos
- }
p.curErr("unexpected token %s - wanted %s", p.tok, s)
} | Move EOF position erroring to correct func | mvdan_sh | train |
4858537930fe5ffb8b9b54d93b64c421b9952233 | diff --git a/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumer.java b/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumer.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumer.java
+++ b/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumer.java
@@ -140,12 +140,13 @@ public class SQSMessageConsumer implements MessageConsumer, QueueReceiver {
* paused.
*
* @return the next message produced for this message consumer, or null if
- * this message consumer is closed
+ * this message consumer is closed during the receive call
* @throws JMSException
* On internal error
*/
@Override
public Message receive() throws JMSException {
+ checkClosed();
return sqsMessageConsumerPrefetch.receive();
}
@@ -157,12 +158,13 @@ public class SQSMessageConsumer implements MessageConsumer, QueueReceiver {
* @param timeout
* the timeout value (in milliseconds)
* @return the next message produced for this message consumer, or null if
- * the timeout expires or this message consumer is closed
+ * the timeout expires or this message consumer is closed during the receive call
* @throws JMSException
* On internal error
*/
@Override
public Message receive(long timeout) throws JMSException {
+ checkClosed();
return sqsMessageConsumerPrefetch.receive(timeout);
}
@@ -176,6 +178,7 @@ public class SQSMessageConsumer implements MessageConsumer, QueueReceiver {
*/
@Override
public Message receiveNoWait() throws JMSException {
+ checkClosed();
return sqsMessageConsumerPrefetch.receiveNoWait();
}
diff --git a/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumerPrefetch.java b/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumerPrefetch.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumerPrefetch.java
+++ b/src/main/java/com/amazon/sqs/javamessaging/SQSMessageConsumerPrefetch.java
@@ -590,11 +590,11 @@ public class SQSMessageConsumerPrefetch implements Runnable, PrefetchManager {
private boolean cannotDeliver() throws JMSException {
if (!running) {
- return true;
+ return true;
}
if (isClosed()) {
- throw new JMSException("Cannot receive messages when the consumer is closed");
+ throw new JMSException("Cannot receive messages when the consumer is closed");
}
if (messageListener != null) {
diff --git a/src/test/java/com/amazon/sqs/javamessaging/SQSMessageConsumerTest.java b/src/test/java/com/amazon/sqs/javamessaging/SQSMessageConsumerTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/amazon/sqs/javamessaging/SQSMessageConsumerTest.java
+++ b/src/test/java/com/amazon/sqs/javamessaging/SQSMessageConsumerTest.java
@@ -348,6 +348,88 @@ public class SQSMessageConsumerTest {
}
/**
+ * Test receive fails when consumer is already closed
+ */
+ @Test
+ public void testReceiveAlreadyClosed() throws InterruptedException, JMSException {
+
+ /*
+ * Set up consumer
+ */
+ consumer = spy(new SQSMessageConsumer(sqsConnection, sqsSession, sqsSessionRunnable,
+ destination, acknowledger, negativeAcknowledger, threadFactory, sqsMessageConsumerPrefetch));
+
+ consumer.close();
+
+ /*
+ * Call receive
+ */
+ try {
+ consumer.receive();
+ fail();
+ } catch (JMSException ex) {
+ assertEquals("Consumer is closed", ex.getMessage());
+ }
+
+
+ }
+
+ /**
+ * Test set message listener fails when consumer is already closed
+ */
+ @Test
+ public void testReceiveWithTimeoutAlreadyClosed() throws InterruptedException, JMSException {
+
+ /*
+ * Set up consumer
+ */
+ consumer = spy(new SQSMessageConsumer(sqsConnection, sqsSession, sqsSessionRunnable,
+ destination, acknowledger, negativeAcknowledger, threadFactory, sqsMessageConsumerPrefetch));
+
+ consumer.close();
+
+ long timeout = 10;
+
+ /*
+ * Call receive with timeout
+ */
+ try {
+ consumer.receive(timeout);
+ fail();
+ } catch (JMSException ex) {
+ assertEquals("Consumer is closed", ex.getMessage());
+ }
+
+
+ }
+
+ /**
+ * Test set message listener fails when consumer is already closed
+ */
+ @Test
+ public void testReceiveNoWaitAlreadyClosed() throws InterruptedException, JMSException {
+
+ /*
+ * Set up consumer
+ */
+ consumer = spy(new SQSMessageConsumer(sqsConnection, sqsSession, sqsSessionRunnable,
+ destination, acknowledger, negativeAcknowledger, threadFactory, sqsMessageConsumerPrefetch));
+
+ consumer.close();
+
+ /*
+ * Call receive no wait
+ */
+ try {
+
+ consumer.receiveNoWait();
+ fail();
+ } catch (JMSException ex) {
+ assertEquals("Consumer is closed", ex.getMessage());
+ }
+ }
+
+ /**
* Test set message listener
*/
@Test | Add checkClosed() to all three receive*() methods
As per <URL> | awslabs_amazon-sqs-java-messaging-lib | train |
cca736173428a0c33fb42d887601aab71608ab89 | diff --git a/db/common/src/main/java/org/commonjava/indy/db/common/AbstractStoreDataManager.java b/db/common/src/main/java/org/commonjava/indy/db/common/AbstractStoreDataManager.java
index <HASH>..<HASH> 100644
--- a/db/common/src/main/java/org/commonjava/indy/db/common/AbstractStoreDataManager.java
+++ b/db/common/src/main/java/org/commonjava/indy/db/common/AbstractStoreDataManager.java
@@ -44,6 +44,7 @@ import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
+import java.util.Objects;
import java.util.Set;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
@@ -54,6 +55,7 @@ import java.util.stream.Stream;
import static org.commonjava.indy.db.common.StoreUpdateAction.DELETE;
import static org.commonjava.indy.db.common.StoreUpdateAction.STORE;
+import static org.commonjava.indy.model.core.StoreType.group;
import static org.commonjava.indy.model.core.StoreType.hosted;
public abstract class AbstractStoreDataManager
@@ -318,15 +320,22 @@ public abstract class AbstractStoreDataManager
logger.warn("Storing {} using operation lock: {}", store, opLocks);
-
- if (internalFeatureConfig != null && internalFeatureConfig.getStoreValidation() && store.getType() != StoreType.group) {
- ArtifactStoreValidateData validateData = storeValidator.validate(store);
- if (!validateData.isValid()) {
- logger.warn("=> [AbstractStoreDataManager] Adding Validation Metadata to Remote Store: " + store.getKey() + " - not Valid! ");
- if(store.getMetadata() != null)
- store.getMetadata().putAll(validateData.getErrors());
+ if ( internalFeatureConfig != null && internalFeatureConfig.getStoreValidation() && store.getType() != group )
+ {
+ ArtifactStoreValidateData validateData = storeValidator.validate( store );
+ if ( !validateData.isValid() )
+ {
+ logger.warn(
+ "=> [AbstractStoreDataManager] Adding Validation Metadata to Remote Store: " + store.getKey()
+ + " - not Valid! " );
+ if ( store.getMetadata() != null )
+ {
+ store.getMetadata().putAll( validateData.getErrors() );
+ }
else
- store.setMetadata(validateData.getErrors());
+ {
+ store.setMetadata( validateData.getErrors() );
+ }
}
}
@@ -450,7 +459,6 @@ public abstract class AbstractStoreDataManager
}
protected Set<Group> affectedByFromStores( final Collection<StoreKey> keys )
- throws IndyDataException
{
Logger logger = LoggerFactory.getLogger( getClass() );
logger.debug( "Getting groups affected by: {}", keys );
@@ -466,19 +474,11 @@ public abstract class AbstractStoreDataManager
Set<StoreKey> processed = new HashSet<>();
final String packageType = toProcess.get( 0 ).getPackageType();
- Set<ArtifactStore> all = this.getAllArtifactStores().stream().filter( st -> {
- if ( packageType != null && !st.getPackageType().equals( packageType ) )
- {
- return false;
- }
-
- if ( st.getType() != StoreType.group )
- {
- return false;
- }
-
- return true;
- } ).collect( Collectors.toSet() );
+ Set<ArtifactStore> all = this.getStoreKeysByPkgAndType( packageType, group )
+ .stream()
+ .map( this::getArtifactStoreInternal )
+ .filter( Objects::nonNull )
+ .collect( Collectors.toSet() );
while ( !toProcess.isEmpty() )
{
diff --git a/db/common/src/main/java/org/commonjava/indy/db/common/DefaultArtifactStoreQuery.java b/db/common/src/main/java/org/commonjava/indy/db/common/DefaultArtifactStoreQuery.java
index <HASH>..<HASH> 100644
--- a/db/common/src/main/java/org/commonjava/indy/db/common/DefaultArtifactStoreQuery.java
+++ b/db/common/src/main/java/org/commonjava/indy/db/common/DefaultArtifactStoreQuery.java
@@ -429,12 +429,7 @@ public class DefaultArtifactStoreQuery<T extends ArtifactStore>
return false;
}
- if ( filterPredicate != null && !filterPredicate.test( key ) )
- {
- return false;
- }
-
- return true;
+ return filterPredicate == null || filterPredicate.test(key);
});
} | Some code simplify
Conflicts:
db/common/src/main/java/org/commonjava/indy/db/common/AbstractStoreDataManager.java | Commonjava_indy | train |
eed2235859c1cb0090ce689261ce7fc58a206a76 | diff --git a/types/types.go b/types/types.go
index <HASH>..<HASH> 100644
--- a/types/types.go
+++ b/types/types.go
@@ -132,6 +132,7 @@ type Version struct {
Os string
Arch string
KernelVersion string `json:",omitempty"`
+ Experimental bool
}
// GET "/info" | fix experimental version and release script
add api version experimental | docker_engine-api | train |
9b7060393ec0fd7e103b039a5507843270e5908f | diff --git a/br/pkg/stream/stream_mgr.go b/br/pkg/stream/stream_mgr.go
index <HASH>..<HASH> 100644
--- a/br/pkg/stream/stream_mgr.go
+++ b/br/pkg/stream/stream_mgr.go
@@ -179,7 +179,7 @@ func FastUnmarshalMetaData(
m := &backuppb.Metadata{}
err = m.Unmarshal(b)
if err != nil {
- if !strings.HasSuffix(path, ".meta") {
+ if !strings.HasSuffix(readPath, ".meta") {
return nil
} else {
return err
diff --git a/br/pkg/task/stream.go b/br/pkg/task/stream.go
index <HASH>..<HASH> 100644
--- a/br/pkg/task/stream.go
+++ b/br/pkg/task/stream.go
@@ -878,12 +878,7 @@ func RunStreamTruncate(c context.Context, g glue.Glue, cmdName string, cfg *Stre
return nil
}
}
- if cfg.Until > sp && !cfg.DryRun {
- if err := restore.SetTSToFile(
- ctx, storage, cfg.Until, restore.TruncateSafePointFileName); err != nil {
- return err
- }
- }
+
readMetaDone := console.ShowTask("Reading Metadata... ", glue.WithTimeCost())
metas := restore.StreamMetadataSet{
BeforeDoWriteBack: func(path string, last, current *backuppb.Metadata) (skip bool) {
@@ -919,16 +914,14 @@ func RunStreamTruncate(c context.Context, g glue.Glue, cmdName string, cfg *Stre
return nil
}
- removed := metas.RemoveDataBefore(shiftUntilTS)
-
- // remove metadata
- removeMetaDone := console.ShowTask("Removing metadata... ", glue.WithTimeCost())
- if !cfg.DryRun {
- if err := metas.DoWriteBack(ctx, storage); err != nil {
+ if cfg.Until > sp && !cfg.DryRun {
+ if err := restore.SetTSToFile(
+ ctx, storage, cfg.Until, restore.TruncateSafePointFileName); err != nil {
return err
}
}
- removeMetaDone()
+
+ removed := metas.RemoveDataBefore(shiftUntilTS)
// remove log
clearDataFileDone := console.ShowTask(
@@ -941,17 +934,27 @@ func RunStreamTruncate(c context.Context, g glue.Glue, cmdName string, cfg *Stre
for _, f := range removed {
if !cfg.DryRun {
wg.Add(1)
+ finalFile := f
worker.Apply(func() {
defer wg.Done()
- if err := storage.DeleteFile(ctx, f.Path); err != nil {
- log.Warn("File not deleted.", zap.String("path", f.Path), logutil.ShortError(err))
- console.Print("\n"+em(f.Path), "not deleted, you may clear it manually:", warn(err))
+ if err := storage.DeleteFile(ctx, finalFile.Path); err != nil {
+ log.Warn("File not deleted.", zap.String("path", finalFile.Path), logutil.ShortError(err))
+ console.Print("\n"+em(finalFile.Path), "not deleted, you may clear it manually:", warn(err))
}
})
}
}
wg.Wait()
clearDataFileDone()
+
+ // remove metadata
+ removeMetaDone := console.ShowTask("Removing metadata... ", glue.WithTimeCost())
+ if !cfg.DryRun {
+ if err := metas.DoWriteBack(ctx, storage); err != nil {
+ return err
+ }
+ }
+ removeMetaDone()
return nil
} | log-backup: fixed bugs about log truncate left some files and log truncate update log-min-date (#<I>)
close pingcap/tidb#<I>, ref pingcap/tidb#<I>, ref pingcap/tidb#<I> | pingcap_tidb | train |
3a672bac7c30be8598bf901e2ea23ed2003ceffa | diff --git a/rows/plugins/plugin_pdf.py b/rows/plugins/plugin_pdf.py
index <HASH>..<HASH> 100644
--- a/rows/plugins/plugin_pdf.py
+++ b/rows/plugins/plugin_pdf.py
@@ -352,9 +352,8 @@ class HeaderPositionAlgorithm(YGroupsAlgorithm):
line.append(y_objs)
lines.append(line)
- for obj in line_objs:
- if obj not in used:
- raise RuntimeError('Object not selected: {}'.format(obj))
+ # TODO: may check if one of objects in line_objs is not in used and
+ # raise an exception
return lines | Do not raise exception if some object is not selected | turicas_rows | train |
e9af894a31ffc50b53ae44c41ed8405c3b444507 | diff --git a/tests/Silex/Tests/Provider/SessionServiceProviderTest.php b/tests/Silex/Tests/Provider/SessionServiceProviderTest.php
index <HASH>..<HASH> 100644
--- a/tests/Silex/Tests/Provider/SessionServiceProviderTest.php
+++ b/tests/Silex/Tests/Provider/SessionServiceProviderTest.php
@@ -30,6 +30,11 @@ class SessionServiceProviderTest extends \PHPUnit_Framework_TestCase
$app->register(new SessionServiceProvider());
+ /**
+ * Smoke test
+ */
+ $defaultStorage = $app['session.storage'];
+
$app['session.storage'] = $app->share(function () use ($app) {
return new MockArraySessionStorage();
}); | Added simple smoke test for default session provider setup | silexphp_Silex | train |
0d00ed8368ed6c7356d3203e3762fe3b002694cc | diff --git a/rtcpeerconnection.js b/rtcpeerconnection.js
index <HASH>..<HASH> 100644
--- a/rtcpeerconnection.js
+++ b/rtcpeerconnection.js
@@ -1012,6 +1012,19 @@ module.exports = function(window, edgeVersion) {
transceiver.rtpSender.setTransport(transceiver.dtlsTransport);
}
+ // If the offer contained RTX but the answer did not,
+ // remove RTX from sendEncodingParameters.
+ var commonCapabilities = getCommonCapabilities(
+ transceiver.localCapabilities,
+ transceiver.remoteCapabilities);
+
+ var hasRtx = commonCapabilities.codecs.filter(function(c) {
+ return c.name.toLowerCase() === 'rtx';
+ }).length;
+ if (!hasRtx && transceiver.sendEncodingParameters[0].rtx) {
+ delete transceiver.sendEncodingParameters[0].rtx;
+ }
+
pc._transceive(transceiver,
direction === 'sendrecv' || direction === 'recvonly',
direction === 'sendrecv' || direction === 'sendonly');
diff --git a/test/rtcpeerconnection.js b/test/rtcpeerconnection.js
index <HASH>..<HASH> 100644
--- a/test/rtcpeerconnection.js
+++ b/test/rtcpeerconnection.js
@@ -3849,6 +3849,58 @@ describe('Edge shim', () => {
});
});
+ describe('non-rtx answer to rtx', () => {
+ let pc;
+ beforeEach(() => {
+ pc = new RTCPeerConnection();
+ });
+ afterEach(() => {
+ pc.close();
+ });
+ it('does not call send() with RTX', () => {
+ let sender;
+ return navigator.mediaDevices.getUserMedia({video: true})
+ .then((stream) => {
+ sender = pc.addTrack(stream.getTracks()[0], stream);
+ sender.send = sinon.stub();
+ })
+ .then(() => pc.createOffer())
+ .then((offer) => pc.setLocalDescription(offer))
+ .then(() => {
+ const localMid = SDPUtils.getMid(
+ SDPUtils.splitSections(pc.localDescription.sdp)[1]);
+ const candidateString = 'a=candidate:702786350 1 udp 41819902 ' +
+ '8.8.8.8 60769 typ host';
+ const sdp = 'v=0\r\n' +
+ 'o=- 0 0 IN IP4 127.0.0.1\r\n' +
+ 's=nortxanswer\r\n' +
+ 't=0 0\r\n' +
+ 'm=video 1 UDP/TLS/RTP/SAVPF 100\r\n' +
+ 'c=IN IP4 0.0.0.0\r\n' +
+ 'a=rtpmap:100 VP8/90000\r\n' +
+ 'a=rtcp:1 IN IP4 0.0.0.0\r\n' +
+ 'a=rtcp-fb:100 nack\r\n' +
+ 'a=rtcp-fb:100 nack pli\r\n' +
+ 'a=rtcp-fb:100 goog-remb\r\n' +
+ 'a=extmap:1 http://www.webrtc.org/experiments/rtp-hdrext/abs-send-time\r\n' +
+ 'a=setup:active\r\n' +
+ 'a=mid:' + localMid + '\r\n' +
+ 'a=recvonly\r\n' +
+ 'a=ice-ufrag:S5Zq\r\n' +
+ 'a=ice-pwd:6E1muhzVwnphsbN6uokNU/\r\n' +
+ 'a=fingerprint:sha-256 ' + FINGERPRINT_SHA256 + '\r\n' +
+ candidateString + '\r\n' +
+ 'a=end-of-candidates\r\n' +
+ 'a=rtcp-mux\r\n';
+ return pc.setRemoteDescription({type: 'answer', sdp});
+ })
+ .then(() => {
+ expect(sender.send).to.have.been.calledWith(
+ sinon.match.has('encodings', [{ssrc: 1001}]));
+ });
+ });
+ });
+
describe('edge clonestream issue', () => {
let pc;
beforeEach(() => { | fix rtx issue
when called with a non-rtx answer, rtx was still sent.
Fixes <URL> | otalk_rtcpeerconnection-shim | train |
7c9c593bfca9d261b1473881f2e77fce2cb39694 | diff --git a/Task/Generate.php b/Task/Generate.php
index <HASH>..<HASH> 100644
--- a/Task/Generate.php
+++ b/Task/Generate.php
@@ -36,20 +36,22 @@ class Generate extends Base {
*
* @param $environment
* The current environment handler.
- * @param $component_name
- * A component name. Currently supports 'module' and 'theme'.
+ * @param $component_type
+ * A component type. Currently supports 'module' and 'theme'.
* (We need this early on so we can use it to determine our sanity level.)
*/
- public function __construct($environment, $component_name) {
+ public function __construct($environment, $component_type) {
$this->environment = $environment;
$this->initGenerators();
// Fake the component data for now, as it's expected by the constructor.
$component_data = array();
+ // The component name is just the same as the type for the base generator.
+ $component_name = $component_type;
- $this->base = $component_name;
- $this->base_generator = $this->getGenerator($component_name, $component_data);
+ $this->base = $component_type;
+ $this->base_generator = $this->getGenerator($component_type, $component_name, $component_data);
}
/**
@@ -118,13 +120,28 @@ class Generate extends Base {
}
/**
- * Generator factory. WIP!
+ * Generator factory.
*
- * TODO: switch over to using this everywhere.
+ * @param $component_type
+ * The type of the component. This is use to build the class name: see
+ * getGeneratorClass().
+ * @param $component_name
+ * The identifier for the component. This is often the same as the type
+ * (e.g., 'module', 'hooks') but in the case of types used multiple times
+ * this will be a unique identifier.
+ * @param $component_data
+ * An associative array of input data for the component, as received by
+ * Generate::generateComponent(). For example, for modules this will
+ * be the module name, hooks required, and so on. See each component for
+ * documentation on what this should contain.
+ *
+ * @return
+ * A generator object, with the component name and data set on it, as well
+ * as a reference to this task handler.
*/
- public function getGenerator($component, $component_data) {
- $class = module_builder_get_class($component);
- $generator = new $class($component, $component_data);
+ public function getGenerator($component_type, $component_name, $component_data) {
+ $class = $this->getGeneratorClass($component_type);
+ $generator = new $class($component_name, $component_data);
// Each generator needs a link back to the factory to be able to make more
// generators, and also so it can access the environment. | Fixed total mess with component name / type; changed parameter name for Generate task constructor to be more accurate; added param to getGenerator(). | drupal-code-builder_drupal-code-builder | train |
Subsets and Splits