hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
0d357913e2d03597103d87d131bffc6710582690 | diff --git a/app/models/socializer/activity.rb b/app/models/socializer/activity.rb
index <HASH>..<HASH> 100644
--- a/app/models/socializer/activity.rb
+++ b/app/models/socializer/activity.rb
@@ -36,7 +36,7 @@ module Socializer
@target ||= activitable_target.activitable
end
- # Selects the activites that either the person made, that is public from a person in
+ # Selects the activities that either the person made, that is public from a person in
# one of his circle, or that is shared to one of the circles he is part of.
#
# * <tt>options[:provider]</tt> - <tt>nil</tt>, <tt>activities</tt>, <tt>people</tt>, <tt>circles</tt>, <tt>groups</tt>
diff --git a/app/models/socializer/activity_object.rb b/app/models/socializer/activity_object.rb
index <HASH>..<HASH> 100644
--- a/app/models/socializer/activity_object.rb
+++ b/app/models/socializer/activity_object.rb
@@ -23,7 +23,7 @@ module Socializer
has_many :ties, foreign_key: 'contact_id'
has_many :memberships, -> { where active: true }, foreign_key: 'member_id'
- # define a class macro for setting comparaison with activitable_type
+ # define a class macro for setting comparison with activitable_type
def self.attribute_type_of(*args)
args.each do |type|
define_method("#{type}?") { activitable_type == "Socializer::#{type.capitalize}" } | a couple of spelling corrections in comments | socializer_socializer | train |
8aaee67a354e5e9ac642e6537b869f8ee14e0e6c | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -34,7 +34,8 @@ tests_require = [
"remotecv",
"hiredis",
"scikit-image>=0.12.3",
- "celery"
+ "celery",
+ "cairosvg",
]
diff --git a/tests/handlers/test_base_handler.py b/tests/handlers/test_base_handler.py
index <HASH>..<HASH> 100644
--- a/tests/handlers/test_base_handler.py
+++ b/tests/handlers/test_base_handler.py
@@ -876,20 +876,25 @@ class ImageOperationsWithJpegtranTestCase(BaseImagingTestCase):
f.write(response.body)
f.close()
+ exiftool = which('exiftool')
+ if not exiftool:
+ raise AssertionError('exiftool was not found. Please install it to run thumbor\'s tests.')
+
command = [
- which('exiftool'),
+ exiftool,
tmp_file_path,
'-DeviceModel',
'-EncodingProcess'
]
- with open(os.devnull) as null:
- output = subprocess.check_output(command, stdin=null)
-
- expect(response.code).to_equal(200)
- expect(output).to_equal('Encoding Process : Progressive DCT, Huffman coding\n')
+ try:
+ with open(os.devnull) as null:
+ output = subprocess.check_output(command, stdin=null)
- os.remove(tmp_file_path)
+ expect(response.code).to_equal(200)
+ expect(output).to_equal('Encoding Process : Progressive DCT, Huffman coding\n')
+ finally:
+ os.remove(tmp_file_path)
def test_with_meta(self):
response = self.fetch('/unsafe/meta/800x400/image.jpg') | Improved tests and added missing dev requirement | thumbor_thumbor | train |
67eafc6717fe46f1a4f76b670a096aefa0467970 | diff --git a/src/main/java/org/hyperledger/fabric/sdk/security/CryptoPrimitives.java b/src/main/java/org/hyperledger/fabric/sdk/security/CryptoPrimitives.java
index <HASH>..<HASH> 100755
--- a/src/main/java/org/hyperledger/fabric/sdk/security/CryptoPrimitives.java
+++ b/src/main/java/org/hyperledger/fabric/sdk/security/CryptoPrimitives.java
@@ -61,16 +61,17 @@ import javax.xml.bind.DatatypeConverter;
import org.apache.commons.io.FileUtils;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
+import org.bouncycastle.asn1.ASN1Encodable;
+import org.bouncycastle.asn1.ASN1InputStream;
import org.bouncycastle.asn1.ASN1Integer;
+import org.bouncycastle.asn1.ASN1Primitive;
+import org.bouncycastle.asn1.ASN1Sequence;
import org.bouncycastle.asn1.DERSequenceGenerator;
import org.bouncycastle.asn1.x9.ECNamedCurveTable;
import org.bouncycastle.asn1.x9.X9ECParameters;
import org.bouncycastle.crypto.Digest;
import org.bouncycastle.crypto.digests.SHA256Digest;
import org.bouncycastle.crypto.digests.SHA3Digest;
-import org.bouncycastle.crypto.params.ECDomainParameters;
-import org.bouncycastle.crypto.params.ECPrivateKeyParameters;
-import org.bouncycastle.crypto.signers.ECDSASigner;
import org.bouncycastle.jce.provider.BouncyCastleProvider;
import org.bouncycastle.openssl.jcajce.JcaPEMWriter;
import org.bouncycastle.operator.ContentSigner;
@@ -599,6 +600,42 @@ public class CryptoPrimitives implements CryptoSuite {
}
}
+ /**
+ * Decodes an ECDSA signature and returns a two element BigInteger array.
+ *
+ * @param signature ECDSA signature bytes.
+ * @return BigInteger array for the signature's r and s values
+ * @throws Exception
+ */
+ private static BigInteger[] decodeECDSASignature(byte[] signature) throws Exception {
+ ByteArrayInputStream inStream = new ByteArrayInputStream(signature);
+ ASN1InputStream asnInputStream = new ASN1InputStream(inStream);
+ ASN1Primitive asn1 = asnInputStream.readObject();
+
+ BigInteger[] sigs = new BigInteger[2];
+ int count = 0;
+ if (asn1 instanceof ASN1Sequence) {
+ ASN1Sequence asn1Sequence = (ASN1Sequence) asn1;
+ ASN1Encodable[] asn1Encodables = asn1Sequence.toArray();
+ for (ASN1Encodable asn1Encodable : asn1Encodables) {
+ ASN1Primitive asn1Primitive = asn1Encodable.toASN1Primitive();
+ if (asn1Primitive instanceof ASN1Integer) {
+ ASN1Integer asn1Integer = (ASN1Integer) asn1Primitive;
+ BigInteger integer = asn1Integer.getValue();
+ if (count < 2) {
+ sigs[count] = integer;
+ }
+ count++;
+ }
+ }
+ }
+ if (count != 2) {
+ throw new CryptoException(format("Invalid ECDSA signature. Expected count of 2 but got: %d. Signature is: %s", count,
+ DatatypeConverter.printHexBinary(signature)));
+ }
+ return sigs;
+ }
+
/**
* Sign data with the specified elliptic curve private key.
@@ -610,19 +647,16 @@ public class CryptoPrimitives implements CryptoSuite {
*/
private byte[] ecdsaSignToBytes(ECPrivateKey privateKey, byte[] data) throws CryptoException {
try {
- final byte[] encoded = hash(data);
-
X9ECParameters params = ECNamedCurveTable.getByName(curveName);
BigInteger curveN = params.getN();
- ECDomainParameters ecParams = new ECDomainParameters(params.getCurve(), params.getG(), curveN,
- params.getH());
-
- ECDSASigner signer = new ECDSASigner();
+ Signature sig = SECURITY_PROVIDER == null ? Signature.getInstance(DEFAULT_SIGNATURE_ALGORITHM) :
+ Signature.getInstance(DEFAULT_SIGNATURE_ALGORITHM, SECURITY_PROVIDER);
+ sig.initSign(privateKey);
+ sig.update(data);
+ byte[] signature = sig.sign();
- ECPrivateKeyParameters privKey = new ECPrivateKeyParameters(privateKey.getS(), ecParams);
- signer.init(true, privKey);
- BigInteger[] sigs = signer.generateSignature(encoded);
+ BigInteger[] sigs = decodeECDSASignature(signature);
sigs = preventMalleability(sigs, curveN); | [FAB-<I>] Java SDK Signatures with Standard JCA/JCE
Improvement [FAB-<I>] changing CryptoPrimitives to use the standard
JCA/JCE Signature class to perform the ECDSA signatures.
The signature is then decoded in order to call preventMalleability.
Change-Id: Ia<I>dade<I>b5a1c<I>b3cd<I>fc0c<I>fb9f3a<I> | hyperledger_fabric-sdk-java | train |
2f68e760c1bf0606ad5f61427af7d245830a5b51 | diff --git a/portfolio/type/boxnet/lib.php b/portfolio/type/boxnet/lib.php
index <HASH>..<HASH> 100644
--- a/portfolio/type/boxnet/lib.php
+++ b/portfolio/type/boxnet/lib.php
@@ -199,4 +199,8 @@ class portfolio_plugin_boxnet extends portfolio_plugin_base {
}
//@TODO see if we can verify the api key without actually getting an authentication token
}
+
+ public static function allows_multiple() {
+ return false;
+ }
} | MDL-<I>: boxnet portfolio plugin should only be allowed one instance | moodle_moodle | train |
6ab1d9ac8166e2a3ff6f876a0fbe78d05b9cf827 | diff --git a/spec/cli/parser/string_spec.rb b/spec/cli/parser/string_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/cli/parser/string_spec.rb
+++ b/spec/cli/parser/string_spec.rb
@@ -100,6 +100,11 @@ describe "Strings" do
parsed("?&").should == [:str, "&"]
end
+ it "parses escape sequences" do
+ parsed("?\\n").should == [:str, "\n"]
+ parsed("?\\t").should == [:str, "\t"]
+ end
+
it "parses a string sexp as a command arg" do
parsed("foo ?a").should == [:call, nil, :foo, [:arglist, [:str, "a"]]]
end | Add spec for previous character string fix | opal_opal | train |
1f63069e22105e5cb0108cd717a78ed1f85b6a52 | diff --git a/inputs/__init__.py b/inputs/__init__.py
index <HASH>..<HASH> 100644
--- a/inputs/__init__.py
+++ b/inputs/__init__.py
@@ -13,26 +13,6 @@ EV_KEY = 0x01
# pylint: disable=too-few-public-methods
-def get_input():
- """Get a single event from any input device."""
- pass
-
-
-def get_key():
- """Get a single keypress from a keyboard."""
- pass
-
-
-def get_mouse():
- """Get a single movement or click from a mouse."""
- pass
-
-
-def get_gamepad():
- """Get a single action from a gamepad."""
- pass
-
-
class InputEvent(object):
"""A user event."""
def __init__(self,
@@ -86,9 +66,7 @@ class InputDevice(object):
event = self._character_device.read(EVENT_SIZE)
(tv_sec, tv_usec, ev_type, code, value) = struct.unpack(
EVENT_FORMAT, event)
- print((tv_sec, tv_usec, ev_type, code, value))
if ev_type == EV_KEY:
- print(tv_sec + (tv_usec / 1000000))
yield InputEvent(self,
tv_sec + (tv_usec / 1000000),
code,
@@ -173,13 +151,36 @@ class DeviceManager(object):
except IndexError:
raise IndexError("list index out of range")
-devices = DeviceManager()
+devices = DeviceManager() # pylint: disable=invalid-name
+
+
+def get_input():
+ """Get a single event from any input device."""
+ pass
+
+
+def get_key():
+ """Get a single keypress from a keyboard."""
+ try:
+ keyboard = devices.keyboards[0]
+ except IndexError:
+ raise RuntimeError("No keyboard found.")
+ return keyboard.read()
-def main():
- """Simple example."""
- print("Hello")
+def get_mouse():
+ """Get a single movement or click from a mouse."""
+ try:
+ mouse = devices.mice[0]
+ except IndexError:
+ raise RuntimeError("No mice found.")
+ return mouse.read()
-if __name__ == '__main__':
- main()
+def get_gamepad():
+ """Get a single action from a gamepad."""
+ try:
+ gamepad = devices.gamepads[0]
+ except IndexError:
+ raise RuntimeError("No gamepad found.")
+ return gamepad.read() | Enable the simple, pre-setup functions. | zeth_inputs | train |
b4b9d576104b37dadc39cdbe374b52c79d986d6b | diff --git a/src/clockpicker.js b/src/clockpicker.js
index <HASH>..<HASH> 100644
--- a/src/clockpicker.js
+++ b/src/clockpicker.js
@@ -125,7 +125,7 @@
this.spanMinutes.click($.proxy(this.toggleView, this, 'minutes'));
// Show or toggle
- input.on('focus.clockpicker', $.proxy(this.show, this));
+ input.on('focus.clockpicker click.clockpicker', $.proxy(this.show, this));
addon.on('click.clockpicker', $.proxy(this.toggle, this));
// Build ticks
@@ -571,7 +571,7 @@
// Remove clockpicker from input
ClockPicker.prototype.remove = function() {
this.element.removeData('clockpicker');
- this.input.off('focus.clockpicker');
+ this.input.off('focus.clockpicker click.clockpicker');
this.addon.off('click.clockpicker');
if (this.isShown) {
this.hide(); | Show clock on click in input as well as on focus.
Just adds click listener as well as focus listener to input, same event handler (this.show). Gives similar behaviour to that of jquery.ui.datepicker. See also 3DFace issue <URL> | weareoutman_clockpicker | train |
de817cfe46b56424afa0d80155bb5be4c47d5fa0 | diff --git a/drivers/openstack/client.go b/drivers/openstack/client.go
index <HASH>..<HASH> 100644
--- a/drivers/openstack/client.go
+++ b/drivers/openstack/client.go
@@ -4,8 +4,10 @@ import (
"crypto/tls"
"fmt"
"net/http"
+ "time"
"github.com/docker/machine/log"
+ "github.com/docker/machine/utils"
"github.com/docker/machine/version"
"github.com/rackspace/gophercloud"
"github.com/rackspace/gophercloud/openstack"
@@ -31,7 +33,7 @@ type Client interface {
StopInstance(d *Driver) error
RestartInstance(d *Driver) error
DeleteInstance(d *Driver) error
- WaitForInstanceStatus(d *Driver, status string, timeout int) error
+ WaitForInstanceStatus(d *Driver, status string) error
GetInstanceIpAddresses(d *Driver) ([]IpAddress, error)
CreateKeyPair(d *Driver, name string, publicKey string) error
DeleteKeyPair(d *Driver, name string) error
@@ -132,11 +134,23 @@ func (c *GenericClient) DeleteInstance(d *Driver) error {
return nil
}
-func (c *GenericClient) WaitForInstanceStatus(d *Driver, status string, timeout int) error {
- if err := servers.WaitForStatus(c.Compute, d.MachineId, status, timeout); err != nil {
- return err
- }
- return nil
+func (c *GenericClient) WaitForInstanceStatus(d *Driver, status string) error {
+ return utils.WaitForSpecificOrError(func() (bool, error) {
+ current, err := servers.Get(c.Compute, d.MachineId).Extract()
+ if err != nil {
+ return true, err
+ }
+
+ if current.Status == "ERROR" {
+ return true, fmt.Errorf("Instance creation failed. Instance is in ERROR state")
+ }
+
+ if current.Status == status {
+ return true, nil
+ }
+
+ return false, nil
+ }, 50, 4*time.Second)
}
func (c *GenericClient) GetInstanceIpAddresses(d *Driver) ([]IpAddress, error) {
diff --git a/drivers/openstack/openstack.go b/drivers/openstack/openstack.go
index <HASH>..<HASH> 100644
--- a/drivers/openstack/openstack.go
+++ b/drivers/openstack/openstack.go
@@ -664,7 +664,7 @@ func (d *Driver) assignFloatingIp() error {
func (d *Driver) waitForInstanceActive() error {
log.WithField("MachineId", d.MachineId).Debug("Waiting for the OpenStack instance to be ACTIVE...")
- if err := d.client.WaitForInstanceStatus(d, "ACTIVE", 200); err != nil {
+ if err := d.client.WaitForInstanceStatus(d, "ACTIVE"); err != nil {
return err
}
return nil | Stop polling the instance when status is ERROR
Fix #<I> | docker_machine | train |
a50b0e2cb2536c0c7a1c0087c2a62249d30632cb | diff --git a/Kwf/Controller/Action/Auto/Grid.php b/Kwf/Controller/Action/Auto/Grid.php
index <HASH>..<HASH> 100644
--- a/Kwf/Controller/Action/Auto/Grid.php
+++ b/Kwf/Controller/Action/Auto/Grid.php
@@ -987,7 +987,7 @@ abstract class Kwf_Controller_Action_Auto_Grid extends Kwf_Controller_Action_Aut
$text = $helperDateTime->dateTime($text);
}
$sheet->setCellValueExplicit($cell, $text, $cellType);
- if ($renderer[$col] == 'clickableLink') {
+ if ($renderer[$col] == 'clickableLink' && $text) {
$sheet->getCell($cell)->getHyperlink()->setUrl($text);
}
} | Require url to render clickable link in AutoGrid excel export
Sometimes the url is not set and causes an exception, which breaks the export. | koala-framework_koala-framework | train |
941184a95454df6259d45485ee49db70dd33b65f | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -23,7 +23,7 @@ function _generateFrom (file, fname) {
Object.assign(acc, info)
}
return acc
- }, {[fname]: Object.assign({isDir: true}, stat)})
+ }, {})
})
} else if (!stat.isFile()) {
// ignored. We don't do things like symlinks rn
@@ -46,7 +46,6 @@ function check (dirname, dirIntegrity) {
dirname = path.resolve(dirname)
return pEvery(Object.keys(dirIntegrity), f => {
const fstat = dirIntegrity[f]
- if (fstat.isDir) return true
const filename = path.join(dirname, f)
if (fstat.size > MAX_BULK_SIZE) { | feat: don't return directories
BREAKING CHANGE:
Directories are not returned in the index object | zkochan_dint | train |
6af786477a799f07ea52a17a3f016aaab9402053 | diff --git a/lib/ronin/cacheable/cached_file.rb b/lib/ronin/cacheable/cached_file.rb
index <HASH>..<HASH> 100644
--- a/lib/ronin/cacheable/cached_file.rb
+++ b/lib/ronin/cacheable/cached_file.rb
@@ -49,6 +49,19 @@ module Ronin
end
#
+ # Finds all cached files that were cached from a given directory.
+ #
+ # @param [String] directory
+ # The directory search for.
+ #
+ # @return [DataMapper::Collection<CachedFile>]
+ # The cached files that were cached from the given directory.
+ #
+ def CachedFile.from(directory)
+ all(:path.like => File.join(File.expand_path(directory),'%'))
+ end
+
+ #
# Creates a new CacheFile object with a given path, and caches it.
#
# @return [CacheFile, nil]
diff --git a/lib/ronin/platform/object_cache.rb b/lib/ronin/platform/object_cache.rb
index <HASH>..<HASH> 100644
--- a/lib/ronin/platform/object_cache.rb
+++ b/lib/ronin/platform/object_cache.rb
@@ -52,13 +52,13 @@ module Ronin
# The cached file.
#
def ObjectCache.each(directory=nil,&block)
- attributes = {}
-
+ files = Cacheable::CachedFile.all
+
if directory
- attributes.merge!(:path.like => File.join(directory,'%'))
+ files = files.from(directory)
end
-
- Cacheable::CachedFile.all(attributes).each(&block)
+
+ files.each(&block)
return true
end | Added Cacheable::CachedFile.from. | ronin-ruby_ronin | train |
296eb95d70a86c2151971d29aa719e64b3e53066 | diff --git a/oct2py/tests/test_oct2py.py b/oct2py/tests/test_oct2py.py
index <HASH>..<HASH> 100644
--- a/oct2py/tests/test_oct2py.py
+++ b/oct2py/tests/test_oct2py.py
@@ -790,6 +790,20 @@ def test_interact():
assert output.buf == expected
+def test_func_without_docstring():
+ oc = Oct2Py()
+ pwd = oc._session.get_pwd()
+ fname = '%s/temp_oct2py_func.m' % pwd
+ msg = 'function [outp] = temp_oct2py_func(inp)\noutp = inp;\nend\n'
+ with open(fname, 'wb') as fid:
+ fid.write(msg.encode('utf-8'))
+ out = oc.temp_oct2py_func(5)
+ assert out == 5
+ assert 'user-defined function' in oc.temp_oct2py_func.__doc__
+ assert pwd in oc.temp_oct2py_func.__doc__
+ os.remove(fname)
+
+
if __name__ == '__main__': # pragma: no cover
print('oct2py test')
print('*' * 20) | Add test for functions without docstrings | blink1073_oct2py | train |
809a3826bce0c2c96fee5b692e0c2aeb411b38bd | diff --git a/externs/es5.js b/externs/es5.js
index <HASH>..<HASH> 100644
--- a/externs/es5.js
+++ b/externs/es5.js
@@ -243,6 +243,13 @@ Object.prototype.toJSON = function(opt_key) {};
/**
+ * @see https://developer.mozilla.org/en/JavaScript/Reference/Global_Objects/Date/toISOString
+ * @return {string}
+ */
+Date.prototype.toISOString = function() {};
+
+
+/**
* @param {*=} opt_ignoredKey
* @return {string}
* @override | Add toISOString to externs
R=johnlenz
DELTA=7 (7 added, 0 deleted, 0 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL> | google_closure-compiler | train |
5ad1c046da45a69817b643eb0c6602ddab1b112c | diff --git a/src/main/java/io/reactivex/Maybe.java b/src/main/java/io/reactivex/Maybe.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/reactivex/Maybe.java
+++ b/src/main/java/io/reactivex/Maybe.java
@@ -1967,13 +1967,12 @@ public abstract class Maybe<T> implements MaybeSource<T> {
* </dl>
*
* @param <R> the value type of the Maybe returned by the transformer function
- * @param transformer
- * implements the function that transforms the source Maybe
+ * @param transformer the transformer function, not null
* @return a Maybe, transformed by the transformer function
* @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a>
*/
@SchedulerSupport(SchedulerSupport.NONE)
- public final <R> Maybe<R> compose(Function<? super Maybe<T>, ? extends MaybeSource<R>> transformer) {
+ public final <R> Maybe<R> compose(MaybeTransformer<T, R> transformer) {
return wrap(to(transformer));
}
diff --git a/src/main/java/io/reactivex/Single.java b/src/main/java/io/reactivex/Single.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/reactivex/Single.java
+++ b/src/main/java/io/reactivex/Single.java
@@ -1467,13 +1467,12 @@ public abstract class Single<T> implements SingleSource<T> {
* </dl>
*
* @param <R> the value type of the single returned by the transformer function
- * @param transformer
- * implements the function that transforms the source Single
+ * @param transformer the transformer function, not null
* @return the source Single, transformed by the transformer function
* @see <a href="https://github.com/ReactiveX/RxJava/wiki/Implementing-Your-Own-Operators">RxJava wiki: Implementing Your Own Operators</a>
*/
@SchedulerSupport(SchedulerSupport.NONE)
- public final <R> Single<R> compose(Function<? super Single<T>, ? extends SingleSource<R>> transformer) {
+ public final <R> Single<R> compose(SingleTransformer<T, R> transformer) {
return wrap(to(transformer));
}
diff --git a/src/test/java/io/reactivex/internal/operators/single/SingleMiscTest.java b/src/test/java/io/reactivex/internal/operators/single/SingleMiscTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/reactivex/internal/operators/single/SingleMiscTest.java
+++ b/src/test/java/io/reactivex/internal/operators/single/SingleMiscTest.java
@@ -13,18 +13,23 @@
package io.reactivex.internal.operators.single;
-import static org.junit.Assert.*;
-
-import java.util.concurrent.*;
-import java.util.concurrent.atomic.AtomicBoolean;
-
-import org.junit.Test;
-
-import io.reactivex.*;
+import io.reactivex.Single;
+import io.reactivex.SingleObserver;
+import io.reactivex.SingleSource;
+import io.reactivex.SingleTransformer;
import io.reactivex.disposables.Disposables;
import io.reactivex.exceptions.TestException;
import io.reactivex.functions.*;
import io.reactivex.schedulers.Schedulers;
+import org.junit.Test;
+
+import java.util.concurrent.Callable;
+import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import static org.junit.Assert.assertNotSame;
+import static org.junit.Assert.assertSame;
public class SingleMiscTest {
@Test
@@ -78,7 +83,7 @@ public class SingleMiscTest {
public void compose() {
Single.just(1)
- .compose(new Function<Single<Integer>, SingleSource<Object>>() {
+ .compose(new SingleTransformer<Integer, Object>() {
@Override
public SingleSource<Object> apply(Single<Integer> f) throws Exception {
return f.map(new Function<Integer, Object>() {
diff --git a/src/test/java/io/reactivex/maybe/MaybeTest.java b/src/test/java/io/reactivex/maybe/MaybeTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/io/reactivex/maybe/MaybeTest.java
+++ b/src/test/java/io/reactivex/maybe/MaybeTest.java
@@ -388,7 +388,7 @@ public class MaybeTest {
@Test
public void compose() {
- Maybe.just(1).compose(new Function<Maybe<Integer>, MaybeSource<Integer>>() {
+ Maybe.just(1).compose(new MaybeTransformer<Integer, Integer>() {
@Override
public MaybeSource<Integer> apply(Maybe<Integer> m) throws Exception {
return m.map(new Function<Integer, Integer>() { | Switch Maybe and Single to use their Transformers in compose() (#<I>)
* Switch Maybe and Single to use their Transformers in compose()
Resolves #<I>
* Update compose() tests | ReactiveX_RxJava | train |
0b495892fcb26c3d911bb2edf7d90c350ab64006 | diff --git a/form/radio_button.go b/form/radio_button.go
index <HASH>..<HASH> 100644
--- a/form/radio_button.go
+++ b/form/radio_button.go
@@ -16,14 +16,23 @@ func (f Form) RadioButton(opts tags.Options) *tags.Tag {
label = fmt.Sprint(opts["label"])
delete(opts, "label")
}
+ var ID string
+ if opts["id"] != nil {
+ ID = fmt.Sprint(opts["id"])
+ }
value := opts["value"]
checked := opts["checked"]
delete(opts, "checked")
ct := f.InputTag(opts)
ct.Checked = template.HTMLEscaper(value) == template.HTMLEscaper(checked)
- tag := tags.New("label", tags.Options{
+ labelOptions := tags.Options{
"body": strings.Join([]string{ct.String(), label}, " "),
- })
+ }
+ // If the ID is provided, give it to the label's for attribute
+ if ID != "" {
+ labelOptions["for"] = ID
+ }
+ tag := tags.New("label", labelOptions)
return tag
}
diff --git a/form/radio_button_test.go b/form/radio_button_test.go
index <HASH>..<HASH> 100644
--- a/form/radio_button_test.go
+++ b/form/radio_button_test.go
@@ -43,3 +43,14 @@ func Test_Form_RadioButton_WithLabel(t *testing.T) {
})
r.Equal(`<label><input type="radio" value="1" /> check me</label>`, ct.String())
}
+
+func Test_Form_RadioButton_WithId(t *testing.T) {
+ r := require.New(t)
+ f := form.New(tags.Options{})
+ ct := f.RadioButton(tags.Options{
+ "label": "Water",
+ "id": "radio-drink-water",
+ "value": "water",
+ })
+ r.Equal(`<label for="radio-drink-water"><input id="radio-drink-water" type="radio" value="water" /> Water</label>`, ct.String())
+} | Auto set the for attribute on label when the ID is provided (RadioButton) | gobuffalo_tags | train |
23c2a56c1ee80e6eda1602c53ff5120b51333b9d | diff --git a/ambry/library/search_backends/postgres_backend.py b/ambry/library/search_backends/postgres_backend.py
index <HASH>..<HASH> 100644
--- a/ambry/library/search_backends/postgres_backend.py
+++ b/ambry/library/search_backends/postgres_backend.py
@@ -76,7 +76,6 @@ class PostgreSQLSearchBackend(BaseSearchBackend):
class DatasetPostgreSQLIndex(BaseDatasetIndex):
def __init__(self, backend=None):
- from sqlalchemy.exc import ProgrammingError
assert backend is not None, 'backend argument can not be None.'
super(self.__class__, self).__init__(backend=backend)
@@ -97,7 +96,7 @@ class DatasetPostgreSQLIndex(BaseDatasetIndex):
results = self.backend.library.database.connection.execute(query, **query_params)
datasets = {}
- def make_result(vid = None, b_score = 0, p_score = 0):
+ def make_result(vid=None, b_score=0, p_score=0):
res = DatasetSearchResult()
res.b_score = b_score
res.p_score = p_score
@@ -108,13 +107,13 @@ class DatasetPostgreSQLIndex(BaseDatasetIndex):
for result in results:
vid, dataset_score = result
- datasets[vid] = make_result(vid, b_score = dataset_score)
+ datasets[vid] = make_result(vid, b_score=dataset_score)
logger.debug('Extending datasets with partitions.')
for partition in self.backend.partition_index.search(search_phrase):
- if not partition.dataset_vid in datasets:
+ if partition.dataset_vid not in datasets:
datasets[partition.dataset_vid] = make_result(partition.dataset_vid)
datasets[partition.dataset_vid].p_score += partition.score
diff --git a/test/unit/test_library/test_search_backends/test_postgresql_backend.py b/test/unit/test_library/test_search_backends/test_postgresql_backend.py
index <HASH>..<HASH> 100644
--- a/test/unit/test_library/test_search_backends/test_postgresql_backend.py
+++ b/test/unit/test_library/test_search_backends/test_postgresql_backend.py
@@ -1,7 +1,7 @@
# -*- coding: utf-8 -*-
from ambry.library.search_backends.postgres_backend import PostgreSQLSearchBackend
-from ambry.library import new_library
+from ambry.orm.database import POSTGRES_SCHEMA_NAME
from ambry.util import AttrDict
from test.test_base import PostgreSQLTestBase
@@ -47,8 +47,10 @@ class PostgreSQLSearchBackendTest(PostgreSQLBackendBaseTest):
class DatasetPostgreSQLIndexTest(PostgreSQLBackendBaseTest):
def test_creates_dataset_index(self):
+ # backend __init__ created the index. Just check here.
with self._my_library.database._engine.connect() as conn:
- result = conn.execute('SELECT * FROM dataset_index;').fetchall()
+ query = 'SELECT * FROM {}.dataset_index;'.format(POSTGRES_SCHEMA_NAME)
+ result = conn.execute(query).fetchall()
self.assertEqual(result, [])
# search() tests
@@ -133,7 +135,8 @@ class IdentifierPostgreSQLIndexTest(PostgreSQLBackendBaseTest):
def test_creates_identifier_index(self):
with self._my_library.database._engine.connect() as conn:
- result = conn.execute('SELECT * FROM identifier_index;').fetchall()
+ query = 'SELECT * FROM {}.identifier_index;'.format(POSTGRES_SCHEMA_NAME)
+ result = conn.execute(query).fetchall()
self.assertEqual(result, [])
# search() tests
@@ -223,7 +226,8 @@ class PartitionPostgreSQLIndexTest(PostgreSQLBackendBaseTest):
def test_creates_partition_index(self):
with self._my_library.database._engine.connect() as conn:
- result = conn.execute('SELECT * from partition_index;').fetchall()
+ query = 'SELECT * FROM {}.partition_index;'.format(POSTGRES_SCHEMA_NAME)
+ result = conn.execute(query).fetchall()
self.assertEqual(result, [])
# search() tests | Postgres search broken tests fixed. #<I>. | CivicSpleen_ambry | train |
cc8710556f5575b096e533d9855f8ae9c8d741e2 | diff --git a/lib/solargraph/diagnostics/update_errors.rb b/lib/solargraph/diagnostics/update_errors.rb
index <HASH>..<HASH> 100644
--- a/lib/solargraph/diagnostics/update_errors.rb
+++ b/lib/solargraph/diagnostics/update_errors.rb
@@ -3,7 +3,7 @@ module Solargraph
class UpdateErrors < Base
def diagnose source, api_map
result = []
- source.error_ranges.each do |range|
+ combine_ranges(source.code, source.error_ranges).each do |range|
result.push(
range: range.to_hash,
severity: Diagnostics::Severities::ERROR,
@@ -13,6 +13,26 @@ module Solargraph
end
result
end
+
+ private
+
+ # Combine an array of ranges by their starting lines.
+ #
+ # @param ranges [Array<Range>]
+ # @return [Array<Range>]
+ def combine_ranges code, ranges
+ result = []
+ lines = []
+ ranges.sort{|a, b| a.start.line <=> b.start.line}.each do |rng|
+ next if rng.nil? || lines.include?(rng.start.line)
+ lines.push rng.start.line
+ next if rng.start.line >= code.lines.length
+ scol = code.lines[rng.start.line].index(/[^\s]/) || 0
+ ecol = code.lines[rng.start.line].length
+ result.push Range.from_to(rng.start.line, scol, rng.start.line, ecol)
+ end
+ result
+ end
end
end
end
diff --git a/lib/solargraph/source.rb b/lib/solargraph/source.rb
index <HASH>..<HASH> 100644
--- a/lib/solargraph/source.rb
+++ b/lib/solargraph/source.rb
@@ -108,7 +108,7 @@ module Solargraph
end
incr_code = updater.repair(@repaired)
synced = Source.new(incr_code, filename)
- synced.error_ranges.concat combine_errors(error_ranges + updater.changes.map(&:range))
+ synced.error_ranges.concat (error_ranges + updater.changes.map(&:range))
synced.code = real_code
synced.version = updater.version
synced
@@ -221,22 +221,6 @@ module Solargraph
result
end
- # @param ranges [Array<Range>]
- # @return [Array<Range>]
- def combine_errors ranges
- result = []
- lines = []
- ranges.sort{|a, b| a.start.line <=> b.start.line}.each do |rng|
- next if rng.nil? || lines.include?(rng.start.line)
- lines.push rng.start.line
- next if comment_at?(rng.start) || rng.start.line >= code.lines.length
- fcol = code.lines[rng.start.line].index(/[^\s]/) || 0
- ecol = code.lines[rng.start.line].length
- result.push Range.from_to(rng.start.line, fcol, rng.start.line, ecol)
- end
- result
- end
-
protected
# @return [Integer] | Source tracks original error ranges. UpdateErrors combines them into lines. | castwide_solargraph | train |
3035dc89c3200f33c6d470762d17b94e64faf179 | diff --git a/rollup.config.js b/rollup.config.js
index <HASH>..<HASH> 100644
--- a/rollup.config.js
+++ b/rollup.config.js
@@ -15,7 +15,10 @@ const config = [{
},
plugins: [
pluginJson
- ]
+ ],
+ watch: {
+ include: 'src/**'
+ }
}, {
input: './src/apivis.js',
output: {
@@ -26,7 +29,10 @@ const config = [{
},
plugins: [
pluginJson
- ]
+ ],
+ watch: {
+ include: 'src/**'
+ }
}, {
input: './src/apivis.es.js',
output: {
@@ -36,7 +42,10 @@ const config = [{
},
plugins: [
pluginJson
- ]
+ ],
+ watch: {
+ include: 'src/**'
+ }
}];
export default config; | Limit watch to src folder | rpeev_apivis | train |
4caed18da11be25798f32a5bff7a0cbbab3a0e97 | diff --git a/tests/Rct567/DomQuery/Tests/DomQueryTest.php b/tests/Rct567/DomQuery/Tests/DomQueryTest.php
index <HASH>..<HASH> 100644
--- a/tests/Rct567/DomQuery/Tests/DomQueryTest.php
+++ b/tests/Rct567/DomQuery/Tests/DomQueryTest.php
@@ -163,11 +163,17 @@ class DomQueryTest extends \PHPUnit\Framework\TestCase
{
$dom = new DomQuery;
$this->assertEquals(0, count($dom));
+ $this->assertNull($dom[0]);
$this->assertFalse(isset($dom[0]));
$this->assertEquals(0, count($dom->find('*')));
$this->assertEquals(0, count($dom->children()));
$this->assertNull($dom->get(0));
+ $this->assertNull($dom->getDocument());
+ $this->assertNull($dom->getXpathQuery());
+ $this->assertNull($dom->getCssQuery());
+ $this->assertNull($dom->getCssQuery());
+
$num = 0;
foreach ($dom as $node) {
$num++; | add more assertion for test empty domquery instance | Rct567_DomQuery | train |
a8c4608fe93af50b9c40414b14629c8aed3486a5 | diff --git a/climateConops.py b/climateConops.py
index <HASH>..<HASH> 100755
--- a/climateConops.py
+++ b/climateConops.py
@@ -1,5 +1,5 @@
-# -*- coding: utf-8 -*-
#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
""" Michael Hirsch
crude thermal budget for outdoor enclosure
we consider arbitrarily two worst case dates: | ordering of utf-8 prefix | scivision_histutils | train |
79ae68bc3bda3c1230bea42f2158f6781a0a1d03 | diff --git a/openquake/engine/calculators/hazard/event_based/core.py b/openquake/engine/calculators/hazard/event_based/core.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/calculators/hazard/event_based/core.py
+++ b/openquake/engine/calculators/hazard/event_based/core.py
@@ -122,7 +122,7 @@ def compute_ruptures(
hc = models.HazardCalculation.objects.get(oqjob=job_id)
all_ses = range(1, hc.ses_per_logic_tree_path + 1)
- imts = sorted(map(from_string, hc.intensity_measure_types))
+ imts = map(from_string, hc.intensity_measure_types)
params = dict(
correl_model=general.get_correl_model(hc),
truncation_level=hc.truncation_level, | Removed sorting of the IMTs breaking qatest_1 | gem_oq-engine | train |
ce45015c1673c3b66f857cf37658f45335c1f770 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -117,10 +117,6 @@ class F1React extends React.Component {
// we'll initialize f1 with the current state passed in
componentDidMount() {
this.state.ui.init(this.props.state);
-
- if(this.props.onF1) {
- this.props.onF1(this.state.ui);
- }
}
// we'll need to update f1 based on the state we should go to | Removed onF1 as it will produce components have related dependencies and are not isolatd | Jam3_react-f1 | train |
5ef1b7a00479004d264f1ff6c050eb21847945f3 | diff --git a/code/actions/AssignUsersToWorkflowAction.php b/code/actions/AssignUsersToWorkflowAction.php
index <HASH>..<HASH> 100644
--- a/code/actions/AssignUsersToWorkflowAction.php
+++ b/code/actions/AssignUsersToWorkflowAction.php
@@ -12,7 +12,7 @@ class AssignUsersToWorkflowAction extends WorkflowAction {
public static $db = array(
'AssignInitiator' => 'Boolean',
);
-
+
public static $many_many = array(
'Users' => 'Member',
'Groups' => 'Group'
@@ -44,8 +44,8 @@ class AssignUsersToWorkflowAction extends WorkflowAction {
return $fields;
}
- public function fieldLabels() {
- return array_merge(parent::fieldLabels(), array(
+ public function fieldLabels($relations = true) {
+ return array_merge(parent::fieldLabels($relations), array(
'AssignUsers' => _t('AssignUsersToWorkflowAction.ASSIGNUSERS', 'Assign Users'),
'Users' => _t('AssignUsersToWorkflowAction.USERS', 'Users'),
'Groups' => _t('AssignUsersToWorkflowAction.GROUPS', 'Groups'),
diff --git a/code/actions/NotifyUsersWorkflowAction.php b/code/actions/NotifyUsersWorkflowAction.php
index <HASH>..<HASH> 100644
--- a/code/actions/NotifyUsersWorkflowAction.php
+++ b/code/actions/NotifyUsersWorkflowAction.php
@@ -50,8 +50,8 @@ class NotifyUsersWorkflowAction extends WorkflowAction {
return $fields;
}
- public function fieldLabels() {
- return array_merge(parent::fieldLabels(), array(
+ public function fieldLabels($relations = true) {
+ return array_merge(parent::fieldLabels($relations), array(
'NotificationEmail' => _t('NotifyUsersWorkflowAction.NOTIFICATIONEMAIL', 'Notification Email'),
'NotificationNote' => _t('NotifyUsersWorkflowAction.NOTIFICATIONNOTE',
'All users attached to the workflow will be sent an email when this action is run.'),
diff --git a/code/actions/SimpleApprovalWorkflowAction.php b/code/actions/SimpleApprovalWorkflowAction.php
index <HASH>..<HASH> 100644
--- a/code/actions/SimpleApprovalWorkflowAction.php
+++ b/code/actions/SimpleApprovalWorkflowAction.php
@@ -15,7 +15,7 @@ class SimpleApprovalWorkflowAction extends WorkflowAction {
public static $icon = 'advancedworkflow/images/approval.png';
- public function execute() {
+ public function execute(WorkflowInstance $workflow) {
// we don't need to do anything for this execution,
// as we're relying on the fact that there's at least 2 outbound transitions
// which will cause the workflow to block and wait.
diff --git a/code/extensions/AdvancedWorkflowExtension.php b/code/extensions/AdvancedWorkflowExtension.php
index <HASH>..<HASH> 100644
--- a/code/extensions/AdvancedWorkflowExtension.php
+++ b/code/extensions/AdvancedWorkflowExtension.php
@@ -7,7 +7,7 @@
* @license BSD License (http://silverstripe.org/bsd-license/)
* @package advancedworkflow
*/
-class AdvancedWorkflowExtension extends LeftAndMainDecorator {
+class AdvancedWorkflowExtension extends LeftAndMainExtension {
public function startworkflow($data, $form, $request) {
$item = $form->getRecord();
diff --git a/code/extensions/WorkflowApplicable.php b/code/extensions/WorkflowApplicable.php
index <HASH>..<HASH> 100644
--- a/code/extensions/WorkflowApplicable.php
+++ b/code/extensions/WorkflowApplicable.php
@@ -8,8 +8,8 @@
* @license BSD License (http://silverstripe.org/bsd-license/)
* @package advancedworkflow
*/
-class WorkflowApplicable extends DataObjectDecorator {
-
+class WorkflowApplicable extends DataExtension {
+
/**
*
* A cache var for the current workflow instance
@@ -17,8 +17,8 @@ class WorkflowApplicable extends DataObjectDecorator {
* @var WorkflowInstance
*/
protected $currentInstance;
-
- public function extraStatics() {
+
+ public function extraStatics($class = null, $extension = null) {
return array(
'has_one' => array(
'WorkflowDefinition' => 'WorkflowDefinition',
diff --git a/code/extensions/WorkflowEmbargoExpiryExtension.php b/code/extensions/WorkflowEmbargoExpiryExtension.php
index <HASH>..<HASH> 100644
--- a/code/extensions/WorkflowEmbargoExpiryExtension.php
+++ b/code/extensions/WorkflowEmbargoExpiryExtension.php
@@ -6,8 +6,8 @@
* @author [email protected]
* @license BSD License http://silverstripe.org/bsd-license/
*/
-class WorkflowEmbargoExpiryExtension extends DataObjectDecorator {
-
+class WorkflowEmbargoExpiryExtension extends DataExtension {
+
public function extraStatics() {
return array(
'db' => array(
diff --git a/code/services/WorkflowService.php b/code/services/WorkflowService.php
index <HASH>..<HASH> 100644
--- a/code/services/WorkflowService.php
+++ b/code/services/WorkflowService.php
@@ -66,7 +66,7 @@ class WorkflowService implements PermissionProvider {
* @return DataObjectSet
*/
public function getDefinitions() {
- return DataObject::get('WorkflowDefinition');
+ return DataList::create('WorkflowDefinition');
}
/** | Fixed strict errors and removed deprecated usage. | symbiote_silverstripe-advancedworkflow | train |
0c4142c927cddf3c28f16eb5f03c6c797a160779 | diff --git a/tests/Unit/InstantiableTypesTest/InstantiableTypesTest.php b/tests/Unit/InstantiableTypesTest/InstantiableTypesTest.php
index <HASH>..<HASH> 100644
--- a/tests/Unit/InstantiableTypesTest/InstantiableTypesTest.php
+++ b/tests/Unit/InstantiableTypesTest/InstantiableTypesTest.php
@@ -9,8 +9,10 @@ use Rebing\GraphQL\Tests\TestCase;
class InstantiableTypesTest extends TestCase
{
- public function testSomething(): void
+ public function testDateFunctions(): void
{
+ Carbon::setTestNow('2020-06-05 12:34:56');
+
$query = <<<'GRAQPHQL'
{
user {
@@ -24,8 +26,8 @@ GRAQPHQL;
$result = $this->graphql($query);
- $dateOfBirth = Carbon::now()->addMonth()->startOfDay();
- $createdAt = Carbon::now()->startOfDay();
+ $dateOfBirth = Carbon::today()->addMonth();
+ $createdAt = Carbon::today();
$expectedResult = [
'data' => [ | Freeze the time to trigger date test | rebing_graphql-laravel | train |
dfa0619b9e55058b9ca1e1e45432d8cfea8401be | diff --git a/tests/integration/files/file/base/tojson/init.sls b/tests/integration/files/file/base/tojson/init.sls
index <HASH>..<HASH> 100644
--- a/tests/integration/files/file/base/tojson/init.sls
+++ b/tests/integration/files/file/base/tojson/init.sls
@@ -1,4 +1,4 @@
-{%- set data = '{"Der Zucker": "süß", "Die Webseite": "https://saltstack.com"}'|load_json -%}
+{%- set data = '{"Der Zucker": "süß", "Die Webseite": "https://saltproject.io"}'|load_json -%}
{{ pillar['tojson-file'] }}:
file.managed:
- source: salt://tojson/template.jinja
diff --git a/tests/integration/states/test_file.py b/tests/integration/states/test_file.py
index <HASH>..<HASH> 100644
--- a/tests/integration/states/test_file.py
+++ b/tests/integration/states/test_file.py
@@ -869,7 +869,7 @@ class FileTest(ModuleCase, SaltReturnAssertsMixin):
managed = salt.utils.stringutils.to_unicode(test_file.read_bytes())
expected = dedent(
"""\
- Die Webseite ist https://saltstack.com.
+ Die Webseite ist https://saltproject.io.
Der Zucker ist süß.
"""
diff --git a/tests/support/pytest/helpers.py b/tests/support/pytest/helpers.py
index <HASH>..<HASH> 100644
--- a/tests/support/pytest/helpers.py
+++ b/tests/support/pytest/helpers.py
@@ -582,7 +582,7 @@ class FakeSaltExtension:
author = Pedro
author_email = [email protected]
keywords = salt-extension
- url = http://saltstack.com
+ url = http://saltproject.io
license = Apache Software License 2.0
classifiers =
Programming Language :: Python
diff --git a/tests/unit/utils/test_jinja.py b/tests/unit/utils/test_jinja.py
index <HASH>..<HASH> 100644
--- a/tests/unit/utils/test_jinja.py
+++ b/tests/unit/utils/test_jinja.py
@@ -1394,8 +1394,7 @@ class TestCustomExtensions(TestCase):
"""
urls = (
# These cannot be HTTPS urls since urllib2 chokes on those
- "http://saltstack.com",
- "http://community.saltstack.com",
+ "http://saltproject.io",
"http://google.com",
"http://duckduckgo.com",
) | Replace `saltstack.com` with `saltproject.io` on URLs being tested | saltstack_salt | train |
17d4a7d05babe9407aa40970c3892b304e585149 | diff --git a/views/datagrid.blade.php b/views/datagrid.blade.php
index <HASH>..<HASH> 100644
--- a/views/datagrid.blade.php
+++ b/views/datagrid.blade.php
@@ -42,7 +42,5 @@
@if ($dg->havePagination())
- <div class="pagination">
{!! $dg->links() !!}
- </div>
@endif | default pagination presenter already has pagination class | zofe_rapyd-laravel | train |
e224fa2c2b76fc7eb7356056ea1de6cc6ca84a5d | diff --git a/src/wcmf/application/views/plugins/block.assetic.php b/src/wcmf/application/views/plugins/block.assetic.php
index <HASH>..<HASH> 100644
--- a/src/wcmf/application/views/plugins/block.assetic.php
+++ b/src/wcmf/application/views/plugins/block.assetic.php
@@ -21,6 +21,7 @@ use Assetic\Asset\FileAsset;
use Assetic\Asset\StringAsset;
use Assetic\AssetWriter;
use Assetic\Cache\FilesystemCache;
+use Assetic\Filter\CssRewriteFilter;
use Minifier\MinFilter;
if (!class_exists('Assetic\Asset\AssetCollection')) {
@@ -63,7 +64,7 @@ function smarty_block_assetic($params, $content, Smarty_Internal_Template $templ
else {
$result = '';
- // parse urls and group resource by extension and minified state
+ // parse urls and group resources by extension and minified state
$resources = array();
$urls = StringUtil::getUrls($content);
foreach ($urls as $url) {
@@ -73,41 +74,57 @@ function smarty_block_assetic($params, $content, Smarty_Internal_Template $templ
if (!isset($resources[$extension])) {
$resources[$extension] = array('min' => array(), 'src' => array());
}
- $resources[$extension][$min ? 'min' : 'src'][] = new FileAsset($url);
+ $resources[$extension][$min ? 'min' : 'src'][] = $url;
}
// setup assetic
$config = ObjectFactory::getInstance('configuration');
- $cacheDir = WCMF_BASE.$config->getValue('cacheDir', 'View').'cache';
+ $basePath = dirname(FileUtil::realpath($_SERVER['SCRIPT_FILENAME'])).'/';
+ $cacheRootAbs = WCMF_BASE.$config->getValue('cacheDir', 'View').'cache';
+ $cacheRootRel = URIUtil::makeRelative($cacheRootAbs, $basePath);
// process resources
foreach ($resources as $type => $files) {
- $filesystem = new FilesystemCache($cacheDir);
- $writer = new AssetWriter($cacheDir);
- $hasSrcFiles = sizeof($files['src']) > 0;
- $hasMinFiles = sizeof($files['min']) > 0;
+ $filesystem = new FilesystemCache($cacheRootAbs);
+ $writer = new AssetWriter($cacheRootAbs);
- $minAssets = $hasMinFiles ? $files['min'] : array();
- if ($hasSrcFiles) {
- $srcCollection = new AssetCollection($files['src'], array(new MinFilter($type)));
- $minAssets[] = new StringAsset($srcCollection->dump());
+ $cacheFile = (isset($params['name']) ? $params['name'] : uniqid()).'.min.'.$type;
+ $cachePathRel = $cacheRootRel.'/'.$cacheFile;
+
+ // create filters
+ $filters = array();
+ if ($type == 'css') {
+ $filters[] = new CssRewriteFilter();
}
- $minCollection = new AssetCollection($minAssets);
- $filename = (isset($params['name']) ? $params['name'] : uniqid()).'.min.'.$type;
+ $minFilters = array_merge($filters, array(new MinFilter($type)));
+ // create string assets from files (sourcePath and targetPath must be
+ // set correctly in order to make CssRewriteFilter work)
+ $minAssets = array();
+ foreach ($files['min'] as $file) {
+ $asset = new FileAsset($file, $filters, '', $file);
+ $asset->setTargetPath($cachePathRel);
+ $minAssets[] = new StringAsset($asset->dump());
+ }
+ foreach ($files['src'] as $file) {
+ $asset = new FileAsset($file, $minFilters, '', $file);
+ $asset->setTargetPath($cachePathRel);
+ $minAssets[] = new StringAsset($asset->dump());
+ }
+
+ // write collected assets into cached file
+ $minCollection = new AssetCollection($minAssets);
$cache = new AssetCache($minCollection, $filesystem);
- $cache->setTargetPath($filename);
+ $cache->setTargetPath($cacheFile);
$writer->writeAsset($cache);
- $url = URIUtil::makeRelative($cacheDir.'/'.$filename,
- dirname(FileUtil::realpath($_SERVER['SCRIPT_FILENAME'])).'/');
-
+ // create html tag
switch ($type) {
case 'js':
- $tag = '<script src="'.$url.'"></script>';
+ $tag = '<script src="'.$cachePathRel.'"></script>';
break;
case 'css':
- $tag = '<link rel="stylesheet" href="'.$url.'">';
+ $tag = '<link rel="stylesheet" href="'.$cachePathRel.'">';
break;
}
$result .= $tag; | use CssRewriteFilter for assetic | iherwig_wcmf | train |
746ed70b2b40ec76c90a6db58f1f97ea87ebd3e7 | diff --git a/demo/index.php b/demo/index.php
index <HASH>..<HASH> 100644
--- a/demo/index.php
+++ b/demo/index.php
@@ -15,7 +15,7 @@ $feed = new SimplePie();
if (isset($_GET['feed']) && $_GET['feed'] !== '')
{
// Strip slashes if magic quotes is enabled (which automatically escapes certain characters)
- if (get_magic_quotes_gpc())
+ if (function_exists('get_magic_quotes_gpc') && get_magic_quotes_gpc())
{
$_GET['feed'] = stripslashes($_GET['feed']);
} | PHP6 doesn't have get_magic_quotes_gpc() | simplepie_simplepie | train |
afc6defc2392c797042cb6bc73188de1ab147a12 | diff --git a/packages/veritone-react-common/src/components/FilePicker/FileList/FileListItem.js b/packages/veritone-react-common/src/components/FilePicker/FileList/FileListItem.js
index <HASH>..<HASH> 100644
--- a/packages/veritone-react-common/src/components/FilePicker/FileList/FileListItem.js
+++ b/packages/veritone-react-common/src/components/FilePicker/FileList/FileListItem.js
@@ -43,6 +43,10 @@ class FileListItem extends Component {
}
if (/^image\//gi.test(mime.lookup(file.name))) {
fileReader.readAsDataURL(file);
+ } else {
+ this.setState({
+ dataUrl: ""
+ });
}
} | Fixed a bug where file images were retaining the old image. | veritone_veritone-sdk | train |
4436cd0387d6239cf17f57d2ab90443b6be609b7 | diff --git a/test/lib/viewports/web-mercator-viewport.spec.js b/test/lib/viewports/web-mercator-viewport.spec.js
index <HASH>..<HASH> 100644
--- a/test/lib/viewports/web-mercator-viewport.spec.js
+++ b/test/lib/viewports/web-mercator-viewport.spec.js
@@ -18,7 +18,6 @@
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
-import {WebMercatorViewport} from 'deck.gl/lib/viewports';
import test from 'tape-catch';
import {vec2, vec3} from 'gl-matrix';
import {WebMercatorViewport} from 'deck.gl'; | Remove a leftover line from rebase in previous commit | uber_deck.gl | train |
b387dcacc26f7427152d9de087093d668f2071d7 | diff --git a/qtpylib/broker.py b/qtpylib/broker.py
index <HASH>..<HASH> 100644
--- a/qtpylib/broker.py
+++ b/qtpylib/broker.py
@@ -213,14 +213,14 @@ class Broker():
# find specific name
if self.blotter_name is not None: # and self.blotter_name != 'auto-detect':
- args_cache_file = tempfile.gettempdir()+"/"+self.blotter_name.lower()+".ezq"
+ args_cache_file = tempfile.gettempdir()+"/"+self.blotter_name.lower()+".qtpylib"
if not os.path.exists(args_cache_file):
print("[ERROR] Cannot connect to running Blotter [%s]" % (self.blotter_name))
sys.exit(0)
# no name provided - connect to last running
else:
- blotter_files = sorted(glob.glob(tempfile.gettempdir()+"/*.ezq"), key=os.path.getmtime)
+ blotter_files = sorted(glob.glob(tempfile.gettempdir()+"/*.qtpylib"), key=os.path.getmtime)
if len(blotter_files) == 0:
print("[ERROR] Cannot connect to running Blotter [%s]" % (self.blotter_name))
sys.exit(0) | tmp file extension changed to..qtpylib | ranaroussi_qtpylib | train |
f6c904bb81541cb17a4bee8647432293991ba550 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -13,13 +13,13 @@ setup(
long_description_content_type='text/markdown',
include_package_data=True,
install_requires=[
- 'django>=2.0,<2.1a1',
'beautifulsoup4>=4.6.0<5.0.0',
'directory-constants>=17.0.0,<18.0.0',
'jsonschema==3.0.1'
],
extras_require={
'test': [
+ 'django>=2.0,<2.1a1',
'ansicolors==1.1.8',
'codecov==2.0.9',
'flake8==3.0.4',
@@ -34,6 +34,7 @@ setup(
'setuptools>=38.6.0,<39.0.0'
],
'demo': [
+ 'django>=2.0,<2.1a1',
'django-environ==0.4.5',
'gunicorn==19.5.0',
'whitenoise==3.3.1', | Remove django version pin from install_requires, as this version should be compatible with other apps using django <I> as well as <I> | uktrade_directory-components | train |
5c573ee75f2292a4634d4e67be78dd8726e51523 | diff --git a/Readme.md b/Readme.md
index <HASH>..<HASH> 100644
--- a/Readme.md
+++ b/Readme.md
@@ -51,6 +51,7 @@ The `options` argument allows you to customize the client with the following pro
- request: to override the [request](https://github.com/request/request) module.
- httpClient: to provide your own http client that implements `request(rurl, data, callback, exheaders, exoptions)`.
- forceSoap12Headers: to set proper headers for SOAP v1.2
+- envelopeKey: to set specific key instead of <pre><<b>soap</b>:Body></<b>soap</b>:Body></pre>
- wsdl_options: custom options for the request module on WSDL requests.
- wsdl_headers: custom HTTP headers to be sent on WSDL requests.
diff --git a/lib/client.js b/lib/client.js
index <HASH>..<HASH> 100644
--- a/lib/client.js
+++ b/lib/client.js
@@ -120,6 +120,7 @@ Client.prototype._initializeServices = function(endpoint) {
Client.prototype._initializeOptions = function(options) {
this.wsdl.options.attributesKey = options.attributesKey || 'attributes';
+ this.wsdl.options.envelopeKey = options.envelopeKey || 'soap';
this.wsdl.options.forceSoap12Headers = !!options.forceSoap12Headers;
};
@@ -164,6 +165,7 @@ Client.prototype._invoke = function(method, args, location, callback, options, e
output = method.output,
style = method.style,
defs = this.wsdl.definitions,
+ envelopeKey = this.wsdl.options.envelopeKey,
ns = defs.$targetNamespace,
encoding = '',
message = '',
@@ -174,11 +176,11 @@ Client.prototype._invoke = function(method, args, location, callback, options, e
headers = {
"Content-Type": "text/xml; charset=utf-8"
},
- xmlnsSoap = "xmlns:soap=\"http://schemas.xmlsoap.org/soap/envelope/\"";
+ xmlnsSoap = "xmlns:" + envelopeKey + "=\"http://schemas.xmlsoap.org/soap/envelope/\"";
if (this.wsdl.options.forceSoap12Headers) {
headers["Content-Type"] = "application/soap+xml; charset=utf-8";
- xmlnsSoap = "xmlns:soap=\"http://www.w3.org/2003/05/soap-envelope\"";
+ xmlnsSoap = "xmlns:" + envelopeKey + "=\"http://www.w3.org/2003/05/soap-envelope\"";
}
if (this.SOAPAction) {
@@ -217,28 +219,28 @@ Client.prototype._invoke = function(method, args, location, callback, options, e
message = self.wsdl.objectToDocumentXML(input.$name, args, input.targetNSAlias, input.targetNamespace, (input.$type || input.$lookupType));
}
xml = "<?xml version=\"1.0\" encoding=\"utf-8\"?>" +
- "<soap:Envelope " +
+ "<" + envelopeKey + ":Envelope " +
xmlnsSoap + " " +
"xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\" " +
encoding +
this.wsdl.xmlnsInEnvelope + '>' +
((self.soapHeaders || self.security) ?
(
- "<soap:Header>" +
+ "<" + envelopeKey + ":Header>" +
(self.soapHeaders ? self.soapHeaders.join("\n") : "") +
(self.security && !self.security.postProcess ? self.security.toXML() : "") +
- "</soap:Header>"
+ "</" + envelopeKey + ":Header>"
)
:
''
) +
- "<soap:Body" +
+ "<" + envelopeKey + ":Body" +
(self.bodyAttributes ? self.bodyAttributes.join(' ') : '') +
(self.security && self.security.postProcess ? " Id='_0'" : '') +
">" +
message +
- "</soap:Body>" +
- "</soap:Envelope>";
+ "</" + envelopeKey + ":Body>" +
+ "</" + envelopeKey + ":Envelope>";
if(self.security && self.security.postProcess){
xml = self.security.postProcess(xml);
diff --git a/test/client-test.js b/test/client-test.js
index <HASH>..<HASH> 100644
--- a/test/client-test.js
+++ b/test/client-test.js
@@ -74,6 +74,18 @@ describe('SOAP Client', function() {
});
});
+ it('should allow customization of envelope', function(done) {
+ soap.createClient(__dirname+'/wsdl/default_namespace.wsdl', {envelopeKey: 'soapenv'}, function(err, client) {
+ assert.ok(client);
+ assert.ok(!err);
+
+ client.MyOperation({}, function(err, result) {
+ assert.notEqual(client.lastRequest.indexOf('xmlns:soapenv='), -1);
+ done();
+ });
+ });
+ });
+
it('should set binding style to "document" by default if not explicitly set in WSDL, per SOAP spec', function (done) {
soap.createClient(__dirname+'/wsdl/binding_document.wsdl', function(err, client) {
assert.ok(client);
@@ -272,7 +284,7 @@ describe('SOAP Client', function() {
done();
});
});
-
+
it('should add http headers', function(done) {
soap.createClient(__dirname+'/wsdl/default_namespace.wsdl', function(err, client) {
assert.ok(client); | Allow to set a custom envelop | vpulim_node-soap | train |
79b535f920f4e49a9ec5d5adf8a142414f423e6f | diff --git a/lib/mwlib/src/MW/Unittest/Testcase.php b/lib/mwlib/src/MW/Unittest/Testcase.php
index <HASH>..<HASH> 100644
--- a/lib/mwlib/src/MW/Unittest/Testcase.php
+++ b/lib/mwlib/src/MW/Unittest/Testcase.php
@@ -110,12 +110,4 @@ class MW_Unittest_Testcase extends PHPUnit_Framework_TestCase
parent::assertThat($actual, parent::isEmpty(), $message);
}
}
-
-
- /**
- * Empty test for compatibility with phpunit 3.4.
- */
- public function testEmpty()
- {
- }
} | Removes workaround for phpunit <I> as it's no longer supported and creates a lot of additional, empty tests | Arcavias_arcavias-core | train |
9d6d0e03c2feb5da752cc826ea552e9da22f061a | diff --git a/src/objectliterals.jsdoc b/src/objectliterals.jsdoc
index <HASH>..<HASH> 100644
--- a/src/objectliterals.jsdoc
+++ b/src/objectliterals.jsdoc
@@ -363,7 +363,7 @@
/**
* @typedef {Object} olx.interaction.DrawOptions
- * @property {ol.layer.Vector|undefined} layer Destination layer for the features.
+ * @property {ol.source.Vector|undefined} source Destination source for the features.
* @property {number|undefined} snapTolerance Pixel distance for snapping to the
* drawing finish (default is 12).
* @property {ol.geom.GeometryType} type Drawing type ('Point', 'LineString',
diff --git a/src/ol/interaction/drawinteraction.js b/src/ol/interaction/drawinteraction.js
index <HASH>..<HASH> 100644
--- a/src/ol/interaction/drawinteraction.js
+++ b/src/ol/interaction/drawinteraction.js
@@ -75,11 +75,11 @@ ol.interaction.Draw = function(opt_options) {
goog.base(this);
/**
- * Target layer for drawn features.
- * @type {ol.layer.Vector}
+ * Target source for drawn features.
+ * @type {ol.source.Vector}
* @private
*/
- this.layer_ = goog.isDef(opt_options.layer) ? opt_options.layer : null;
+ this.source_ = goog.isDef(opt_options.source) ? opt_options.source : null;
/**
* Pixel distance for snapping.
@@ -467,10 +467,8 @@ ol.interaction.Draw.prototype.finishDrawing_ = function(event) {
sketchFeature.setGeometry(new ol.geom.MultiPolygon([coordinates]));
}
- if (!goog.isNull(this.layer_)) {
- var vectorSource = this.layer_.getSource();
- goog.asserts.assertInstanceof(vectorSource, ol.source.Vector);
- vectorSource.addFeature(sketchFeature);
+ if (!goog.isNull(this.source_)) {
+ this.source_.addFeature(sketchFeature);
}
this.dispatchEvent(new ol.DrawEvent(ol.DrawEventType.DRAWEND,
this.sketchFeature_));
diff --git a/test/spec/ol/interaction/drawinteraction.test.js b/test/spec/ol/interaction/drawinteraction.test.js
index <HASH>..<HASH> 100644
--- a/test/spec/ol/interaction/drawinteraction.test.js
+++ b/test/spec/ol/interaction/drawinteraction.test.js
@@ -1,7 +1,7 @@
goog.provide('ol.test.interaction.Draw');
describe('ol.interaction.Draw', function() {
- var target, map, source, layer;
+ var target, map, source;
var width = 360;
var height = 180;
@@ -16,7 +16,7 @@ describe('ol.interaction.Draw', function() {
style.height = height + 'px';
document.body.appendChild(target);
source = new ol.source.Vector();
- layer = new ol.layer.Vector({source: source});
+ var layer = new ol.layer.Vector({source: source});
map = new ol.Map({
target: target,
renderer: ol.RendererHint.CANVAS,
@@ -57,7 +57,7 @@ describe('ol.interaction.Draw', function() {
it('creates a new interaction', function() {
var draw = new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.POINT
});
expect(draw).to.be.a(ol.interaction.Draw);
@@ -71,7 +71,7 @@ describe('ol.interaction.Draw', function() {
beforeEach(function() {
draw = new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.POINT
});
map.addInteraction(draw);
@@ -119,7 +119,7 @@ describe('ol.interaction.Draw', function() {
beforeEach(function() {
map.addInteraction(new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.MULTI_POINT
}));
});
@@ -143,7 +143,7 @@ describe('ol.interaction.Draw', function() {
beforeEach(function() {
draw = new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.LINE_STRING
});
map.addInteraction(draw);
@@ -241,7 +241,7 @@ describe('ol.interaction.Draw', function() {
beforeEach(function() {
map.addInteraction(new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.MULTI_LINE_STRING
}));
});
@@ -278,7 +278,7 @@ describe('ol.interaction.Draw', function() {
beforeEach(function() {
draw = new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.POLYGON
});
map.addInteraction(draw);
@@ -360,7 +360,7 @@ describe('ol.interaction.Draw', function() {
beforeEach(function() {
map.addInteraction(new ol.interaction.Draw({
- layer: layer,
+ source: source,
type: ol.geom.GeometryType.MULTI_POLYGON
}));
}); | Configure draw interaction with a vector source | openlayers_openlayers | train |
e63cb983f71699ac4749215d5884527d9dc9280a | diff --git a/lib/driver/mysql.js b/lib/driver/mysql.js
index <HASH>..<HASH> 100644
--- a/lib/driver/mysql.js
+++ b/lib/driver/mysql.js
@@ -113,6 +113,14 @@ var MysqlDriver = Base.extend({
constraint.push('UNIQUE');
}
+ if (spec.engine && typeof(spec.engine) === 'string') {
+ constraint.push('ENGINE=\'' + spec.engine + '\'')
+ }
+
+ if (spec.rowFormat && typeof(spec.rowFormat) === 'string') {
+ constraint.push('ROW_FORMAT=\'' + spec.rowFormat + '\'')
+ }
+
if (spec.null || spec.notNull === false) {
constraint.push('NULL');
} | add engine and row format to mysql | db-migrate_node-db-migrate | train |
ecbcff551585d02ae095c6d28ef2ea9cec49bdee | diff --git a/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/aggregator.go b/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/aggregator.go
index <HASH>..<HASH> 100644
--- a/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/aggregator.go
+++ b/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/aggregator.go
@@ -104,6 +104,8 @@ func createAggregatorConfig(
EnableAggregatedDiscoveryTimeout: utilfeature.DefaultFeatureGate.Enabled(kubefeatures.EnableAggregatedDiscoveryTimeout),
},
}
+ // we need to clear the poststarthooks so we don't add them multiple times to all the servers (that fails)
+ aggregatorConfig.GenericConfig.PostStartHooks = map[string]genericapiserver.PostStartHookConfigEntry{}
return aggregatorConfig, nil
}
diff --git a/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/apiextensions.go b/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/apiextensions.go
index <HASH>..<HASH> 100644
--- a/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/apiextensions.go
+++ b/vendor/k8s.io/kubernetes/cmd/kube-apiserver/app/apiextensions.go
@@ -88,6 +88,9 @@ func createAPIExtensionsConfig(
},
}
+ // we need to clear the poststarthooks so we don't add them multiple times to all the servers (that fails)
+ apiextensionsConfig.GenericConfig.PostStartHooks = map[string]genericapiserver.PostStartHookConfigEntry{}
+
return apiextensionsConfig, nil
} | UPSTREAM: <drop>: don't add the same poststarthook in kube-apiserver | openshift_origin | train |
58f96c42b5cb6c814ca2f7b546973fb50f261397 | diff --git a/src/walker/reassemble_stree.js b/src/walker/reassemble_stree.js
index <HASH>..<HASH> 100644
--- a/src/walker/reassemble_stree.js
+++ b/src/walker/reassemble_stree.js
@@ -28,6 +28,7 @@ goog.require('sre.Semantic');
// Note that reassemble tree will not give you exactly the original tree, as the
// mathml nodes and mathml tree components can not be reconstructed.
+//TODO: This is probably not a class but just a utility procedure.
/**
* @constructor
* @param {!Node} mathml The enriched MathML node.
@@ -45,7 +46,6 @@ sre.ReassembleStree = function(mathml) {
var dp = new sre.SystemExternal.xmldom.DOMParser();
var xml = dp.parseFromString('<stree></stree>', 'text/xml');
- //xml.childNodes[0].appendChild();
return this.stree;
};
//goog.addSingletonGetter(sre.ReassembleStree);
@@ -61,7 +61,6 @@ sre.ReassembleStree.prototype.makeTree = function() {
sre.ReassembleStree.prototype.assembleTree = function(node) {
- console.log(node.toString());
// if (this.frontier.length === 0) return;
// var current = this.frontier.shift();
var snode = sre.ReassembleStree.makeNode(node);
@@ -69,21 +68,66 @@ sre.ReassembleStree.prototype.assembleTree = function(node) {
sre.WalkerUtil.getAttribute(node, sre.EnrichMathml.Attribute.CHILDREN));
var content = sre.WalkerUtil.splitAttribute(
sre.WalkerUtil.getAttribute(node, sre.EnrichMathml.Attribute.CONTENT));
- snode.content = content.map(goog.bind(this.assembleTree, this));
- snode.children = children.map(goog.bind(this.assembleTree, this));
+ if (content.length === 0 && children.length === 0) {
+ snode.textContent = node.textContent;
+ return snode;
+ }
+ if (content.length > 0) {
+ var fcontent = sre.WalkerUtil.getBySemanticId(node, content[0]);
+ if (fcontent) {
+ var operator = sre.WalkerUtil.splitAttribute(
+ sre.WalkerUtil.getAttribute(
+ fcontent, sre.EnrichMathml.Attribute.OPERATOR));
+ if (operator.length > 1) {
+ snode.textContent = operator[1];
+ }
+ }
+ }
+ var setParent = function(n) {
+ var mml = sre.WalkerUtil.getBySemanticId(node, n);
+ var sn = this.assembleTree(mml);
+ sn.parent = snode;
+ return sn;
+ };
+ snode.contentNodes = content.map(goog.bind(setParent, this));
+ snode.childNodes = children.map(goog.bind(setParent, this));
return snode;
};
sre.ReassembleStree.makeNode = function(node) {
- console.log(sre.EnrichMathml.Attribute.TYPE);
var type = sre.WalkerUtil.getAttribute(node, sre.EnrichMathml.Attribute.TYPE);
var role = sre.WalkerUtil.getAttribute(node, sre.EnrichMathml.Attribute.ROLE);
var font = sre.WalkerUtil.getAttribute(node, sre.EnrichMathml.Attribute.FONT);
var id = sre.WalkerUtil.getAttribute(node, sre.EnrichMathml.Attribute.ID);
+ var embellished = sre.WalkerUtil.getAttribute(
+ node, sre.EnrichMathml.Attribute.EMBELLISHED);
+ var fencepointer = sre.WalkerUtil.getAttribute(
+ node, sre.EnrichMathml.Attribute.FENCEPOINTER);
var snode = new sre.SemanticTree.Node(parseInt(id, 10));
snode.type = /** @type {sre.SemanticAttr.Type} */(type);
snode.role = /** @type {sre.SemanticAttr.Role} */(role);
- snode.font = /** @type {sre.SemanticAttr.Font} */(font);
+ snode.font = font ? /** @type {sre.SemanticAttr.Font} */(font) :
+ sre.SemanticAttr.Font.UNKNOWN;
+ if (fencepointer) {
+ snode.fencePointer = fencepointer;
+ }
+ if (embellished) {
+ snode.embellished = /** @type {sre.SemanticAttr.Type} */(embellished);
+ }
return snode;
};
+
+
+sre.ReassembleStree.experiment__ = function(expr) {
+ var mml = sre.DomUtil.parseInput('<math>' + expr + '</math>');
+ var stree = new sre.SemanticTree(mml);
+ var emml = sre.EnrichMathml.enrich(mml, stree);
+ var reass = new sre.ReassembleStree(emml);
+
+ var str1 = stree.toString();
+ var str2 = reass.toString();
+ console.log(str1);
+ console.log(str2);
+ return str1 === str2;
+}; | First working version of semantic tree reconstruction. Still without treatment of collapse. | zorkow_speech-rule-engine | train |
fb06d0b20d9a39acf3c5da0863b283bf9ba2fee4 | diff --git a/.babelrc.js b/.babelrc.js
index <HASH>..<HASH> 100644
--- a/.babelrc.js
+++ b/.babelrc.js
@@ -6,6 +6,7 @@ module.exports = {
['@babel/plugin-transform-classes', { loose: true }],
['@babel/plugin-transform-destructuring', { loose: true }],
['@babel/plugin-transform-spread', { loose: true }],
+ ['@babel/plugin-transform-for-of', { assumeArray: true }],
],
env: {
cjs: { | Switch all 'for..of' loops to assume they iterating only arrays (#<I>) | graphql_graphql-js | train |
dbc640a153cc53ca99d4ec36efbb0c39e16b62bb | diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -5,6 +5,7 @@ Yii Framework 2 apidoc extension Change Log
-----------------------
- Bug #149: Fixed crash on wrongly formatted API links (cebe, santosh-1265)
+- Bug: Usage of deprecated `yii\base\Object` changed to `yii\base\BaseObject` allowing compatibility with PHP 7.2 (klimov-paul)
- Enh #38: Fixed display of default values given as octal or hex notation (hiqsol)
- Enh: Display TOC only if there is more than one headline (cebe)
- Enh: Extracted markdown code highlighting to a trait `MarkdownHighlightTrait` (cebe)
diff --git a/composer.json b/composer.json
index <HASH>..<HASH> 100644
--- a/composer.json
+++ b/composer.json
@@ -20,7 +20,7 @@
"minimum-stability": "dev",
"require": {
"php": ">=5.4",
- "yiisoft/yii2": "~2.0.4",
+ "yiisoft/yii2": "~2.0.13",
"yiisoft/yii2-bootstrap": "~2.0.0",
"phpdocumentor/reflection": "^3.0.1",
"phpdocumentor/reflection-docblock": "^2.0.4",
diff --git a/models/BaseDoc.php b/models/BaseDoc.php
index <HASH>..<HASH> 100644
--- a/models/BaseDoc.php
+++ b/models/BaseDoc.php
@@ -9,7 +9,7 @@ namespace yii\apidoc\models;
use phpDocumentor\Reflection\DocBlock\Tag\DeprecatedTag;
use phpDocumentor\Reflection\DocBlock\Tag\SinceTag;
-use yii\base\Object;
+use yii\base\BaseObject;
use yii\helpers\StringHelper;
/**
@@ -18,7 +18,7 @@ use yii\helpers\StringHelper;
* @author Carsten Brandt <[email protected]>
* @since 2.0
*/
-class BaseDoc extends Object
+class BaseDoc extends BaseObject
{
/**
* @var \phpDocumentor\Reflection\DocBlock\Context
diff --git a/models/Context.php b/models/Context.php
index <HASH>..<HASH> 100644
--- a/models/Context.php
+++ b/models/Context.php
@@ -362,12 +362,12 @@ class Context extends Component
}
/**
- * Add properties for getters and setters if class is subclass of [[\yii\base\Object]].
+ * Add properties for getters and setters if class is subclass of [[\yii\base\BaseObject]].
* @param ClassDoc $class
*/
protected function handlePropertyFeature($class)
{
- if (!$this->isSubclassOf($class, 'yii\base\Object')) {
+ if (!$this->isSubclassOf($class, 'yii\base\BaseObject')) {
return;
}
foreach ($class->getPublicMethods() as $name => $method) {
diff --git a/models/ParamDoc.php b/models/ParamDoc.php
index <HASH>..<HASH> 100644
--- a/models/ParamDoc.php
+++ b/models/ParamDoc.php
@@ -8,7 +8,7 @@
namespace yii\apidoc\models;
use yii\apidoc\helpers\PrettyPrinter;
-use yii\base\Object;
+use yii\base\BaseObject;
/**
* Represents API documentation information for a [[FunctionDoc|function]] or [[MethodDoc|method]] `param`.
@@ -16,7 +16,7 @@ use yii\base\Object;
* @author Carsten Brandt <[email protected]>
* @since 2.0
*/
-class ParamDoc extends Object
+class ParamDoc extends BaseObject
{
public $name;
public $typeHint; | Usage of deprecated `yii\base\Object` changed to `yii\base\BaseObject` allowing compatibility with PHP <I> | yiisoft_yii2-apidoc | train |
c28da5ef184b002b826c31f71142af7e413848e6 | diff --git a/src/Client/Server/Magento.php b/src/Client/Server/Magento.php
index <HASH>..<HASH> 100644
--- a/src/Client/Server/Magento.php
+++ b/src/Client/Server/Magento.php
@@ -97,10 +97,7 @@ class Magento extends Server
*/
public function userDetails($data, TokenCredentials $tokenCredentials)
{
- // Check if Magento returns authenticated user only.
- // It's done in case if '/api/rest/customers' returned all users in older
- // Magento versions.
- if (count($data) !== 1) {
+ if (!is_array($data) || !count($data)) {
throw new \Exception('Not possible to get user info');
} | No need to check data as it always has current user only.
All magento versions (starting from <I>) are returning
current ApiUser customer only.
See app/code/core/Mage/Customer/Model/Api2/Customer/Rest/Customer/V1.php#L<I> | thephpleague_oauth1-client | train |
d138faa1384e67c67833e64779af48118b7dd744 | diff --git a/testsuite/integration/src/test/java/org/jboss/as/test/integration/ee/appclient/util/AppClientWrapper.java b/testsuite/integration/src/test/java/org/jboss/as/test/integration/ee/appclient/util/AppClientWrapper.java
index <HASH>..<HASH> 100644
--- a/testsuite/integration/src/test/java/org/jboss/as/test/integration/ee/appclient/util/AppClientWrapper.java
+++ b/testsuite/integration/src/test/java/org/jboss/as/test/integration/ee/appclient/util/AppClientWrapper.java
@@ -25,6 +25,8 @@ import java.io.BufferedReader;
import java.io.File;
import java.io.InputStreamReader;
import java.io.PrintWriter;
+import java.util.Enumeration;
+import java.util.Properties;
import java.util.Vector;
import java.util.concurrent.BlockingQueue;
import java.util.concurrent.LinkedBlockingQueue;
@@ -172,28 +174,37 @@ public class AppClientWrapper implements Runnable {
} else {
appClientArg = archiveOnDisk.getAbsolutePath() + "#" + clientArchiveName;
}
- //TODO: this seems dodgy
- final File targetDir = new File("../../build/target");
- if ((!targetDir.exists()) || (!targetDir.isDirectory())) throw new Exception("Missing AS target directory.");
- final String[] children = targetDir.list();
- String asDir = null;
- for (final String child : children) {
- if (child.startsWith("jboss-as")) {
- asDir = child;
- break;
- }
+
+ // TODO: Move to a self-test.
+ System.out.println("*** System properties: ***");
+ Properties props = System.getProperties();
+ //props.list( System.out );
+ Enumeration en = props.propertyNames();
+ while( en.hasMoreElements() ){
+ String name = (String) en.nextElement();
+ System.out.println( "\t" + name + " = " + System.getProperty(name) );
}
- if (asDir == null) throw new Exception("Missing AS target directory.");
-
- appClientCommand = "java " +
- "-Djboss.modules.dir=../../build/target/" + asDir + "/modules " +
- "-Djline.WindowsTerminal.directConsole=false " +
- "-jar ./target/jbossas/jboss-modules.jar " +
- "-mp ../../build/target/" + asDir + "/modules " +
- "-logmodule org.jboss.logmanager org.jboss.as.appclient " +
- "-Djboss.server.base.dir=target/jbossas/appclient " +
- "-Djboss.home.dir=target/jbossas " +
- appClientArg + " " + args;
+
+
+ // TODO: Move to a shared testsuite lib.
+ String asDist = System.getProperty("jboss.dist");
+ if( asDist == null ) throw new Exception("'jboss.dist' property is not set.");
+ if( ! new File(asDist).exists() ) throw new Exception("AS dir from 'jboss.dist' doesn't exist: " + asDist + " user.dir: " + System.getProperty("user.dir"));
+
+ // TODO: Move to a shared testsuite lib.
+ String asInst = System.getProperty("jboss.inst");
+ if( asInst == null ) throw new Exception("'jboss.inst' property is not set. Perhaps this test is in a multi-node tests group but runs outside container?");
+ if( ! new File(asInst).exists() ) throw new Exception("AS dir from 'jboss.inst' doesn't exist: " + asInst + " user.dir: " + System.getProperty("user.dir"));
+
+ appClientCommand = "java" +
+ " -Djboss.modules.dir="+ asDist + "/modules" +
+ " -Djline.WindowsTerminal.directConsole=false" +
+ " -jar "+ asDist + "/jboss-modules.jar" +
+ " -mp "+ asDist + "/modules" +
+ " -logmodule org.jboss.logmanager org.jboss.as.appclient" +
+ " -Djboss.server.base.dir="+ asInst + "/appclient" +
+ " -Djboss.home.dir="+ asInst +
+ " " +appClientArg + " " + args;
return appClientCommand;
} | Make AppClientWrapper use ${jboss.inst} instead of relying on relative path. | wildfly_wildfly | train |
9b7a60c3f59853827083063a2f5c1304445f6c1c | diff --git a/packages/styled-components/src/hooks/useTheme.js b/packages/styled-components/src/hooks/useTheme.js
index <HASH>..<HASH> 100644
--- a/packages/styled-components/src/hooks/useTheme.js
+++ b/packages/styled-components/src/hooks/useTheme.js
@@ -2,9 +2,6 @@
import { useContext } from 'react';
import { ThemeContext } from '../models/ThemeProvider';
-const useTheme = () => {
- const outerTheme = useContext(ThemeContext);
- return outerTheme;
-}
+const useTheme = () => useContext(ThemeContext);
export default useTheme; | Make useTheme cleaner (#<I>) | styled-components_styled-components | train |
bbb87829e532d50e31f13ce4adbb8198202ed191 | diff --git a/tests/test_poscar.py b/tests/test_poscar.py
index <HASH>..<HASH> 100644
--- a/tests/test_poscar.py
+++ b/tests/test_poscar.py
@@ -1,17 +1,44 @@
import unittest
from unittest.mock import Mock, patch, mock_open
+from io import StringIO
-import vasppy.poscar
+from vasppy.poscar import Poscar
+from vasppy.cell import Cell
import numpy as np
from collections import Counter
class PoscarTestCase( unittest.TestCase ):
+ def setUp( self ):
+ self.poscar = Poscar()
+ self.poscar.title = "Title"
+ self.poscar.scaling = 1.0
+ self.poscar.cell = Mock( spec=Cell )
+ self.poscar.cell.matrix = np.identity( 3 )
+ self.poscar.atoms = [ 'A' ]
+ self.poscar.atom_numbers = [ 1 ]
+ self.poscar.coordinate_type = 'Direct'
+ self.poscar.coordinates = np.array( [ [ 0.0, 0.0, 0.0 ] ] )
+ self.poscar.selective_dynamics = False
+
def test_stoichiometry( self ):
- poscar = vasppy.poscar.Poscar()
+ poscar = Poscar()
poscar.atoms = [ 'A', 'B', 'C' ]
poscar.atom_numbers = [ 1, 2, 3 ]
self.assertEqual( poscar.stoichiometry, Counter( { 'A': 1, 'B': 2, 'C': 3 } ) )
+ @patch('sys.stdout', new_callable=StringIO)
+ def test_output_header( self, mock_stdout ):
+ self.poscar.output_header()
+ expected_header_string = ("Title\n"
+ "1.0\n"
+ " 1.0000000000 0.0000000000 0.0000000000\n"
+ " 0.0000000000 1.0000000000 0.0000000000\n"
+ " 0.0000000000 0.0000000000 1.0000000000\n"
+ "A\n"
+ "1\n"
+ "Direct\n")
+ self.assertEqual( mock_stdout.getvalue(), expected_header_string )
+
if __name__ == '__main__':
unittest.main()
diff --git a/vasppy/poscar.py b/vasppy/poscar.py
index <HASH>..<HASH> 100644
--- a/vasppy/poscar.py
+++ b/vasppy/poscar.py
@@ -142,22 +142,28 @@ class Poscar:
if opts is None:
opts = {}
if not opts.get( 'coordinates_only' ):
- print( self.title )
- print( self.scaling )
- [ print( ''.join( [' {: .10f}'.format( element ) for element in row ] ) ) for row in self.cell.matrix ]
- print( ' '.join( self.atoms ) )
- print( ' '.join( [ str(n) for n in self.atom_numbers ] ) )
- if opts.get('selective'):
- print( 'Selective Dynamics' )
- print( coordinate_type )
+ self.output_header( self )
self.output_coordinates_only( coordinate_type=coordinate_type, opts=opts )
+ def output_header( self, coordinate_type='Direct', opts=None ):
+ if opts is None:
+ opts = {}
+ print( self.title )
+ print( self.scaling )
+ [ print( ''.join( [' {: .10f}'.format( element ) for element in row ] ) ) for row in self.cell.matrix ]
+ print( ' '.join( self.atoms ) )
+ print( ' '.join( [ str(n) for n in self.atom_numbers ] ) )
+ if opts.get('selective'):
+ print( 'Selective Dynamics' )
+ print( coordinate_type )
+
def write_to( self, filename, coordinate_type='Direct', opts=None ):
if opts is None:
opts = {}
with open( filename, 'w' ) as sys.stdout:
self.output( coordinate_type=coordinate_type, opts=opts )
sys.stdout = sys.__stdout__ # make sure sys.stdout is reset
+
def output_as_xtl( self ):
print( self.title )
print( "CELL" ) | Refactoring of Poscar output methods | bjmorgan_vasppy | train |
775979f655a74857b85e50a1f70477083ee84598 | diff --git a/src/com/google/javascript/jscomp/AbstractCommandLineRunner.java b/src/com/google/javascript/jscomp/AbstractCommandLineRunner.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/AbstractCommandLineRunner.java
+++ b/src/com/google/javascript/jscomp/AbstractCommandLineRunner.java
@@ -318,20 +318,14 @@ public abstract class AbstractCommandLineRunner<A extends Compiler,
protected abstract void addWhitelistWarningsGuard(
CompilerOptions options, File whitelistFile);
- /**
- * Sets options based on the configurations set flags API.
- * Called during the run() run() method.
- * If you want to ignore the flags API, or interpret flags your own way,
- * then you should override this method.
- */
- protected void setRunOptions(CompilerOptions options) throws IOException {
- DiagnosticGroups diagnosticGroups = getDiagnosticGroups();
-
- if (config.warningGuards != null) {
- for (FlagEntry<CheckLevel> entry : config.warningGuards) {
+ protected static void setWarningGuardOptions(
+ CompilerOptions options,
+ ArrayList<FlagEntry<CheckLevel>> warningGuards,
+ DiagnosticGroups diagnosticGroups) {
+ if (warningGuards != null) {
+ for (FlagEntry<CheckLevel> entry : warningGuards) {
if ("*".equals(entry.value)) {
- Set<String> groupNames =
- diagnosticGroups.getRegisteredGroups().keySet();
+ Set<String> groupNames = diagnosticGroups.getRegisteredGroups().keySet();
for (String groupName : groupNames) {
if (!DiagnosticGroups.wildcardExcludedGroups.contains(groupName)) {
diagnosticGroups.setWarningLevel(options, groupName, entry.flag);
@@ -342,6 +336,18 @@ public abstract class AbstractCommandLineRunner<A extends Compiler,
}
}
}
+ }
+
+ /**
+ * Sets options based on the configurations set flags API.
+ * Called during the run() run() method.
+ * If you want to ignore the flags API, or interpret flags your own way,
+ * then you should override this method.
+ */
+ protected void setRunOptions(CompilerOptions options) throws IOException {
+ DiagnosticGroups diagnosticGroups = getDiagnosticGroups();
+
+ setWarningGuardOptions(options, config.warningGuards, diagnosticGroups);
if (!config.warningsWhitelistFile.isEmpty()) {
addWhitelistWarningsGuard(options, new File(config.warningsWhitelistFile)); | Small refactoring in AbstractCommandLineRunner
-------------
Created by MOE: <URL> | google_closure-compiler | train |
40f6736fe567e5cd5f4f1bd32b7774731f60ddc4 | diff --git a/src/test/java/org/kaazing/gateway/transport/ssl/SslAcceptorTest.java b/src/test/java/org/kaazing/gateway/transport/ssl/SslAcceptorTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/org/kaazing/gateway/transport/ssl/SslAcceptorTest.java
+++ b/src/test/java/org/kaazing/gateway/transport/ssl/SslAcceptorTest.java
@@ -233,6 +233,76 @@ public class SslAcceptorTest {
}
}
+ @Test
+ public void shouldNotBindUsingUnknownHostName() throws Exception {
+
+ VirtualHostKeySelectorTest.assumeDNSNameAccessible("one.example.test");
+ thrown.expect(RuntimeException.class);
+ thrown.expectMessage("Keystore does not have a certificate entry for otherhost");
+
+ keyStorePassword = getPassword("keystore.pw");
+ keyStore = getKeyStore("keystore.db");
+ trustStore = getTrustStore("truststore-JCEKS.db");
+
+ TestSecurityContext securityContext = getSecurityContext();
+
+ sslAcceptor = (SslAcceptor)transportFactory.getTransport("ssl").getAcceptor();
+
+ tcpAcceptor = (NioSocketAcceptor)transportFactory.getTransport("tcp").getAcceptor();
+
+ schedulerProvider = new SchedulerProvider();
+
+ sslAcceptor.setBridgeServiceFactory(bridgeServiceFactory);
+ sslAcceptor.setResourceAddressFactory(resourceAddressFactory);
+ sslAcceptor.setSecurityContext(securityContext);
+
+ tcpAcceptor.setSchedulerProvider(schedulerProvider);
+
+ final IoHandlerAdapter<IoSession> acceptHandler = new IoHandlerAdapter<IoSession>() {
+ @Override
+ protected void doSessionOpened(final IoSession session)
+ throws Exception {
+ }
+
+ @Override
+ protected void doMessageReceived(IoSession session,
+ Object message)
+ throws Exception {
+ }
+ };
+
+ Map<String, Object> opts = buildSslOptionsMap();
+
+ URI firstURI = URI.create("ssl://one.example.test:443");
+ ResourceAddress firstAccept =
+ resourceAddressFactory.newResourceAddress(firstURI, opts);
+ sslAcceptor.bind(firstAccept, acceptHandler, null);
+
+ // The opts are mutated on first bind, so build them again
+ opts = buildSslOptionsMap();
+
+ try {
+ opts.put(ResourceAddress.TRANSPORT_URI.name(), "tcp://127.0.0.1:443");
+ URI secondURI = URI.create("ssl://otherhost:443");
+ ResourceAddress secondAccept =
+ resourceAddressFactory.newResourceAddress(secondURI, opts);
+ sslAcceptor.bind(secondAccept, acceptHandler, null);
+
+ } finally {
+ UnbindFuture unbindFuture = sslAcceptor.unbind(firstAccept);
+ unbindFuture.addListener(new IoFutureListener<UnbindFuture>() {
+ @Override
+ public void operationComplete(UnbindFuture future) {
+ schedulerProvider.shutdownNow();
+ }
+ });
+ unbindFuture.awaitUninterruptibly(5, TimeUnit.SECONDS);
+ if (!unbindFuture.isUnbound()) {
+ throw new Exception(String.format("Failed to unbind from %s within 5 seconds", firstAccept));
+ }
+ }
+ }
+
private Map<String, Object> buildSslOptionsMap() {
Map<String, Object> opts = new HashMap<String, Object>();
opts.put(SSL_WANT_CLIENT_AUTH, Boolean.FALSE); | KG-<I>: Add a new test that validates the correct error is received when binding to a host for which there is no certificate. | kaazing_gateway | train |
b9fddb231fba81f6f5b89e80873f50a77f931d7f | diff --git a/cmd/cloud.go b/cmd/cloud.go
index <HASH>..<HASH> 100644
--- a/cmd/cloud.go
+++ b/cmd/cloud.go
@@ -39,6 +39,10 @@ import (
log "github.com/sirupsen/logrus"
)
+var (
+ exitOnRunning = os.Getenv("K6_EXIT_ON_RUNNING") != ""
+)
+
var cloudCmd = &cobra.Command{
Use: "cloud",
Short: "Run a test on the cloud",
@@ -164,7 +168,7 @@ This will execute the test on the Load Impact cloud service. Use "k6 login cloud
case <-ticker.C:
testProgress, progressErr = client.GetTestProgress(refID)
if progressErr == nil {
- if testProgress.RunStatus > 2 {
+ if (testProgress.RunStatus > 2) || (exitOnRunning && testProgress.RunStatus == 2) {
shouldExitLoop = true
}
progress.Progress = testProgress.Progress
@@ -199,4 +203,5 @@ func init() {
cloudCmd.Flags().SortFlags = false
cloudCmd.Flags().AddFlagSet(optionFlagSet())
cloudCmd.Flags().AddFlagSet(runtimeOptionFlagSet(false))
+ cloudCmd.Flags().BoolVar(&exitOnRunning, "exit-on-running", exitOnRunning, "exists when test reaches the running status")
} | Adding exit-on-running option | loadimpact_k6 | train |
87809dbda2807516d0b1b21190199b24a67d0468 | diff --git a/graylog2-plugin-interfaces/src/main/java/org/graylog2/plugin/streams/Stream.java b/graylog2-plugin-interfaces/src/main/java/org/graylog2/plugin/streams/Stream.java
index <HASH>..<HASH> 100644
--- a/graylog2-plugin-interfaces/src/main/java/org/graylog2/plugin/streams/Stream.java
+++ b/graylog2-plugin-interfaces/src/main/java/org/graylog2/plugin/streams/Stream.java
@@ -31,7 +31,13 @@ import java.util.Set;
public interface Stream extends Persisted {
enum MatchingType {
AND,
- OR
+ OR;
+
+ public static final MatchingType DEFAULT = AND;
+
+ public static MatchingType valueOfOrDefault(String name) {
+ return (name == null ? DEFAULT : valueOf(name));
+ }
}
String getId();
diff --git a/graylog2-server/src/main/java/org/graylog2/streams/StreamServiceImpl.java b/graylog2-server/src/main/java/org/graylog2/streams/StreamServiceImpl.java
index <HASH>..<HASH> 100644
--- a/graylog2-server/src/main/java/org/graylog2/streams/StreamServiceImpl.java
+++ b/graylog2-server/src/main/java/org/graylog2/streams/StreamServiceImpl.java
@@ -95,7 +95,7 @@ public class StreamServiceImpl extends PersistedServiceImpl implements StreamSer
streamData.put(StreamImpl.FIELD_CREATOR_USER_ID, userId);
streamData.put(StreamImpl.FIELD_CREATED_AT, Tools.iso8601());
streamData.put(StreamImpl.FIELD_CONTENT_PACK, cr.contentPack());
- streamData.put(StreamImpl.FIELD_MATCHING_TYPE, Stream.MatchingType.valueOf(cr.matchingType()));
+ streamData.put(StreamImpl.FIELD_MATCHING_TYPE, Stream.MatchingType.valueOfOrDefault(cr.matchingType()));
return create(streamData);
} | Adding helper method for matching type which returns DEFAULT if null.
Unbreaks creating a stream with an unspecified matching type. | Graylog2_graylog2-server | train |
e4a4fe97bc404b0a6c12f43f6a3788e34f378279 | diff --git a/grimoire_elk/elk/discourse.py b/grimoire_elk/elk/discourse.py
index <HASH>..<HASH> 100644
--- a/grimoire_elk/elk/discourse.py
+++ b/grimoire_elk/elk/discourse.py
@@ -206,9 +206,10 @@ class DiscourseEnrich(Enrich):
# The first post is the first published, and it is the question
first_post = topic['post_stream']['posts'][0]
eitem['category_id'] = topic['category_id']
- eitem['category_name'] = self.categories[topic['category_id']]
eitem['categories'] = self.__related_categories(topic['category_id'])
- eitem['categories'] += [eitem['category_name']]
+ if topic['category_id'] in self.categories:
+ eitem['category_name'] = self.categories[topic['category_id']]
+ eitem['categories'] += [eitem['category_name']]
eitem['url'] = eitem['origin'] + "/t/" + first_post['topic_slug']
eitem['url'] += "/" + str(first_post['topic_id']) + "/" + str(first_post['post_number'])
eitem['display_username'] = first_post['display_username'] | [enrich][discourse] Support that some categories ids don't appear in categories
names list. | chaoss_grimoirelab-elk | train |
8c1104a709dc9b0cc79c7c756b3d4e266b17546c | diff --git a/src/controllers/TicketController.php b/src/controllers/TicketController.php
index <HASH>..<HASH> 100644
--- a/src/controllers/TicketController.php
+++ b/src/controllers/TicketController.php
@@ -97,10 +97,10 @@ class TicketController extends \hipanel\base\CrudController
$ticket = $action->model;
$attributes = [
- 'id' => $ticket->recipient_id,
- 'login' => $ticket->recipient,
- 'seller' => $ticket->recipient_seller,
- 'seller_id' => $ticket->recipient_seller_id,
+ 'id' => $ticket->recipient_id ?? $ticket->author_id,
+ 'login' => $ticket->recipient ?? $ticket->author,
+ 'seller' => $ticket->recipient_seller ?? $ticket->author_seller,
+ 'seller_id' => $ticket->recipient_seller_id ?? $ticket->author_seller_id,
];
$isAnonymTicket = $ticket->recipient === 'anonym';
@@ -115,7 +115,7 @@ class TicketController extends \hipanel\base\CrudController
$client->name = $ticket->anonym_name;
}
- return array_merge(compact('client'), $this->prepareRefs());
+ return array_merge(['client' => $client], $this->prepareRefs());
},
],
'validate-form' => [ | Fix client info loading. there are tickets for which the recipient is not filled in, for example #<I> | hiqdev_hipanel-module-ticket | train |
b6a07b47673c6271a7b39ce5f229e6eeae88507d | diff --git a/scripts/setup.js b/scripts/setup.js
index <HASH>..<HASH> 100644
--- a/scripts/setup.js
+++ b/scripts/setup.js
@@ -40,7 +40,7 @@ const CONFIG = new Proxy(
write() {
fs.writeFileSync("./.perspectiverc", this.config.join("\n"));
if (process.env.PSP_BUILD_IMMEDIATELY) {
- execute`"node scripts/build.js"`;
+ execute`node scripts/build.js`;
}
}
})(), | fix bug in setup.js where extra set of quotes prevents initial build | finos_perspective | train |
52613468fc58737baf1d45b3f6a8f7978876a59d | diff --git a/src/renderable/container.js b/src/renderable/container.js
index <HASH>..<HASH> 100644
--- a/src/renderable/container.js
+++ b/src/renderable/container.js
@@ -800,31 +800,34 @@
renderer.translate(this.pos.x, this.pos.y);
for (var i = this.children.length, obj; i--, (obj = this.children[i]);) {
- isFloating = obj.floating === true;
+ if (obj.isRenderable) {
+
+ isFloating = obj.floating === true;
- if ((obj.inViewport || isFloating) && obj.isRenderable) {
+ if ((obj.inViewport || isFloating)) {
- if (isFloating) {
- // translate to screen coordinates
- renderer.save();
- renderer.resetTransform();
- }
+ if (isFloating) {
+ // translate to screen coordinates
+ renderer.save();
+ renderer.resetTransform();
+ }
- // predraw (apply transforms)
- obj.preDraw(renderer);
+ // predraw (apply transforms)
+ obj.preDraw(renderer);
- // draw the object
- obj.draw(renderer, rect);
+ // draw the object
+ obj.draw(renderer, rect);
- // postdraw (clean-up);
- obj.postDraw(renderer);
+ // postdraw (clean-up);
+ obj.postDraw(renderer);
- // restore the previous "state"
- if (isFloating) {
- renderer.restore();
- }
+ // restore the previous "state"
+ if (isFloating) {
+ renderer.restore();
+ }
- this.drawCount++;
+ this.drawCount++;
+ }
}
}
} | small optimization to filter first renderable objects in the draw loop
as opposed to before where it was checking if there where floating and in the viewport (uncessary checks for non-renderable) | melonjs_melonJS | train |
a62391a5fc444ab423ebcd302040931a1e81eda2 | diff --git a/system/HTTP/IncomingRequest.php b/system/HTTP/IncomingRequest.php
index <HASH>..<HASH> 100755
--- a/system/HTTP/IncomingRequest.php
+++ b/system/HTTP/IncomingRequest.php
@@ -587,7 +587,10 @@ class IncomingRequest extends Request
}
else
{
- throw FrameworkException::forEmptyBaseURL();
+ if(! is_cli())
+ {
+ throw FrameworkException::forEmptyBaseURL();
+ }
}
} | Don't check base path on cli | codeigniter4_CodeIgniter4 | train |
d1baf9f86a49818bb7a7c60affd5733fea05dfcc | diff --git a/is_core/templatetags/forms.py b/is_core/templatetags/forms.py
index <HASH>..<HASH> 100644
--- a/is_core/templatetags/forms.py
+++ b/is_core/templatetags/forms.py
@@ -1,12 +1,13 @@
from django import template
from django.template.loader import render_to_string
from django.template.base import TemplateSyntaxError, token_kwargs
-from django.db.models.fields import FieldDoesNotExist
+from django.db.models.fields import FieldDoesNotExist, DateTimeField
from django.db.models.fields.related import ForeignKey
from django.contrib.admin.util import display_for_value
from django.utils.html import linebreaks
from django.utils.safestring import mark_safe
from django.utils.encoding import force_text
+from django.contrib.humanize.templatetags import humanize
from block_snippets.templatetags import SnippetsIncludeNode
@@ -108,6 +109,8 @@ def get_model_field_value_and_label(field_name, instance):
label = field.verbose_name
if isinstance(field, ForeignKey) and hasattr(getattr(callable_value, 'get_absolute_url', None), '__call__'):
value = '<a href="%s">%s</a>' % (callable_value.get_absolute_url(), force_text(value))
+ if isinstance(field, DateTimeField):
+ value = '<span title="%s">%s</span>' % (force_text(value), humanize.naturaltime(callable_value))
else:
label = callable_value.short_description | Humanize readonly DateTimeField | matllubos_django-is-core | train |
63aa1fb9ddddfff7a0bcd01cbdfb6507c1723afa | diff --git a/lib/tools/system-calls.js b/lib/tools/system-calls.js
index <HASH>..<HASH> 100644
--- a/lib/tools/system-calls.js
+++ b/lib/tools/system-calls.js
@@ -151,6 +151,10 @@ systemCallMethods.adbExec = async function (cmd, opts = {}) {
if (!cmd) {
throw new Error("You need to pass in a command to adbExec()");
}
+ // setting default timeout for each command to prevent infinite wait.
+ if (!opts.timeout) {
+ opts.timeout = 20000;
+ }
let execFunc = async () => {
try {
if (!(cmd instanceof Array)) {
diff --git a/test/functional/adb-commands-e2e-specs.js b/test/functional/adb-commands-e2e-specs.js
index <HASH>..<HASH> 100644
--- a/test/functional/adb-commands-e2e-specs.js
+++ b/test/functional/adb-commands-e2e-specs.js
@@ -15,8 +15,9 @@ const apiLevel = '21',
pkgName = 'com.example.android.contactmanager',
actName = 'ContactManager';
-describe('adb commands', () => {
+describe('adb commands', function () {
let adb = new ADB();
+ this.timeout(60000);
before(async () => {
await adb.createADB();
});
diff --git a/test/functional/apk-utils-e2e-specs.js b/test/functional/apk-utils-e2e-specs.js
index <HASH>..<HASH> 100644
--- a/test/functional/apk-utils-e2e-specs.js
+++ b/test/functional/apk-utils-e2e-specs.js
@@ -7,7 +7,7 @@ import * as utils from '../../lib/utils.js';
chai.use(chaiAsPromised);
-describe('apk utils', () => {
+describe('apk utils', function () {
let adb = new ADB();
const contactManagerPath = path.resolve(utils.rootDir, 'test',
'fixtures', 'ContactManager.apk');
@@ -17,6 +17,7 @@ describe('apk utils', () => {
appPackage.should.equal('com.example.android.contactmanager');
appActivity.should.equal('.ContactManager');
};
+ this.timeout(60000);
before(async () => {
await adb.createADB();
});
diff --git a/test/functional/syscalls-e2e-specs.js b/test/functional/syscalls-e2e-specs.js
index <HASH>..<HASH> 100644
--- a/test/functional/syscalls-e2e-specs.js
+++ b/test/functional/syscalls-e2e-specs.js
@@ -5,12 +5,12 @@ import ADB from '../../lib/adb.js';
chai.use(chaiAsPromised);
-describe('System calls', () => {
+describe('System calls', function () {
let adb = new ADB();
const apiLevel = '21',
// TODO change according to avdName on test machine
avdName = 'finaltest21';
-
+ this.timeout(50000);
before(async () => {
await adb.createADB();
}); | Adding default timeout to exec command, adding timeout to tests | appium_appium-adb | train |
64aafd36bb1309eb309cca11378b3ca17bb77f77 | diff --git a/cake/libs/cake_request.php b/cake/libs/cake_request.php
index <HASH>..<HASH> 100644
--- a/cake/libs/cake_request.php
+++ b/cake/libs/cake_request.php
@@ -26,7 +26,10 @@ class CakeRequest {
public $params = array();
/**
- * Array of POST data
+ * Array of POST data. Will contain form data as well as uploaded files.
+ * Will only contain data from inputs that start with 'data'. So
+ * `<input name="some_input" />` will not end up in data. However,
+ * `<input name="data[something]" />`
*
* @var array
*/
diff --git a/cake/tests/cases/libs/cake_request.test.php b/cake/tests/cases/libs/cake_request.test.php
index <HASH>..<HASH> 100644
--- a/cake/tests/cases/libs/cake_request.test.php
+++ b/cake/tests/cases/libs/cake_request.test.php
@@ -303,8 +303,20 @@ class CakeRequestTestCase extends CakeTestCase {
'size' => 123
)
);
-
$this->assertEqual($request->data, $expected);
+
+ $_FILES = array(
+ 'something' => array(
+ 'name' => 'something.txt',
+ 'type' => 'text/plain',
+ 'tmp_name' => '/some/file',
+ 'error' => 0,
+ 'size' => 123
+ )
+ );
+ $request = new CakeRequest();
+ $this->assertEqual($request->params['form'], $_FILES);
+
}
/** | Expanding coverage for CakeRequest. Expanding doc blocks in CakeRequest. | cakephp_cakephp | train |
0feaa158f0f01957da6be02ac6fa940a5c645093 | diff --git a/tests/Datagrid/ListMapperTest.php b/tests/Datagrid/ListMapperTest.php
index <HASH>..<HASH> 100644
--- a/tests/Datagrid/ListMapperTest.php
+++ b/tests/Datagrid/ListMapperTest.php
@@ -208,8 +208,11 @@ class ListMapperTest extends TestCase
$this->listMapper->add('_'.$type, $type);
}
- foreach ($this->fieldDescriptionCollection as $field) {
- $this->assertTrue($field->isVirtual(), 'Failed asserting that FieldDescription with type "'.$field->getType().'" is tagged with virtual flag.');
+ foreach ($this->fieldDescriptionCollection->getElements() as $field) {
+ $this->assertTrue(
+ $field->isVirtual(),
+ 'Failed asserting that FieldDescription with type "'.$field->getType().'" is tagged with virtual flag.'
+ );
}
} | Actually traverse the array
Traversing that object creates no iteration. This was not tested. | sonata-project_SonataAdminBundle | train |
5666aa201b1fd67e367874fffbb92d72e9133710 | diff --git a/pkg/backup/backup.go b/pkg/backup/backup.go
index <HASH>..<HASH> 100644
--- a/pkg/backup/backup.go
+++ b/pkg/backup/backup.go
@@ -32,6 +32,7 @@ func Run(client controller.Client, out io.Writer, progress ProgressBar) error {
"PGPASSWORD": pgRelease.Env["PGPASSWORD"],
},
DisableLog: true,
+ Partition: ct.PartitionTypeBackground,
}
if err := tw.WriteCommandOutput(client, "postgres.sql.gz", "postgres", pgJob); err != nil {
return fmt.Errorf("error dumping postgres database: %s", err)
@@ -51,6 +52,7 @@ func Run(client controller.Client, out io.Writer, progress ProgressBar) error {
"MYSQL_PWD": mysqlRelease.Env["MYSQL_PWD"],
},
DisableLog: true,
+ Partition: ct.PartitionTypeBackground,
}
if err := tw.WriteCommandOutput(client, "mysql.sql.gz", "mariadb", mysqlJob); err != nil {
return fmt.Errorf("error dumping mariadb database: %s", err)
@@ -71,6 +73,7 @@ func Run(client controller.Client, out io.Writer, progress ProgressBar) error {
"MONGO_PWD": mongodbRelease.Env["MONGO_PWD"],
},
DisableLog: true,
+ Partition: ct.PartitionTypeBackground,
}
if err := tw.WriteCommandOutput(client, "mongodb.archive.gz", "mongodb", mongodbJob); err != nil {
return fmt.Errorf("error dumping mongodb database: %s", err) | pkg/backup: Run database dumps in background partition | flynn_flynn | train |
b3bc7e166df57b05e0b4764dee218e84dbf51ca9 | diff --git a/tests/test_finance.py b/tests/test_finance.py
index <HASH>..<HASH> 100644
--- a/tests/test_finance.py
+++ b/tests/test_finance.py
@@ -307,11 +307,9 @@ class FinanceTestCase(TestCase):
cumulative_pos = tracker.cumulative_performance.positions[sid]
self.assertEqual(total_volume, cumulative_pos.amount)
- # the open orders should now be empty
+ # the open orders should not contain sid.
oo = blotter.open_orders
- self.assertTrue(sid in oo)
- order_list = oo[sid]
- self.assertEqual(0, len(order_list))
+ self.assertNotIn(sid, oo, "Entry is removed when no open orders")
def test_blotter_processes_splits(self):
sim_params = factory.create_simulation_parameters()
diff --git a/zipline/finance/blotter.py b/zipline/finance/blotter.py
index <HASH>..<HASH> 100644
--- a/zipline/finance/blotter.py
+++ b/zipline/finance/blotter.py
@@ -210,10 +210,15 @@ class Blotter(object):
yield txn, order
# update the open orders for the trade_event's sid
- self.open_orders[trade_event.sid] = \
+ updated_orders = \
[order for order
- in self.open_orders[trade_event.sid]
- if order.open]
+ in self.open_orders[trade_event.sid]
+ if order.open]
+
+ if updated_orders:
+ self.open_orders[trade_event.sid] = updated_orders
+ else:
+ del self.open_orders[trade_event.sid]
def process_transactions(self, trade_event, current_orders):
for order, txn in self.transact(trade_event, current_orders): | BUG: Remove sid entry from open_orders when there are none.
TST: updated tests for new open_orders behavior | quantopian_zipline | train |
5bd7dff2bcd66db97bbc6b36a20a9ddb4f411b23 | diff --git a/src/lolex-src.js b/src/lolex-src.js
index <HASH>..<HASH> 100644
--- a/src/lolex-src.js
+++ b/src/lolex-src.js
@@ -610,8 +610,13 @@ function createClock(now, loopLimit) {
exports.createClock = createClock;
exports.install = function install(target, now, toFake, loopLimit) {
- var i,
- l;
+ var i, l;
+
+ if (target instanceof Date) {
+ toFake = now;
+ now = target.getTime();
+ target = null;
+ }
if (typeof target === "number") {
toFake = now;
diff --git a/test/lolex-test.js b/test/lolex-test.js
index <HASH>..<HASH> 100644
--- a/test/lolex-test.js
+++ b/test/lolex-test.js
@@ -41,6 +41,14 @@ describe("issue #59", function () {
});
});
+describe('issue #73', function() {
+ it('should install with date object', function () {
+ var date = new Date('2015-09-25');
+ var clock = lolex.install(date);
+ assert.same(date.clock, clock);
+ });
+});
+
describe("lolex", function () {
describe("setTimeout", function () { | Allow install to be called with date object | sinonjs_lolex | train |
a4f70a115168188e22d57535500dd3a13ef9c82f | diff --git a/multiqc/modules/flash/flash.py b/multiqc/modules/flash/flash.py
index <HASH>..<HASH> 100644
--- a/multiqc/modules/flash/flash.py
+++ b/multiqc/modules/flash/flash.py
@@ -122,7 +122,7 @@ class MultiqcModule(BaseMultiqcModule):
data[s_name] = sample
except Exception as err:
- log.warning("Error parsing record in %s. %s", logf['fn'], err)
+ log.warning("Error parsing record in {}. {}".format(logf['fn'], err))
log.debug(traceback.format_exc())
continue
return data
@@ -171,7 +171,8 @@ class MultiqcModule(BaseMultiqcModule):
}
splotconfig = {'id': 'flash_combo_stats_plot',
'title': 'FLASh: Read combination statistics',
- 'ylab': 'Samples'}
+ 'ylab': 'Number of read pairs',
+ 'hide_zero_cats': False }
return bargraph.plot(data, cats, splotconfig)
@staticmethod | FLASh: Minor consistency / axis label tweaks | ewels_MultiQC | train |
b93a2d1664e1f21a7502ab14fe94b80afb216737 | diff --git a/src/ValuSo/Proxy/ServiceProxyGenerator.php b/src/ValuSo/Proxy/ServiceProxyGenerator.php
index <HASH>..<HASH> 100644
--- a/src/ValuSo/Proxy/ServiceProxyGenerator.php
+++ b/src/ValuSo/Proxy/ServiceProxyGenerator.php
@@ -107,7 +107,7 @@ class ServiceProxyGenerator
$source = "<?php\n" . $class->generate();
$fileName = $this->getProxyFileName($className);
- $parentDirectory = dirname($fileName);
+ $parentDirectory = $this->proxyDirectory;
if ( ! is_dir($parentDirectory) && (false === @mkdir($parentDirectory, 0775, true))) {
throw Exception\RuntimeException('Proxy directory '.$parentDirectory.' not found');
@@ -632,4 +632,4 @@ class ServiceProxyGenerator
return '';
}
}
-}
\ No newline at end of file
+} | Ensures that proxy directory is always created if it doesn't exist | valu-digital_valuso | train |
b621fa268ae6e61e6185cc16c1d9ea916432053a | diff --git a/molecule/driver/dockerdriver.py b/molecule/driver/dockerdriver.py
index <HASH>..<HASH> 100644
--- a/molecule/driver/dockerdriver.py
+++ b/molecule/driver/dockerdriver.py
@@ -218,8 +218,8 @@ class DockerDriver(basedriver.BaseDriver):
def _build_ansible_compatible_image(self):
available_images = [
tag.encode('utf-8')
- for image in self._docker.images()
- for tag in image.get('RepoTags', [])
+ for image in self._docker.images() if image.get('RepoTags') is not None
+ for tag in image.get('RepoTags')
]
for container in self.instances: | Corrected bug where RepoTags is None (#<I>)
Fixes: #<I> | ansible_molecule | train |
7a96f85e5cff947cee0d11f2fb8e00fed09d0e99 | diff --git a/lib/algebrick.rb b/lib/algebrick.rb
index <HASH>..<HASH> 100644
--- a/lib/algebrick.rb
+++ b/lib/algebrick.rb
@@ -338,7 +338,9 @@ module Algebrick
if keys
@field_names = keys
keys.all? { |k| is_kind_of! k, Symbol }
- dict = @field_indexes = keys.each_with_index.inject({}) { |h, (k, i)| h.update k => i }
+ dict = @field_indexes =
+ Hash.new { |h, k| raise ArgumentError, "uknown field #{k.inspect}" }.
+ update keys.each_with_index.inject({}) { |h, (k, i)| h.update k => i }
define_method(:[]) { |key| @fields[dict[key]] }
end | Raise when reading unknown field on a product type | pitr-ch_algebrick | train |
08575b70c8a663aefb97cfa8b356a7813b36f4c0 | diff --git a/app/Push/Modes/InServiceUpgrade/InServicePushMode.php b/app/Push/Modes/InServiceUpgrade/InServicePushMode.php
index <HASH>..<HASH> 100644
--- a/app/Push/Modes/InServiceUpgrade/InServicePushMode.php
+++ b/app/Push/Modes/InServiceUpgrade/InServicePushMode.php
@@ -78,7 +78,7 @@ class InServicePushMode implements PushMode {
$inServiceUpgradeEvent = new PushCommandInServiceUpgradeEvent();
$inServiceUpgradeEvent->setServiceNames( $serviceNames );
$inServiceUpgradeEvent->setConfiguration( $config );
- $inServiceUpgradeEvent->setForceUpgrade( false );
+ $inServiceUpgradeEvent->setForceUpgrade(true);
return $inServiceUpgradeEvent;
}
diff --git a/app/RancherAccess/RancherService.php b/app/RancherAccess/RancherService.php
index <HASH>..<HASH> 100644
--- a/app/RancherAccess/RancherService.php
+++ b/app/RancherAccess/RancherService.php
@@ -221,9 +221,31 @@ class RancherService {
$url = $this->getUrl();
$account = $this->account;
if ($this->cliMode && $account instanceof RancherCliAccount)
- $command = [ $account->getCliVersion(), 'up', "-f", "$directory/docker-compose.yml", '--rancher-file', "$directory/rancher-compose.yml", '-s', $stackName, '-d' ];
+ $command = [
+ $account->getCliVersion(),
+ 'up',
+ "-f",
+ "$directory/docker-compose.yml",
+ '--rancher-file',
+ "$directory/rancher-compose.yml",
+ '-s',
+ $stackName,
+ '-d',
+ '-p'
+ ];
else
- $command = [ $account->getRancherCompose(), "-f", "$directory/docker-compose.yml", '-r', "$directory/rancher-compose.yml", '-p', $stackName, 'up', '-d' ];
+ $command = [
+ $account->getRancherCompose(),
+ "-f",
+ "$directory/docker-compose.yml",
+ '-r',
+ "$directory/rancher-compose.yml",
+ '-p',
+ $stackName,
+ 'up',
+ '-d',
+ '-p'
+ ];
if($upgrade)
$command = array_merge($command, ['--upgrade']); | force upgrade by default, pull images by default | ipunkt_rancherize | train |
37269e8a70e7e5bbe48d6fe0507bebb34ad8eaab | diff --git a/javascript/firefox-driver/js/webLoadingListener.js b/javascript/firefox-driver/js/webLoadingListener.js
index <HASH>..<HASH> 100644
--- a/javascript/firefox-driver/js/webLoadingListener.js
+++ b/javascript/firefox-driver/js/webLoadingListener.js
@@ -186,8 +186,8 @@ WebLoadingListener = function(browser, toCall, timeout, opt_window) {
}
loadingListenerTimer.setTimeout(function() {
- func(true);
- WebLoadingListener.removeListener(browser, handler);
+ WebLoadingListener.removeListener(browser, handler);
+ func(true);
}, timeout);
}; | Removing web loading listener before calling a timeout callback. Fixes issue <I> | SeleniumHQ_selenium | train |
0ec411e2697fc81e5a5c1b0461224a0b69a557f2 | diff --git a/library/src/android/support/v4/app/FragmentActivity.java b/library/src/android/support/v4/app/FragmentActivity.java
index <HASH>..<HASH> 100644
--- a/library/src/android/support/v4/app/FragmentActivity.java
+++ b/library/src/android/support/v4/app/FragmentActivity.java
@@ -239,6 +239,24 @@ public class FragmentActivity extends Activity {
}
}
+ @Override
+ public void setTitle(CharSequence title) {
+ if ((mActionBar != null) && !IS_HONEYCOMB) {
+ mActionBar.setTitle(title);
+ } else {
+ super.setTitle(title);
+ }
+ }
+
+ @Override
+ public void setTitle(int titleId) {
+ if ((mActionBar != null) && !IS_HONEYCOMB) {
+ mActionBar.setTitle(titleId);
+ } else {
+ super.setTitle(titleId);
+ }
+ }
+
/**
* Hook into the superclass's setContentView implementation.
* | add setTitle()
+ setTitle(int)
+ setTitle(ChareSequence)
Both implementations will forward the call to ActionBar on pre honeycomb devices. | JakeWharton_ActionBarSherlock | train |
a7c63f91527886feb13793c4c7be7468eb352876 | diff --git a/src/layers/core/reflection-layer/reflection-layer.js b/src/layers/core/reflection-layer/reflection-layer.js
index <HASH>..<HASH> 100644
--- a/src/layers/core/reflection-layer/reflection-layer.js
+++ b/src/layers/core/reflection-layer/reflection-layer.js
@@ -20,7 +20,7 @@
import {Layer} from '../../../lib';
import {assembleShaders} from '../../../shader-utils';
-import {GL, Model, Geometry} from 'luma.gl';
+import {GL, Model, Geometry, Framebuffer, Texture2D} from 'luma.gl';
const glslify = require('glslify');
@@ -74,9 +74,17 @@ export default class ReflectionLayer extends Layer {
const IndexType = gl.getExtension('OES_element_index_uint') ?
Uint32Array : Uint16Array;
+ const framebuffer = new Framebuffer(gl, {
+ width: this.context.viewport.width,
+ height: this.context.viewport.height,
+ minFilter: gl.LINEAR,
+ magFilter: gl.LINEAR
+ });
+
this.setState({
model: this.getModel(gl),
numInstances: 0,
+ framebuffer: framebuffer,
IndexType
});
}
@@ -91,6 +99,14 @@ export default class ReflectionLayer extends Layer {
if (oldProps.opacity !== props.opacity) {
this.setUniforms({opacity: props.opacity});
}
+
+ if (this.context.viewport.width != this.state.framebuffer.width || this.context.viewport.height != this.state.framebuffer.height) {
+ const {gl} = this.context;
+ this.state.framebuffer.resize({
+ width: this.context.viewport.width,
+ height: this.context.viewport.height,
+ });
+ }
}
draw({uniforms}) { | added a framebuffer to the reflection layer | uber_deck.gl | train |
6f913779649aa23243ff4f8e879236c8932083cc | diff --git a/pyOCD/flash/flash_lpc1768.py b/pyOCD/flash/flash_lpc1768.py
index <HASH>..<HASH> 100644
--- a/pyOCD/flash/flash_lpc1768.py
+++ b/pyOCD/flash/flash_lpc1768.py
@@ -15,7 +15,13 @@
limitations under the License.
"""
-from flash import Flash
+from flash import Flash, PageInfo, DEFAULT_PAGE_PROGRAM_WEIGHT, DEFAULT_PAGE_ERASE_WEIGHT
+
+LARGE_PAGE_START_ADDR = 0x10000
+SMALL_PAGE_SIZE = 0x1000
+LARGE_PAGE_SIZE = 0x8000
+LARGE_TO_SMALL_RATIO = LARGE_PAGE_SIZE / SMALL_PAGE_SIZE
+WRITE_SIZE = 1024
flash_algo = { 'load_address' : 0x10000000,
'instructions' : [
@@ -53,15 +59,28 @@ class Flash_lpc1768(Flash):
super(Flash_lpc1768, self).__init__(target, flash_algo)
def erasePage(self, flashPtr):
- if flashPtr < 0x10000:
- erase_size = 0x1000
- else:
- erase_size = 0x8000
- for i in range(0, 0x8000 / erase_size):
- Flash.erasePage(self, flashPtr + i * erase_size)
+ Flash.erasePage(self, flashPtr)
def programPage(self, flashPtr, bytes):
- write_size = 1024
- for i in range(0, 32):
- data = bytes[i * write_size : (i + 1) * write_size]
- Flash.programPage(self, flashPtr + i * write_size, data)
+ if flashPtr < LARGE_PAGE_START_ADDR:
+ assert len(bytes) <= SMALL_PAGE_SIZE
+ else:
+ assert len(bytes) <= LARGE_PAGE_SIZE
+
+ pages = (len(bytes) + WRITE_SIZE - 1) // WRITE_SIZE
+
+ for i in range(0, pages):
+ data = bytes[i * WRITE_SIZE : (i + 1) * WRITE_SIZE]
+ Flash.programPage(self, flashPtr + i * WRITE_SIZE, data)
+
+ def getPageInfo(self, addr):
+ info = PageInfo()
+ if addr < LARGE_PAGE_START_ADDR:
+ info.erase_weight = DEFAULT_PAGE_ERASE_WEIGHT
+ info.program_weight = DEFAULT_PAGE_PROGRAM_WEIGHT
+ info.size = SMALL_PAGE_SIZE
+ else:
+ info.erase_weight = DEFAULT_PAGE_ERASE_WEIGHT * LARGE_TO_SMALL_RATIO
+ info.program_weight = DEFAULT_PAGE_PROGRAM_WEIGHT * LARGE_TO_SMALL_RATIO
+ info.size = LARGE_PAGE_SIZE
+ return info | Allow differing page sizes on LPC<I>
Allow 4KB and <I>KB pages on the LPC<I>. This greatly increases flash
performance when only programming the first few sectors. | mbedmicro_pyOCD | train |
9aa2366c4e69a1a18d43ad0b5ded4180d042c3fb | diff --git a/options.go b/options.go
index <HASH>..<HASH> 100644
--- a/options.go
+++ b/options.go
@@ -201,14 +201,38 @@ func coerce(v interface{}, opt interface{}, arg string) (interface{}, error) {
return nil, err
}
return int(i), nil
+ case int16:
+ i, err := coerceInt64(v)
+ if err != nil {
+ return nil, err
+ }
+ return int16(i), nil
+ case uint16:
+ i, err := coerceInt64(v)
+ if err != nil {
+ return nil, err
+ }
+ return uint16(i), nil
case int32:
i, err := coerceInt64(v)
if err != nil {
return nil, err
}
return int32(i), nil
+ case uint32:
+ i, err := coerceInt64(v)
+ if err != nil {
+ return nil, err
+ }
+ return uint32(i), nil
case int64:
return coerceInt64(v)
+ case uint64:
+ i, err := coerceInt64(v)
+ if err != nil {
+ return nil, err
+ }
+ return uint64(i), nil
case string:
return coerceString(v)
case time.Duration: | support the rest of the int types | mreiferson_go-options | train |
768b005296bfba855ccb29b948b57fa194f756af | diff --git a/bin/release.py b/bin/release.py
index <HASH>..<HASH> 100755
--- a/bin/release.py
+++ b/bin/release.py
@@ -281,16 +281,16 @@ def release():
# Step 4: Check in to Maven2 repo
print "Step 4: Checking in to Maven2 Repo (this can take a while, go get coffee)"
- do_task(checkInMaven2Repo, (version, workingDir), async_processes)
+ do_task(checkInMaven2Repo, [version, workingDir], async_processes)
print "Step 4: Complete"
# Step 5: Upload javadocs to FTP
print "Step 5: Uploading Javadocs"
- do_task(uploadJavadocs, (base_dir, workingDir, version), async_processes)
+ do_task(uploadJavadocs, [base_dir, workingDir, version], async_processes)
print "Step 5: Complete"
print "Step 6: Uploading to Sourceforge"
- do_task(uploadArtifactsToSourceforge, (version), async_processes)
+ do_task(uploadArtifactsToSourceforge, [version], async_processes)
print "Step 6: Complete"
## Wait for processes to finish | do_work() needs to take it params as a list, not a tuple | infinispan_infinispan | train |
a310dbd613ba69f309e9cca59aa2757a14e34888 | diff --git a/src/test/java/com/authy/api/TokensTest.java b/src/test/java/com/authy/api/TokensTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/authy/api/TokensTest.java
+++ b/src/test/java/com/authy/api/TokensTest.java
@@ -1,9 +1,13 @@
package com.authy.api;
import static com.github.tomakehurst.wiremock.client.WireMock.aResponse;
+import static com.github.tomakehurst.wiremock.client.WireMock.equalTo;
import static com.github.tomakehurst.wiremock.client.WireMock.get;
+import static com.github.tomakehurst.wiremock.client.WireMock.getRequestedFor;
import static com.github.tomakehurst.wiremock.client.WireMock.stubFor;
+import static com.github.tomakehurst.wiremock.client.WireMock.urlPathEqualTo;
import static com.github.tomakehurst.wiremock.client.WireMock.urlPathMatching;
+import static com.github.tomakehurst.wiremock.client.WireMock.verify;
import static junit.framework.TestCase.fail;
@@ -167,4 +171,24 @@ public class TokensTest extends TestApiBase {
}
}
+ @Test
+ public void testRequestParameters() {
+ stubFor(get(urlPathMatching("/protected/xml/verify/.*"))
+ .willReturn(aResponse()
+ .withStatus(200)
+ .withHeader("Content-Type", "application/xml")
+ .withBody(validTokenResponse)));
+
+
+ try {
+ Token token = tokens.verify(testUserId, testToken);
+
+ verify(getRequestedFor(urlPathEqualTo("/protected/xml/verify/" + testToken + "/" + testUserId))
+ .withHeader("X-Authy-API-Key", equalTo(testApiKey)));
+ Assert.assertNull("Token must not have an error", token.getError());
+ Assert.assertTrue("Token verification must be successful", token.isOk());
+ } catch (AuthyException e) {
+ fail("Verification should be successful");
+ }
+ }
}
\ No newline at end of file | Added test to verify api request creation | twilio_authy-java | train |
4b22d3b1a1f7b9d6818444ddc4e8cda84b93e696 | diff --git a/lxd/db/networks.go b/lxd/db/networks.go
index <HASH>..<HASH> 100644
--- a/lxd/db/networks.go
+++ b/lxd/db/networks.go
@@ -455,8 +455,8 @@ func (c *Cluster) getNetworkConfig(id int64) (map[string]string, error) {
return config, nil
}
-// NetworkCreate creates a new network.
-func (c *Cluster) NetworkCreate(name, description string, config map[string]string) (int64, error) {
+// CreateNetwork creates a new network.
+func (c *Cluster) CreateNetwork(name, description string, config map[string]string) (int64, error) {
var id int64
err := c.Transaction(func(tx *ClusterTx) error {
result, err := tx.tx.Exec("INSERT INTO networks (name, description, state) VALUES (?, ?, ?)", name, description, networkCreated)
diff --git a/lxd/db/networks_test.go b/lxd/db/networks_test.go
index <HASH>..<HASH> 100644
--- a/lxd/db/networks_test.go
+++ b/lxd/db/networks_test.go
@@ -15,7 +15,7 @@ func TestGetNetworksLocalConfigs(t *testing.T) {
cluster, cleanup := db.NewTestCluster(t)
defer cleanup()
- _, err := cluster.NetworkCreate("lxdbr0", "", map[string]string{
+ _, err := cluster.CreateNetwork("lxdbr0", "", map[string]string{
"dns.mode": "none",
"bridge.external_interfaces": "vlan0",
})
diff --git a/lxd/networks.go b/lxd/networks.go
index <HASH>..<HASH> 100644
--- a/lxd/networks.go
+++ b/lxd/networks.go
@@ -195,7 +195,7 @@ func networksPost(d *Daemon, r *http.Request) response.Response {
}
// Create the database entry
- _, err = d.cluster.NetworkCreate(req.Name, req.Description, req.Config)
+ _, err = d.cluster.CreateNetwork(req.Name, req.Description, req.Config)
if err != nil {
return response.SmartError(fmt.Errorf("Error inserting %s into database: %s", req.Name, err))
} | lxd/db: Rename NetworkCreate to CreateNetwork | lxc_lxd | train |
e3af917b1b6fae8132bdcef80fd9b9b614703559 | diff --git a/GPy/kern/parts/rbf_inv.py b/GPy/kern/parts/rbf_inv.py
index <HASH>..<HASH> 100644
--- a/GPy/kern/parts/rbf_inv.py
+++ b/GPy/kern/parts/rbf_inv.py
@@ -159,20 +159,21 @@ class RBFInv(RBF):
def dpsi1_dtheta(self, dL_dpsi1, Z, mu, S, target):
self._psi_computations(Z, mu, S)
- tmp = 1 + S[:,None,:]*self.inv_lengthscale2
- d_inv_length_old = -self._psi1[:,:,None] * ((self._psi1_dist_sq - 1.)/(self.lengthscale*self._psi1_denom) + self.inv_lengthscale)/self.inv_lengthscale2
- d_inv_length = -self._psi1[:,:,None] * ((self._psi1_dist_sq - 1.)/self._psi1_denom + self.lengthscale)
+ tmp = 1 + S[:, None, :] * self.inv_lengthscale2
+ # d_inv_length_old = -self._psi1[:, :, None] * ((self._psi1_dist_sq - 1.) / (self.lengthscale * self._psi1_denom) + self.inv_lengthscale) / self.inv_lengthscale2
+ d_length = -(self._psi1[:, :, None] * ((np.square(self._psi1_dist) * self.inv_lengthscale) / (tmp ** 2) + (S[:, None, :] * self.inv_lengthscale) / (tmp)))
+ # d_inv_length = -self._psi1[:, :, None] * ((self._psi1_dist_sq - 1.) / self._psi1_denom + self.lengthscale)
target[0] += np.sum(dL_dpsi1 * self._psi1 / self.variance)
- dpsi1_dlength = d_inv_length * dL_dpsi1[:, :, None]
+ dpsi1_dlength = d_length * dL_dpsi1[:, :, None]
if not self.ARD:
- target[1] += dpsi1_dlength.sum()#*(-self.lengthscale2)
+ target[1] += dpsi1_dlength.sum() # *(-self.lengthscale2)
else:
- target[1:] += dpsi1_dlength.sum(0).sum(0)#*(-self.lengthscale2)
- #target[1:] = target[1:]*(-self.lengthscale2)
+ target[1:] += dpsi1_dlength.sum(0).sum(0) # *(-self.lengthscale2)
+ # target[1:] = target[1:]*(-self.lengthscale2)
def dpsi1_dZ(self, dL_dpsi1, Z, mu, S, target):
self._psi_computations(Z, mu, S)
- dpsi1_dZ = -self._psi1[:, :, None] * ((self.inv_lengthscale2*self._psi1_dist)/self._psi1_denom)
+ dpsi1_dZ = -self._psi1[:, :, None] * ((self.inv_lengthscale2 * self._psi1_dist) / self._psi1_denom)
target += np.sum(dL_dpsi1[:, :, None] * dpsi1_dZ, 0)
def dpsi1_dmuS(self, dL_dpsi1, Z, mu, S, target_mu, target_S):
@@ -185,15 +186,15 @@ class RBFInv(RBF):
"""Shape N,num_inducing,num_inducing,Ntheta"""
self._psi_computations(Z, mu, S)
d_var = 2.*self._psi2 / self.variance
- #d_length = 2.*self._psi2[:, :, :, None] * (self._psi2_Zdist_sq * self._psi2_denom + self._psi2_mudist_sq + S[:, None, None, :] / self.lengthscale2) / (self.lengthscale * self._psi2_denom)
+ # d_length = 2.*self._psi2[:, :, :, None] * (self._psi2_Zdist_sq * self._psi2_denom + self._psi2_mudist_sq + S[:, None, None, :] / self.lengthscale2) / (self.lengthscale * self._psi2_denom)
d_length = -2.*self._psi2[:, :, :, None] * (self._psi2_Zdist_sq * self._psi2_denom + self._psi2_mudist_sq + S[:, None, None, :] * self.inv_lengthscale2) / (self.inv_lengthscale * self._psi2_denom)
target[0] += np.sum(dL_dpsi2 * d_var)
dpsi2_dlength = d_length * dL_dpsi2[:, :, :, None]
if not self.ARD:
- target[1] += dpsi2_dlength.sum()#*(-self.lengthscale2)
+ target[1] += dpsi2_dlength.sum() # *(-self.lengthscale2)
else:
- target[1:] += dpsi2_dlength.sum(0).sum(0).sum(0)#*(-self.lengthscale2)
- #target[1:] = target[1:]*(-self.lengthscale2)
+ target[1:] += dpsi2_dlength.sum(0).sum(0).sum(0) # *(-self.lengthscale2)
+ # target[1:] = target[1:]*(-self.lengthscale2)
def dpsi2_dZ(self, dL_dpsi2, Z, mu, S, target):
self._psi_computations(Z, mu, S) | Merge of rbf_inv failed, corrected with AD | SheffieldML_GPy | train |
7376bdef29ec9035cde4a4a6db6f7f76cf1eb40b | diff --git a/devices.js b/devices.js
index <HASH>..<HASH> 100644
--- a/devices.js
+++ b/devices.js
@@ -1393,6 +1393,13 @@ const devices = [
extend: generic.light_onoff_brightness_colortemp,
},
{
+ zigbeeModel: ['LIGHTIFY BR RGBW'],
+ model: '73739',
+ vendor: 'Sylvania',
+ description: 'LIGHTIFY LED RGBW BR30',
+ extend: generic.light_onoff_brightness_colortemp_colorxy,
+ },
+ {
zigbeeModel: ['LIGHTIFY A19 RGBW'],
model: '73693',
vendor: 'Sylvania', | Add Support for Osram BR<I> RGBW (#<I>)
* Add support for Sengled E<I>-N<I> (BR<I>) Light
* Add Osram BR<I> RGBW LED to HA
* Update devices.js | Koenkk_zigbee-shepherd-converters | train |
f0a95c087b4da4da3d8a1e3ea71560016c8f2bdb | diff --git a/samples/package.json b/samples/package.json
index <HASH>..<HASH> 100644
--- a/samples/package.json
+++ b/samples/package.json
@@ -9,16 +9,13 @@
"node": ">=8"
},
"scripts": {
- "ava": "ava -T 20s --verbose system-test/*.test.js",
- "test": "npm run ava"
+ "test": "mocha system-test/*.js --timeout 600000"
},
"dependencies": {
"@google-cloud/firestore": "^0.18.0"
},
"devDependencies": {
"@google-cloud/nodejs-repo-tools": "^2.3.0",
- "ava": "^0.25.0",
- "proxyquire": "^2.0.1",
- "sinon": "^7.0.0"
+ "mocha": "^5.0.0"
}
}
diff --git a/samples/quickstart.js b/samples/quickstart.js
index <HASH>..<HASH> 100644
--- a/samples/quickstart.js
+++ b/samples/quickstart.js
@@ -17,36 +17,37 @@
// [START firestore_quickstart]
const Firestore = require('@google-cloud/firestore');
-
-const firestore = new Firestore({
- projectId: 'YOUR_PROJECT_ID',
- keyFilename: '/path/to/keyfile.json',
-});
-
-const document = firestore.doc('posts/intro-to-firestore');
-
-// Enter new data into the document.
-document.set({
- title: 'Welcome to Firestore',
- body: 'Hello World',
-}).then(() => {
- // Document created successfully.
-});
-
-// Update an existing document.
-document.update({
- body: 'My first Firestore app',
-}).then(() => {
- // Document updated successfully.
-});
-
-// Read the document.
-document.get().then(doc => {
- // Document read successfully.
-});
-
-// Delete the document.
-document.delete().then(() => {
- // Document deleted successfully.
-});
+async function main() {
+ const firestore = new Firestore({
+ projectId: process.env.GCLOUD_PROJECT,
+ keyFilename: process.env.GOOGLE_APPLICATION_CREDENTIALS,
+ });
+
+ const document = firestore.doc('posts/intro-to-firestore');
+ console.log('Document created');
+
+ // Enter new data into the document.
+ await document.set({
+ title: 'Welcome to Firestore',
+ body: 'Hello World',
+ });
+ console.log('Entered new data into the document');
+
+ // Update an existing document.
+ await document.update({
+ body: 'My first Firestore app',
+ });
+ console.log('Updated an existing document');
+
+ // Read the document.
+ let doc = await document.get();
+ console.log('Read the document');
+
+ // Delete the document.
+ await document.delete();
+ console.log('Deleted the document');
+
+};
+
+main().catch(console.error);
// [END firestore_quickstart]
\ No newline at end of file
diff --git a/samples/system-test/quickstart.test.js b/samples/system-test/quickstart.test.js
index <HASH>..<HASH> 100644
--- a/samples/system-test/quickstart.test.js
+++ b/samples/system-test/quickstart.test.js
@@ -16,30 +16,24 @@
'use strict';
const path = require(`path`);
-const proxyquire = require(`proxyquire`).noPreserveCache();
-const sinon = require(`sinon`);
-const test = require(`ava`);
+const tools = require('@google-cloud/nodejs-repo-tools');
+const assert = require('assert');
const cmd = `node quickstart.js`;
const cwd = path.join(__dirname, `..`);
-test(`should make some API calls`, (t) => {
- const docMock = {
- set: sinon.stub().returns(Promise.resolve()),
- update: sinon.stub().returns(Promise.resolve()),
- get: sinon.stub().returns(Promise.resolve()),
- delete: sinon.stub().returns(Promise.resolve()),
- };
-
- function FirestoreMock() {}
- FirestoreMock.prototype.doc = sinon.stub().returns(docMock);
-
- proxyquire(`../quickstart`, {
- '@google-cloud/firestore': FirestoreMock,
- });
-
- t.is(docMock.set.callCount, 1);
- t.is(docMock.update.callCount, 1);
- t.is(docMock.get.callCount, 1);
- t.is(docMock.delete.callCount, 1);
-});
+
+describe('should make some API calls',() =>{
+
+ it('firestore_inspect_string', async () => {
+ const output = await tools.runAsync(cmd,cwd);
+
+ assert.strictEqual(output.includes('Document created'), true);
+ assert.strictEqual(output.includes('Entered new data into the document'), true);
+ assert.strictEqual(output.includes('Updated an existing document'), true);
+ assert.strictEqual(output.includes('Read the document'), true);
+ assert.strictEqual(output.includes('Deleted the document'), true);
+
+ });
+
+});
\ No newline at end of file | docs(samples): convert samples to async/await (#<I>) | googleapis_nodejs-firestore | train |
b4fd7d6b4cc115988324b6b40d2189ac5238bc79 | diff --git a/Swat/SwatTableView.php b/Swat/SwatTableView.php
index <HASH>..<HASH> 100644
--- a/Swat/SwatTableView.php
+++ b/Swat/SwatTableView.php
@@ -830,6 +830,9 @@ class SwatTableView extends SwatControl implements SwatUIParent
foreach ($this->columns as $column)
$set->addEntrySet($column->getHtmlHeadEntrySet());
+ foreach ($this->row_columns as $column)
+ $set->addEntrySet($column->getHtmlHeadEntrySet());
+
foreach ($this->extra_rows as $row)
$set->addEntrySet($row->getHtmlHeadEntrySet()); | get style sheets and javascript from row columns
svn commit r<I> | silverorange_swat | train |
92eb0f004a865e20bc0476968f2134414f374ae2 | diff --git a/pom.xml b/pom.xml
index <HASH>..<HASH> 100644
--- a/pom.xml
+++ b/pom.xml
@@ -34,6 +34,11 @@
<dependencies>
<dependency>
+ <groupId>com.tomgibara.fundament</groupId>
+ <artifactId>fundament</artifactId>
+ <version>1.0.0-SNAPSHOT</version>
+ </dependency>
+ <dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
<version>4.11</version>
diff --git a/src/main/java/com/tomgibara/streams/ReadStream.java b/src/main/java/com/tomgibara/streams/ReadStream.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/tomgibara/streams/ReadStream.java
+++ b/src/main/java/com/tomgibara/streams/ReadStream.java
@@ -16,6 +16,8 @@
*/
package com.tomgibara.streams;
+import com.tomgibara.fundament.Producer;
+
/**
* <p>
* An abstraction for reading basic Java types into a byte based stream.
@@ -253,6 +255,12 @@ public interface ReadStream extends CloseableStream {
return new String(cs);
}
+ // convenience methods
+
+ default <T> Producer<T> readWith(StreamDeserializer<T> deserializer) {
+ return () -> deserializer.deserialize(this);
+ }
+
// closeable
default void close() {
diff --git a/src/main/java/com/tomgibara/streams/WriteStream.java b/src/main/java/com/tomgibara/streams/WriteStream.java
index <HASH>..<HASH> 100755
--- a/src/main/java/com/tomgibara/streams/WriteStream.java
+++ b/src/main/java/com/tomgibara/streams/WriteStream.java
@@ -16,6 +16,8 @@
*/
package com.tomgibara.streams;
+import com.tomgibara.fundament.Consumer;
+
/**
* <p>
* An abstraction for writing basic Java types into a byte based stream.
@@ -256,6 +258,12 @@ public interface WriteStream extends CloseableStream {
}
}
+ // convenience methods
+
+ default <T> Consumer<T> writeWith(StreamSerializer<T> serializer) {
+ return v -> serializer.serialize(v, this);
+ }
+
// closeable
@Override | Adds convenient default de/serialization methods to streams. | tomgibara_streams | train |
e275779500541a842a4a99005da228c8eeebef5e | diff --git a/src/Analyser/NodeScopeResolver.php b/src/Analyser/NodeScopeResolver.php
index <HASH>..<HASH> 100644
--- a/src/Analyser/NodeScopeResolver.php
+++ b/src/Analyser/NodeScopeResolver.php
@@ -943,6 +943,8 @@ class NodeScopeResolver
$scope = $processVarAnnotation($matches[1], $matches[2]);
} elseif (preg_match('#@var\s+\$([a-zA-Z0-9_]+)\s+' . FileTypeMapper::TYPE_PATTERN . '#', $comment, $matches)) {
$scope = $processVarAnnotation($matches[2], $matches[1]);
+ } elseif (preg_match('#@var\s+' . FileTypeMapper::TYPE_PATTERN . '(?!\s+\$[a-zA-Z0-9_]+)#', $comment, $matches)) {
+ $scope = $processVarAnnotation($matches[1], $variableName);
}
}
}
diff --git a/tests/PHPStan/Analyser/data/var-annotations-alt.php b/tests/PHPStan/Analyser/data/var-annotations-alt.php
index <HASH>..<HASH> 100644
--- a/tests/PHPStan/Analyser/data/var-annotations-alt.php
+++ b/tests/PHPStan/Analyser/data/var-annotations-alt.php
@@ -93,4 +93,48 @@ class Foo
die;
}
+ public function doFooBar()
+ {
+ /* @var int */
+ $integer = getFoo();
+
+ /* @var bool */
+ $boolean = getFoo();
+
+ /* @var string */
+ $string = getFoo();
+
+ /* @var float */
+ $float = getFoo();
+
+ /* @var Lorem */
+ $loremObject = getFoo();
+
+ /* @var \AnotherNamespace\Bar */
+ $barObject = getFoo();
+
+ /* @var */
+ $mixed = getFoo();
+
+ /* @var array */
+ $array = getFoo();
+
+ /* @var bool|null */
+ $isNullable = getFoo();
+
+ /* @var callable */
+ $callable = getFoo();
+
+ /* @var self */
+ $self = getFoo();
+
+ /* @var float */
+ $invalidInteger = 1.0;
+
+ /* @var static */
+ $static = getFoo();
+
+ die;
+ }
+
}
diff --git a/tests/PHPStan/Analyser/data/var-annotations.php b/tests/PHPStan/Analyser/data/var-annotations.php
index <HASH>..<HASH> 100644
--- a/tests/PHPStan/Analyser/data/var-annotations.php
+++ b/tests/PHPStan/Analyser/data/var-annotations.php
@@ -93,4 +93,48 @@ class Foo
die;
}
+ public function doFooBar()
+ {
+ /** @var int */
+ $integer = getFoo();
+
+ /** @var bool */
+ $boolean = getFoo();
+
+ /** @var string */
+ $string = getFoo();
+
+ /** @var float */
+ $float = getFoo();
+
+ /** @var Lorem */
+ $loremObject = getFoo();
+
+ /** @var \AnotherNamespace\Bar */
+ $barObject = getFoo();
+
+ /** @var */
+ $mixed = getFoo();
+
+ /** @var array */
+ $array = getFoo();
+
+ /** @var bool|null */
+ $isNullable = getFoo();
+
+ /** @var callable */
+ $callable = getFoo();
+
+ /** @var self */
+ $self = getFoo();
+
+ /** @var float */
+ $invalidInteger = 1.0;
+
+ /** @var static */
+ $static = getFoo();
+
+ die;
+ }
+
}
diff --git a/tests/PHPStan/Type/FileTypeMapperTest.php b/tests/PHPStan/Type/FileTypeMapperTest.php
index <HASH>..<HASH> 100644
--- a/tests/PHPStan/Type/FileTypeMapperTest.php
+++ b/tests/PHPStan/Type/FileTypeMapperTest.php
@@ -28,6 +28,7 @@ class FileTypeMapperTest extends \PHPStan\TestCase
'float' => 'float',
'string | null' => 'string|null',
'stdClass | null' => 'stdClass|null',
+ 'Bar' => 'Bar',
];
$this->assertEquals(array_keys($expected), array_keys($typeMap));
diff --git a/tests/PHPStan/Type/data/annotations.php b/tests/PHPStan/Type/data/annotations.php
index <HASH>..<HASH> 100644
--- a/tests/PHPStan/Type/data/annotations.php
+++ b/tests/PHPStan/Type/data/annotations.php
@@ -14,4 +14,11 @@
class Foo
{
+
+ public function doSomething()
+ {
+ /** @var Bar */
+ $number = $this;
+ }
+
} | Implemented parsing type of variable from var annotation with type only | phpstan_phpstan | train |
3a74c469e5fb53aee66caa1aa3638dbe278a21f1 | diff --git a/builder/amazon/common/run_config.go b/builder/amazon/common/run_config.go
index <HASH>..<HASH> 100644
--- a/builder/amazon/common/run_config.go
+++ b/builder/amazon/common/run_config.go
@@ -91,6 +91,7 @@ func (c *RunConfig) Prepare(t *packer.ConfigTemplate) []error {
templates := map[string]*string{
"iam_instance_profile": &c.IamInstanceProfile,
"instance_type": &c.InstanceType,
+ "spot_price": &c.SpotPrice,
"ssh_timeout": &c.RawSSHTimeout,
"ssh_username": &c.SSHUsername,
"ssh_private_key_file": &c.SSHPrivateKeyFile, | Add "spot_price" param into template processing | hashicorp_packer | train |
320d1e225a8299b4d4553d00645b7b46029a6534 | diff --git a/director/db/migrations/20110209010747_initial.rb b/director/db/migrations/20110209010747_initial.rb
index <HASH>..<HASH> 100644
--- a/director/db/migrations/20110209010747_initial.rb
+++ b/director/db/migrations/20110209010747_initial.rb
@@ -120,4 +120,4 @@ Sequel.migration do
Integer :size, :null => false
end
end
-end
\ No newline at end of file
+end
diff --git a/director/db/migrations/20110518225809_remove_cid_constrain.rb b/director/db/migrations/20110518225809_remove_cid_constrain.rb
index <HASH>..<HASH> 100644
--- a/director/db/migrations/20110518225809_remove_cid_constrain.rb
+++ b/director/db/migrations/20110518225809_remove_cid_constrain.rb
@@ -6,8 +6,6 @@ Sequel.migration do
end
down do
- alter_table(:vms) do
- set_column_allow_null :cid, false
- end
+ raise Sequel::Error, "Irreversible migration, vms:cid might contain nulls so we cannot enforce 'not null' constraint"
end
end
diff --git a/director/lib/director/config.rb b/director/lib/director/config.rb
index <HASH>..<HASH> 100644
--- a/director/lib/director/config.rb
+++ b/director/lib/director/config.rb
@@ -5,16 +5,37 @@ module Bosh::Director
class << self
- attr_accessor :base_dir
- attr_accessor :logger
- attr_accessor :uuid
- attr_accessor :process_uuid
- attr_accessor :db
- attr_accessor :name
-
- attr_reader :redis_options
- attr_reader :cloud_options
- attr_reader :revision
+ CONFIG_OPTIONS = \
+ [
+ :base_dir,
+ :logger,
+ :uuid,
+ :process_uuid,
+ :db,
+ :name,
+ :redis_options,
+ :cloud_options,
+ :revision
+ ]
+
+ CONFIG_OPTIONS.each do |option|
+ attr_accessor option
+ end
+
+ def clear
+ CONFIG_OPTIONS.each do |option|
+ self.instance_variable_set("@#{option}".to_sym, nil)
+ end
+
+ Thread.list.each do |thr|
+ thr[:bosh] = nil
+ end
+
+ @blobstore = nil
+ @nats = nil
+ @nats_rpc = nil
+ @cloud = nil
+ end
def configure(config)
@base_dir = config["dir"]
diff --git a/director/spec/Rakefile b/director/spec/Rakefile
index <HASH>..<HASH> 100644
--- a/director/spec/Rakefile
+++ b/director/spec/Rakefile
@@ -73,7 +73,7 @@ namespace "spec" do
RSpec::Core::RakeTask.new("run_unit") do |t|
t.gemfile = gemfile
t.rspec_opts = spec_opts
- t.pattern = "unit/*_spec.rb"
+ t.pattern = "unit/**/*_spec.rb"
end
RSpec::Core::RakeTask.new("run_functional") do |t|
diff --git a/director/spec/spec_helper.rb b/director/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/director/spec/spec_helper.rb
+++ b/director/spec/spec_helper.rb
@@ -152,11 +152,15 @@ end
Rspec.configure do |rspec_config|
rspec_config.before(:each) do
- # hack to clean-up the db for the spec tests.
- Bosh::Director::Models::Instance.each { |c| c.destroy }
- Bosh::Director::Models::Vm.each { |c| c.destroy }
+ Bosh::Director::Config.clear
+
+ db = Sequel::Model.db
+ db.execute("PRAGMA foreign_keys = OFF")
+ db.tables.each do |table|
+ db.drop_table(table)
+ end
+ db.execute("PRAGMA foreign_keys = ON")
- Sequel::Migrator.apply(db, migrate_dir, 0)
Sequel::Migrator.apply(db, migrate_dir, nil)
FileUtils.mkdir_p(bosh_dir)
Bosh::Director::Config.logger = logger
diff --git a/director/spec/unit/jobs/delete_release_spec.rb b/director/spec/unit/jobs/delete_release_spec.rb
index <HASH>..<HASH> 100644
--- a/director/spec/unit/jobs/delete_release_spec.rb
+++ b/director/spec/unit/jobs/delete_release_spec.rb
@@ -241,7 +241,7 @@ describe Bosh::Director::Jobs::DeleteRelease do
lock = stub("lock")
Bosh::Director::Lock.stub!(:new).with("lock:release:test_release", :timeout => 10).
- and_return(lock)
+ and_return(lock)
lock.should_receive(:lock).exactly(2).times.and_yield
job1.perform | Clear director configuration in between tests.
Clear director DB in between tests by dropping tables
(as we might have irreversible migrations).
Change-Id: Ica8b4d2cd<I>d5d<I>c<I>bb<I>d<I>b<I> | cloudfoundry_bosh | train |
b5e9faa67e71d5757c0b39af0ca839e68c6ddd16 | diff --git a/message/lib.php b/message/lib.php
index <HASH>..<HASH> 100644
--- a/message/lib.php
+++ b/message/lib.php
@@ -708,32 +708,41 @@ function message_get_recent_conversations($user, $limitfrom=0, $limitto=100) {
// There is a separate query for read and unread messages as they are stored
// in different tables. They were originally retrieved in one query but it
// was so large that it was difficult to be confident in its correctness.
- $sql = "SELECT $userfields,
+ $uniquefield = $DB->sql_concat_join("'-'", array('message.useridfrom', 'message.useridto'));
+ $sql = "SELECT $uniquefield, $userfields,
message.id as mid, message.notification, message.smallmessage, message.fullmessage,
message.fullmessagehtml, message.fullmessageformat, message.timecreated,
contact.id as contactlistid, contact.blocked
-
FROM {message_read} message
- JOIN {user} otheruser ON otheruser.id = CASE
- WHEN message.useridto = :userid1 THEN message.useridfrom
- ELSE message.useridto END
- LEFT JOIN {message_contacts} contact ON contact.userid = :userid2 AND contact.contactid = otheruser.id
-
- WHERE otheruser.deleted = 0
- AND (message.useridto = :userid3 OR message.useridfrom = :userid4)
- AND message.notification = 0
- AND NOT EXISTS (
- SELECT 1
- FROM {message_read} othermessage
- WHERE ((othermessage.useridto = :userid5 AND othermessage.useridfrom = otheruser.id) OR
- (othermessage.useridfrom = :userid6 AND othermessage.useridto = otheruser.id))
- AND (othermessage.timecreated > message.timecreated OR (
- othermessage.timecreated = message.timecreated AND othermessage.id > message.id))
- )
-
+ JOIN (
+ SELECT MAX(id) AS messageid,
+ matchedmessage.useridto,
+ matchedmessage.useridfrom
+ FROM {message_read} matchedmessage
+ INNER JOIN (
+ SELECT MAX(recentmessages.timecreated) timecreated,
+ recentmessages.useridfrom,
+ recentmessages.useridto
+ FROM {message_read} recentmessages
+ WHERE (recentmessages.useridfrom = :userid1 OR recentmessages.useridto = :userid2)
+ GROUP BY recentmessages.useridfrom, recentmessages.useridto
+ ) recent ON matchedmessage.useridto = recent.useridto
+ AND matchedmessage.useridfrom = recent.useridfrom
+ AND matchedmessage.timecreated = recent.timecreated
+ GROUP BY matchedmessage.useridto, matchedmessage.useridfrom
+ ) messagesubset ON messagesubset.messageid = message.id
+ JOIN {user} otheruser ON (message.useridfrom = :userid4 AND message.useridto = otheruser.id)
+ OR (message.useridto = :userid5 AND message.useridfrom = otheruser.id)
+ LEFT JOIN {message_contacts} contact ON contact.userid = :userid3 AND contact.userid = otheruser.id
+ WHERE otheruser.deleted = 0 AND message.notification = 0
ORDER BY message.timecreated DESC";
- $params = array('userid1' => $user->id, 'userid2' => $user->id, 'userid3' => $user->id,
- 'userid4' => $user->id, 'userid5' => $user->id, 'userid6' => $user->id);
+ $params = array(
+ 'userid1' => $user->id,
+ 'userid2' => $user->id,
+ 'userid3' => $user->id,
+ 'userid4' => $user->id,
+ 'userid5' => $user->id,
+ );
$read = $DB->get_records_sql($sql, $params, $limitfrom, $limitto);
// We want to get the messages that have not been read. These are stored in the 'message' table. It is the
@@ -742,16 +751,23 @@ function message_get_recent_conversations($user, $limitfrom=0, $limitto=100) {
$sql = str_replace('{message_read}', '{message}', $sql);
$unread = $DB->get_records_sql($sql, $params, $limitfrom, $limitto);
- $conversations = array();
-
// Union the 2 result sets together looking for the message with the most
// recent timecreated for each other user.
// $conversation->id (the array key) is the other user's ID.
$conversation_arrays = array($unread, $read);
foreach ($conversation_arrays as $conversation_array) {
foreach ($conversation_array as $conversation) {
- if (empty($conversations[$conversation->id]) || $conversations[$conversation->id]->timecreated < $conversation->timecreated ) {
+ if (!isset($conversations[$conversation->id])) {
$conversations[$conversation->id] = $conversation;
+ } else {
+ $current = $conversations[$conversation->id];
+ if ($current->timecreated < $conversation->timecreated) {
+ $conversations[$conversation->id] = $conversation;
+ } else if ($current->timecreated == $conversation->timecreated) {
+ if ($current->mid < $conversation->mid) {
+ $conversations[$conversation->id] = $conversation;
+ }
+ }
}
}
} | MDL-<I> message: Rewrite recent conversations | moodle_moodle | train |
c0673a2b9484bd060c313ff90544447c276e64c3 | diff --git a/flink-runtime/src/main/java/org/apache/flink/runtime/state/StateBackendLoader.java b/flink-runtime/src/main/java/org/apache/flink/runtime/state/StateBackendLoader.java
index <HASH>..<HASH> 100644
--- a/flink-runtime/src/main/java/org/apache/flink/runtime/state/StateBackendLoader.java
+++ b/flink-runtime/src/main/java/org/apache/flink/runtime/state/StateBackendLoader.java
@@ -197,15 +197,11 @@ public class StateBackendLoader {
// (1) the application defined state backend has precedence
if (fromApplication != null) {
- if (logger != null) {
- logger.info("Using application-defined state backend: {}", fromApplication);
- }
-
// see if this is supposed to pick up additional configuration parameters
if (fromApplication instanceof ConfigurableStateBackend) {
// needs to pick up configuration
if (logger != null) {
- logger.info("Configuring application-defined state backend with job/cluster config");
+ logger.info("Using job/cluster config to configure application-defined state backend: {}", fromApplication);
}
backend = ((ConfigurableStateBackend) fromApplication).configure(config, classLoader);
@@ -214,6 +210,10 @@ public class StateBackendLoader {
// keep as is!
backend = fromApplication;
}
+
+ if (logger != null) {
+ logger.info("Using application-defined state backend: {}", backend);
+ }
}
else {
// (2) check if the config defines a state backend | [FLINK-<I>][statebackend] Log application-defined state backends after configuration | apache_flink | train |
9f56056c8a85e9b2c54a450b1e6248837a470255 | diff --git a/blkn/src/constants.js b/blkn/src/constants.js
index <HASH>..<HASH> 100644
--- a/blkn/src/constants.js
+++ b/blkn/src/constants.js
@@ -1,10 +1,32 @@
+// eslint-disable import/prefer-default-export
// @flow
-// eslint-disable-next-line import/prefer-default-export
+// colors from styleguide https://marvelapp.com/79c0g9e/screen/36355252
+export const colors = {
+ white: "#fff",
+ alabaster: "#f5f7f9",
+ silver: "#e8edf1",
+ casper: "#bac7d5",
+ polo: "#7f91a8",
+ shuttle: "#46515e",
+ smoke: "#171b1e",
+ green: "#127f22",
+ harp: "#e7f3e8",
+ orange: "#e73c04",
+ sand: "#fcf1cd",
+ red: "#d21c1c",
+ linen: "#fae8e8",
+ azure: "#10709f",
+ sky: "#e0f6ff",
+ teal: "#00a991",
+ niagara: "#009c85",
+ gold: "#ffd700",
+};
+
export const fontColors = {
- primary: "#46515e",
- secondary: "#7f91a8",
- attention: "#171B1E",
- error: "#D02228",
- input: "#bac7d5",
+ primary: colors.shuttle,
+ secondary: colors.polo,
+ attention: colors.smoke,
+ error: colors.red,
+ input: colors.casper,
}; | BLKN: add colors to constants | kiwicom_orbit-components | train |
c36a30371f2a0684e7a73b199ccd448e5521c1ec | diff --git a/src/Macros/Sixth.php b/src/Macros/Sixth.php
index <HASH>..<HASH> 100644
--- a/src/Macros/Sixth.php
+++ b/src/Macros/Sixth.php
@@ -7,7 +7,7 @@ class Sixth
public function __invoke()
{
return function () {
- return $this->get(5);
+ return $this->skip(5)->first();
};
}
} | Update Sixth.php (#<I>)
* Update Sixth.php
* Update Sixth.php | spatie_laravel-collection-macros | train |
0e231cfd19257fbabc778b8401d04c90407f2bb9 | diff --git a/README.markdown b/README.markdown
index <HASH>..<HASH> 100644
--- a/README.markdown
+++ b/README.markdown
@@ -11,15 +11,12 @@ compatibility and standards compliance][what_is].
Requirements
------------
-* PHP 5.2.0 or newer
+* PHP 5.3+
* libxml2 (certain 2.7.x releases are too buggy for words, and will crash)
* Either the iconv or mbstring extension
* cURL or fsockopen()
* PCRE support
-If you're looking for PHP 4.x support, pull the "one-dot-two" branch, as that's
-the last version to support PHP 4.x.
-
What comes in the package?
--------------------------
diff --git a/compatibility_test/sp_compatibility_test.php b/compatibility_test/sp_compatibility_test.php
index <HASH>..<HASH> 100644
--- a/compatibility_test/sp_compatibility_test.php
+++ b/compatibility_test/sp_compatibility_test.php
@@ -14,7 +14,7 @@ else if (isset($_GET['background']))
exit;
}
-$php_ok = (function_exists('version_compare') && version_compare(phpversion(), '5.2.0', '>='));
+$php_ok = (function_exists('version_compare') && version_compare(phpversion(), '5.3.0', '>='));
$pcre_ok = extension_loaded('pcre');
$curl_ok = function_exists('curl_exec');
$zlib_ok = extension_loaded('zlib');
@@ -215,7 +215,7 @@ function fnLoadPngs() {
<tbody>
<tr class="<?php echo ($php_ok) ? 'enabled' : 'disabled'; ?>">
<td>PHP</td>
- <td>5.2.0 or higher</td>
+ <td>5.3.0 or higher</td>
<td><?php echo phpversion(); ?></td>
</tr>
<tr class="<?php echo ($xml_ok) ? 'enabled, and sane' : 'disabled, or broken'; ?>">
diff --git a/library/SimplePie.php b/library/SimplePie.php
index <HASH>..<HASH> 100755
--- a/library/SimplePie.php
+++ b/library/SimplePie.php
@@ -659,9 +659,9 @@ class SimplePie
*/
public function __construct()
{
- if (version_compare(PHP_VERSION, '5.2', '<'))
+ if (version_compare(PHP_VERSION, '5.3', '<'))
{
- trigger_error('PHP 4.x, 5.0 and 5.1 are no longer supported. Please upgrade to PHP 5.2 or newer.');
+ trigger_error('Please upgrade to PHP 5.3 or newer.');
die();
}
diff --git a/tests/EncodingTest.php b/tests/EncodingTest.php
index <HASH>..<HASH> 100644
--- a/tests/EncodingTest.php
+++ b/tests/EncodingTest.php
@@ -130,14 +130,7 @@ class EncodingTest extends PHPUnit_Framework_TestCase
public function test_convert_UTF8_mbstring($input, $expected, $encoding)
{
$encoding = SimplePie_Misc::encoding($encoding);
- if (version_compare(phpversion(), '5.3', '<'))
- {
- $this->assertEquals($expected, Mock_Misc::__callStatic('change_encoding_mbstring', array($input, $encoding, 'UTF-8')));
- }
- else
- {
- $this->assertEquals($expected, Mock_Misc::change_encoding_mbstring($input, $encoding, 'UTF-8'));
- }
+ $this->assertEquals($expected, Mock_Misc::change_encoding_mbstring($input, $encoding, 'UTF-8'));
}
/**
@@ -150,13 +143,7 @@ class EncodingTest extends PHPUnit_Framework_TestCase
public function test_convert_UTF8_iconv($input, $expected, $encoding)
{
$encoding = SimplePie_Misc::encoding($encoding);
- if (version_compare(phpversion(), '5.3', '<'))
- {
- $this->assertEquals($expected, Mock_Misc::__callStatic('change_encoding_iconv', array($input, $encoding, 'UTF-8')));
- }
- else {
- $this->assertEquals($expected, Mock_Misc::change_encoding_iconv($input, $encoding, 'UTF-8'));
- }
+ $this->assertEquals($expected, Mock_Misc::change_encoding_iconv($input, $encoding, 'UTF-8'));
}
/**#@-*/ | PHP <I> is not supported anymore
Patches such as <URL> state=1 in
./simplepie/library/SimplePie/ on line <I>
Parse error: syntax error, unexpected T_STRING in
./simplepie/library/SimplePie/Parser.php on line <I>
```
The Web site should be updated as well
<URL> | simplepie_simplepie | train |
eee260ccd89830077fcd573407f745956d4f2e7e | diff --git a/.eslintrc b/.eslintrc
index <HASH>..<HASH> 100644
--- a/.eslintrc
+++ b/.eslintrc
@@ -7,7 +7,7 @@
"complexity": [2, 15],
"eqeqeq": [2, "allow-null"],
"max-depth": [1, 4],
- "max-statements": [2, 25],
+ "max-statements": [2, 26],
"no-extra-parens": [1],
"no-magic-numbers": [0],
"no-restricted-syntax": [2, "BreakStatement", "ContinueStatement", "DebuggerStatement", "LabeledStatement", "WithStatement"]
diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -44,7 +44,8 @@ module.exports = function extend() {
target = arguments[1] || {};
// skip the boolean and the target
i = 2;
- } else if ((typeof target !== 'object' && typeof target !== 'function') || target == null) {
+ }
+ if (target == null || (typeof target !== 'object' && typeof target !== 'function')) {
target = {};
}
diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -619,3 +619,10 @@ test('works without Array.isArray', function (t) {
Array.isArray = savedIsArray;
t.end();
});
+
+test('non-object target', function (t) {
+ t.deepEqual(extend(3.14, { a: 'b' }), { a: 'b' });
+ t.deepEqual(extend(true, 3.14, { a: 'b' }), { a: 'b' });
+
+ t.end();
+}); | [Fix] deep extending should work with a non-object.
Fixes #<I>. | justmoon_node-extend | train |
b16a213e0633bce055ee833e67da13014a9b7a6b | diff --git a/nodes/time/time.js b/nodes/time/time.js
index <HASH>..<HASH> 100644
--- a/nodes/time/time.js
+++ b/nodes/time/time.js
@@ -85,6 +85,7 @@ module.exports = function (RED) {
// Doesn't match time format 00:00:00
if (digits === null) {
if (!isValidDate(dateString)) {
+ this.debugToClient(`Invalid date`);
throw new Error(RED._('ha-time.errors.invalid_date'));
}
crontab = new Date(dateString);
@@ -95,6 +96,7 @@ module.exports = function (RED) {
crontab.setSeconds(digits.seconds);
}
} catch (e) {
+ this.debugToClient(e.message);
this.setStatusFailed(e.message);
return;
}
@@ -112,6 +114,7 @@ module.exports = function (RED) {
if (this.nodeConfig.repeatDaily) {
crontab = `${crontab.getSeconds()} ${crontab.getMinutes()} ${crontab.getHours()} * * *`;
} else if (crontab.getTime() < Date.now()) {
+ this.debugToClient(`date in the past`);
this.setStatusFailed(RED._('ha-time.status.in_the_past'));
return;
}
diff --git a/nodes/time/ui-time.html b/nodes/time/ui-time.html
index <HASH>..<HASH> 100644
--- a/nodes/time/ui-time.html
+++ b/nodes/time/ui-time.html
@@ -67,9 +67,8 @@
<div class="form-row checkbox-option">
<label>
- <input type="checkbox" id="node-input-randomOffset" /><span
- data-i18n="ha-time.label.randomize_offset"
- ></span>
+ <input type="checkbox" id="node-input-randomOffset" />
+ <span data-i18n="ha-time.label.randomize_offset"></span>
</label>
</div>
@@ -86,8 +85,12 @@
<div class="form-row checkbox-option">
<label>
- <input type="checkbox" id="node-input-repeatDaily" /><span
- data-i18n="ha-time.label.repeat_daily"
- ></span>
+ <input type="checkbox" id="node-input-repeatDaily" />
+ <span data-i18n="ha-time.label.repeat_daily"></span>
</label>
</div>
+
+<div class="form-row checkbox-option">
+ <input type="checkbox" id="node-input-debugenabled" />
+ <label for="node-input-debugenabled"> Show Debug Information </label>
+</div>
diff --git a/nodes/time/ui-time.js b/nodes/time/ui-time.js
index <HASH>..<HASH> 100644
--- a/nodes/time/ui-time.js
+++ b/nodes/time/ui-time.js
@@ -32,6 +32,7 @@ RED.nodes.registerType('ha-time', {
repeatDaily: { value: false },
payload: { value: '$entity().state' },
payloadType: { value: 'jsonata' },
+ debugenabled: { value: true },
},
oneditprepare: function () {
const node = this; | chore(time): Add more debug logs | zachowj_node-red-contrib-home-assistant-websocket | train |
aca70c6234f31e0123f4dc62058777b7c9c80cd0 | diff --git a/pypsa/linopt.py b/pypsa/linopt.py
index <HASH>..<HASH> 100644
--- a/pypsa/linopt.py
+++ b/pypsa/linopt.py
@@ -288,6 +288,13 @@ def get_con(n, c, attr, pop=False):
def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile,
solver_options, keep_files, warmstart=None,
store_basis=True):
+ """
+ Solving function. Reads the linear problem file and passes it to the cbc
+ solver. If the solution is sucessful it returns variable solutions and
+ constraint dual values.
+
+ For more information on the solver options, run 'cbc' in your shell
+ """
#printingOptions is about what goes in solution file
command = f"cbc -printingOptions all -import {problem_fn} "
if warmstart:
@@ -338,7 +345,14 @@ def run_and_read_cbc(n, problem_fn, solution_fn, solver_logfile,
def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile,
solver_options, keep_files, warmstart=None,
store_basis=True):
- # for solver_options lookup https://kam.mff.cuni.cz/~elias/glpk.pdf
+ """
+ Solving function. Reads the linear problem file and passes it to the glpk
+ solver. If the solution is sucessful it returns variable solutions and
+ constraint dual values.
+
+ For more information on the glpk solver options:
+ https://kam.mff.cuni.cz/~elias/glpk.pdf
+ """
command = (f"glpsol --lp {problem_fn} --output {solution_fn}")
if solver_logfile is not None:
command += f' --log {solver_logfile}'
@@ -386,9 +400,15 @@ def run_and_read_glpk(n, problem_fn, solution_fn, solver_logfile,
def run_and_read_gurobi(n, problem_fn, solution_fn, solver_logfile,
solver_options, keep_files, warmstart=None,
store_basis=True):
+ """
+ Solving function. Reads the linear problem file and passes it to the gurobi
+ solver. If the solution is sucessful it returns variable solutions and
+ constraint dual values. Gurobipy must be installed for using this function
+
+ For more information on solver options:
+ https://www.gurobi.com/documentation/{gurobi_verion}/refman/parameter_descriptions.html
+ """
import gurobipy
- # for solver options see
- # https://www.gurobi.com/documentation/8.1/refman/parameter_descriptions.html
if (solver_logfile is not None) and (solver_options is not None):
solver_options["logfile"] = solver_logfile | linopt: add docstrings for run_and_read_ | PyPSA_PyPSA | train |
e58c22dbd0e1bd7f0de095429f2cf464d9173e14 | diff --git a/karma.conf.js b/karma.conf.js
index <HASH>..<HASH> 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -26,6 +26,6 @@ module.exports = function(config) {
logLevel: config.LOG_INFO,
autoWatch: false,
browsers: ['PhantomJS'],
- singleRun: false
+ singleRun: true
});
};
diff --git a/karma.saucelabs.conf.js b/karma.saucelabs.conf.js
index <HASH>..<HASH> 100644
--- a/karma.saucelabs.conf.js
+++ b/karma.saucelabs.conf.js
@@ -6,6 +6,8 @@
*/
module.exports = function(config) {
+ require('./karma.conf')(config);
+
var customLaunchers = {
sl_chrome: {
base: 'SauceLabs',
@@ -37,37 +39,15 @@ module.exports = function(config) {
};
config.set({
+ reporters: ['saucelabs'],
- basePath: '',
-
- files: [
- 'test/vendor/es5-shim.js',
- 'test/vendor/jquery-2.1.1.js',
- 'test/client/**/*.test.js'
- ],
-
- frameworks: ['mocha', 'browserify'],
-
- browserify: {
- debug: true
- },
-
- preprocessors: {
- 'test/client/**/*.test.js': ['browserify']
- },
-
- reporters: ['progress', 'saucelabs'],
-
- port: 9876,
- colors: true,
- logLevel: config.LOG_INFO,
- autoWatch: false,
sauceLabs: {
testName: 'react-mini-router client tests',
startConnect: false
},
+
customLaunchers: customLaunchers,
- browsers: Object.keys(customLaunchers),
- singleRun: true
+
+ browsers: Object.keys(customLaunchers)
});
};
diff --git a/test/client/detect.test.js b/test/client/detect.test.js
index <HASH>..<HASH> 100644
--- a/test/client/detect.test.js
+++ b/test/client/detect.test.js
@@ -9,7 +9,9 @@ describe('detect', function() {
});
it('Should detect if HTML5 History API is available.', function() {
- assert.ok(detect.hasPushState);
+ var isIE9 = navigator.userAgent.indexOf('MSIE 9') > -1;
+
+ assert.ok(detect.hasPushState !== isIE9);
});
it('Should detect if it is in hashbang url mode.', function() { | Fix detect history API test to correctly pass even under IE9.
Default to singleRun mode for karma.
Refactor saucelabs karma conf to use the base settings from the default config. | larrymyers_react-mini-router | train |
b32e48ede517797330be4e9f1057f8d324ba3ff0 | diff --git a/dropwizard-metrics/src/main/java/io/dropwizard/metrics/CsvReporterFactory.java b/dropwizard-metrics/src/main/java/io/dropwizard/metrics/CsvReporterFactory.java
index <HASH>..<HASH> 100644
--- a/dropwizard-metrics/src/main/java/io/dropwizard/metrics/CsvReporterFactory.java
+++ b/dropwizard-metrics/src/main/java/io/dropwizard/metrics/CsvReporterFactory.java
@@ -54,7 +54,11 @@ public class CsvReporterFactory extends BaseFormattedReporterFactory {
@Override
public ScheduledReporter build(MetricRegistry registry) {
- file.mkdirs();
+ boolean creation = file.mkdirs();
+ if (!creation && !file.exists()) {
+ String msg = "Failed to create" + file.getAbsolutePath();
+ throw new RuntimeException(msg);
+ }
return CsvReporter.forRegistry(registry)
.convertDurationsTo(getDurationUnit()) | Appease FindBugs with mkdirs return value | dropwizard_dropwizard | train |
e41a4252264d73eba03acd66879dbf2548e09457 | diff --git a/import_aws_route_table.go b/import_aws_route_table.go
index <HASH>..<HASH> 100644
--- a/import_aws_route_table.go
+++ b/import_aws_route_table.go
@@ -28,7 +28,7 @@ func resourceAwsRouteTableImportState(
// Start building our results
results := make([]*schema.ResourceData, 1,
- 1+len(table.Associations)+len(table.Routes))
+ 2+len(table.Associations)+len(table.Routes))
results[0] = d
{
@@ -68,5 +68,25 @@ func resourceAwsRouteTableImportState(
}
}
+ {
+ // Construct the main associations. We could do this above but
+ // I keep this as a separate section since it is a separate resource.
+ subResource := resourceAwsMainRouteTableAssociation()
+ for _, assoc := range table.Associations {
+ if !*assoc.Main {
+ // Ignore
+ continue
+ }
+
+ // Minimal data for route
+ d := subResource.Data(nil)
+ d.SetType("aws_main_route_table_association")
+ d.Set("route_table_id", id)
+ d.Set("vpc_id", table.VpcId)
+ d.SetId(*assoc.RouteTableAssociationId)
+ results = append(results, d)
+ }
+ }
+
return results, nil
} | providers/aws: import main route table association | terraform-providers_terraform-provider-aws | train |
1a63e6fead3a15d03b4760b9322fa4013c8529e2 | diff --git a/pkg/kubelet/kubelet_dockershim_nodocker.go b/pkg/kubelet/kubelet_dockershim_nodocker.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/kubelet_dockershim_nodocker.go
+++ b/pkg/kubelet/kubelet_dockershim_nodocker.go
@@ -32,5 +32,5 @@ func runDockershim(kubeCfg *kubeletconfiginternal.KubeletConfiguration,
remoteImageEndpoint string,
nonMasqueradeCIDR string) error {
- return fmt.Errorf("trying to use docker runtime, w/ Kubelet compiled w/o docker support")
+ return fmt.Errorf("trying to use docker runtime when Kubelet was compiled without docker support")
} | Clarify `kubelet_dockershim_nodocker` error message
Clarify the error message returned when trying to use the docker runtime
on a Kubelet that was compiled without Docker.
We removed the "w/" and "w/o", which can be confusing abbreviations, and
also add slightly more detail on the actual error. | kubernetes_kubernetes | train |
7e99a5e4e9acc5ca19406658bca5ce259f480ef7 | diff --git a/noxfile.py b/noxfile.py
index <HASH>..<HASH> 100644
--- a/noxfile.py
+++ b/noxfile.py
@@ -52,7 +52,8 @@ def check_docs(session):
@nox_poetry.session()
def build_docs(session):
- _install(session, dependencies=['sphinx', 'sphinx-immaterial',
+ _install(session, dependencies=['https://github.com/brechtm/sphinx/archive/refs/heads/toctree-index.zip',
+ 'sphinx-immaterial',
'sphinxcontrib-autoprogram'])
session.chdir('doc')
session.run('python', 'build.py', *session.posargs) | Nox: temporary work-around for cjolowicz/nox-poetry#<I> | brechtm_rinohtype | train |
bc7451dc7c932ce314abf635b017ed11d041c9e1 | diff --git a/ryu/services/protocols/ovsdb/client.py b/ryu/services/protocols/ovsdb/client.py
index <HASH>..<HASH> 100644
--- a/ryu/services/protocols/ovsdb/client.py
+++ b/ryu/services/protocols/ovsdb/client.py
@@ -300,7 +300,7 @@ class RemoteOvsdb(app_manager.RyuApp):
schema_exclude_columns)
fsm = reconnect.Reconnect(now())
- fsm.set_name('%s:%s' % address)
+ fsm.set_name('%s:%s' % address[:2])
fsm.enable(now())
fsm.set_passive(True, now())
fsm.set_max_tries(-1)
@@ -390,8 +390,8 @@ class RemoteOvsdb(app_manager.RyuApp):
if proxy_ev_cls:
self.send_event_to_observers(proxy_ev_cls(ev))
except Exception:
- self.logger.exception('Error submitting specific event for OVSDB',
- self.system_id)
+ self.logger.exception(
+ 'Error submitting specific event for OVSDB %s', self.system_id)
def _idl_loop(self):
while self.is_active:
diff --git a/ryu/services/protocols/ovsdb/manager.py b/ryu/services/protocols/ovsdb/manager.py
index <HASH>..<HASH> 100644
--- a/ryu/services/protocols/ovsdb/manager.py
+++ b/ryu/services/protocols/ovsdb/manager.py
@@ -15,6 +15,7 @@
import ssl
import socket
+import netaddr
from ryu import cfg
from ryu.base import app_manager
@@ -91,7 +92,12 @@ class OVSDB(app_manager.RyuApp):
sock.close()
continue
- self.logger.debug('New connection from %s:%s' % client_address)
+ if netaddr.valid_ipv6(client_address[0]):
+ self.logger.debug(
+ 'New connection from [%s]:%s' % client_address[:2])
+ else:
+ self.logger.debug(
+ 'New connection from %s:%s' % client_address[:2])
t = hub.spawn(self._start_remote, sock, client_address)
self.threads.append(t)
@@ -158,7 +164,11 @@ class OVSDB(app_manager.RyuApp):
sock.close()
def start(self):
- server = hub.listen((self._address, self._port))
+ if netaddr.valid_ipv6(self._address):
+ server = hub.listen(
+ (self._address, self._port), family=socket.AF_INET6)
+ else:
+ server = hub.listen((self._address, self._port))
key = self.CONF.ovsdb.mngr_privkey or self.CONF.ctl_privkey
cert = self.CONF.ovsdb.mngr_cert or self.CONF.ctl_cert
@@ -173,8 +183,12 @@ class OVSDB(app_manager.RyuApp):
self._server = server
- self.logger.info('Listening on %s:%s for clients' % (self._address,
- self._port))
+ if netaddr.valid_ipv6(self._address):
+ self.logger.info(
+ 'Listening on [%s]:%s for clients', self._address, self._port)
+ else:
+ self.logger.info(
+ 'Listening on %s:%s for clients', self._address, self._port)
t = hub.spawn(self._accept, self._server)
super(OVSDB, self).start()
return t | services/ovsdb: Listen on IPv6 Address
This patch enables Ryu to listen ovsdb connections on IPv6 Address.
Note that you need to make a config file like belows:
[ovsdb]
address=:: | osrg_ryu | train |
eb7bcb6f3618eda11c07bc49cda4dbaff1f729d9 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ readme = open("README.rst").read()
setup(
name = "emili",
- version = "1.3~git",
+ version = "1.3",
description = "Mail sending Python/CLI interface using markdown or ANSI codes based content",
author = "David Garcia Garzon",
author_email = "[email protected]", | preparing for <I> release | Som-Energia_emili | train |
77218f7fc1ab01a5a92c5d1b9e621644a1dae8d2 | diff --git a/src/Message/FetchPaymentMethodsResponse.php b/src/Message/FetchPaymentMethodsResponse.php
index <HASH>..<HASH> 100644
--- a/src/Message/FetchPaymentMethodsResponse.php
+++ b/src/Message/FetchPaymentMethodsResponse.php
@@ -8,6 +8,18 @@ use Omnipay\Common\PaymentMethod;
class FetchPaymentMethodsResponse extends AbstractResponse implements FetchPaymentMethodsResponseInterface
{
+ protected $names = array(
+ 'ideal' => 'iDEAL',
+ 'ecare' => 'ecare',
+ 'ebill' => 'ebill',
+ 'overboeking' => 'Overboeking',
+ 'sofort' => 'DIRECTebanking/SofortBanking',
+ 'mistercash' => 'MisterCash/BanContact',
+ 'webshop' => 'WebShop GiftCard',
+ 'fijncadeau' => 'FijnCadeau',
+ 'podium' => 'Podium Cadeaukaart',
+ );
+
/**
* {@inheritdoc}
*/
@@ -25,11 +37,12 @@ class FetchPaymentMethodsResponse extends AbstractResponse implements FetchPayme
if (isset($this->data->merchant->payments)) {
foreach ($this->data->merchant->payments->payment as $method) {
- $method = (string)$method;
- $methods[] = new PaymentMethod($method, $method);
+ $method = (string) $method;
+ $name = isset($this->names[$method]) ? $this->names[$method] : ucfirst($method);
+ $methods[] = new PaymentMethod($method, $name);
}
}
return $methods;
}
-}
\ No newline at end of file
+} | Prettify names for payment methods | fruitcake_omnipay-sisow | train |
963f671ba1425b359e44418e84644acc8d0c3613 | diff --git a/h2o-hadoop/tests/python/pyunit_gbm_on_hive.py b/h2o-hadoop/tests/python/pyunit_gbm_on_hive.py
index <HASH>..<HASH> 100644
--- a/h2o-hadoop/tests/python/pyunit_gbm_on_hive.py
+++ b/h2o-hadoop/tests/python/pyunit_gbm_on_hive.py
@@ -34,7 +34,8 @@ def gbm_on_hive():
airlines_dataset = h2o.import_sql_select(connection_url, select_query, username, password)
airlines_dataset = adapt_airlines(airlines_dataset)
# read from Hive Streaming
- airlines_dataset_streaming = h2o.import_sql_select(connection_url, select_query, username, password, streaming=True)
+ airlines_dataset_streaming = h2o.import_sql_select(connection_url, select_query, username, password,
+ fetch_mode="SINGLE")
airlines_dataset_streaming = adapt_airlines(airlines_dataset_streaming)
# datasets should be identical from user's point of view | [HOTFIX] Fix import_sql_select API call in Hive test | h2oai_h2o-3 | train |
2ae277e42dc0c9ae002f743e0cc396b0aa7e55d5 | diff --git a/src/saml2/config.py b/src/saml2/config.py
index <HASH>..<HASH> 100644
--- a/src/saml2/config.py
+++ b/src/saml2/config.py
@@ -258,21 +258,13 @@ class Config(object):
self.setattr(typ, arg, _val)
self.context = typ
- self.load_complex(cnf, typ)
self.context = self.def_context
- def load_complex(self, cnf, typ=""):
- try:
- self.setattr(typ, "policy", Policy(cnf["policy"], config=self))
- except KeyError:
- pass
-
- # for srv, spec in cnf["service"].items():
- # try:
- # self.setattr(srv, "policy",
- # Policy(cnf["service"][srv]["policy"]))
- # except KeyError:
- # pass
+ def load_complex(self, cnf):
+ acs = ac_factory(cnf.get("attribute_map_dir"))
+ if not acs:
+ raise ConfigurationError("No attribute converters, something is wrong!!")
+ self.setattr("", "attribute_converters", acs)
def unicode_convert(self, item):
try:
@@ -339,16 +331,6 @@ class Config(object):
if "extensions" in cnf:
self.do_extensions(cnf["extensions"])
- acs = ac_factory(cnf.get("attribute_map_dir"))
- if not acs:
- raise ConfigurationError("No attribute converters, something is wrong!!")
- self.setattr("", "attribute_converters", acs)
-
- try:
- self.setattr("", "metadata", self.load_metadata(cnf["metadata"]))
- except KeyError:
- pass
-
self.load_complex(cnf)
self.context = self.def_context | Handle complex configs in the same place | IdentityPython_pysaml2 | train |
74d8af78da8ce2533d0fa516e9078f08fa1ec2da | diff --git a/tangelo/tangelo/server.py b/tangelo/tangelo/server.py
index <HASH>..<HASH> 100644
--- a/tangelo/tangelo/server.py
+++ b/tangelo/tangelo/server.py
@@ -65,14 +65,19 @@ def analyze_url(raw_reqpath):
analysis.directive = Directive(Directive.HTTPRedirect, argument="/")
return analysis
- if plugins is not None and reqpath[0] == "/" and reqpath.split("/")[1] == "plugin":
+ # If the request path does not begin with a /, then it is invalid.
+ if reqpath[0] != "/":
+ raise ValueError("request path must be absolute, i.e., begin with a slash")
+
+ # If the request path is to a plugin, substitute the correct webroot path.
+ if reqpath.split("/")[1] == "plugin":
plugin_comp = reqpath.split("/")
if len(plugin_comp) < 3:
analysis.directive = Directive(Directive.ListPlugins)
return analysis
plugin = plugin_comp[2]
- if plugin not in plugins.plugins:
+ if plugins is None or plugin not in plugins.plugins:
analysis.content = Content(Content.NotFound, path=reqpath)
return analysis
@@ -91,23 +96,11 @@ def analyze_url(raw_reqpath):
reqpathcomp = reqpath[1:].split("/")
# Compute the disk path the URL corresponds to.
- #
- # First check to see whether the path is absolute (i.e. rooted at
- # webroot) or in a user home directory.
- if reqpathcomp[0][0] == "~" and len(reqpathcomp[0]) > 1:
- # Only treat this component as a home directory if there is
- # actually text following the tilde (rather than making the server
- # serve files from the home directory of whatever user account it
- # is using to run).
- pathcomp = [os.path.expanduser(reqpathcomp[0]) +
- os.path.sep +
- "tangelo_html"] + reqpathcomp[1:]
- else:
- pathcomp = [webroot] + reqpathcomp
+ pathcomp = [webroot] + reqpathcomp
- # Save the request path and disk path components in the thread storage,
- # slightly modifying the request path if it refers to an absolute path
- # (indicated by being one element shorter than the disk path).
+ # Save the request path and disk path components, slightly modifying the
+ # request path if it refers to an absolute path (indicated by being one
+ # element shorter than the disk path).
if len(reqpathcomp) == len(pathcomp) - 1:
reqpathcomp_save = [""] + reqpathcomp
elif len(reqpathcomp) == len(pathcomp):
@@ -135,7 +128,7 @@ def analyze_url(raw_reqpath):
pathcomp = [pathcomp[0] + os.path.sep + pathcomp[1]] + pathcomp[2:]
# Form an actual path string.
- path = os.path.sep.join(pathcomp)
+ path = os.path.join(*pathcomp)
# If the path is a directory, check for a trailing slash. If missing,
# perform a redirect to the path WITH the trailing slash. Otherwise, check
@@ -154,7 +147,7 @@ def analyze_url(raw_reqpath):
if raw_reqpath[-1] != "/":
analysis.directive = Directive(Directive.HTTPRedirect, argument=raw_reqpath + "/")
return analysis
- elif os.path.exists(path + os.path.sep + "index.html"):
+ elif os.path.exists(os.path.join(path, "index.html")):
analysis.directive = Directive(Directive.InternalRedirect, argument=raw_reqpath + "index.html")
return analysis
else: | Disabling home directory service altogether. | Kitware_tangelo | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.