hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
21692415a251285cc3f4e24f29ffd4b810a5656d | diff --git a/downhill/base.py b/downhill/base.py
index <HASH>..<HASH> 100644
--- a/downhill/base.py
+++ b/downhill/base.py
@@ -132,7 +132,8 @@ class Optimizer(util.Registrar(str('Base'), (), {})):
yield vel_tm1, vel_t
if self.nesterov:
# https://github.com/lisa-lab/pylearn2/pull/136#issuecomment-10381617
- yield param, param + self.momentum * vel_t + delta
+ yield param, (param + self.momentum ** 2 * vel_tm1
+ + (1 + self.momentum) * delta)
else:
yield param, param + vel_t | Reformulate nesterov update after rereading paper. | lmjohns3_downhill | train | py |
86085f450c73317c0dfa45dd1aa0b39afff9f9f1 | diff --git a/account/src/test/java/com/ning/billing/account/AccountTestSuite.java b/account/src/test/java/com/ning/billing/account/AccountTestSuite.java
index <HASH>..<HASH> 100644
--- a/account/src/test/java/com/ning/billing/account/AccountTestSuite.java
+++ b/account/src/test/java/com/ning/billing/account/AccountTestSuite.java
@@ -18,5 +18,5 @@ package com.ning.billing.account;
import com.ning.billing.KillbillTestSuite;
-public class AccountTestSuite extends KillbillTestSuite {
+public abstract class AccountTestSuite extends KillbillTestSuite {
} | account: make AccountTestSuite abstract | killbill_killbill | train | java |
eec73f77a4ce3d3e81edf02dd49166584e9dd42d | diff --git a/activerecord/test/cases/relation_test.rb b/activerecord/test/cases/relation_test.rb
index <HASH>..<HASH> 100644
--- a/activerecord/test/cases/relation_test.rb
+++ b/activerecord/test/cases/relation_test.rb
@@ -26,7 +26,7 @@ module ActiveRecord
def test_initialize_single_values
relation = Relation.new(FakeKlass)
(Relation::SINGLE_VALUE_METHODS - [:create_with]).each do |method|
- assert_nil relation.send("#{method}_value"), method.to_s
+ assert_nil relation.public_send("#{method}_value"), method.to_s
end
value = relation.create_with_value
assert_equal({}, value)
@@ -36,7 +36,7 @@ module ActiveRecord
def test_multi_value_initialize
relation = Relation.new(FakeKlass)
Relation::MULTI_VALUE_METHODS.each do |method|
- values = relation.send("#{method}_values")
+ values = relation.public_send("#{method}_values")
assert_equal [], values, method.to_s
assert_predicate values, :frozen?, method.to_s
end | value_methods on Relation are public methods | rails_rails | train | rb |
d8d5560903aa12424f48886909a9c5bf06a5fabb | diff --git a/src/javascript/runtime/RuntimeClient.js b/src/javascript/runtime/RuntimeClient.js
index <HASH>..<HASH> 100644
--- a/src/javascript/runtime/RuntimeClient.js
+++ b/src/javascript/runtime/RuntimeClient.js
@@ -48,6 +48,7 @@ define('moxie/runtime/RuntimeClient', [
constructor = Runtime.getConstructor(type);
if (!constructor || !constructor.can(options.required_caps)) {
initialize(items);
+ return;
}
// try initializing the runtime | RuntimeClient: Add missing return after fallback. | moxiecode_moxie | train | js |
d65cfc0c59701295dfddbad7152acec1a08e9940 | diff --git a/tests/Unit/Suites/Product/Block/FilterNavigationBlockTest.php b/tests/Unit/Suites/Product/Block/FilterNavigationBlockTest.php
index <HASH>..<HASH> 100644
--- a/tests/Unit/Suites/Product/Block/FilterNavigationBlockTest.php
+++ b/tests/Unit/Suites/Product/Block/FilterNavigationBlockTest.php
@@ -46,10 +46,11 @@ class FilterNavigationBlockTest extends \PHPUnit_Framework_TestCase
protected function setUp()
{
$this->stubBlockRenderer = $this->getMock(BlockRenderer::class, [], [], '', false);
+ $blockName = 'foo';
$this->stubFilterCollection = $this->getMock(FilterNavigationFilterCollection::class, [], [], '', false);
$stubDataObject = $this->stubFilterCollection;
- $this->block = new FilterNavigationBlock($this->stubBlockRenderer, 'foo.phtml', 'foo', $stubDataObject);
+ $this->block = new FilterNavigationBlock($this->stubBlockRenderer, 'foo.phtml', $blockName, $stubDataObject);
}
public function testBlockClassIsExtended() | Issue #<I>: Refactor FilterNavigationBlockTest | lizards-and-pumpkins_catalog | train | php |
03a7f2e10ffb8629e5816ad493a755e60775f5bc | diff --git a/etcdctlv3/main.go b/etcdctlv3/main.go
index <HASH>..<HASH> 100644
--- a/etcdctlv3/main.go
+++ b/etcdctlv3/main.go
@@ -43,9 +43,9 @@ var (
func init() {
rootCmd.PersistentFlags().StringVar(&globalFlags.Endpoints, "endpoint", "127.0.0.1:2378", "gRPC endpoint")
- rootCmd.PersistentFlags().StringVar(&globalFlags.TLS.CertFile, "cert", "", "identify HTTPS client using this SSL certificate file")
- rootCmd.PersistentFlags().StringVar(&globalFlags.TLS.KeyFile, "key", "", "identify HTTPS client using this SSL key file")
- rootCmd.PersistentFlags().StringVar(&globalFlags.TLS.CAFile, "cacert", "", "verify certificates of HTTPS-enabled servers using this CA bundle")
+ rootCmd.PersistentFlags().StringVar(&globalFlags.TLS.CertFile, "cert", "", "identify secure client using this TLS certificate file")
+ rootCmd.PersistentFlags().StringVar(&globalFlags.TLS.KeyFile, "key", "", "identify secure client using this TLS key file")
+ rootCmd.PersistentFlags().StringVar(&globalFlags.TLS.CAFile, "cacert", "", "verify certificates of TLS-enabled secure servers using this CA bundle")
rootCmd.AddCommand(
command.NewRangeCommand(), | etcdctlv3: secure is not HTTPS | etcd-io_etcd | train | go |
e9298f7be4aa385aad5c0fc3c1899091bc9e69ca | diff --git a/treeherder/etl/push_loader.py b/treeherder/etl/push_loader.py
index <HASH>..<HASH> 100644
--- a/treeherder/etl/push_loader.py
+++ b/treeherder/etl/push_loader.py
@@ -160,6 +160,15 @@ class GithubPushTransformer(GithubTransformer):
URL_BASE = "https://api.github.com/repos/{}/{}/compare/{}...{}"
+ def get_branch(self):
+ """
+ Tag pushes don't use the actual branch, just the string "tag"
+ """
+ if self.message_body["details"].get("event.head.tag"):
+ return "tag"
+
+ return super(GithubPushTransformer, self).get_branch()
+
def transform(self, repository):
push_url = self.URL_BASE.format(
self.message_body["organization"], | Bug <I> - Fix KeyError in tag push situation of GithubPushTransformer (#<I>)
* fix: KeyError when GithubPushTransformer.get_branch's called when tag push triggers
* chore: remove unused whitespace | mozilla_treeherder | train | py |
71359b12a7b64d3e8a1502f1ffd0b313c5f0aaf6 | diff --git a/lib/active_record/postgresql_extensions/types.rb b/lib/active_record/postgresql_extensions/types.rb
index <HASH>..<HASH> 100644
--- a/lib/active_record/postgresql_extensions/types.rb
+++ b/lib/active_record/postgresql_extensions/types.rb
@@ -6,7 +6,23 @@ module ActiveRecord
class PostgreSQLAdapter
# Returns an Array of available languages.
def types(name = nil)
- query(%{SELECT typname FROM pg_type;}, name).map { |row| row[0] }
+ query(PostgreSQLExtensions::Utils.strip_heredoc(<<-SQL), name).map(&:first)
+ SELECT t.typname as type
+ FROM pg_type t
+ LEFT JOIN pg_catalog.pg_namespace n ON n.oid = t.typnamespace
+ WHERE (t.typrelid = 0 OR (
+ SELECT c.relkind = 'c'
+ FROM pg_catalog.pg_class c
+ WHERE c.oid = t.typrelid
+ )) AND
+ NOT EXISTS(
+ SELECT 1
+ FROM pg_catalog.pg_type el
+ WHERE el.oid = t.typelem
+ AND el.typarray = t.oid
+ ) AND
+ n.nspname NOT IN ('information_schema');
+ SQL
end
def type_exists?(name) | Only return base and user-created types and not TOAST types and the like. | dark-panda_activerecord-postgresql-extensions | train | rb |
8323dd9a14fedde3712f0b2e6cd2b1f30ab2285c | diff --git a/cmd/torrent/main.go b/cmd/torrent/main.go
index <HASH>..<HASH> 100644
--- a/cmd/torrent/main.go
+++ b/cmd/torrent/main.go
@@ -183,6 +183,9 @@ func main() {
func mainErr() error {
tagflag.Parse(&flags)
defer envpprof.Stop()
+ if stdoutAndStderrAreSameFile() {
+ log.Default = log.Logger{log.StreamLogger{W: progress.Bypass(), Fmt: log.LineFormatter}}
+ }
clientConfig := torrent.NewDefaultClientConfig()
clientConfig.NoDHT = !flags.Dht
clientConfig.Debug = flags.Debug
@@ -234,9 +237,6 @@ func mainErr() error {
http.HandleFunc("/", func(w http.ResponseWriter, req *http.Request) {
client.WriteStatus(w)
})
- if stdoutAndStderrAreSameFile() {
- log.SetDefault(log.Logger{log.StreamLogger{W: progress.Bypass(), Fmt: log.LineFormatter}})
- }
if flags.Progress {
progress.Start()
} | cmd/torrent: Move log setup earlier to avoid race | anacrolix_torrent | train | go |
4ed4a82ecc684e82bbef2153ce1fa928168adacd | diff --git a/src/commands-api.js b/src/commands-api.js
index <HASH>..<HASH> 100644
--- a/src/commands-api.js
+++ b/src/commands-api.js
@@ -22,6 +22,7 @@ import { locatorBuilders } from "./record";
export const selenium = new Selenium(BrowserBot.createForWindow(window));
let contentSideexTabId = window.contentSideexTabId;
+let targetSelector;
function doCommands(request, sender, sendResponse) {
if (request.commands) {
@@ -78,7 +79,6 @@ function doCommands(request, sender, sendResponse) {
}
return true;
}
- let targetSelector;
if (request.selectMode) {
if (request.selecting) {
targetSelector = new TargetSelector(function (element, win) { | keep the reference to the locator builder | SeleniumHQ_selenium-ide | train | js |
a286a017f5de4c27a0102f79ac7d3d4665c82028 | diff --git a/src/tablesort.js b/src/tablesort.js
index <HASH>..<HASH> 100644
--- a/src/tablesort.js
+++ b/src/tablesort.js
@@ -117,7 +117,10 @@
};
// Sort as number if a currency key exists or number
- if (item.match(/^-?[£\x24Û¢´]\d/) || item.match(/^-?(\d+[,\.]?)+(E[\-+][\d]+)?%?$/)) {
+ if (item.match(/^-?[£\x24Û¢´€] ?\d/) || // prefixed currency
+ item.match(/^-?\d+\s*[€]/) || // suffixed currencty
+ item.match(/^-?(\d+[,\.]?)+(E[\-+][\d]+)?%?$/) // number
+ ) {
sortFunction = sortNumber;
} else if (testDate(item)) {
sortFunction = sortDate; | Fix #<I>: Recognized € as currency
- Recognize both "€5" and "5€"
- Also recognize if space in between: "€ 5", "5 €", "$5" | tristen_tablesort | train | js |
12a4837768f56bdaa5fb6a57197aa9361bab25e7 | diff --git a/devices/philips.js b/devices/philips.js
index <HASH>..<HASH> 100644
--- a/devices/philips.js
+++ b/devices/philips.js
@@ -1843,6 +1843,15 @@ module.exports = [
ota: ota.zigbeeOTA,
},
{
+ zigbeeModel: ['3418331P6'],
+ model: '3418331P6',
+ vendor: 'Philips',
+ description: 'Hue white ambiance Adore bathroom mirror light',
+ meta: {turnsOffAtBrightness1: true},
+ extend: hueExtend.light_onoff_brightness_colortemp({colorTempRange: [153, 454]}),
+ ota: ota.zigbeeOTA,
+ },
+ {
zigbeeModel: ['5309331P6'],
model: '5309331P6',
vendor: 'Philips', | Add <I>P6 (#<I>)
* Add support for <I>P6
* add colortemp for <I>P6
* Update philips.js
* Update philips.js | Koenkk_zigbee-shepherd-converters | train | js |
753888a11ddf5d8a73d92bc5565bcf4a962852df | diff --git a/parse.go b/parse.go
index <HASH>..<HASH> 100644
--- a/parse.go
+++ b/parse.go
@@ -157,15 +157,13 @@ func (p *parser) next() {
}
return
case DQUOTE:
+ p.pos = Pos(p.npos + 1)
switch b {
case '`', '"', '$':
- p.pos = Pos(p.npos + 1)
p.advanceTok(p.dqToken(b))
case '\n':
- p.pos++
p.advanceLitDquote()
default:
- p.pos = Pos(p.npos + 1)
p.advanceLitDquote()
}
return
@@ -182,16 +180,11 @@ func (p *parser) next() {
}
return
case SQUOTE:
- switch b {
- case '\'':
- p.pos = Pos(p.npos + 1)
+ p.pos = Pos(p.npos + 1)
+ if b == '\'' {
p.npos++
p.advanceTok(SQUOTE)
- case '\n':
- p.pos++
- p.advanceLitOther(q)
- default:
- p.pos = Pos(p.npos + 1)
+ } else {
p.advanceLitOther(q)
}
return | parse: clean up position logic in next() | mvdan_sh | train | go |
cc853b97972b2f53ab7a8341e83e3142a3479767 | diff --git a/lib/foodcritic/rake_task.rb b/lib/foodcritic/rake_task.rb
index <HASH>..<HASH> 100644
--- a/lib/foodcritic/rake_task.rb
+++ b/lib/foodcritic/rake_task.rb
@@ -24,8 +24,11 @@ module FoodCritic
desc "Lint Chef cookbooks"
task(name) do
result = FoodCritic::Linter.new.check(files, options)
- puts result
- fail if result.failed?
+ if result.warnings.any?
+ puts result
+ end
+
+ fail result.to_s if result.failed?
end
end | Reduce unnecessary blank lines from console output if there's nothing to print.
We only print the food critic reviews if there's a warning and fail with a proper message if there's any. | Foodcritic_foodcritic | train | rb |
26eec74d5c2e28ba55f32ded45827fcfa72e8add | diff --git a/openquake/engine/performance.py b/openquake/engine/performance.py
index <HASH>..<HASH> 100644
--- a/openquake/engine/performance.py
+++ b/openquake/engine/performance.py
@@ -204,7 +204,7 @@ class EnginePerformanceMonitor(PerformanceMonitor):
is intended for debugging purposes.
"""
if no_distribute():
- logs.LOG.warn('PyMem: %d mb, PgMem: %d mb' % self.mem_peaks)
+ logs.LOG.warn('PyMem: %s mb, PgMem: %s mb' % self.mem_peaks)
def __exit__(self, etype, exc, tb):
super(EnginePerformanceMonitor, self).__exit__(etype, exc, tb) | Fixed an error happening when the memory stats are not available (None instead of an integer)
Former-commit-id: <I>cbd<I>c<I>f4c7ba4dd<I>d8d2b0fdd | gem_oq-engine | train | py |
1a5051f8223fa069b28a8a442a961e64d937cb01 | diff --git a/scot/var.py b/scot/var.py
index <HASH>..<HASH> 100644
--- a/scot/var.py
+++ b/scot/var.py
@@ -123,7 +123,7 @@ class VARBase():
R = np.array([[acm(m-k) for k in range(self.p)] for m in range(self.p)])
R = np.concatenate(np.concatenate(R, -2), -1)
- c = np.linalg.solve(R, r)
+ c = sp.linalg.solve(R, r, sym_pos=True)
c = np.concatenate([c[m::self.p, :] for m in range(n_channels)]).T
self.coef = c | switched to scipy solver | scot-dev_scot | train | py |
a9ead55ef3863bc3fb8688507da885d831227897 | diff --git a/source/main/org/freecompany/redline/payload/Contents.java b/source/main/org/freecompany/redline/payload/Contents.java
index <HASH>..<HASH> 100644
--- a/source/main/org/freecompany/redline/payload/Contents.java
+++ b/source/main/org/freecompany/redline/payload/Contents.java
@@ -46,6 +46,7 @@ public class Contents {
builtin.add( "/bin");
builtin.add( "/dev");
builtin.add( "/etc");
+ builtin.add( "/etc/bash_completion.d");
builtin.add( "/etc/cron.d");
builtin.add( "/etc/cron.daily");
builtin.add( "/etc/cron.hourly");
@@ -64,6 +65,7 @@ public class Contents {
builtin.add( "/usr/sbin");
builtin.add( "/usr/share");
builtin.add( "/usr/share/applications");
+ builtin.add( "/root");
builtin.add( "/sbin");
builtin.add( "/opt");
builtin.add( "/tmp"); | Add builtin entries for /root and /etc/bash_completion.d | craigwblake_redline | train | java |
6828a830ce7a29b5dc74b73bf887736f438b6a6f | diff --git a/billy/tests/fixtures/ex/districts.py b/billy/tests/fixtures/ex/districts.py
index <HASH>..<HASH> 100644
--- a/billy/tests/fixtures/ex/districts.py
+++ b/billy/tests/fixtures/ex/districts.py
@@ -1,7 +1,7 @@
districts = [{u'_id': u'ex-lower-1',
u'abbr': u'ex',
- u'boundary_id': u'fake_boudary_id-1',
+ u'boundary_id': u'sldu/ma-first-suffolk-middlesex',
u'chamber': u'lower',
u'name': u'1',
u'num_seats': 1} | changed boudary_id to Boston's district | openstates_billy | train | py |
517ca1a3e47342151d703b5685053845fc2a0d7a | diff --git a/lib/getDevices.js b/lib/getDevices.js
index <HASH>..<HASH> 100644
--- a/lib/getDevices.js
+++ b/lib/getDevices.js
@@ -26,7 +26,7 @@ module.exports = function(fromDevice, query, owner, callback) {
devicedata.forEach(function(device){
if(securityImpl.canDiscover(fromDevice, device)){
deviceResults.push(device);
-
+
delete device.token;
delete device.socketid;
delete device._id;
@@ -66,10 +66,6 @@ module.exports = function(fromDevice, query, owner, callback) {
if (query.online){
fetch.online = query.online === "true";
}
- if (_.isString(query.type) && query.type.toLowerCase() == "user"){
- fetch = {};
- fetch.uuid = fromDevice.uuid;
- }
delete fetch.token;
//sorts newest devices on top | Removed type=user condition in getDevices | octoblu_meshblu | train | js |
29bb75a79e5f01e24720899ded30d3833268952c | diff --git a/parthial/built_ins.py b/parthial/built_ins.py
index <HASH>..<HASH> 100644
--- a/parthial/built_ins.py
+++ b/parthial/built_ins.py
@@ -91,3 +91,9 @@ def lisp_cdr(self, ctx, l):
cdr = l.val[1:]
return ctx.env.new(LispList(cdr))
+@built_in(default_globals, 'list', count_args=False)
+def lisp_list(self, ctx, l):
+ if len(l) > 1024:
+ raise LispError('too many items in list')
+ return ctx.env.new(LispList(l))
+ | Fixed an incredibly stupid typo bug. | benzrf_parthial | train | py |
a3dcb0f309bf8c8036ce844c750053e35244b437 | diff --git a/caravel/forms.py b/caravel/forms.py
index <HASH>..<HASH> 100644
--- a/caravel/forms.py
+++ b/caravel/forms.py
@@ -113,7 +113,9 @@ class FormFactory(object):
viz = self.viz
datasource = viz.datasource
default_metric = datasource.metrics_combo[0][0]
- default_groupby = datasource.groupby_column_names[0]
+
+ gb_cols = datasource.groupby_column_names
+ default_groupby = gb_cols[0] if gb_cols else None
group_by_choices = [(s, s) for s in datasource.groupby_column_names]
# Pool of all the fields that can be used in Caravel
self.field_dict = { | closes #<I> (#<I>) | apache_incubator-superset | train | py |
d21a03e0b1d9fc1df59ff54e7a513655c1748b0c | diff --git a/u2fdemo/main.go b/u2fdemo/main.go
index <HASH>..<HASH> 100644
--- a/u2fdemo/main.go
+++ b/u2fdemo/main.go
@@ -51,7 +51,13 @@ func registerResponse(w http.ResponseWriter, r *http.Request) {
return
}
- reg, err := u2f.Register(regResp, *challenge, nil)
+ config := &u2f.Config{
+ // Chrome 66+ doesn't return the device's attestation
+ // certificate by default.
+ SkipAttestationVerify: true,
+ }
+
+ reg, err := u2f.Register(regResp, *challenge, config)
if err != nil {
log.Printf("u2f.Register error: %v", err)
http.Error(w, "error verifying response", http.StatusInternalServerError) | u2fdemo: Disable attestation verify by default due to new Chrome default policy | tstranex_u2f | train | go |
426149057d76d87a6e724f7b7575d4885a8f1f0d | diff --git a/mimesis/__init__.py b/mimesis/__init__.py
index <HASH>..<HASH> 100755
--- a/mimesis/__init__.py
+++ b/mimesis/__init__.py
@@ -32,6 +32,8 @@ from .providers import (
Transport,
)
+from mimesis.schema import Schema, Field
+
__all__ = [
"Address",
"BaseDataProvider",
@@ -56,6 +58,9 @@ __all__ = [
"Cryptographic",
# Has all:
"Generic",
+ # Schema:
+ "Field",
+ "Schema",
# Meta:
"__version__",
"__title__", | Add Schema and Field to __init__.py | lk-geimfari_mimesis | train | py |
d1077e5cfe9efae03e8f50eb2fde5b6bb9dc948d | diff --git a/karma.conf.js b/karma.conf.js
index <HASH>..<HASH> 100644
--- a/karma.conf.js
+++ b/karma.conf.js
@@ -26,6 +26,10 @@ module.exports = function(config) {
}
},
+ mochaReporter: {
+ showDiff: true
+ },
+
webpackMiddleware: {
noInfo: true
} | turn on diffing in karma-reporter | developit_preact-router | train | js |
96f3c64dd8b542d657a51f9bb89903839ccde4df | diff --git a/azure/src/test/java/com/microsoft/azure/management/TestCdn.java b/azure/src/test/java/com/microsoft/azure/management/TestCdn.java
index <HASH>..<HASH> 100644
--- a/azure/src/test/java/com/microsoft/azure/management/TestCdn.java
+++ b/azure/src/test/java/com/microsoft/azure/management/TestCdn.java
@@ -87,7 +87,7 @@ public class TestCdn extends TestTemplate<CdnProfile, CdnProfiles> {
.parent()
.apply();
- Assert.assertEquals(2, profile.endpoints().size());
+ Assert.assertEquals(3, profile.endpoints().size());
CdnEndpoint updatedEndpoint = profile.endpoints().get(firstEndpointName);
Assert.assertTrue(updatedEndpoint.isHttpsAllowed());
Assert.assertEquals(1111, updatedEndpoint.httpPort()); | big fix in CDN unit test (it was broken) | Azure_azure-sdk-for-java | train | java |
c3fa2caa1db4d13f6f584acfa34e80291669cc66 | diff --git a/ui/src/shared/components/Annotation.js b/ui/src/shared/components/Annotation.js
index <HASH>..<HASH> 100644
--- a/ui/src/shared/components/Annotation.js
+++ b/ui/src/shared/components/Annotation.js
@@ -29,7 +29,8 @@ const Annotation = ({annotation, dygraph}) =>
<div
className="dygraph-annotation"
style={calcStyle(annotation, dygraph)}
- data-time={annotation.time}
+ data-time-ms={annotation.time}
+ data-time-local={new Date(+annotation.time)}
/>
const {shape, string} = PropTypes | Improve debuggability of annotations
This is ultimately going to be removed | influxdata_influxdb | train | js |
f9708bf7484a29ad70e06fa999e4e246eeadbd53 | diff --git a/source/Core/UtilsObject.php b/source/Core/UtilsObject.php
index <HASH>..<HASH> 100644
--- a/source/Core/UtilsObject.php
+++ b/source/Core/UtilsObject.php
@@ -154,7 +154,9 @@ class UtilsObject
*/
public static function setClassInstance($className, $instance)
{
- $className = strtolower($className);
+ if (!self::isNamespacedClass($className)) {
+ $className = strtolower($className);
+ }
self::$_aClassInstances[$className] = $instance;
}
@@ -234,7 +236,7 @@ class UtilsObject
array_shift($arguments);
$argumentsCount = count($arguments);
$shouldUseCache = $this->shouldCacheObject($className, $arguments);
- if (strpos($className, '\\') === false) {
+ if (!self::isNamespacedClass($className)) {
$className = strtolower($className);
}
@@ -431,4 +433,14 @@ class UtilsObject
{
return count($arguments) < 2 && (!isset($arguments[0]) || is_scalar($arguments[0]));
}
+
+ /**
+ * @param $className
+ *
+ * @return bool
+ */
+ private static function isNamespacedClass($className)
+ {
+ return strpos($className, '\\') !== false;
+ }
} | Ensure that class cache works correctly in oxNew
Class cache setter should act same as getter.
Before setter would always lower case class name, and getter only for not namespaced classes.
Classes without namespace should be in lowercase beacause they path's are different and class name is just a key to get path.
Class with namespace must be in camel case as namespace is class path. | OXID-eSales_oxideshop_ce | train | php |
653e9a7cdb8011c1eb3193c034c161db79d14e8a | diff --git a/drools-persistence-jpa/src/main/java/org/drools/persistence/jta/JtaTransactionManager.java b/drools-persistence-jpa/src/main/java/org/drools/persistence/jta/JtaTransactionManager.java
index <HASH>..<HASH> 100644
--- a/drools-persistence-jpa/src/main/java/org/drools/persistence/jta/JtaTransactionManager.java
+++ b/drools-persistence-jpa/src/main/java/org/drools/persistence/jta/JtaTransactionManager.java
@@ -121,7 +121,7 @@ public class JtaTransactionManager
logger.debug( "No UserTransaction found at JNDI location [{}]",
DEFAULT_USER_TRANSACTION_NAME,
ex );
- return null;
+ throw new IllegalStateException("Unable to find transaction: " + ex.getMessage(), ex);
}
} | - throw exception if no user transaction was found | kiegroup_drools | train | java |
92b424ecad21815e76305949cd0b98ccc5839484 | diff --git a/lib/rack-timeout.rb b/lib/rack-timeout.rb
index <HASH>..<HASH> 100644
--- a/lib/rack-timeout.rb
+++ b/lib/rack-timeout.rb
@@ -1,5 +1,5 @@
# encoding: utf-8
-require_relative 'rack/timeout'
+require 'rack/timeout'
if defined?(Rails) && [3,4].include?(Rails::VERSION::MAJOR)
class Rack::Timeout::Railtie < Rails::Railtie | no need for require_relative here | heroku_rack-timeout | train | rb |
33977d32cac2b90919c5832d1660f4dc10073370 | diff --git a/pwnypack/flow.py b/pwnypack/flow.py
index <HASH>..<HASH> 100644
--- a/pwnypack/flow.py
+++ b/pwnypack/flow.py
@@ -15,6 +15,7 @@ class ProcessChannel(object):
def __init__(self, *arguments):
self._process = subprocess.Popen(
arguments,
+ bufsize=0,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
)
@@ -96,7 +97,7 @@ class Flow(object):
def read(self, n, echo=None):
d = self.channel.read(n)
if echo or (echo is None and self.echo):
- sys.stdout.write(d.decode('utf-8'))
+ sys.stdout.write(d.decode('latin1'))
return d
def read_eof(self, echo=None):
@@ -129,7 +130,7 @@ class Flow(object):
def write(self, data, echo=None):
if echo or (echo is None and self.echo):
- sys.stdout.write(data)
+ sys.stdout.write(data.decode('latin1'))
self.channel.write(data)
def writelines(self, lines, echo=None): | Python3 fixes for flow.
- Use latin1 for echo mode as not everything will be encodable as utf-8.
- Disable buffering on subprocess. | edibledinos_pwnypack | train | py |
6c62c841d9b94f613bc7810edcb66f1ddb39578e | diff --git a/src/config/createLatestConfig.js b/src/config/createLatestConfig.js
index <HASH>..<HASH> 100644
--- a/src/config/createLatestConfig.js
+++ b/src/config/createLatestConfig.js
@@ -66,9 +66,9 @@ export function createHelper({ modern = false, minified = false, runtime = true,
// changed from default. More efficient to use real polyfills.
polyfill: false
}])
- } else {
+ } /* else {
additionalPlugins.push("external-helpers")
- }
+ } */
return babel({
// Don't try to find .babelrc because we want to force this configuration. | Another round trying without external-helpers | sebastian-software_preppy | train | js |
cd0ea5da7b3f730756ed4898d0d838008d79015d | diff --git a/src/main/java/com/semanticcms/openfile/servlet/OpenFile.java b/src/main/java/com/semanticcms/openfile/servlet/OpenFile.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/semanticcms/openfile/servlet/OpenFile.java
+++ b/src/main/java/com/semanticcms/openfile/servlet/OpenFile.java
@@ -96,7 +96,8 @@ final public class OpenFile {
String[] getCommand(java.io.File resourceFile) throws IOException;
}
- private static final Object fileOpenersLock = new Object();
+ private static class FileOpenersLock {}
+ private static final FileOpenersLock fileOpenersLock = new FileOpenersLock();
/**
* Registers a file opener. | Each lock object now a small empty class to help identify lock contention.
The lock contention profiler in NetBeans is just showing "java.lang.Object" all over, and can't seem to
get from the lock object id to the actual object in the heap dump using OQL (id not found). | aoindustries_semanticcms-openfile-servlet | train | java |
0498d149327dd6a86b6506be387d860f2b0504cf | diff --git a/js/bitfinex.js b/js/bitfinex.js
index <HASH>..<HASH> 100644
--- a/js/bitfinex.js
+++ b/js/bitfinex.js
@@ -678,7 +678,9 @@ module.exports = class bitfinex extends Exchange {
}
}
if (market !== undefined)
- symbol = market['symbol'];
+ symbol = market['symbol'].toUpperCase ();
+ else
+ symbol = order['symbol'];
let orderType = order['type'];
let exchange = orderType.indexOf ('exchange ') >= 0;
if (exchange) { | [bitfinex1] made parse order apply a default symbol if one can't be mapped (required for delisted symbols such as BCHUSD) | ccxt_ccxt | train | js |
30f036ad3e124bdb6c02ca595d1ee5bde2c846dc | diff --git a/panels/dashcontrol/module.js b/panels/dashcontrol/module.js
index <HASH>..<HASH> 100644
--- a/panels/dashcontrol/module.js
+++ b/panels/dashcontrol/module.js
@@ -5,7 +5,7 @@ angular.module('kibana.dashcontrol', [])
var _d = {
group : "default",
save : {
- gist: true,
+ gist: false,
elasticsearch: true,
local: true,
'default': true
@@ -209,12 +209,10 @@ angular.module('kibana.dashcontrol', [])
}
$scope.gist_dblist = function(id) {
- $http({
- url: "https://api.github.com/gists/"+id,
- method: "GET"
- }).success(function(data, status, headers, config) {
+ $http.jsonp("https://api.github.com/gists/"+id+"?callback=JSON_CALLBACK"
+ ).success(function(response) {
$scope.gist.files = []
- _.each(data.files,function(v,k) {
+ _.each(response.data.files,function(v,k) {
try {
var file = JSON.parse(v.content)
$scope.gist.files.push(file) | Fixed gist loading, saving will require registration of your domain | grafana_grafana | train | js |
8a91c4c4bee556e3bd4d1e79d87e74b130def880 | diff --git a/txtemplate/templates.py b/txtemplate/templates.py
index <HASH>..<HASH> 100644
--- a/txtemplate/templates.py
+++ b/txtemplate/templates.py
@@ -138,6 +138,7 @@ class GenshiTemplateAdapter(object):
self._stream = None
self.template = template
self.delayedCall = None
+ self.serialize_method = 'html'
def _populateBuffer(self, stream, n):
"""
@@ -179,7 +180,7 @@ class GenshiTemplateAdapter(object):
self._stream = self.template.generate(**kwargs)
self._deferred = defer.Deferred()
self._deferred.addCallbacks(self._rendered, self._failed)
- s = self._stream.serialize()
+ s = self._stream.serialize(method=self.serialize_method)
self.delayedCall = reactor.callLater(CALL_DELAY, self._populateBuffer, s, POPULATE_N_STEPS)
return self._deferred | Option for serialize method in Genshi templates (now defaults to 'html', was implicitly 'xml'). | steder_txtemplate | train | py |
748f7d4e9f5571d54377dfe31bac3c2be9463433 | diff --git a/src/main/java/org/gitlab/api/GitlabAPI.java b/src/main/java/org/gitlab/api/GitlabAPI.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/gitlab/api/GitlabAPI.java
+++ b/src/main/java/org/gitlab/api/GitlabAPI.java
@@ -154,7 +154,11 @@ public class GitlabAPI {
}
public List<GitlabCommit> getCommits(GitlabMergeRequest mergeRequest) throws IOException {
- String tailUrl = GitlabProject.URL + "/" + mergeRequest.getProjectId() +
+ Integer projectId = mergeRequest.getSourceProjectId();
+ if (projectId == null) {
+ projectId = mergeRequest.getProjectId();
+ }
+ String tailUrl = GitlabProject.URL + "/" + projectId +
"/repository" + GitlabCommit.URL + "?ref_name=" + mergeRequest.getSourceBranch();
GitlabCommit[] commits = retrieve().to(tailUrl, GitlabCommit[].class); | support merge requests from a different project (fork) in getCommits() | timols_java-gitlab-api | train | java |
72026109f4b6fe2636450efdc305d01fe496dfde | diff --git a/lib/Storeit.js b/lib/Storeit.js
index <HASH>..<HASH> 100644
--- a/lib/Storeit.js
+++ b/lib/Storeit.js
@@ -4,6 +4,7 @@ var Q = require("q");
var _ = require("underscore");
var pubit = require("pubit-as-promised");
var whatsDifferent = require("./utils").whatsDifferent;
+var previously = require("./utils").previously;
var isObject = require("./utils").isObject;
var cloneObject = require("./utils").cloneObject;
var isEqual = require("./utils").isEqual;
@@ -123,17 +124,22 @@ function Storeit(namespace, storageProvider) {
function setCache(key, value) {
var results = {};
if (has(key)) {
- var partial = value;
+ var previousPartial;
+ var partial;
var currentValue = getValue(key);
if (isObject(value) && isObject(currentValue)) {
value = _.extend(cloneObject(currentValue), value); // Allow "patching" with partial value.
partial = whatsDifferent(currentValue, value);
+ previousPartial = previously(currentValue, partial);
+ } else {
+ partial = value;
+ previousPartial = currentValue;
}
if (isEqual(currentValue, value)) {
results.action = Action.none;
} else {
cache[key].value = value;
- publish(EventName.modified, partial, key);
+ publish(EventName.modified, partial, key, previousPartial);
results.action = Action.modified;
}
} else { | Publish a `previousPartial` as a third parameter to `modified` event. | donavon_storeit | train | js |
f0ed81725b6e9f4a3fa69a575841caa4efacd65c | diff --git a/packages/node_modules/@webex/internal-plugin-wdm/src/config.js b/packages/node_modules/@webex/internal-plugin-wdm/src/config.js
index <HASH>..<HASH> 100644
--- a/packages/node_modules/@webex/internal-plugin-wdm/src/config.js
+++ b/packages/node_modules/@webex/internal-plugin-wdm/src/config.js
@@ -13,7 +13,8 @@ export default {
hydra: process.env.HYDRA_SERVICE_URL || 'https://api.ciscospark.com/v1'
},
defaults: {
- name: process.title.trim() || inBrowser && 'browser' || 'javascript',
+ name: (typeof process.title === 'string' ? process.title.trim() : undefined) ||
+ inBrowser && 'browser' || 'javascript',
deviceType: 'WEB',
model: 'web-js-sdk',
localizedModel: 'webex-js-sdk', | fix(internal-plugin-wdm): hotfix possible error trigger
Update config.device.defaults.name to also validate that
process.title is a string before attempting to run the
String.trim() command. | webex_spark-js-sdk | train | js |
cc3349d842c316a4c9acbd9c076ce688b3cbaf6e | diff --git a/spec/ospec/runner.rb b/spec/ospec/runner.rb
index <HASH>..<HASH> 100644
--- a/spec/ospec/runner.rb
+++ b/spec/ospec/runner.rb
@@ -176,6 +176,7 @@ module Kernel
# FIXME: remove
def ruby_version_is(*); end
def pending(*); end
+ def language_version(*); end
end
module MSpec
diff --git a/spec/rubyspec/language/array_spec.rb b/spec/rubyspec/language/array_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rubyspec/language/array_spec.rb
+++ b/spec/rubyspec/language/array_spec.rb
@@ -114,4 +114,4 @@ describe "The packing splat operator (*)" do
end
-# language_version __FILE__, "array"
+language_version __FILE__, "array"
diff --git a/spec/rubyspec/language/block_spec.rb b/spec/rubyspec/language/block_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/rubyspec/language/block_spec.rb
+++ b/spec/rubyspec/language/block_spec.rb
@@ -536,4 +536,4 @@ describe "A block" do
end
-# language_version __FILE__, "block"
+language_version __FILE__, "block" | Update some language specs from rubyspec | opal_opal | train | rb,rb,rb |
8a061a15f7fb91cbcbfe6cb98ce37d5e5c4bbedf | diff --git a/test/cases/coerced_tests.rb b/test/cases/coerced_tests.rb
index <HASH>..<HASH> 100644
--- a/test/cases/coerced_tests.rb
+++ b/test/cases/coerced_tests.rb
@@ -23,6 +23,28 @@ class UniquenessValidationTest < ActiveRecord::TestCase
end
end
end
+
+ # Same as original coerced test except that it handles default SQL Server case-insensitive collation.
+ coerce_tests! :test_validate_uniqueness_by_default_database_collation
+ def test_validate_uniqueness_by_default_database_collation_coerced
+ Topic.validates_uniqueness_of(:author_email_address)
+
+ topic1 = Topic.new(author_email_address: "[email protected]")
+ topic2 = Topic.new(author_email_address: "[email protected]")
+
+ assert_equal 1, Topic.where(author_email_address: "[email protected]").count
+
+ assert_not topic1.valid?
+ assert_not topic1.save
+
+ # Case insensitive collation (SQL_Latin1_General_CP1_CI_AS) by default.
+ # Should not allow "David" if "david" exists.
+ assert_not topic2.valid?
+ assert_not topic2.save
+
+ assert_equal 1, Topic.where(author_email_address: "[email protected]").count
+ assert_equal 1, Topic.where(author_email_address: "[email protected]").count
+ end
end
require "models/event" | Coerce test to handle default case-insensitive collation (#<I>) | rails-sqlserver_activerecord-sqlserver-adapter | train | rb |
a42f7dd0c8ff20c14aae9f0eefd8f05e167c73af | diff --git a/clustering/server/src/main/java/org/wildfly/clustering/server/singleton/ServiceLifecycle.java b/clustering/server/src/main/java/org/wildfly/clustering/server/singleton/ServiceLifecycle.java
index <HASH>..<HASH> 100644
--- a/clustering/server/src/main/java/org/wildfly/clustering/server/singleton/ServiceLifecycle.java
+++ b/clustering/server/src/main/java/org/wildfly/clustering/server/singleton/ServiceLifecycle.java
@@ -80,16 +80,8 @@ public class ServiceLifecycle implements Lifecycle {
monitor.awaitStability();
- State state = this.controller.getState();
- switch (state) {
- case START_FAILED: {
- throw new IllegalStateException(this.controller.getStartException());
- }
- default: {
- if (state != targetState) {
- throw new IllegalStateException(state.toString());
- }
- }
+ if (this.controller.getState() == ServiceController.State.START_FAILED) {
+ throw new IllegalStateException(this.controller.getStartException());
}
} catch (InterruptedException e) {
Thread.currentThread().interrupt(); | Ignore service state transition failures due to removed services. | wildfly_wildfly | train | java |
a15dc3a7e39b667eefec81a0df724087b168ab9d | diff --git a/lib/ldapter/entry.rb b/lib/ldapter/entry.rb
index <HASH>..<HASH> 100644
--- a/lib/ldapter/entry.rb
+++ b/lib/ldapter/entry.rb
@@ -502,7 +502,8 @@ module Ldapter
private :check_server_contraints
# For new objects, does an LDAP add. For existing objects, does an LDAP
- # modify. This only sends the modified attributes to the server.
+ # modify. This only sends the modified attributes to the server. If a
+ # server constraint was violated, populates #errors and returns false.
def save
return false unless valid?
if @original_attributes
@@ -518,6 +519,7 @@ module Ldapter
true
end
+ # Like #save, but raise an exception if the entry could not be saved.
def save!
save ? self : raise(EntryNotSaved)
end | Document #save and #save! | tpope_ldaptic | train | rb |
288f60517fb8b2239523a4d0e3e81bc551f87374 | diff --git a/tests/test_main.py b/tests/test_main.py
index <HASH>..<HASH> 100644
--- a/tests/test_main.py
+++ b/tests/test_main.py
@@ -1,3 +1,4 @@
+import sys
import asyncio
import os
import time
@@ -36,6 +37,11 @@ def test_sigterm():
assert not loop.is_closed()
[email protected](sys.version_info >= (3, 7), reason=(
+ "On nightly (3.7), the use of uvloop causes the following error:\n\n"
+ "AttributeError: module 'asyncio.coroutines' has no attribute 'debug_wrapper'\n\n"
+ "This is being tracked upstream at https://github.com/MagicStack/uvloop/issues/126"
+))
def test_uvloop():
"""Basic SIGTERM"""
async def main(): | Skip uvloop test on nightly. | cjrh_aiorun | train | py |
b595208c2c29ffc74e3563881c37822c918488c2 | diff --git a/src/pyctd/manager/database.py b/src/pyctd/manager/database.py
index <HASH>..<HASH> 100755
--- a/src/pyctd/manager/database.py
+++ b/src/pyctd/manager/database.py
@@ -29,8 +29,8 @@ from ..constants import bcolors
log = logging.getLogger(__name__)
alchemy_pandas_dytpe_mapper = {
- sqltypes.Text: np.unicode,
- sqltypes.String: np.unicode,
+ sqltypes.Text: np.object,
+ sqltypes.String: np.object,
sqltypes.Integer: np.float,
sqltypes.REAL: np.double
} | changed np.unicode to np.pbject in alchemy_pandas_dytpe_mapper | cebel_pyctd | train | py |
7a576acb83688194e366c4ffd3a3d1edd7982062 | diff --git a/lib/draper/decorated_enumerable_proxy.rb b/lib/draper/decorated_enumerable_proxy.rb
index <HASH>..<HASH> 100644
--- a/lib/draper/decorated_enumerable_proxy.rb
+++ b/lib/draper/decorated_enumerable_proxy.rb
@@ -27,7 +27,7 @@ module Draper
alias :is_a? :kind_of?
def ==(other)
- @wrapped_collection == other
+ @wrapped_collection == (other.respond_to?(:source) ? other.source : other)
end
def to_s | Fixing problem when comparing two decorated collections with eq matcher on rspec expect method | drapergem_draper | train | rb |
c0eb4c0a7f8482b365f1756f5abb62ebd65e5e19 | diff --git a/src/org/openscience/cdk/qsar/model/R2/RModel.java b/src/org/openscience/cdk/qsar/model/R2/RModel.java
index <HASH>..<HASH> 100644
--- a/src/org/openscience/cdk/qsar/model/R2/RModel.java
+++ b/src/org/openscience/cdk/qsar/model/R2/RModel.java
@@ -32,8 +32,6 @@ public abstract class RModel implements IModel {
private String modelName = null;
protected RList modelObject = null;
protected HashMap params = null;
- protected String paramVarName = null;
-
/**
* The object that performs the calls to the R engine.
@@ -263,7 +261,7 @@ public abstract class RModel implements IModel {
if (prefix == null || prefix.equals("")) prefix = "var";
Random rnd = new Random();
long uid = ((System.currentTimeMillis() >>> 16) << 16) + rnd.nextLong();
- return prefix + uid;
+ return prefix + String.valueOf(Math.abs(uid)).trim();
}
/** | made the unique var name method more robust and also modified to convert -ve values of the nuemric part to +ve
git-svn-id: <URL> | cdk_cdk | train | java |
c0094c3b1068595305e5b770ce6b11fd5d1a0dc4 | diff --git a/src/Vinelab/Cdn/Providers/AwsS3Provider.php b/src/Vinelab/Cdn/Providers/AwsS3Provider.php
index <HASH>..<HASH> 100755
--- a/src/Vinelab/Cdn/Providers/AwsS3Provider.php
+++ b/src/Vinelab/Cdn/Providers/AwsS3Provider.php
@@ -137,6 +137,7 @@ class AwsS3Provider extends Provider implements ProviderInterface
'acl' => $this->default['providers']['aws']['s3']['acl'],
'cloudfront' => $this->default['providers']['aws']['s3']['cloudfront']['use'],
'cloudfront_url' => $this->default['providers']['aws']['s3']['cloudfront']['cdn_url'],
+ 'http' => $this->default['providers']['aws']['s3']['http'],
];
// check if any required configuration is missed | fix an issue for non-pem users | Vinelab_cdn | train | php |
c5f24e5a6cd977c3e785488287c56c537a0631b1 | diff --git a/PHPCompatibility/Tests/BaseSniffTest.php b/PHPCompatibility/Tests/BaseSniffTest.php
index <HASH>..<HASH> 100644
--- a/PHPCompatibility/Tests/BaseSniffTest.php
+++ b/PHPCompatibility/Tests/BaseSniffTest.php
@@ -272,7 +272,7 @@ class BaseSniffTest extends TestCase
private function assertForType($issues, $type, $lineNumber, $expectedMessage)
{
if (isset($issues[$lineNumber]) === false) {
- throw new \Exception("Expected $type '$expectedMessage' on line number $lineNumber, but none found.");
+ $this->fail("Expected $type '$expectedMessage' on line number $lineNumber, but none found.");
}
$insteadFoundMessages = array(); | BaseSniffTest: fail test on missing message, don't error
When an (unexpected) exception is thrown by a test, the test will be marked as "Errored", not as "Failed".
With that in mind, when an error/warning is expected on a certain line and it isn't found, the test should fail, not error.
Fixed now. | PHPCompatibility_PHPCompatibility | train | php |
266c9f43db56f8f5ca78dd08f818d5fb8476470a | diff --git a/QuickBooks/IPP/Service/TaxAgency.php b/QuickBooks/IPP/Service/TaxAgency.php
index <HASH>..<HASH> 100644
--- a/QuickBooks/IPP/Service/TaxAgency.php
+++ b/QuickBooks/IPP/Service/TaxAgency.php
@@ -25,4 +25,9 @@ class QuickBooks_IPP_Service_TaxAgency extends QuickBooks_IPP_Service
{
return parent::_query($Context, $realm, $query);
}
+
+ public function add($Context, $realm, $Object)
+ {
+ return parent::_add($Context, $realm, QuickBooks_IPP_IDS::RESOURCE_TAXAGENCY, $Object);
+ }
} | Update TaxAgency.php
Add add() method | consolibyte_quickbooks-php | train | php |
65031242789cd72ae3922859fb312d979409c921 | diff --git a/spool/spoolverb.py b/spool/spoolverb.py
index <HASH>..<HASH> 100644
--- a/spool/spoolverb.py
+++ b/spool/spoolverb.py
@@ -38,7 +38,7 @@ class Spoolverb(object):
self.meta = meta
self.version = version
self.num_editions = num_editions
- self.edition_number = edition_num
+ self.edition_number = edition_num if edition_num else ''
self.loan_start = loan_start
self.loan_end = loan_end
self.action = action | loan spoolverb now supports empty num_editions parameters.
Used when loaning a piece | ascribe_pyspool | train | py |
664826585700daab1686e55a00d945dbfba02490 | diff --git a/mod/forum/lib.php b/mod/forum/lib.php
index <HASH>..<HASH> 100644
--- a/mod/forum/lib.php
+++ b/mod/forum/lib.php
@@ -138,7 +138,22 @@ function forum_update_instance($forum) {
notify('Warning! There is more than one discussion in this forum - using the most recent');
$discussion = array_pop($discussions);
} else {
- print_error('cannotfinddisscussion', 'forum');
+ // try to recover by creating initial discussion - MDL-16262
+ $discussion = new object();
+ $discussion->course = $forum->course;
+ $discussion->forum = $forum->id;
+ $discussion->name = $forum->name;
+ $discussion->intro = $forum->intro;
+ $discussion->assessed = $forum->assessed;
+ $discussion->format = $forum->type;
+ $discussion->mailnow = false;
+ $discussion->groupid = -1;
+
+ forum_add_discussion($discussion, null, $message);
+
+ if (! $discussion = $DB->get_record('forum_discussions', array('forum'=>$forum->id))) {
+ print_error('cannotadd', 'forum');
+ }
}
}
if (! $post = $DB->get_record('forum_posts', array('id'=>$discussion->firstpost))) { | MDL-<I> recovery of broken single simple discussions after old reset | moodle_moodle | train | php |
a14b313dc6f180dab402ceb07abdc2db1a3ed673 | diff --git a/packages/ember-metal/lib/core.js b/packages/ember-metal/lib/core.js
index <HASH>..<HASH> 100644
--- a/packages/ember-metal/lib/core.js
+++ b/packages/ember-metal/lib/core.js
@@ -167,7 +167,7 @@ if (typeof Ember.EXTEND_PROTOTYPES === 'undefined') {
Ember.LOG_STACKTRACE_ON_DEPRECATION = (Ember.ENV.LOG_STACKTRACE_ON_DEPRECATION !== false);
/**
- Determines whether Ember should add ECMAScript 5 shims to older browsers.
+ Determines whether Ember should add ECMAScript 5 Array shims to older browsers.
@property SHIM_ES5
@type Boolean | Clarify that SHIM_ES5 only adds Array methods
I spent a little time today trying to figure out why the SHIM_ES5 setting wasn't shimming Function#bind — and its because this flag only enables Array method shims. Added clarification. | emberjs_ember.js | train | js |
671ce46456746a239e6bb120655a0ea4105c96ae | diff --git a/spec/dummy/config/environments/test.rb b/spec/dummy/config/environments/test.rb
index <HASH>..<HASH> 100644
--- a/spec/dummy/config/environments/test.rb
+++ b/spec/dummy/config/environments/test.rb
@@ -30,7 +30,7 @@ Rails.application.configure do
config.action_dispatch.show_exceptions = false
# Disable request forgery protection in test environment.
- config.action_controller.allow_forgery_protection = false
+ config.action_controller.allow_forgery_protection = true
config.action_mailer.perform_caching = false
diff --git a/spec/features/admin/legacy_page_url_management_spec.rb b/spec/features/admin/legacy_page_url_management_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/features/admin/legacy_page_url_management_spec.rb
+++ b/spec/features/admin/legacy_page_url_management_spec.rb
@@ -11,7 +11,8 @@ RSpec.describe "Legacy page url management", type: :system, js: true do
def open_page_properties
visit admin_pages_path
- page.find("a[href='#{configure_admin_page_path(a_page)}']").click
+ expect(page).to have_no_css(".spinner")
+ page.find("a[href='#{configure_admin_page_path(a_page)}']", wait: 10).click
end
it "lets a user add a page link" do | fix admin sitemap feature specs
Enable forgery protection so we have a csrf-token in the dom that our ajax lib
expects.
Also wait for the sitemap spinner to disappear
before acting with the dom with capybara | AlchemyCMS_alchemy_cms | train | rb,rb |
8f0de029a3e3b79434ea1cccd2711e5deffc4501 | diff --git a/test/helper.rb b/test/helper.rb
index <HASH>..<HASH> 100644
--- a/test/helper.rb
+++ b/test/helper.rb
@@ -16,6 +16,9 @@ puts "Testing with Rails #{Rails.version}"
require 'dalli'
require 'logger'
+require 'active_support/time'
+require 'active_support/cache/dalli_store'
+
Dalli.logger = Logger.new(STDOUT)
Dalli.logger.level = Logger::ERROR | Explicitly include the Dalli cache adapter and ActiveSupport::Time to avoid intermittent failures in CI | petergoldstein_dalli | train | rb |
c0206a778fdb7e8b22c3436a03e999ca787750cf | diff --git a/lib/rest-sugar-client.js b/lib/rest-sugar-client.js
index <HASH>..<HASH> 100644
--- a/lib/rest-sugar-client.js
+++ b/lib/rest-sugar-client.js
@@ -50,6 +50,10 @@ function op(method, r, q) {
request.get({url: r + '/' + id, qs: o},
handle(cb));
}
+ else if(funkit.isFunction(o)) {
+ request.get({url: r, qs: {method: method}},
+ handle(cb));
+ }
else {
o = o? o: '';
request.get({url: r + o, qs: {method: method}}, | Make it possible to pass cb as the first param
Ie. api.count(cb) should work now. | sugarjs_rest-sugar-client | train | js |
8bbc45b1137aa5206cee825a39cc3d2ca5895e13 | diff --git a/alot/buffers.py b/alot/buffers.py
index <HASH>..<HASH> 100644
--- a/alot/buffers.py
+++ b/alot/buffers.py
@@ -6,6 +6,7 @@ import settings
import commands
from walker import PipeWalker
from helper import shorten_author_string
+from db import NonexistantObjectError
class Buffer(object):
@@ -228,7 +229,12 @@ class ThreadBuffer(Buffer):
self._build_pile(acc, reply, msg, depth + 1)
def rebuild(self):
- self.thread.refresh()
+ try:
+ self.thread.refresh()
+ except NonexistantObjectError:
+ self.body = urwid.SolidFill()
+ self.message_count = 0
+ return
# depth-first traversing the thread-tree, thereby
# 1) build a list of tuples (parentmsg, depth, message) in DF order
# 2) create a dict that counts no. of direct replies per message
@@ -255,7 +261,9 @@ class ThreadBuffer(Buffer):
depth=depth,
bars_at=bars)
msglines.append(mwidget)
+
self.body = urwid.ListBox(msglines)
+ self.message_count = self.thread.get_total_messages()
def get_selection(self):
"""returns focussed :class:`~alot.widgets.MessageWidget`""" | ThreadBuffer rebuilds to SolidFill if thread nonexistant
ThreadBuffer will display as urwid.SolidFill if the
displayed thread seized to exist. This could happen
for example if the last message of that thread has been
removed. | pazz_alot | train | py |
e4efa7da84c30d40fd471e312458eab5c5c39039 | diff --git a/aws/resource_aws_lb_listener.go b/aws/resource_aws_lb_listener.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_lb_listener.go
+++ b/aws/resource_aws_lb_listener.go
@@ -92,6 +92,7 @@ func resourceAwsLbListener() *schema.Resource {
"order": {
Type: schema.TypeInt,
Optional: true,
+ Computed: true,
ValidateFunc: validation.IntBetween(1, 50000),
}, | resource/aws_lb_listener: Set action order to Computed
While the acceptance testing is not finding this scenario, the order value may return as 1. This may be caused by web console updates. | terraform-providers_terraform-provider-aws | train | go |
88a49fe9ebce3564fd3355005ddfa63e17afb9ef | diff --git a/components/remotes/remotes.js b/components/remotes/remotes.js
index <HASH>..<HASH> 100644
--- a/components/remotes/remotes.js
+++ b/components/remotes/remotes.js
@@ -92,7 +92,6 @@ class RemotesViewModel {
updateRemotes() {
return this.server.getPromise('/remotes', { path: this.repoPath() })
.then(remotes => {
- const outerThis = this;
remotes = remotes.map(remote => ({
name: remote,
changeRemote: () => { this.currentRemote(remote) } | Update components/remotes/remotes.js | FredrikNoren_ungit | train | js |
b9be43267792d2f1c4d1c96f07f0f7300e4509f0 | diff --git a/openquake/job/__init__.py b/openquake/job/__init__.py
index <HASH>..<HASH> 100644
--- a/openquake/job/__init__.py
+++ b/openquake/job/__init__.py
@@ -80,7 +80,7 @@ def run_job(job_file, output_type):
a_job.set_status('running')
try:
- results = a_job.launch()
+ a_job.launch()
except sqlalchemy.exc.SQLAlchemyError:
# Try to cleanup the session status to have a chance to update the
# job record without further errors.
@@ -97,9 +97,6 @@ def run_job(job_file, output_type):
raise
else:
a_job.set_status('succeeded')
-
- for filepath in results:
- print filepath
else:
a_job.set_status('failed') | Only print file names when they are written to disk.
Former-commit-id: 5b<I>cb1e2f<I>da7ddac<I>a<I>c7b0b0a<I> | gem_oq-engine | train | py |
b887fecefb62f2e62793e8b76bff2a12fc5ae5dc | diff --git a/HardwareSource.py b/HardwareSource.py
index <HASH>..<HASH> 100644
--- a/HardwareSource.py
+++ b/HardwareSource.py
@@ -433,7 +433,7 @@ class HardwareSourceDataBuffer(object):
# select the preferred item.
# TODO: better mechanism for selecting preferred item at start of acquisition.
if self.first_data:
- self.notify_listeners("acquisition_started", self.data_group, new_channel_to_data_item_dict)
+ self.notify_listeners("acquisition_started", self.hardware_source, self.data_group, new_channel_to_data_item_dict)
self.first_data = False
# update the data items with the new data. | Rework source panel to not use combobox pop-up.
svn r<I> | nion-software_nionswift | train | py |
68f44050d12222c0364d5549bfa4360c245f2806 | diff --git a/moskito-core/src/main/java/net/anotheria/moskito/core/predefined/AbstractStatsFactory.java b/moskito-core/src/main/java/net/anotheria/moskito/core/predefined/AbstractStatsFactory.java
index <HASH>..<HASH> 100644
--- a/moskito-core/src/main/java/net/anotheria/moskito/core/predefined/AbstractStatsFactory.java
+++ b/moskito-core/src/main/java/net/anotheria/moskito/core/predefined/AbstractStatsFactory.java
@@ -3,7 +3,6 @@ package net.anotheria.moskito.core.predefined;
import java.util.Arrays;
import net.anotheria.moskito.core.dynamic.IOnDemandStatsFactory;
-import net.anotheria.moskito.core.predefined.Constants;
import net.anotheria.moskito.core.producers.IStats;
import net.anotheria.moskito.core.stats.Interval; | removed unneeded import (pmd) | anotheria_moskito | train | java |
29940c8784083e9d9f82f920112ada01c02e467a | diff --git a/opentrons_sdk/drivers/motor.py b/opentrons_sdk/drivers/motor.py
index <HASH>..<HASH> 100644
--- a/opentrons_sdk/drivers/motor.py
+++ b/opentrons_sdk/drivers/motor.py
@@ -7,6 +7,11 @@ import serial
from opentrons_sdk.util import log
+JSON_ERROR = None
+if sys.version_info > (3, 4):
+ JSON_ERROR = ValueError
+else:
+ JSON_ERROR = json.decoder.JSONDecodeError
class GCodeLogger():
@@ -344,7 +349,9 @@ class CNCDriver(object):
coords['target'][letter] = response_dict.get(letter.upper(),0)
- except (ValueError, json.decoder.JSONDecodeError) as e:
+ # TODO (andy): travis-ci is testing on both 3.4 and 3.5
+ # JSONDecodeError does not exist in 3.4 so the build breaks here
+ except JSON_ERROR as e:
log.debug("Serial", "Error parsing JSON string:")
log.debug("Serial", res) | json throws diff error depending on python version | Opentrons_opentrons | train | py |
d1675f69f9ed0fdeb3dc56e44f4211cba22eeecf | diff --git a/odl/test/solvers/functional/default_functionals_test.py b/odl/test/solvers/functional/default_functionals_test.py
index <HASH>..<HASH> 100644
--- a/odl/test/solvers/functional/default_functionals_test.py
+++ b/odl/test/solvers/functional/default_functionals_test.py
@@ -583,7 +583,7 @@ def test_weighted_proximal_L1_norm(space):
# Check if the subdifferential inequalities are satisfied.
# p = prox_{sigma * f}(x) iff (x - p)/sigma = grad f(p)
- assert all_almost_equal(func.gradient(p1), space.divide(x - p1, sigma))
+ assert all_almost_equal(func.gradient(p1), (x - p1) / sigma)
if __name__ == '__main__': | MAINT: Replace 'divide' by '/' in test. | odlgroup_odl | train | py |
b9a02036fb22ab75114cab6f0dfc904897754c2a | diff --git a/tests/Functional/CheckerManagerTest.php b/tests/Functional/CheckerManagerTest.php
index <HASH>..<HASH> 100644
--- a/tests/Functional/CheckerManagerTest.php
+++ b/tests/Functional/CheckerManagerTest.php
@@ -42,7 +42,7 @@ class CheckerManagerTest extends TestCase
/**
* @expectedException \InvalidArgumentException
- * @expectedExceptionMessage The JWT is issued in the futur.
+ * @expectedExceptionMessage The JWT is issued in the future.
*/
public function testJWTIssuedInTheFuture()
{ | Test fixed
Test failed due to the modification introduced by #<I> | Spomky-Labs_jose | train | php |
738165807261c30a89d6823c93a51c6fde0d4c56 | diff --git a/casviewer/blobs.go b/casviewer/blobs.go
index <HASH>..<HASH> 100644
--- a/casviewer/blobs.go
+++ b/casviewer/blobs.go
@@ -84,7 +84,7 @@ func returnBlob(ctx context.Context, w http.ResponseWriter, cl *client.Client, b
// readBlob reads the blob from CAS.
func readBlob(ctx context.Context, cl *client.Client, bd *digest.Digest) ([]byte, error) {
- b, err := cl.ReadBlob(ctx, *bd)
+ b, _, err := cl.ReadBlob(ctx, *bd)
if err != nil {
// convert gRPC code to LUCI errors tag.
t := grpcutil.Tag.With(status.Code(err)) | casviewer: fix build
This is follow up for
<URL> | luci_luci-go | train | go |
161088aba48d09de64f2a414e0df10e2432dfd0a | diff --git a/lib/fog/vcloud_director/generators/compute/edge_gateway_service_configuration.rb b/lib/fog/vcloud_director/generators/compute/edge_gateway_service_configuration.rb
index <HASH>..<HASH> 100644
--- a/lib/fog/vcloud_director/generators/compute/edge_gateway_service_configuration.rb
+++ b/lib/fog/vcloud_director/generators/compute/edge_gateway_service_configuration.rb
@@ -143,6 +143,7 @@ module Fog
xml.SourcePort rule[:SourcePort] == "Any" ? "-1" : rule[:SourcePort]
xml.SourcePortRange rule[:SourcePortRange]
xml.SourceIp rule[:SourceIp]
+ xml.EnableLogging rule[:EnableLogging] if rule.key?(:EnableLogging)
}
end | Add EnableLogging field to FirewallService XML | fog_fog | train | rb |
8366b6738b42631ec0168e7acd04d9918a9d0bda | diff --git a/lib/components/src/theme.js b/lib/components/src/theme.js
index <HASH>..<HASH> 100644
--- a/lib/components/src/theme.js
+++ b/lib/components/src/theme.js
@@ -25,6 +25,8 @@ export const normal = {
mainTextColor: baseFonts.color,
dimmedTextColor: 'rgba(0,0,0,0.4)',
highlightColor: '#9fdaff',
+ successColor: '#0edf62',
+ failColor: '#ff3f3f',
mainTextSize: 13,
monoTextFace: monoFonts.fontFamily,
layoutMargin: '10px',
@@ -51,6 +53,8 @@ export const dark = {
mainTextColor: '#efefef',
dimmedTextColor: 'rgba(255,255,255,0.4)',
highlightColor: '#9fdaff',
+ successColor: '#0edf62',
+ failColor: '#ff3f3f',
mainTextSize: 13,
monoTextFace: monoFonts.fontFamily,
layoutMargin: '10px', | ADD success & fail colours to theme | storybooks_storybook | train | js |
cba1aadbbe3474a001e4787cf85fabde91ebd3ec | diff --git a/lib/events-ha-node.js b/lib/events-ha-node.js
index <HASH>..<HASH> 100644
--- a/lib/events-ha-node.js
+++ b/lib/events-ha-node.js
@@ -73,7 +73,9 @@ class EventsHaNode extends EventsNode {
switch (type) {
case INTEGRATION_UNLOADED:
case INTEGRATION_NOT_LOADED:
- this.isEnabled = true;
+ if (this.type !== 'trigger-state') {
+ this.isEnabled = true;
+ }
this.removeSubscription();
this.updateConnectionStatus();
break; | fix: stop enabling trigger-state node on connection to HA
Fixes: #<I> | zachowj_node-red-contrib-home-assistant-websocket | train | js |
0d9cbd058eb999e0dc77de76f676dac4aa11965b | diff --git a/securesystemslib/keys.py b/securesystemslib/keys.py
index <HASH>..<HASH> 100755
--- a/securesystemslib/keys.py
+++ b/securesystemslib/keys.py
@@ -602,8 +602,8 @@ def format_metadata_to_key(key_metadata):
keytype = key_metadata['keytype']
key_value = key_metadata['keyval']
- # Convert 'key_value' to 'securesystemslib.formats.KEY_SCHEMA' and generate its
- # hash The hash is in hexdigest form.
+ # Convert 'key_value' to 'securesystemslib.formats.KEY_SCHEMA' and generate
+ # its hash The hash is in hexdigest form.
default_keyid = _get_keyid(keytype, key_value)
keyids = set()
keyids.add(default_keyid)
@@ -637,7 +637,7 @@ def _get_keyid(keytype, key_value, hash_algorithm = 'sha256'):
# Create a digest object and call update(), using the JSON
# canonical format of 'rskey_meta' as the update data.
- digest_object = securesystemslib.hash.digest(_KEY_ID_HASH_ALGORITHM)
+ digest_object = securesystemslib.hash.digest(hash_algorithm)
digest_object.update(key_update_data.encode('utf-8'))
# 'keyid' becomes the hexadecimal representation of the hash. | Fix bug in _get_keyid()
The hash_algorithm argument to _get_keyid() wasn't correctly being used | secure-systems-lab_securesystemslib | train | py |
c5d9117f3abc7667ee6a5c88fc16add22861adb9 | diff --git a/go/vt/wrangler/wrangler.go b/go/vt/wrangler/wrangler.go
index <HASH>..<HASH> 100644
--- a/go/vt/wrangler/wrangler.go
+++ b/go/vt/wrangler/wrangler.go
@@ -94,7 +94,7 @@ func (wr *Wrangler) ChangeType(tabletAlias naming.TabletAlias, dbType naming.Tab
// You don't have a choice - you must wait for
// completion before rebuilding.
if err == nil {
- err = wr.ai.WaitForCompletion(actionPath, DefaultActionTimeout)
+ err = wr.ai.WaitForCompletion(actionPath, wr.actionTimeout())
}
} | ChangeType should use the wrangler's action timeout. | vitessio_vitess | train | go |
722ef383adbb174593585e55a9c943d5a1e6d07e | diff --git a/lib/mementus.rb b/lib/mementus.rb
index <HASH>..<HASH> 100644
--- a/lib/mementus.rb
+++ b/lib/mementus.rb
@@ -1,5 +1,5 @@
require 'virtus'
require 'axiom-memory-adapter'
-require 'mementus/version'
-require 'mementus/model'
+require_relative 'mementus/version'
+require_relative 'mementus/model'
diff --git a/lib/mementus/version.rb b/lib/mementus/version.rb
index <HASH>..<HASH> 100644
--- a/lib/mementus/version.rb
+++ b/lib/mementus/version.rb
@@ -1,3 +1,3 @@
module Mementus
- VERSION = "0.1.0"
+ VERSION = "0.1.1"
end
diff --git a/spec/collection_spec.rb b/spec/collection_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/collection_spec.rb
+++ b/spec/collection_spec.rb
@@ -9,11 +9,13 @@ describe Mementus::Model do
attribute :order, Integer
end
- 20.times do |i|
- item = Item.new
- item.name = "Item: #{i.to_s}"
- item.order = i + 1
- item.create
+ before(:all) do
+ 20.times do |i|
+ item = Item.new
+ item.name = "Item: #{i.to_s}"
+ item.order = i + 1
+ item.create
+ end
end
it "counts created items" do | Use before hook in collection spec to deal with gc issues | maetl_mementus | train | rb,rb,rb |
49db717c3596973518adff2f7a000f2f61e75445 | diff --git a/lib/dpl/provider/heroku/git.rb b/lib/dpl/provider/heroku/git.rb
index <HASH>..<HASH> 100644
--- a/lib/dpl/provider/heroku/git.rb
+++ b/lib/dpl/provider/heroku/git.rb
@@ -2,14 +2,23 @@ module DPL
class Provider
module Heroku
class Git < Generic
+ require 'netrc'
+
def git_url
- "https://#{option(:api_key)}@git.heroku.com/#{option(:app)}.git"
+ "https://git.heroku.com/#{option(:app)}.git"
end
def push_app
+ write_netrc
git_remote = options[:git] || git_url
context.shell "git push #{git_remote} HEAD:refs/heads/master -f"
end
+
+ def write_netrc
+ n = Netrc.read
+ n['git.heroku.com'] = [user, option(:api_key)]
+ n.save
+ end
end
end
end | heroku http git: write token to netrc, so it wont get printed to the logs accidentially | travis-ci_dpl | train | rb |
4644a634dfe88aba7d836f6eb33d3708b376ea1a | diff --git a/generators/generator-constants.js b/generators/generator-constants.js
index <HASH>..<HASH> 100644
--- a/generators/generator-constants.js
+++ b/generators/generator-constants.js
@@ -60,7 +60,7 @@ const DOCKER_CASSANDRA = 'cassandra:3.11.11';
const DOCKER_MSSQL = 'mcr.microsoft.com/mssql/server:2019-CU13-ubuntu-20.04';
const DOCKER_NEO4J = 'neo4j:4.3.7';
const DOCKER_HAZELCAST_MANAGEMENT_CENTER = 'hazelcast/management-center:4.2021.06';
-const DOCKER_MEMCACHED = 'memcached:1.6.10-alpine';
+const DOCKER_MEMCACHED = 'memcached:1.6.12-alpine';
const DOCKER_REDIS = 'redis:6.2.5';
const DOCKER_KEYCLOAK = 'jboss/keycloak:15.0.2'; // The version should match the attribute 'keycloakVersion' from /docker-compose/templates/realm-config/jhipster-realm.json.ejs and /server/templates/src/main/docker/config/realm-config/jhipster-realm.json.ejs
const DOCKER_ELASTICSEARCH = 'docker.elastic.co/elasticsearch/elasticsearch:7.13.3'; // The version should be coherent with the one from spring-data-elasticsearch project | Update memcached docker image version to <I>-alpine | jhipster_generator-jhipster | train | js |
c36f1e9e6c61dad042cd5827f816a570bc71cc6b | diff --git a/lib/puppet/provider/service/systemd.rb b/lib/puppet/provider/service/systemd.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/provider/service/systemd.rb
+++ b/lib/puppet/provider/service/systemd.rb
@@ -5,7 +5,14 @@ Puppet::Type.type(:service).provide :systemd, :parent => :base do
commands :systemctl => "systemctl"
- confine :exists => "/run/systemd/system"
+ if Facter.value(:osfamily).downcase == 'debian'
+ # With multiple init systems on Debian, it is possible to have
+ # pieces of systemd around (e.g. systemctl) but not really be
+ # using systemd. We do not do this on other platforms as it can
+ # cause issues when running in a chroot without /run mounted
+ # (PUP-5577)
+ confine :exists => "/run/systemd/system"
+ end
defaultfor :osfamily => [:archlinux]
defaultfor :osfamily => :redhat, :operatingsystemmajrelease => "7" | (PUP-<I>) Don't require /run/systemd/system for systemd
7fe<I>f<I>fc4c<I>cd6b<I>c<I>a<I>ff7c9f9 added this check; here we
restrict it to Debian because it means systemd is not detected if
running puppet within a chroot environment where this is not yet
mounted. | puppetlabs_puppet | train | rb |
b5e2b3a86780466ab3064b06f1dfdcd6a03786bf | diff --git a/src/protean/impl/repository/__init__.py b/src/protean/impl/repository/__init__.py
index <HASH>..<HASH> 100644
--- a/src/protean/impl/repository/__init__.py
+++ b/src/protean/impl/repository/__init__.py
@@ -131,10 +131,7 @@ class Providers:
)
self.domain._domain_element(
- DomainObjects.REPOSITORY,
- _cls=new_class,
- aggregate_cls=aggregate_cls,
- bounded_context=aggregate_cls.meta_.bounded_context,
+ DomainObjects.REPOSITORY, _cls=new_class, aggregate_cls=aggregate_cls,
)
# FIXME Avoid comparing classes / Fetch a Repository class directly by its aggregate class
diff --git a/tests/entity/test_entity_meta.py b/tests/entity/test_entity_meta.py
index <HASH>..<HASH> 100644
--- a/tests/entity/test_entity_meta.py
+++ b/tests/entity/test_entity_meta.py
@@ -36,7 +36,6 @@ class TestEntityMeta:
# Domain attributes
assert hasattr(Person.meta_, "aggregate_cls")
- assert hasattr(Person.meta_, "bounded_context")
def test_entity_meta_has_declared_fields_on_construction(self):
assert Person.meta_.declared_fields is not None | Remove `bounded_context` attribute everywhere
The `domain` object (actually, the sub-domain) itself is representative
of the bounded context. No further annotation necessary. | proteanhq_protean | train | py,py |
6be7c79f66607896d1ad85e87c50b89d7dd5865e | diff --git a/tests/losantrest_tests.py b/tests/losantrest_tests.py
index <HASH>..<HASH> 100644
--- a/tests/losantrest_tests.py
+++ b/tests/losantrest_tests.py
@@ -30,7 +30,7 @@ class TestClient(unittest.TestCase):
self.assertEqual(request.headers["Accept"], "application/json")
self.assertEqual(request.headers["Content-Type"], "application/json")
self.assertNotIn("Authorization", request.headers)
- self.assertEqual(json.loads(request.body), creds)
+ self.assertEqual(json.loads(request.text), creds)
@requests_mock.Mocker()
def test_basic_call_with_auth(self, mock):
@@ -68,7 +68,7 @@ class TestClient(unittest.TestCase):
self.assertEqual(parsed_url.path, "/auth/user")
self.assertEqual(request.headers["Accept"], "application/json")
self.assertNotIn("Authorization", request.headers)
- self.assertEqual(json.loads(request.body), creds)
+ self.assertEqual(json.loads(request.text), creds)
@requests_mock.Mocker()
def test_nested_query_param_call(self, mock): | fix tests for python <I> to <I> | Losant_losant-rest-python | train | py |
4f4fdc608e2bfd8aaa7e5525c99914b1b5951804 | diff --git a/lib/beaker-vmware/version.rb b/lib/beaker-vmware/version.rb
index <HASH>..<HASH> 100644
--- a/lib/beaker-vmware/version.rb
+++ b/lib/beaker-vmware/version.rb
@@ -1,3 +1,3 @@
module BeakerVmware
- VERSION = '0.1.0'
+ VERSION = '0.2.0'
end | (GEM) update beaker-vmware version to <I> | puppetlabs_beaker-vmware | train | rb |
6fc39159ad7256c0d4850f4f64e58514f3d8dc73 | diff --git a/config/locker.php b/config/locker.php
index <HASH>..<HASH> 100644
--- a/config/locker.php
+++ b/config/locker.php
@@ -4,7 +4,7 @@ return [
'Locker' => [
'timeout' => 1000,
'FileLocker' => [
- 'dir' => '/tmp',
+ 'dir' => sys_get_temp_dir(),
],
],
];
diff --git a/src/Utility/Locker/FileLocker.php b/src/Utility/Locker/FileLocker.php
index <HASH>..<HASH> 100644
--- a/src/Utility/Locker/FileLocker.php
+++ b/src/Utility/Locker/FileLocker.php
@@ -11,7 +11,7 @@ class FileLocker extends BaseLocker
/**
* @var $lockDir
*/
- private $lockDir = '/tmp';
+ private $lockDir = null;
/**
* __construct method
@@ -31,6 +31,10 @@ class FileLocker extends BaseLocker
$this->lockDir = $dir;
}
+ if (empty($this->lockDir)) {
+ $this->lockDir = sys_get_temp_dir();
+ }
+
$lock = new FlockLock($this->lockDir);
parent::__construct($key, $lock); | Replaced hardcoded dir to store lock files by default system one | QoboLtd_cakephp-utils | train | php,php |
57629c1b2f05b195ed2908e591c0a02f970a1ecb | diff --git a/tests/test_optics.py b/tests/test_optics.py
index <HASH>..<HASH> 100644
--- a/tests/test_optics.py
+++ b/tests/test_optics.py
@@ -426,8 +426,6 @@ def test_RecurTraversal_no_change():
assert data[n] is result[n]
[email protected]
[email protected]
def test_RecurTraversal_memoizes_hashable():
depth = 100
width = 10 | Removed pytest marks
They have to be registered now and the couple that were in use were
unimportant. | ingolemo_python-lenses | train | py |
f8866d9c0b79e860ee138a60f3e7c90f53064edd | diff --git a/src/components/media_control/media_control.js b/src/components/media_control/media_control.js
index <HASH>..<HASH> 100644
--- a/src/components/media_control/media_control.js
+++ b/src/components/media_control/media_control.js
@@ -365,8 +365,7 @@ export default class MediaControl extends UIObject {
this.currentSeekBarPercentage = 100
// true if dvr is enabled but not in use. E.g. live stream with dvr but at live point
var dvrEnabledButNotInUse = this.container.isDvrEnabled() && !this.container.isDvrInUse()
- if (this.container.settings.seekEnabled && !dvrEnabledButNotInUse) {
- // if seek enabled or dvr is enabled and being used then set to the true percentage
+ if (!dvrEnabledButNotInUse) {
this.currentSeekBarPercentage = (this.currentPositionValue / this.currentDurationValue) * 100
}
this.setSeekPercentage(this.currentSeekBarPercentage) | media control: update seek bar percentage even if seek is disabled
There are some cases (e.g. ads playback) in which seek is disabled, but the percentage is still needed | clappr_clappr | train | js |
5be97492fc04314d89f930bdddf6d0b00f722bc9 | diff --git a/lib/active_record/connection_adapters/oracle_enhanced_adapter.rb b/lib/active_record/connection_adapters/oracle_enhanced_adapter.rb
index <HASH>..<HASH> 100644
--- a/lib/active_record/connection_adapters/oracle_enhanced_adapter.rb
+++ b/lib/active_record/connection_adapters/oracle_enhanced_adapter.rb
@@ -672,7 +672,7 @@ module ActiveRecord
case @connection.error_code(exception)
when 1
RecordNotUnique.new(message)
- when 942, 955, 1418
+ when 900, 904, 942, 955, 1418, 17008
ActiveRecord::StatementInvalid.new(message)
when 1400
ActiveRecord::NotNullViolation.new(message) | Add these errors to be recognized as `ActiveRecord::StatementInvalid`
ORA-<I> "invalid SQL statement"
ORA-<I> "invalid identifier"
ORA-<I> "Closed Connection"
ORA-<I> is likely reported only with JRuby and JDBC driver. | rsim_oracle-enhanced | train | rb |
77da5989e3c7208d7e35dd30ec67a4e2fd4430ad | diff --git a/src/Phinx/Db/Adapter/SqlServerAdapter.php b/src/Phinx/Db/Adapter/SqlServerAdapter.php
index <HASH>..<HASH> 100644
--- a/src/Phinx/Db/Adapter/SqlServerAdapter.php
+++ b/src/Phinx/Db/Adapter/SqlServerAdapter.php
@@ -68,13 +68,13 @@ class SqlServerAdapter extends PdoAdapter implements AdapterInterface
}
$dsn .= ';MultipleActiveResultSets=false';
- // charset support
- if (isset($options['charset'])) {
- $dsn .= ';charset=' . $options['charset'];
- }
-
$driverOptions = array(\PDO::ATTR_ERRMODE => \PDO::ERRMODE_EXCEPTION);
+ // charset support
+ if (isset($options['charset'])) {
+ $driverOptions[\PDO::SQLSRV_ATTR_ENCODING] = $options['charset'];
+ }
+
// support arbitrary \PDO::SQLSRV_ATTR_* driver options and pass them to PDO
// http://php.net/manual/en/ref.pdo-sqlsrv.php#pdo-sqlsrv.constants
foreach ($options as $key => $option) { | Correct SqlServer charset
SqlServer charset is not done via the DSN, but via a driver flag/option. | cakephp_phinx | train | php |
6dc207fffcd293db845a44cf106c8445a2832822 | diff --git a/lib/actions/form.js b/lib/actions/form.js
index <HASH>..<HASH> 100644
--- a/lib/actions/form.js
+++ b/lib/actions/form.js
@@ -17,7 +17,7 @@ const {
getDefaultQuery,
getTripOptionsFromQuery,
getUrlParams,
- planParamsToQuery
+ planParamsToQueryAsync
} = coreUtils.query
export const settingQueryParam = createAction('SET_QUERY_PARAM')
@@ -70,7 +70,7 @@ export function parseUrlQueryString (params = getUrlParams()) {
})
const searchId = params.ui_activeSearch || coreUtils.storage.randId()
// Convert strings to numbers/objects and dispatch
- planParamsToQuery(planParams, getState().otp.config)
+ planParamsToQueryAsync(planParams, getState().otp.config)
.then(query => dispatch(setQueryParam(query, searchId)))
}
} | refactor(actions/form): Use renamed method planParamsToQueryAsync from OTP-UI. | opentripplanner_otp-react-redux | train | js |
d138579b07086630732fa949a947dfad34a241d5 | diff --git a/setuptools/tests/test_easy_install.py b/setuptools/tests/test_easy_install.py
index <HASH>..<HASH> 100644
--- a/setuptools/tests/test_easy_install.py
+++ b/setuptools/tests/test_easy_install.py
@@ -156,15 +156,17 @@ class TestUserInstallTest:
@mock.patch('setuptools.command.easy_install.__file__', None)
def test_user_install_implied(self):
+ # simulate setuptools installed in user site packages
easy_install_pkg.__file__ = site.USER_SITE
- site.ENABLE_USER_SITE = True # disabled sometimes
- #XXX: replace with something meaningfull
+ site.ENABLE_USER_SITE = True
+
+ # create a finalized easy_install command
dist = Distribution()
dist.script_name = 'setup.py'
cmd = ei.easy_install(dist)
cmd.args = ['py']
cmd.ensure_finalized()
- assert cmd.user, 'user should be implied'
+ assert not cmd.user, 'user should not be implied'
def test_multiproc_atexit(self):
try: | Update test to match new expectation following pull request #<I>. Refs #<I>. | pypa_setuptools | train | py |
422dddb5f40395e6b4bd93b7afd89d060dece706 | diff --git a/lib/rufus/sc/scheduler.rb b/lib/rufus/sc/scheduler.rb
index <HASH>..<HASH> 100644
--- a/lib/rufus/sc/scheduler.rb
+++ b/lib/rufus/sc/scheduler.rb
@@ -423,7 +423,7 @@ module Rufus::Scheduler
when Array
mutex.reduce(block) do |memo, m|
m = (@mutexes[m.to_s] ||= Mutex.new) unless m.is_a?(Mutex)
- -> { m.synchronize { memo.call } }
+ lambda { m.synchronize { memo.call } }
end.call
else
(@mutexes[mutex.to_s] ||= Mutex.new).synchronize { block.call } | use lambda instead of ->, be friendly to <I>
rufus-scheduler has been around for a while, there are certainly lots of deployments out there still running on <I>.x, they don't get "->". | jmettraux_rufus-scheduler | train | rb |
45fbcef049054ae06857ce72fac364d8b41b010c | diff --git a/uisrv/offering.go b/uisrv/offering.go
index <HASH>..<HASH> 100644
--- a/uisrv/offering.go
+++ b/uisrv/offering.go
@@ -160,6 +160,7 @@ func (s *Server) fillOffering(offering *data.Offering) error {
offering.Status = data.MsgUnpublished
offering.Agent = agent.EthAddr
offering.BlockNumberUpdated = 1
+ offering.CurrentSupply = offering.Supply
// TODO: remove once prepaid is implemented.
offering.BillingType = data.BillingPostpaid | offering's current supply update on agent side | Privatix_dappctrl | train | go |
f685cd7fdc22f5ee9f3bdecb0e3b3d5121e6941e | diff --git a/flask_mongo_profiler/__init__.py b/flask_mongo_profiler/__init__.py
index <HASH>..<HASH> 100644
--- a/flask_mongo_profiler/__init__.py
+++ b/flask_mongo_profiler/__init__.py
@@ -1 +1,3 @@
+from __future__ import absolute_import
+
from . import contrib | :wheelchair: Add absolute_import | peergradeio_flask-mongo-profiler | train | py |
9d612cdab2ab993cda672cbd92c7d791537eaaa1 | diff --git a/lib/fast_haml/compiler.rb b/lib/fast_haml/compiler.rb
index <HASH>..<HASH> 100644
--- a/lib/fast_haml/compiler.rb
+++ b/lib/fast_haml/compiler.rb
@@ -303,7 +303,7 @@ module FastHaml
when value == true
[[:haml, :attr, key, [:multi]]]
when value == false
- []
+ [[:multi]]
when value.is_a?(Hash) && key == 'data'
data = AttributeBuilder.normalize_data(value)
data.keys.sort.map do |k|
@@ -315,7 +315,7 @@ module FastHaml
end
def compile_dynamic_attribute(key, value)
- [[:haml, :attr, key, [:code, value]]]
+ [[:haml, :attr, key, [:dvalue, value]]]
end
def compile_script(ast)
diff --git a/lib/fast_haml/html.rb b/lib/fast_haml/html.rb
index <HASH>..<HASH> 100644
--- a/lib/fast_haml/html.rb
+++ b/lib/fast_haml/html.rb
@@ -23,7 +23,7 @@ module FastHaml
else
[:static, " #{name}=#{options[:attr_quote]}#{name}#{options[:attr_quote]}"]
end
- elsif value[0] == :code
+ elsif value[0] == :dvalue
[:multi,
[:code, "value = (#{value[1]})"],
[:case, 'value', | Rename symbol because :code is used as another semantics | eagletmt_faml | train | rb,rb |
b5f26a35d66b8a5f5c00152c2f52e65874046a4b | diff --git a/tohu/item_list.py b/tohu/item_list.py
index <HASH>..<HASH> 100644
--- a/tohu/item_list.py
+++ b/tohu/item_list.py
@@ -4,6 +4,7 @@ import re
import pandas as pd
from operator import attrgetter
from sqlalchemy import create_engine, inspect
+from sqlalchemy.schema import CreateSchema
logger = logging.getLogger('tohu')
@@ -167,6 +168,10 @@ class ItemList:
engine = create_engine(url)
ins = inspect(engine)
+ if schema is not None and schema not in ins.get_schema_names():
+ logger.debug(f"Creating non-existing schema: '{schema}'")
+ engine.execute(CreateSchema(schema))
+
if table_name in ins.get_table_names(schema=schema) and if_exists == 'do_nothing':
logger.debug("Table already exists (use if_exists='replace' or if_exists='append' to modify it).")
return | Create schema if it doesn't exist | maxalbert_tohu | train | py |
64d5336ba10279ebf95fed505664d48b1d82b762 | diff --git a/plugins/worker/server/api/worker.py b/plugins/worker/server/api/worker.py
index <HASH>..<HASH> 100644
--- a/plugins/worker/server/api/worker.py
+++ b/plugins/worker/server/api/worker.py
@@ -46,8 +46,7 @@ class Worker(Resource):
result['stats'] = status.stats()
result['ping'] = status.ping()
result['active'] = status.active()
- result['queues'] = status.active_queues()
- result['scheduled'] = status.scheduled()
+ result['reserved'] = status.reserved()
return result
@autoDescribeRoute( | API: Removes unused information about worker and return the queued tasks list | girder_girder | train | py |
ba2c7aaf34f11b83885a8ecbfdd542f790ab484e | diff --git a/closure/goog/module/moduleloader.js b/closure/goog/module/moduleloader.js
index <HASH>..<HASH> 100644
--- a/closure/goog/module/moduleloader.js
+++ b/closure/goog/module/moduleloader.js
@@ -108,20 +108,6 @@ goog.module.ModuleLoader.prototype.loadModulesInternal = function(
/**
- * Create a script tag.
- * @param {string} uri The uri of the script.
- * @return {Element} The new tag.
- * @private
- */
-goog.module.ModuleLoader.prototype.createScriptElement_ = function(uri) {
- var scriptEl = goog.dom.createElement('script');
- scriptEl.src = uri;
- scriptEl.type = 'text/javascript';
- return scriptEl;
-};
-
-
-/**
* Handles a successful response to a request for one or more modules.
*
* @param {goog.net.BulkLoader} bulkLoader The bulk loader. | Removed a dead function from ModuleLoader
R=ebixon
DELTA=<I> (0 added, <I> deleted, 0 changed)
Revision created by MOE tool push_codebase.
MOE_MIGRATION=<I>
git-svn-id: <URL> | google_closure-library | train | js |
93232f47b5f6e3adb66ef2b66f1fc2e2ca155f70 | diff --git a/src/main/java/lv/semti/morphology/analyzer/Wordform.java b/src/main/java/lv/semti/morphology/analyzer/Wordform.java
index <HASH>..<HASH> 100644
--- a/src/main/java/lv/semti/morphology/analyzer/Wordform.java
+++ b/src/main/java/lv/semti/morphology/analyzer/Wordform.java
@@ -41,7 +41,7 @@ public class Wordform extends AttributeValues implements Serializable{
private static final long serialVersionUID = 1L;
private String token;
private transient Ending ending;
- transient Lexeme lexeme;
+ public transient Lexeme lexeme;
public Wordform (String token, Lexeme lexeme, Ending ending) {
this.token = token; | Exposed a property needed for LVTagger | PeterisP_morphology | train | java |
fddbebed0f6c02bccd0061cdeb1a925aeaac827b | diff --git a/tests/core/mock.py b/tests/core/mock.py
index <HASH>..<HASH> 100644
--- a/tests/core/mock.py
+++ b/tests/core/mock.py
@@ -36,8 +36,7 @@ def is_authenticated(request):
# xAuth
return (
- request.headers.get('trakt-user-login') == 'mock' and
- request.headers.get('trakt-user-token') == 'mock'
+ request.headers.get('trakt-user-login') == 'mock' and request.headers.get('trakt-user-token') == 'mock'
) | Fixed formatting in [tests/core/mock.py] | fuzeman_trakt.py | train | py |
5e247752a1e1d57dab945e5027ca56fbced335ae | diff --git a/salt/cloud/clouds/msazure.py b/salt/cloud/clouds/msazure.py
index <HASH>..<HASH> 100644
--- a/salt/cloud/clouds/msazure.py
+++ b/salt/cloud/clouds/msazure.py
@@ -172,19 +172,19 @@ def avail_images(conn=None, call=None):
for image in images:
ret[image.name] = {
'category': image.category,
- 'description': image.description.encode('utf-8'),
+ 'description': image.description.encode('ascii', 'replace'),
'eula': image.eula,
'label': image.label,
'logical_size_in_gb': image.logical_size_in_gb,
'name': image.name,
'os': image.os,
}
- if image.affinity_group:
- ret[image.name] = image.affinity_group
- if image.location:
- ret[image.name] = image.location
- if image.media_link:
- ret[image.name] = image.media_link
+ if hasattr(image, 'affinity_group'):
+ ret[image.name]['affinity_group'] = image.affinity_group
+ if hasattr(image, 'location'):
+ ret[image.name]['location'] = image.location
+ if hasattr(image, 'media_link'):
+ ret[image.name]['media_link'] = image.media_link
return ret | Tidy up --list-images in Azure driver | saltstack_salt | train | py |
491aca375c245897928aa284f4b527cc822f345d | diff --git a/auth_state_machine.go b/auth_state_machine.go
index <HASH>..<HASH> 100644
--- a/auth_state_machine.go
+++ b/auth_state_machine.go
@@ -36,6 +36,11 @@ func (c *akeContext) receiveMessage(msg []byte) (toSend []byte) {
//TODO error?
return
}
+
+ //TODO error
+ c.authState, toSend = c.authState.receiveRevealSigMessage(c, msg)
+
+ //TODO set msgState = encrypted
}
return | Conversation receive Reveal Signature Message | coyim_otr3 | train | go |
6b47714e8c9ce62e1653350fc6bea0bfd1fc1376 | diff --git a/resource_aws_cloudwatch_event_target.go b/resource_aws_cloudwatch_event_target.go
index <HASH>..<HASH> 100644
--- a/resource_aws_cloudwatch_event_target.go
+++ b/resource_aws_cloudwatch_event_target.go
@@ -8,6 +8,7 @@ import (
"github.com/hashicorp/terraform/helper/schema"
"github.com/aws/aws-sdk-go/aws"
+ "github.com/aws/aws-sdk-go/aws/awserr"
events "github.com/aws/aws-sdk-go/service/cloudwatchevents"
)
@@ -94,6 +95,15 @@ func resourceAwsCloudWatchEventTargetRead(d *schema.ResourceData, meta interface
d.SetId("")
return nil
}
+ if awsErr, ok := err.(awserr.Error); ok {
+ // This should never happen, but it's useful
+ // for recovering from https://github.com/hashicorp/terraform/issues/5389
+ if awsErr.Code() == "ValidationException" {
+ log.Printf("[WARN] Removing CloudWatch Event Target %q because it never existed.", d.Id())
+ d.SetId("")
+ return nil
+ }
+ }
return err
}
log.Printf("[DEBUG] Found Event Target: %s", t) | Allow recovering from failed CW Event Target creation in state | terraform-providers_terraform-provider-aws | train | go |
496d06443a9c639ca40ce7524638fd31ce01ba78 | diff --git a/help.php b/help.php
index <HASH>..<HASH> 100644
--- a/help.php
+++ b/help.php
@@ -16,17 +16,13 @@
require_once('config.php');
- $file = optional_param('file', '', PARAM_CLEAN);
+ $file = optional_param('file', '', PARAM_PATH);
$text = optional_param('text', 'No text to display', PARAM_CLEAN);
$module = optional_param('module', 'moodle', PARAM_ALPHAEXT);
$forcelang = optional_param('forcelang', '', PARAM_ALPHAEXT);
print_header();
- if (detect_munged_arguments($module .'/'. $file)) {
- error('Filenames contain illegal characters!');
- }
-
print_simple_box_start('center', '96%');
$helpfound = false; | better cleaning of $file parameter SC#<I>; merged from MOODLE_<I>_STABLE | moodle_moodle | train | php |
7182949ff4bc0b0c9d8538c1ea4e7de4a9fa9698 | diff --git a/src/com/google/javascript/jscomp/DefaultPassConfig.java b/src/com/google/javascript/jscomp/DefaultPassConfig.java
index <HASH>..<HASH> 100644
--- a/src/com/google/javascript/jscomp/DefaultPassConfig.java
+++ b/src/com/google/javascript/jscomp/DefaultPassConfig.java
@@ -246,6 +246,10 @@ public final class DefaultPassConfig extends PassConfig {
checks.add(checkProvides);
}
+ if (options.generateExports && !options.skipNonTranspilationPasses) {
+ checks.add(generateExports);
+ }
+
// Late ES6 transpilation.
// Includes ES6 features that are best handled natively by the compiler.
// As we convert more passes to handle these features, we will be moving the transpilation
@@ -282,13 +286,6 @@ public final class DefaultPassConfig extends PassConfig {
checks.add(angularPass);
}
- // The following passes are more like "preprocessor" passes.
- // It's important that they run before most checking passes.
- // Perhaps this method should be renamed?
- if (options.generateExports) {
- checks.add(generateExports);
- }
-
if (options.exportTestFunctions) {
checks.add(exportTestFunctions);
} | Moving generate exports pass to before the later transpilation step
-------------
Created by MOE: <URL> | google_closure-compiler | train | java |
fbaf2111bd5a4b84b8d515b2e51ece628deb688c | diff --git a/src/org/javasimon/SimonFactory.java b/src/org/javasimon/SimonFactory.java
index <HASH>..<HASH> 100644
--- a/src/org/javasimon/SimonFactory.java
+++ b/src/org/javasimon/SimonFactory.java
@@ -29,6 +29,7 @@ public final class SimonFactory {
}
private SimonFactory() {
+ throw new UnsupportedOperationException();
}
/** | Creating of utility class is now prohibited also internally.
git-svn-id: <URL> | virgo47_javasimon | train | java |
a27d639be35d4b12c9c29cbd5653904e9f097a71 | diff --git a/lib/eiscp.rb b/lib/eiscp.rb
index <HASH>..<HASH> 100644
--- a/lib/eiscp.rb
+++ b/lib/eiscp.rb
@@ -1,6 +1,6 @@
# Library for controlling Onkyo receivers over TCP/IP.
-class EISCP
+module EISCP
VERSION = '0.0.3'
end | fixed class/module discrep | mikerodrigues_onkyo_eiscp_ruby | train | rb |
Subsets and Splits