hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
c102f3573090b176fbaeb3c8965560d20387128a | diff --git a/cmd.go b/cmd.go
index <HASH>..<HASH> 100644
--- a/cmd.go
+++ b/cmd.go
@@ -74,18 +74,19 @@ func init() {
out = new(tabwriter.Writer)
out.Init(os.Stdout, 0, 8, 1, '\t', 0)
+ server := "http://localhost:8000" // default server
+ if serverEnv := os.Getenv("UPDATECTL_SERVER"); serverEnv != "" {
+ server = serverEnv
+ }
+
globalFlagSet = flag.NewFlagSet(cliName, flag.ExitOnError)
- globalFlagSet.StringVar(&globalFlags.Server, "server", "http://localhost:8000", "Update server to connect to")
+ globalFlagSet.StringVar(&globalFlags.Server, "server", server, "Update server to connect to")
globalFlagSet.BoolVar(&globalFlags.Debug, "debug", false, "Output debugging info to stderr")
globalFlagSet.BoolVar(&globalFlags.Version, "version", false, "Print version information and exit.")
globalFlagSet.BoolVar(&globalFlags.Help, "help", false, "Print usage information and exit.")
globalFlagSet.StringVar(&globalFlags.User, "user", os.Getenv("UPDATECTL_USER"), "API Username")
globalFlagSet.StringVar(&globalFlags.Key, "key", os.Getenv("UPDATECTL_KEY"), "API Key")
- if server := os.Getenv("UPDATECTL_SERVER"); server != "" {
- globalFlags.Server = server
- }
-
commands = []*Command{
// admin.go
cmdAdminUser, | cmd: fix server env so it shows up in usage info | coreos_updateservicectl | train |
507dd7c845d1b468a19a6ee24b38979cc92d0f6b | diff --git a/src/moonillum.js b/src/moonillum.js
index <HASH>..<HASH> 100644
--- a/src/moonillum.js
+++ b/src/moonillum.js
@@ -14,7 +14,7 @@
import base from './base'
-const p = Math.PI / 180
+const D2R = Math.PI / 180
/**
* phaseAngleEquatorial computes the phase angle of the Moon given equatorial coordinates.
@@ -115,19 +115,19 @@ export function phaseAngleEcliptic2 (cMoon, cSun) {
*/
export function phaseAngle3 (jde) { // (jde float64) float64
const T = base.J2000Century(jde)
- const D = base.horner(T, 297.8501921 * p, 445267.1114034 * p,
- -0.0018819 * p, p / 545868, -p / 113065000)
- const m = base.horner(T, 357.5291092 * p, 35999.0502909 * p,
- -0.0001535 * p, p / 24490000)
- const m_ = base.horner(T, 134.9633964 * p, 477198.8675055 * p,
- 0.0087414 * p, p / 69699, -p / 14712000)
+ const D = base.horner(T, 297.8501921 * D2R, 445267.1114034 * D2R,
+ -0.0018819 * D2R, D2R / 545868, -D2R / 113065000)
+ const m = base.horner(T, 357.5291092 * D2R, 35999.0502909 * D2R,
+ -0.0001536 * D2R, D2R / 24490000)
+ const m_ = base.horner(T, 134.9633964 * D2R, 477198.8675055 * D2R,
+ 0.0087414 * D2R, D2R / 69699, -D2R / 14712000)
return Math.PI - base.pmod(D, 2 * Math.PI) +
- -6.289 * p * Math.sin(m_) +
- 2.1 * p * Math.sin(m) +
- -1.274 * p * Math.sin(2 * D - m_) +
- -0.658 * p * Math.sin(2 * D) +
- -0.214 * p * Math.sin(2 * m_) +
- -0.11 * p * Math.sin(D)
+ -6.289 * D2R * Math.sin(m_) +
+ 2.1 * D2R * Math.sin(m) +
+ -1.274 * D2R * Math.sin(2 * D - m_) +
+ -0.658 * D2R * Math.sin(2 * D) +
+ -0.214 * D2R * Math.sin(2 * m_) +
+ -0.11 * D2R * Math.sin(D)
}
export default {
diff --git a/src/moonposition.js b/src/moonposition.js
index <HASH>..<HASH> 100644
--- a/src/moonposition.js
+++ b/src/moonposition.js
@@ -27,7 +27,7 @@ export function parallax (distance) {
function dmf (T) {
const d = base.horner(T, 297.8501921 * D2R, 445267.1114034 * D2R, -0.0018819 * D2R, D2R / 545868, -D2R / 113065000)
- const m = base.horner(T, 357.5291092 * D2R, 35999.0502909 * D2R, -0.0001535 * D2R, D2R / 24490000)
+ const m = base.horner(T, 357.5291092 * D2R, 35999.0502909 * D2R, -0.0001536 * D2R, D2R / 24490000)
const m_ = base.horner(T, 134.9633964 * D2R, 477198.8675055 * D2R,
0.0087414 * D2R, D2R / 69699, -D2R / 14712000)
const f = base.horner(T, 93.272095 * D2R, 483202.0175233 * D2R, -0.0036539 * D2R, -D2R / 3526000, D2R / 863310000) | fix 3rd Coeff of sun mean anomaly | commenthol_astronomia | train |
8c1f6a87be8ca78fbea281c6f76adb4d1ccb87bc | diff --git a/homie/node.py b/homie/node.py
index <HASH>..<HASH> 100644
--- a/homie/node.py
+++ b/homie/node.py
@@ -73,7 +73,7 @@ class HomieNode:
is_array = p.range > 1
for i, data in enumerate(p._data):
if data is not None:
- if data == delta[i]:
+ if data == delta[i] and is_array:
continue
if is_array: | Do not resent all array properties on change | microhomie_microhomie | train |
672c66e938418515160bdff2610bf0c787049a9a | diff --git a/src/main/java/io/github/lukehutch/fastclasspathscanner/utils/JarUtils.java b/src/main/java/io/github/lukehutch/fastclasspathscanner/utils/JarUtils.java
index <HASH>..<HASH> 100644
--- a/src/main/java/io/github/lukehutch/fastclasspathscanner/utils/JarUtils.java
+++ b/src/main/java/io/github/lukehutch/fastclasspathscanner/utils/JarUtils.java
@@ -219,8 +219,8 @@ public class JarUtils {
private static final List<String> JRE_JARS = new ArrayList<>();
private static final Set<String> JRE_JARS_SET = new HashSet<>();
- private static final List<String> JRE_LIB_JARS = new ArrayList<>();
- private static final List<String> JRE_EXT_JARS = new ArrayList<>();
+ private static final Set<String> JRE_LIB_JARS = new HashSet<>();
+ private static final Set<String> JRE_EXT_JARS = new HashSet<>();
// Find jars in JRE dirs ({java.home}, {java.home}/lib, {java.home}/lib/ext, etc.)
static {
@@ -273,8 +273,6 @@ public class JarUtils {
}
}
}
- Collections.sort(JRE_LIB_JARS);
- Collections.sort(JRE_EXT_JARS);
// Put rt.jar first in list of JRE jar paths
jreJarPaths.removeAll(jreRtJarPaths);
@@ -346,12 +344,12 @@ public class JarUtils {
}
/** Get the paths for any JRE/JDK "lib/" jars. */
- public static List<String> getJreExtJars() {
+ public static Set<String> getJreExtJars() {
return JRE_EXT_JARS;
}
/** Get the paths for any JRE/JDK "ext/" jars. */
- public static List<String> getJreLibJars() {
+ public static Set<String> getJreLibJars() {
return JRE_LIB_JARS;
} | Prevent duplication of java.ext.jars, by switching from list to set | classgraph_classgraph | train |
01d3662ebe4d62096cd43bbd6af063796df25f90 | diff --git a/src/authService.js b/src/authService.js
index <HASH>..<HASH> 100644
--- a/src/authService.js
+++ b/src/authService.js
@@ -85,6 +85,16 @@ export class AuthService {
return;
}
+ // in case auto refresh tokens are enabled
+ if(this.config.autoUpdateToken
+ && this.authentication.getAccessToken()
+ && this.authentication.getRefreshToken())
+ {
+ // we just need to check the status of the updated token we have in storage
+ this.authentication.updateAuthenticated();
+ return;
+ }
+
logger.info('Stored token changed event');
// IE runs the event handler before updating the storage value. Update it now. | fix(authService): storageEventHandler change | SpoonX_aurelia-authentication | train |
8f82221903508a885092fdbe1b63e55093875d83 | diff --git a/lib/mage/Mage.js b/lib/mage/Mage.js
index <HASH>..<HASH> 100644
--- a/lib/mage/Mage.js
+++ b/lib/mage/Mage.js
@@ -1,19 +1,20 @@
'use strict';
-var CODE_EXTENSIONS = [
+const CODE_EXTENSIONS = [
'.js',
'.ts'
];
-var EventEmitter = require('events').EventEmitter;
-var fs = require('fs');
-var util = require('util');
-var path = require('path');
-var async = require('async');
-var semver = require('semver');
+const EventEmitter = require('events').EventEmitter;
+const fs = require('fs');
+const util = require('util');
+const path = require('path');
+const async = require('async');
+const semver = require('semver');
+const chalk = require('chalk');
-var rootPath = process.cwd();
-var applicationModulesPath = path.resolve(rootPath, './lib/modules');
+const rootPath = process.cwd();
+const applicationModulesPath = path.resolve(rootPath, './lib/modules');
function testEngineVersion(pkg, pkgPath, mage) {
@@ -707,16 +708,31 @@ Mage.prototype.start = function (cb) {
return this;
};
+function showDevelopmentModeWarning(mage) {
+ mage.core.logger.warning
+ .data('config', mage.core.config.get(['developmentMode']))
+ .log('Development mode is turned on');
+
+ if (process.stderr.columns > 45) {
+ const write = (data) => process.stderr.write(chalk.yellow(data) + '\r\n');
+
+ write('');
+ write(' _____ __ __ _');
+ write('| __ \\ | \\/ | | |');
+ write('| | | | _____ _| \\ / | ___ __| | ___');
+ write('| | | |/ _ \\ \\ / | |\\/| |/ _ \\ / _` |/ _ \\');
+ write('| |__| | __/\\ \V /| | | | (_) | (_| | __/');
+ write('|_____/ \\___| \\_/ |_| |_|\\___/ \\__,_|\\___|');
+ write('');
+ }
+}
/**
* Boot MAGE - set it up, and start it if the setup is successful
*/
Mage.prototype.boot = function (cb) {
- var that = this;
-
this.cli.run();
-
- this.setup(function (error, apps) {
+ this.setup((error, apps) => {
if (error) {
if (cb) {
return cb(error);
@@ -726,11 +742,13 @@ Mage.prototype.boot = function (cb) {
}
var keys = Object.keys(apps);
- keys.forEach(function (appName) {
- that.emit('setup.' + appName, apps[appName]);
- });
+ keys.forEach((appName) => this.emit('setup.' + appName, apps[appName]));
+
+ if (this.isDevelopmentMode()) {
+ showDevelopmentModeWarning(this);
+ }
- that.start(cb);
+ this.start(cb);
});
}; | development mode log warning
Display a large "Development mode" banner when on. | mage_mage | train |
ed0470bf536902aa8f2d247099124c76902e0a4d | diff --git a/schwimmbad/jl.py b/schwimmbad/jl.py
index <HASH>..<HASH> 100644
--- a/schwimmbad/jl.py
+++ b/schwimmbad/jl.py
@@ -31,7 +31,4 @@ class JoblibPool(BasePool):
res = Parallel(*(self.args), **(self.kwargs))(
dfunc(a) for a in iterable
)
- if callback is not None:
- res = list(res)
- list(map(callback, res))
- return res
+ return self._call_callback(callback, res)
diff --git a/schwimmbad/pool.py b/schwimmbad/pool.py
index <HASH>..<HASH> 100644
--- a/schwimmbad/pool.py
+++ b/schwimmbad/pool.py
@@ -39,3 +39,10 @@ class BasePool(object):
def __exit__(self, *args):
self.close()
+
+ def _call_callback(self, callback, generator):
+ if callback is None:
+ return generator
+ for element in generator:
+ callback(element)
+ yield element
diff --git a/schwimmbad/serial.py b/schwimmbad/serial.py
index <HASH>..<HASH> 100644
--- a/schwimmbad/serial.py
+++ b/schwimmbad/serial.py
@@ -30,8 +30,4 @@ class SerialPool(BasePool):
function returns but before the results are returned.
"""
- results = map(func, iterable)
- if callback is not None:
- results = list(results)
- list(map(callback, results))
- return results
+ return self._call_callback(callback, map(func, iterable)) | updating JoblibPool and SerialPool to always return generators | adrn_schwimmbad | train |
d390ac009964c774f56d8817258ccf00a8d2365c | diff --git a/users/session/module.go b/users/session/module.go
index <HASH>..<HASH> 100644
--- a/users/session/module.go
+++ b/users/session/module.go
@@ -64,10 +64,6 @@ func (m *Module) Init(c *service.Config) {
if m.KeyStore == nil {
m.KeyStore = &keystore.KeyStore{}
}
- if m.CookieDomain == "" {
- panic("session.CookieDomain is required")
- }
-
var err error
m.encrypter, m.decryptionKey, err = loadKeys(m.KeyStore)
if err != nil { | users: Allow cookie domain to be optional. | octavore_nagax | train |
b430f7610fdb916fb6680725bfd890375d684604 | diff --git a/script/update-embedded-git.js b/script/update-embedded-git.js
index <HASH>..<HASH> 100644
--- a/script/update-embedded-git.js
+++ b/script/update-embedded-git.js
@@ -22,37 +22,13 @@ request(options, async (err, response, release) => {
console.log(`Updating embedded git config to use version ${tag_name}`)
- const output = {}
-
- const windows64bit = assets.find(a => a.name.endsWith('-windows-x64.tar.gz'))
- if (windows64bit == null) {
- throw new Error('Could not find Windows 64-bit archive in latest release')
- }
- output['win32-x64'] = await getDetailsForAsset(assets, windows64bit)
-
- const windows32bit = assets.find(a => a.name.endsWith('-windows-x86.tar.gz'))
- if (windows32bit == null) {
- throw new Error('Could not find Windows 32-bit archive in latest release')
- }
- output['win32-x86'] = await getDetailsForAsset(assets, windows32bit)
-
- const macOS = assets.find(a => a.name.endsWith('-macOS.tar.gz'))
- if (macOS == null) {
- throw new Error('Could not find macOS archive on latest release')
+ const output = {
+ 'win32-x64': await findWindows64BitRelease(assets),
+ 'win32-x86': await findWindows32BitRelease(assets),
+ 'darwin-x64': await findMacOS64BitRelease(assets),
+ 'linux-x64': await findLinux64BitRelease(assets),
+ 'linux-arm64': await findLinuxARM64Release(assets)
}
- output['darwin-x64'] = await getDetailsForAsset(assets, macOS)
-
- const linux64bit = assets.find(a => a.name.endsWith('-ubuntu.tar.gz'))
- if (linux64bit == null) {
- throw new Error('Could not find Linux archive on latest release')
- }
- output['linux-x64'] = await getDetailsForAsset(assets, linux64bit)
-
- const linuxARM = assets.find(a => a.name.endsWith('-arm64.tar.gz'))
- if (linuxARM == null) {
- throw new Error('Could not find ARM64 archive on latest release')
- }
- output['linux-arm64'] = await getDetailsForAsset(assets, linuxARM)
const fileContents = JSON.stringify(output, null, 2)
@@ -65,6 +41,46 @@ request(options, async (err, response, release) => {
)
})
+function findWindows64BitRelease(assets) {
+ const asset = assets.find(a => a.name.endsWith('-windows-x64.tar.gz'))
+ if (asset == null) {
+ throw new Error('Could not find Windows 64-bit archive in latest release')
+ }
+ return getDetailsForAsset(assets, asset)
+}
+
+function findWindows32BitRelease(assets) {
+ const asset = assets.find(a => a.name.endsWith('-windows-x86.tar.gz'))
+ if (asset == null) {
+ throw new Error('Could not find Windows 32-bit archive in latest release')
+ }
+ return getDetailsForAsset(assets, asset)
+}
+
+function findMacOS64BitRelease(assets) {
+ const asset = assets.find(a => a.name.endsWith('-macOS.tar.gz'))
+ if (asset == null) {
+ throw new Error('Could not find MacOS 64-bit archive in latest release')
+ }
+ return getDetailsForAsset(assets, asset)
+}
+
+function findLinux64BitRelease(assets) {
+ const asset = assets.find(a => a.name.endsWith('-ubuntu.tar.gz'))
+ if (asset == null) {
+ throw new Error('Could not find Linux 64-bit archive in latest release')
+ }
+ return getDetailsForAsset(assets, asset)
+}
+
+function findLinuxARM64Release(assets) {
+ const asset = assets.find(a => a.name.endsWith('-arm64.tar.gz'))
+ if (asset == null) {
+ throw new Error('Could not find Linux ARM64 archive in latest release')
+ }
+ return getDetailsForAsset(assets, asset)
+}
+
function downloadChecksum(url) {
return new Promise((resolve, reject) => {
const options = { | avoid a bit of the duplication here | desktop_dugite | train |
d7767a817b52cafed32f95c0fbcbafaa3f5329b7 | diff --git a/fastods/src/test/java/com/github/jferard/fastods/tool/FastOdsTest.java b/fastods/src/test/java/com/github/jferard/fastods/tool/FastOdsTest.java
index <HASH>..<HASH> 100644
--- a/fastods/src/test/java/com/github/jferard/fastods/tool/FastOdsTest.java
+++ b/fastods/src/test/java/com/github/jferard/fastods/tool/FastOdsTest.java
@@ -63,7 +63,8 @@ public class FastOdsTest {
final File f = PowerMock.createMock(File.class);
PowerMock.resetAll();
- EasyMock.expect(f.exists()).andReturn(false);
+ if (FastOds.desktop != null)
+ EasyMock.expect(f.exists()).andReturn(false);
PowerMock.replayAll();
Assert.assertFalse(FastOds.openFile(f)); | Fix (this time for true I hope) headless test | jferard_fastods | train |
ac2d42f74a5574e2f30b34a47267714ab6821c1a | diff --git a/jslack-api-model/src/main/java/com/github/seratch/jslack/api/model/event/MessageEvent.java b/jslack-api-model/src/main/java/com/github/seratch/jslack/api/model/event/MessageEvent.java
index <HASH>..<HASH> 100644
--- a/jslack-api-model/src/main/java/com/github/seratch/jslack/api/model/event/MessageEvent.java
+++ b/jslack-api-model/src/main/java/com/github/seratch/jslack/api/model/event/MessageEvent.java
@@ -24,6 +24,8 @@ public class MessageEvent implements Event {
public static final String TYPE_NAME = "message";
+ private String clientMsgId;
+
private final String type = TYPE_NAME;
private String channel;
private String user;
@@ -33,6 +35,7 @@ public class MessageEvent implements Event {
private List<Attachment> attachments;
private String ts;
+ private String threadTs;
private String eventTs;
private String channelType; | Fix #<I> MessageEvent is missing threadTs field | seratch_jslack | train |
a492fed48a5407a7448f3d28802a89ce17eabb1a | diff --git a/ui/src/api/wfe.js b/ui/src/api/wfe.js
index <HASH>..<HASH> 100644
--- a/ui/src/api/wfe.js
+++ b/ui/src/api/wfe.js
@@ -62,7 +62,8 @@ router.get('/id/:workflowId', async (req, res, next) => {
});
for(let t = 0; t < result.tasks.length; t++) {
let task = result.tasks[t];
- const logs = await http.get(baseURLTask + task.taskId + '/log');
+ let logs = await http.get(baseURLTask + task.taskId + '/log');
+ logs = logs || [];
let logs2 = [];
logs.forEach(log => {
const dtstr = moment(log.createdTime).format('MM/DD/YY, HH:mm:ss:SSS');
diff --git a/ui/src/api/wfegraph.js b/ui/src/api/wfegraph.js
index <HASH>..<HASH> 100644
--- a/ui/src/api/wfegraph.js
+++ b/ui/src/api/wfegraph.js
@@ -72,7 +72,7 @@ class Workflow2Graph {
labelStyle = 'fill:#ffffff; stroke-width: 1px';
break;
case 'COMPLETED_WITH_ERRORS':
- style = 'stroke: #48a770; fill: #48a770';
+ style = 'stroke: #FF8C00; fill: #FF8C00';
labelStyle = 'fill:#ffffff; stroke-width: 1px';
break;
case 'SKIPPED': | show the color for completed with error tasks | Netflix_conductor | train |
4c9d7492fb15489624ae58a4d120d4d4cfe33d33 | diff --git a/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java b/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java
index <HASH>..<HASH> 100644
--- a/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java
+++ b/codec/src/main/java/io/netty/handler/codec/ByteToMessageDecoder.java
@@ -279,21 +279,24 @@ public abstract class ByteToMessageDecoder extends ChannelInboundHandlerAdapter
} catch (Exception e) {
throw new DecoderException(e);
} finally {
- if (cumulation != null && !cumulation.isReadable()) {
- numReads = 0;
- cumulation.release();
- cumulation = null;
- } else if (++ numReads >= discardAfterReads) {
- // We did enough reads already try to discard some bytes so we not risk to see a OOME.
- // See https://github.com/netty/netty/issues/4275
- numReads = 0;
- discardSomeReadBytes();
- }
+ try {
+ if (cumulation != null && !cumulation.isReadable()) {
+ numReads = 0;
+ cumulation.release();
+ cumulation = null;
+ } else if (++numReads >= discardAfterReads) {
+ // We did enough reads already try to discard some bytes so we not risk to see a OOME.
+ // See https://github.com/netty/netty/issues/4275
+ numReads = 0;
+ discardSomeReadBytes();
+ }
- int size = out.size();
- firedChannelRead |= out.insertSinceRecycled();
- fireChannelRead(ctx, out, size);
- out.recycle();
+ int size = out.size();
+ firedChannelRead |= out.insertSinceRecycled();
+ fireChannelRead(ctx, out, size);
+ } finally {
+ out.recycle();
+ }
}
} else {
ctx.fireChannelRead(msg);
diff --git a/codec/src/main/java/io/netty/handler/codec/MessageToMessageDecoder.java b/codec/src/main/java/io/netty/handler/codec/MessageToMessageDecoder.java
index <HASH>..<HASH> 100644
--- a/codec/src/main/java/io/netty/handler/codec/MessageToMessageDecoder.java
+++ b/codec/src/main/java/io/netty/handler/codec/MessageToMessageDecoder.java
@@ -97,11 +97,14 @@ public abstract class MessageToMessageDecoder<I> extends ChannelInboundHandlerAd
} catch (Exception e) {
throw new DecoderException(e);
} finally {
- int size = out.size();
- for (int i = 0; i < size; i ++) {
- ctx.fireChannelRead(out.getUnsafe(i));
+ try {
+ int size = out.size();
+ for (int i = 0; i < size; i++) {
+ ctx.fireChannelRead(out.getUnsafe(i));
+ }
+ } finally {
+ out.recycle();
}
- out.recycle();
}
}
diff --git a/codec/src/main/java/io/netty/handler/codec/MessageToMessageEncoder.java b/codec/src/main/java/io/netty/handler/codec/MessageToMessageEncoder.java
index <HASH>..<HASH> 100644
--- a/codec/src/main/java/io/netty/handler/codec/MessageToMessageEncoder.java
+++ b/codec/src/main/java/io/netty/handler/codec/MessageToMessageEncoder.java
@@ -92,9 +92,6 @@ public abstract class MessageToMessageEncoder<I> extends ChannelOutboundHandlerA
}
if (out.isEmpty()) {
- out.recycle();
- out = null;
-
throw new EncoderException(
StringUtil.simpleClassName(this) + " must produce at least one message.");
}
@@ -107,19 +104,22 @@ public abstract class MessageToMessageEncoder<I> extends ChannelOutboundHandlerA
throw new EncoderException(t);
} finally {
if (out != null) {
- final int sizeMinusOne = out.size() - 1;
- if (sizeMinusOne == 0) {
- ctx.write(out.getUnsafe(0), promise);
- } else if (sizeMinusOne > 0) {
- // Check if we can use a voidPromise for our extra writes to reduce GC-Pressure
- // See https://github.com/netty/netty/issues/2525
- if (promise == ctx.voidPromise()) {
- writeVoidPromise(ctx, out);
- } else {
- writePromiseCombiner(ctx, out, promise);
+ try {
+ final int sizeMinusOne = out.size() - 1;
+ if (sizeMinusOne == 0) {
+ ctx.write(out.getUnsafe(0), promise);
+ } else if (sizeMinusOne > 0) {
+ // Check if we can use a voidPromise for our extra writes to reduce GC-Pressure
+ // See https://github.com/netty/netty/issues/2525
+ if (promise == ctx.voidPromise()) {
+ writeVoidPromise(ctx, out);
+ } else {
+ writePromiseCombiner(ctx, out, promise);
+ }
}
+ } finally {
+ out.recycle();
}
- out.recycle();
}
}
} | CodecOutputList should be recycled in a finally block (#<I>)
Motivation:
To ensure we always recycle the CodecOutputList we should better do it in a finally block
Modifications:
Call CodecOutputList.recycle() in finally
Result:
Less chances of non-recycled lists. Related to <URL> | netty_netty | train |
d68fec0e9f89afcb6308d60319f740d9ede49992 | diff --git a/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states-cell/index.js b/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states-cell/index.js
index <HASH>..<HASH> 100644
--- a/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states-cell/index.js
+++ b/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states-cell/index.js
@@ -46,6 +46,7 @@ Component.register('sw-settings-tax-rule-type-individual-states-cell', {
|| !this.taxRule.data.states
|| !this.taxRule.data.states.length
) {
+ this.individualStates = [];
return;
}
diff --git a/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states/index.js b/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states/index.js
index <HASH>..<HASH> 100644
--- a/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states/index.js
+++ b/src/Administration/Resources/app/administration/src/module/sw-settings-tax/component/sw-settings-tax-rule-type-individual-states/index.js
@@ -60,6 +60,7 @@ Component.register('sw-settings-tax-rule-type-individual-states', {
},
onChange(collection) {
+ this.individualStates = collection;
this.taxRule.data.states = collection.getIds();
}
} | NEXT-<I> - corrected changes of individualStates array in tax rule type individual states component | shopware_platform | train |
61776d68aeb4453222f78c89d9839b49790ff36b | diff --git a/lib/action_subscriber/version.rb b/lib/action_subscriber/version.rb
index <HASH>..<HASH> 100644
--- a/lib/action_subscriber/version.rb
+++ b/lib/action_subscriber/version.rb
@@ -1,3 +1,3 @@
module ActionSubscriber
- VERSION = "0.3.3"
+ VERSION = "0.3.4"
end | Bump to <I>. | mxenabled_action_subscriber | train |
b186da615ed96f0ea9f30c5f3f5e8e31a083c219 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -16,8 +16,8 @@ setup(
long_description="",
install_requires=['cypy>=0.2',
'quantities',
- 'pandas',
- 'ipython',
+ 'pandas==0.18',
+ 'ipython==5.1',
'bs4',
'lxml',
'nbconvert', | Specified pandas and ipython versions | scidash_sciunit | train |
9bd6a1a27da614bdf17880ff7c98ff3618397034 | diff --git a/commands.go b/commands.go
index <HASH>..<HASH> 100644
--- a/commands.go
+++ b/commands.go
@@ -183,7 +183,11 @@ func getRemoteRepository(remote RemoteRepository, doUpdate bool, isShallow bool)
os.Exit(1)
}
- vcs.Clone(remoteURL, path, isShallow)
+ err := vcs.Clone(remoteURL, path, isShallow)
+ if err != nil {
+ utils.Log("error", fmt.Sprintf("Could not find repository: %s", err))
+ os.Exit(1)
+ }
} else {
if doUpdate {
utils.Log("update", path) | Return exit status 1 for clone failure | motemen_ghq | train |
0e00529e04d7c870ffd88b00a6e0357d2787efed | diff --git a/app/models/jobs/action_mailroom/deliver_inbound_email_to_mailroom_job.rb b/app/models/jobs/action_mailroom/deliver_inbound_email_to_mailroom_job.rb
index <HASH>..<HASH> 100644
--- a/app/models/jobs/action_mailroom/deliver_inbound_email_to_mailroom_job.rb
+++ b/app/models/jobs/action_mailroom/deliver_inbound_email_to_mailroom_job.rb
@@ -1,5 +1,5 @@
class ActionMailroom::DeliverInboundEmailToMailroomJob < ApplicationJob
- queue_as :action_mailroom_inbound_email
+ queue_as :rails_action_mailroom_inbound_email
def perform(inbound_email)
ActionMailroom::Router.receive inbound_email | Prefix queue name like we do routes | rails_rails | train |
af0c5c13da01e7e40f80f366bd93d7e969736a80 | diff --git a/post_office/logutils.py b/post_office/logutils.py
index <HASH>..<HASH> 100644
--- a/post_office/logutils.py
+++ b/post_office/logutils.py
@@ -27,9 +27,11 @@ def setup_loghandlers(level=None):
},
},
- "root": {
- "handlers": ["post_office"],
- "level": level or "DEBUG"
+ "loggers": {
+ "post_office": {
+ "handlers": ["post_office"],
+ "level": level or "DEBUG"
+ }
}
})
return logger | Fixed post_office logger set on the root | ui_django-post_office | train |
b76443d39c7a03f82c3d0e233cfba40aa67cc7a7 | diff --git a/dev/components/components/data-table2.vue b/dev/components/components/data-table2.vue
index <HASH>..<HASH> 100644
--- a/dev/components/components/data-table2.vue
+++ b/dev/components/components/data-table2.vue
@@ -1,43 +1,5 @@
<template>
<div class="layout-padding">
- <h4>Row Transition</h4>
- <q-table
- :data="data"
- :columns="columns"
- row-key="name"
- selection="single"
- :selected.sync="selected"
- color="secondary"
- title="Move rows"
- :transition="transition"
- >
- <template slot="header" slot-scope="props">
- <q-tr>
- <q-th></q-th>
- <q-th v-for="col in props.cols" :key="col.name" :props="props">
- {{col.label}}
- </q-th>
- </q-tr>
- </template>
-
- <template slot="body" slot-scope="props">
- <q-tr slot="body" :key="props.row.name">
- <q-td>
- <q-btn dense color="primary" icon="arrow_drop_up" @click="moveRowUp(props.row.name)" />
- <q-btn dense color="primary" icon="arrow_drop_down" @click="moveRowDown(props.row.name)" />
- </q-td>
- <q-td key="desc" :props="props">{{ props.row.name }}</q-td>
- <q-td key="calories" :props="props">{{ props.row.calories }}</q-td>
- <q-td key="fat" :props="props">{{ props.row.fat }}</q-td>
- <q-td key="carbs" :props="props">{{ props.row.carbs }}</q-td>
- <q-td key="protein" :props="props">{{ props.row.protein }}</q-td>
- <q-td key="sodium" :props="props">{{ props.row.sodium }}</q-td>
- <q-td key="calcium" :props="props">{{ props.row.calcium }}</q-td>
- <q-td key="iron" :props="props">{{ props.row.iron }}</q-td>
- </q-tr>
- </template>
- </q-table>
-
<h4>Emulate server-side</h4>
{{serverPagination}}
<q-table
@@ -294,13 +256,6 @@ export default {
visibleColumns: ['desc', 'fat', 'carbs', 'protein', 'sodium', 'calcium', 'iron'],
selected: [],
- transition: {
- name: 'flip-row',
- enterActiveClass: 'animated bounceInLeft',
- leaveActiveClass: 'animated bounceOutRight',
- mode: 'out-in',
- tag: 'tbody'
- },
columns: [
{
name: 'desc',
@@ -477,6 +432,4 @@ export default {
</script>
<style lang="stylus">
-.flip-row-move
- position absolute
</style>
diff --git a/src/components/table/QTable.js b/src/components/table/QTable.js
index <HASH>..<HASH> 100644
--- a/src/components/table/QTable.js
+++ b/src/components/table/QTable.js
@@ -64,8 +64,7 @@ export default {
tableClass: {
type: [String, Array, Object],
default: ''
- },
- transition: Object
+ }
},
computed: {
computedRows () {
diff --git a/src/components/table/table-body.js b/src/components/table/table-body.js
index <HASH>..<HASH> 100644
--- a/src/components/table/table-body.js
+++ b/src/components/table/table-body.js
@@ -68,9 +68,7 @@ export default {
child.push(bottomRow({cols: this.computedCols}))
}
- return this.transition
- ? h('transition-group', { props: this.transition }, child)
- : h('tbody', child)
+ return h('tbody', child)
},
addBodyRowMeta (data) {
if (this.selection) { | revert: Experiment from previous commit | quasarframework_quasar | train |
ac404b34f69bb09b4224d154cacb9935466c102c | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -4,8 +4,9 @@ from setuptools.command.build_py import build_py as _build
import os.path
import subprocess
+import shutil
-PROTOC_BIN = "/usr/bin/protoc"
+PROTOC_EXEC = "protoc"
CURRENT_DIR = os.path.abspath( os.path.dirname( __file__ ) )
@@ -13,11 +14,12 @@ class ProtobufBuilder(_build):
def run(self):
# check if protobuf is installed
- if not os.path.isfile(PROTOC_BIN):
+ exec_path = shutil.which(PROTOC_EXEC)
+ if exec_path is None:
raise Exception("You should install protobuf compiler")
print("Building protobuf file")
- subprocess.run([PROTOC_BIN,
+ subprocess.run([exec_path,
"--proto_path=" + CURRENT_DIR,
"--python_out=" + CURRENT_DIR + "/gpapi/",
CURRENT_DIR + "/googleplay.proto"]) | setup.py: fix for issue #<I> | NoMore201_googleplay-api | train |
44c09c774a9be174611210c0206ca5bff6c705c3 | diff --git a/src/js/util/filter.js b/src/js/util/filter.js
index <HASH>..<HASH> 100644
--- a/src/js/util/filter.js
+++ b/src/js/util/filter.js
@@ -1,4 +1,3 @@
-import {hasAttr} from './attr';
import {inBrowser} from './env';
import {isDocument, isElement, isString, noop, startsWith, toNode, toNodes} from './lang';
@@ -33,8 +32,9 @@ export function isInput(element) {
return toNodes(element).some(element => matches(element, selInput));
}
+export const selFocusable = `${selInput},a[href],[tabindex]`;
export function isFocusable(element) {
- return isInput(element) || matches(element, 'a[href],button') || hasAttr(element, 'tabindex');
+ return matches(element, selFocusable);
}
export function parent(element) { | refactor: isFocusable | uikit_uikit | train |
cdd5a00b26386b5f163977885f63d0179f63cf0e | diff --git a/taxtastic/ncbi.py b/taxtastic/ncbi.py
index <HASH>..<HASH> 100644
--- a/taxtastic/ncbi.py
+++ b/taxtastic/ncbi.py
@@ -111,7 +111,7 @@ UNCLASSIFIED_REGEX_COMPONENTS = [r'-like\b',
# 'et al',
r'environmental samples',
r'eubacterium',
- r'\b[Gg]roup\b',
+ # r'\b[Gg]roup\b',
r'halophilic',
r'hydrothermal\b',
r'isolate', | commented out regex for excluding 'group' records | fhcrc_taxtastic | train |
c3f94ee4d986c4b63fec6655087e0be7656c359e | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -10,7 +10,7 @@ extras = {
":sys_platform=='win32'": ['pypiwin32']
}
-version = '1.0.1'
+version = '1.0.2'
setup(name='qtm',
version=version,
diff --git a/test/rest_test.py b/test/rest_test.py
index <HASH>..<HASH> 100644
--- a/test/rest_test.py
+++ b/test/rest_test.py
@@ -81,7 +81,7 @@ class TestSettings(unittest.TestCase):
def test_get_experimental(self):
d = self.rest.get_experimental()
- d.addCallback(self.assertEqual, ['command', 'settings', 'workerstate'])
+ d.addCallback(self.assertEqual, ['command', 'measurements', 'settings', 'workerstate'])
return d
def test_get_workerstate(self):
diff --git a/test/rt_test.py b/test/rt_test.py
index <HASH>..<HASH> 100644
--- a/test/rt_test.py
+++ b/test/rt_test.py
@@ -12,6 +12,8 @@ from qtm.packet import QRTPacket, QRTComponentType, QRTEvent
from qtm import QRT, QRTConnection, QRTCommandException
+MEASUREMENT = "e:/measurements/3d_analog_6dof_big27file.qtm"
+
class TestConnection(unittest.TestCase):
def setUp(self):
@@ -63,7 +65,7 @@ class TestPacket(unittest.TestCase):
self.qrt.connect(on_connect=self.on_connect, on_disconnect=None, on_event=None)
yield self.defered_connection
yield self.connection.take_control('password')
- yield self.connection.load("d:/measurements/3d_analog_6dof_big27file.qtm")
+ yield self.connection.load(MEASUREMENT)
yield self.connection.start(rtfromfile=True)
def tearDown(self):
@@ -171,7 +173,7 @@ class TestStream(unittest.TestCase):
yield self.defered_connection
yield self.defered_connection
yield self.connection.take_control('password')
- yield self.connection.load("d:/measurements/3d_analog_6dof_big27file.qtm")
+ yield self.connection.load(MEASUREMENT)
yield self.connection.start(rtfromfile=True)
def tearDown(self):
@@ -268,7 +270,7 @@ class TestCommands(unittest.TestCase):
def test_multiple_commands(self):
d = []
d.append(self.connection.take_control("password"))
- d.append(self.connection.load("d:\measurements\FirstMiqusMeasurement.qtm"))
+ d.append(self.connection.load(MEASUREMENT))
d.append(self.connection.start(rtfromfile=True))
d.append(self.connection.stop())
d.append(self.connection.close()) | Updated tests, still refers to hardcoded path though | qualisys_qualisys_python_sdk | train |
f4191d1d15c321244c761fd5b78a7fae90add170 | diff --git a/cmd/wl/commands/task.go b/cmd/wl/commands/task.go
index <HASH>..<HASH> 100644
--- a/cmd/wl/commands/task.go
+++ b/cmd/wl/commands/task.go
@@ -127,7 +127,6 @@ and updates fields with the provided flags.
}
if cmd.Flags().Changed(starredLongFlag) {
- fmt.Printf("starred changing from %t to %t\n", task.Starred, starred)
task.Starred = starred
} | Remove printf when updating task in CLI.
[finishes #<I>] | robdimsdale_wl | train |
578cdd696945ef670c86e150276f5e9a218c3c7b | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,7 +8,7 @@ here = os.path.normpath(os.path.abspath(os.path.dirname(__file__)))
site_packages_path = sysconfig.get_python_lib()
site_packages_files = ["vext_importer.pth"] if os.environ.get('VIRTUAL_ENV') else []
-sandbox._EXCEPTIONS.extend(site_packages_files)
+sandbox._EXCEPTIONS.extend(os.path.join(site_packages_path, f) for f in site_packages_files)
long_description=open('DESCRIPTION.rst').read()
@@ -35,7 +35,7 @@ class vext_install_data(install_data):
setup(
cmdclass={'vext_install_data': vext_install_data},
name='vext',
- version='0.2.4',
+ version='0.2.5',
description='Use system python packages in a virtualenv',
long_description=long_description, | Use absolute paths in sandbox excemption. | stuaxo_vext | train |
d31e69f97633a3e9557103bfd085b04bcb38a852 | diff --git a/lib/tests/behat/behat_general.php b/lib/tests/behat/behat_general.php
index <HASH>..<HASH> 100644
--- a/lib/tests/behat/behat_general.php
+++ b/lib/tests/behat/behat_general.php
@@ -1093,7 +1093,7 @@ class behat_general extends behat_base {
// Check if value exists in specific row/column.
// Get row xpath.
- $rowxpath = $tablexpath."/tbody/tr[th[normalize-space(.)=" . $rowliteral . "] | td[normalize-space(.)=" . $rowliteral . "]]";
+ $rowxpath = $tablexpath."/tbody/tr[th[normalize-space(.)=" . $rowliteral . "] or td[normalize-space(.)=" . $rowliteral . "]]";
$columnvaluexpath = $rowxpath . $columnpositionxpath . "[contains(normalize-space(.)," . $valueliteral . ")]"; | MDL-<I> behat: Fixed xpath for selecting table row | moodle_moodle | train |
13b482dd47eb98b4c80f70e7d325526db96a4082 | diff --git a/ips_vagrant/commands/new/__init__.py b/ips_vagrant/commands/new/__init__.py
index <HASH>..<HASH> 100644
--- a/ips_vagrant/commands/new/__init__.py
+++ b/ips_vagrant/commands/new/__init__.py
@@ -34,8 +34,8 @@ from ips_vagrant.scrapers import Licenses, Version, Installer
help='Use cached version downloads if possible. (Default: True)')
@click.option('--install/--no-install', envvar='INSTALL', default=True,
help='Run the IPS installation automatically after setup. (Default: True)')
[email protected]('--dev/--no-dev', envvar='IPSV_IN_DEV', default=True,
- help='Install developer tools and put the site into dev mode after installation. (Default: True)')
[email protected]('--dev/--no-dev', envvar='IPSV_IN_DEV', default=False,
+ help='Install developer tools and put the site into dev mode after installation. (Default: False)')
@pass_context
def cli(ctx, name, dname, license_key, force, enable, ssl, spdy, gzip, cache, install, dev):
"""
diff --git a/ips_vagrant/scrapers/dev_tools.py b/ips_vagrant/scrapers/dev_tools.py
index <HASH>..<HASH> 100644
--- a/ips_vagrant/scrapers/dev_tools.py
+++ b/ips_vagrant/scrapers/dev_tools.py
@@ -27,6 +27,7 @@ class DevTools(object):
self.session = requests.Session()
self.session.cookies.update(self.cookiejar)
+ self.session.headers.update({'User-Agent': 'ipsv/0.1.0'})
def get(self):
"""
diff --git a/ips_vagrant/scrapers/version.py b/ips_vagrant/scrapers/version.py
index <HASH>..<HASH> 100644
--- a/ips_vagrant/scrapers/version.py
+++ b/ips_vagrant/scrapers/version.py
@@ -20,6 +20,9 @@ class Version(object):
"""
self.ctx = ctx
self.cookiejar = cookiejar
+ self.session = requests.Session()
+ self.session.cookies.update(cookiejar)
+ self.session.headers.update({'User-Agent': 'ipsv/0.1.0'})
self.license = license
self.form = None
self.log = logging.getLogger('ipsv.scraper.version') | Temporarily disabling IN_DEV by default due to Incapsula issues | FujiMakoto_IPS-Vagrant | train |
478c0e6b685fa28dccd725e24056ed75df01d324 | diff --git a/src/drawing/drawTextBox.js b/src/drawing/drawTextBox.js
index <HASH>..<HASH> 100644
--- a/src/drawing/drawTextBox.js
+++ b/src/drawing/drawTextBox.js
@@ -35,55 +35,55 @@ export function textBoxWidth (context, text, padding) {
* @param {string} color The color of the textBox.
* @param {object} options Options for the textBox.
*/
-export default function (context, textLines, x, y, color, options) {
- if (Object.prototype.toString.call(textLines) !== '[object Array]') {
- textLines = [textLines];
- }
+ export default function (context, textLines, x, y, color, options) {
+ if (Object.prototype.toString.call(textLines) !== '[object Array]') {
+ textLines = [textLines];
+ }
- const padding = 5;
- const fontSize = textStyle.getFontSize();
- const backgroundColor = textStyle.getBackgroundColor();
+ const padding = 5;
+ const fontSize = textStyle.getFontSize();
+ const backgroundColor = textStyle.getBackgroundColor();
- // Find the longest text width in the array of text data
- let maxWidth = 0;
+ // Find the longest text width in the array of text data
+ let maxWidth = 0;
- textLines.forEach(function (text) {
- // Get the text width in the current font
- const width = context.measureText(text).width;
+ textLines.forEach(function (text) {
+ // Get the text width in the current font
+ const width = textBoxWidth(context, text, padding);
- // Find the maximum with for all the text rows;
- maxWidth = Math.max(maxWidth, width);
- });
+ // Find the maximum with for all the text rows;
+ maxWidth = Math.max(maxWidth, width);
+ });
- // Calculate the bounding box for this text box
- const boundingBox = {
- width: maxWidth + (padding * 2),
- height: padding + textLines.length * (fontSize + padding)
- };
+ // Calculate the bounding box for this text box
+ const boundingBox = {
+ width: maxWidth,
+ height: padding + textLines.length * (fontSize + padding)
+ };
- draw(context, (context) => {
- context.strokeStyle = color;
+ draw(context, (context) => {
+ context.strokeStyle = color;
- // Draw the background box with padding
- if (options && options.centering && options.centering.x === true) {
- x -= boundingBox.width / 2;
- }
+ // Draw the background box with padding
+ if (options && options.centering && options.centering.x === true) {
+ x -= boundingBox.width / 2;
+ }
- if (options && options.centering && options.centering.y === true) {
- y -= boundingBox.height / 2;
- }
+ if (options && options.centering && options.centering.y === true) {
+ y -= boundingBox.height / 2;
+ }
- boundingBox.left = x;
- boundingBox.top = y;
+ boundingBox.left = x;
+ boundingBox.top = y;
- const fillStyle = (options && options.debug === true) ? '#FF0000' : backgroundColor;
+ const fillStyle = (options && options.debug === true) ? '#FF0000' : backgroundColor;
- fillBox(context, boundingBox, fillStyle);
+ fillBox(context, boundingBox, fillStyle);
- // Draw each of the text lines on top of the background box
- fillTextLines(context, boundingBox, textLines, color, padding);
- });
+ // Draw each of the text lines on top of the background box
+ fillTextLines(context, boundingBox, textLines, color, padding);
+ });
- // Return the bounding box so it can be used for pointNearHandle
- return boundingBox;
-}
+ // Return the bounding box so it can be used for pointNearHandle
+ return boundingBox;
+ }
diff --git a/src/tools/AngleTool.js b/src/tools/AngleTool.js
index <HASH>..<HASH> 100644
--- a/src/tools/AngleTool.js
+++ b/src/tools/AngleTool.js
@@ -3,7 +3,6 @@
import external from '../externalModules.js';
import BaseAnnotationTool from '../base/BaseAnnotationTool.js';
// State
-import textStyle from '../stateManagement/textStyle.js';
import {
addToolState,
getToolState,
@@ -23,6 +22,7 @@ import {
drawJoinedLines
} from '../drawing/index.js';
import drawLinkedTextBox from '../drawing/drawLinkedTextBox.js';
+import { textBoxWidth } from '../drawing/drawTextBox.js';
import drawHandles from '../drawing/drawHandles.js';
import lineSegDistance from '../util/lineSegDistance.js';
import roundToDecimal from '../util/roundToDecimal.js';
@@ -132,7 +132,6 @@ export default class AngleTool extends BaseAnnotationTool {
const context = getNewContext(eventData.canvasContext.canvas);
const lineWidth = toolStyle.getToolWidth();
- const font = textStyle.getFont();
const config = this.configuration;
for (let i = 0; i < toolData.data.length; i++) {
@@ -172,9 +171,6 @@ export default class AngleTool extends BaseAnnotationTool {
drawHandles(context, eventData, data.handles, color, handleOptions);
- // Draw the text
- context.fillStyle = color;
-
// Default to isotropic pixel size, update suffix to reflect this
const columnPixelSpacing = eventData.image.columnPixelSpacing || 1;
const rowPixelSpacing = eventData.image.rowPixelSpacing || 1;
@@ -238,9 +234,7 @@ export default class AngleTool extends BaseAnnotationTool {
x: handleMiddleCanvas.x,
y: handleMiddleCanvas.y
};
-
- context.font = font;
- const textWidth = context.measureText(text).width;
+ const textWidth = textBoxWidth(context, text, 5);
if (handleMiddleCanvas.x < handleStartCanvas.x) {
textCoords.x -= distance + textWidth + 10; | Porting recent drawing API changes, AngleTool. | cornerstonejs_cornerstoneTools | train |
76bc8afe89d92c29a724151d0798b27df5be7f05 | diff --git a/sigal/settings.py b/sigal/settings.py
index <HASH>..<HASH> 100644
--- a/sigal/settings.py
+++ b/sigal/settings.py
@@ -80,7 +80,6 @@ _DEFAULT_CONFIG = {
'video_converter': 'ffmpeg',
'video_extensions': ['.mov', '.avi', '.mp4', '.webm', '.ogv', '.3gp'],
'video_format': 'webm',
- 'video_resize': True,
'video_always_convert': False,
'video_size': (480, 360),
'watermark': '',
diff --git a/sigal/templates/sigal.conf.py b/sigal/templates/sigal.conf.py
index <HASH>..<HASH> 100644
--- a/sigal/templates/sigal.conf.py
+++ b/sigal/templates/sigal.conf.py
@@ -189,6 +189,7 @@ ignore_files = []
# Size of resized video (default: (480, 360))
+# Set this to None if no resizing is desired on the video.
# video_size = (480, 360)
# If the desired video extension and filename are the same, the video will
@@ -196,10 +197,6 @@ ignore_files = []
# set this to True to force convert it. False by default.
# video_always_convert = False
-# Set this to false if no resizing is desired on the video. This overrides
-# the video_size option.
-# video_resize = True
-
# -------------
# Miscellaneous
# -------------
diff --git a/sigal/video.py b/sigal/video.py
index <HASH>..<HASH> 100644
--- a/sigal/video.py
+++ b/sigal/video.py
@@ -77,33 +77,17 @@ def video_size(source, converter='ffmpeg'):
x, y = y, x
return x, y
-
-def get_dimensions(source, converter, output_size):
- """Figure out src and dest width and height for video.
+def get_resize_options(source, converter, output_size):
+ """Figure out resize options for video from src and dst sizes.
:param source: path to a video
:param outname: path to the generated video
:param settings: settings dict
-
"""
logger = logging.getLogger(__name__)
-
- # Don't transcode if source is in the required format and
- # has fitting datedimensions, copy instead.
w_src, h_src = video_size(source, converter=converter)
w_dst, h_dst = output_size
logger.debug('Video size: %i, %i -> %i, %i', w_src, h_src, w_dst, h_dst)
- return {'src': (w_src, h_src), 'dst': (w_dst, h_dst)}
-
-
-def get_resize_options(src_dst_dimension_dict):
- """Figure out resize options for video from src and dst sizes.
-
- :param src_dst_dimension_dict: a dictionary of src and dst,
- each with a width, height tuple
- """
- w_src, h_src = src_dst_dimension_dict['src']
- w_dst, h_dst = src_dst_dimension_dict['dst']
# do not resize if input dimensions are smaller than output dimensions
if w_src <= w_dst and h_src <= h_dst:
@@ -160,10 +144,9 @@ def generate_video(source, outname, settings):
converter = settings['video_converter']
resize_opt = []
- if settings.get("video_resize"):
- src_dst_dimension_dict = get_dimensions(source, converter,
- settings['video_size'])
- resize_opt = get_resize_options(src_dst_dimension_dict)
+ if settings.get("video_size"):
+ resize_opt = get_resize_options(source, converter,
+ settings['video_size'])
base, src_ext = splitext(source)
base, dst_ext = splitext(outname) | Recommended fixes
Using video_size = None to prevent resizing instead of another option
Eliminating pointless method for just getting resize_dimensions | saimn_sigal | train |
bad4fb7feaf0d6c4b6f5f3d2b406754f071ee181 | diff --git a/spinner.go b/spinner.go
index <HASH>..<HASH> 100644
--- a/spinner.go
+++ b/spinner.go
@@ -334,7 +334,7 @@ func (s *Spinner) Stop() {
}
s.erase()
if s.FinalMSG != "" {
- fmt.Fprintf(s.Writer, s.FinalMSG)
+ fmt.Fprint(s.Writer, s.FinalMSG)
}
s.stopChan <- struct{}{}
} | Allow `%` characters in FinalMSG
FinalMSG is not intended to be a format string for `printf`, so it
should not be passed as such. | briandowns_spinner | train |
17d90400b958e9278de0d303c5bdc7d7ef1617fd | diff --git a/test/e2e/specs/wp-gutenboarding-spec.js b/test/e2e/specs/wp-gutenboarding-spec.js
index <HASH>..<HASH> 100644
--- a/test/e2e/specs/wp-gutenboarding-spec.js
+++ b/test/e2e/specs/wp-gutenboarding-spec.js
@@ -32,6 +32,8 @@ before( async function () {
describe( 'Gutenboarding: (' + screenSize + ')', function () {
this.timeout( mochaTimeOut );
describe( 'Visit Gutenboarding page as a new user @parallel @canary', function () {
+ throw 'Forcing Gutenboarding e2e tests to fail';
+
const siteTitle = dataHelper.randomPhrase();
before( async function () { | Intentionally fail E2E gutenboarding tests (#<I>)
This is part of the onboarding process. | Automattic_wp-calypso | train |
8028da7cb22608e92544be2829a33b9c93765d29 | diff --git a/dao/elasticsearch/service.go b/dao/elasticsearch/service.go
index <HASH>..<HASH> 100644
--- a/dao/elasticsearch/service.go
+++ b/dao/elasticsearch/service.go
@@ -25,6 +25,14 @@ func (this *ControlPlaneDao) AddService(svc service.Service, serviceId *string)
if err := this.facade.AddService(datastore.Get(), svc); err != nil {
return err
}
+
+ // Create the tenant volume
+ if tenantID, err := this.facade.GetTenantID(datastore.Get(), svc.ID); err != nil {
+ glog.Warningf("Could not get tenant for service %s: %s", svc.ID, err)
+ } else if _, err := this.dfs.GetVolume(tenantID); err != nil {
+ glog.Warningf("Could not create volume for tenant %s: %s", tenantID, err)
+ }
+
*serviceId = svc.ID
return nil
}
@@ -34,6 +42,13 @@ func (this *ControlPlaneDao) UpdateService(svc service.Service, unused *int) err
if err := this.facade.UpdateService(datastore.Get(), svc); err != nil {
return err
}
+
+ // Create the tenant volume
+ if tenantID, err := this.facade.GetTenantID(datastore.Get(), svc.ID); err != nil {
+ glog.Warningf("Could not get tenant for service %s: %s", svc.ID, err)
+ } else if _, err := this.dfs.GetVolume(tenantID); err != nil {
+ glog.Warningf("Could not create volume for tenant %s: %s", tenantID, err)
+ }
return nil
}
diff --git a/dao/elasticsearch/servicetemplate.go b/dao/elasticsearch/servicetemplate.go
index <HASH>..<HASH> 100644
--- a/dao/elasticsearch/servicetemplate.go
+++ b/dao/elasticsearch/servicetemplate.go
@@ -17,6 +17,7 @@ import (
"github.com/control-center/serviced/dao"
"github.com/control-center/serviced/datastore"
"github.com/control-center/serviced/domain/servicetemplate"
+ "github.com/zenoss/glog"
)
func (this *ControlPlaneDao) AddServiceTemplate(serviceTemplate servicetemplate.ServiceTemplate, templateID *string) error {
@@ -46,6 +47,11 @@ func (this *ControlPlaneDao) GetServiceTemplates(unused int, templates *map[stri
func (this *ControlPlaneDao) DeployTemplate(request dao.ServiceTemplateDeploymentRequest, tenantID *string) error {
var err error
*tenantID, err = this.facade.DeployTemplate(datastore.Get(), request.PoolID, request.TemplateID, request.DeploymentID)
+
+ // Create the tenant volume
+ if _, err := this.dfs.GetVolume(*tenantID); err != nil {
+ glog.Warningf("Could not create volume for tenant %s: %s", tenantID, err)
+ }
return err
}
@@ -67,5 +73,12 @@ func (this *ControlPlaneDao) DeployTemplateActive(notUsed string, active *[]map[
func (this *ControlPlaneDao) DeployService(request dao.ServiceDeploymentRequest, serviceID *string) error {
var err error
*serviceID, err = this.facade.DeployService(datastore.Get(), request.ParentID, request.Service)
+
+ // Create the tenant volume
+ if tenantID, err := this.facade.GetTenantID(datastore.Get(), *serviceID); err != nil {
+ glog.Warningf("Could not get tenant for service %s: %s", *serviceID, err)
+ } else if _, err := this.dfs.GetVolume(tenantID); err != nil {
+ glog.Warningf("Could not create volume for tenant %s: %s", tenantID, err)
+ }
return err
} | Create the tenant volume when a new service is created | control-center_serviced | train |
42d7abaf117f83a1ff1f08e8cff7a680611afed8 | diff --git a/pool.go b/pool.go
index <HASH>..<HASH> 100644
--- a/pool.go
+++ b/pool.go
@@ -159,7 +159,7 @@ func (p *Pool) makeOne() {
}()
}
-func startTLS(c *smtp.Client, addr string) (bool, error) {
+func startTLS(c *client, addr string) (bool, error) {
if ok, _ := c.Extension("STARTTLS"); !ok {
return false, nil
}
@@ -176,7 +176,7 @@ func startTLS(c *smtp.Client, addr string) (bool, error) {
return true, nil
}
-func addAuth(c *smtp.Client, auth smtp.Auth) (bool, error) {
+func addAuth(c *client, auth smtp.Auth) (bool, error) {
if ok, _ := c.Extension("AUTH"); !ok {
return false, nil
}
@@ -189,10 +189,11 @@ func addAuth(c *smtp.Client, auth smtp.Auth) (bool, error) {
}
func (p *Pool) build() (*client, error) {
- c, err := smtp.Dial(p.addr)
+ cl, err := smtp.Dial(p.addr)
if err != nil {
return nil, err
}
+ c := &client{cl, 0}
if _, err := startTLS(c, p.addr); err != nil {
c.Close()
@@ -206,7 +207,7 @@ func (p *Pool) build() (*client, error) {
}
}
- return &client{c, 0}, nil
+ return c, nil
}
func (p *Pool) maybeReplace(err error, c *client) { | convert to a client wrapper struct earlier, to catch all the Close cases | jordan-wright_email | train |
7cc36aa4e4d76457378622681a2e21b8b448affa | diff --git a/google/gax/path_template.py b/google/gax/path_template.py
index <HASH>..<HASH> 100644
--- a/google/gax/path_template.py
+++ b/google/gax/path_template.py
@@ -190,7 +190,7 @@ class _Parser(object):
t_EQUALS = r'='
t_WILDCARD = r'\*'
t_PATH_WILDCARD = r'\*\*'
- t_LITERAL = r'[^/}{=\*]+'
+ t_LITERAL = r'[a-zA-Z0-9\._~-]+'
t_ignore = ' \t'
@@ -199,7 +199,7 @@ class _Parser(object):
def __init__(self):
self.lexer = lex.lex(module=self)
- self.parser = yacc.yacc(module=self, debug=0, write_tables=0)
+ self.parser = yacc.yacc(module=self, debug=False, write_tables=False)
def parse(self, data):
"""Returns a list of path template segments parsed from data.
@@ -288,3 +288,8 @@ class _Parser(object):
'parser error: unexpected token \'%s\'' % p.type)
else:
raise ValidationException('parser error: unexpected EOF')
+
+ def t_error(self, t):
+ """Raises a lexer error."""
+ raise ValidationException(
+ 'lexer error: illegal character \'%s\'' % t.value[0])
diff --git a/test/test_path_template.py b/test/test_path_template.py
index <HASH>..<HASH> 100644
--- a/test/test_path_template.py
+++ b/test/test_path_template.py
@@ -42,6 +42,10 @@ class TestPathTemplate(unittest2.TestCase):
def test_len(self):
self.assertEqual(len(PathTemplate('a/b/**/*/{a=hello/world}')), 6)
+ def test_fail_invalid_token(self):
+ self.assertRaises(ValidationException,
+ PathTemplate, 'hello/wor@ld')
+
def test_fail_when_impossible_match(self):
template = PathTemplate('hello/world')
self.assertRaises(ValidationException,
@@ -119,5 +123,5 @@ class TestPathTemplate(unittest2.TestCase):
self.assertEqual(str(template), 'buckets/{hello=*}')
template = PathTemplate('/buckets/{hello=what}/{world}')
self.assertEqual(str(template), 'buckets/{hello=what}/{world=*}')
- template = PathTemplate('/buckets/hello?#$##:what')
- self.assertEqual(str(template), 'buckets/hello?#$##:what')
+ template = PathTemplate('/buckets/helloazAZ09-.~_:what')
+ self.assertEqual(str(template), 'buckets/helloazAZ09-.~_:what') | Restrict literal regex according to RFC spec
Add t_error to catch illegal token exceptions. | googleapis_gax-python | train |
8d8b8313793b50f0d4b88ff89072ea3733581308 | diff --git a/lib/FSi/Component/DataSource/Extension/Core/Ordering/Field/FieldExtension.php b/lib/FSi/Component/DataSource/Extension/Core/Ordering/Field/FieldExtension.php
index <HASH>..<HASH> 100644
--- a/lib/FSi/Component/DataSource/Extension/Core/Ordering/Field/FieldExtension.php
+++ b/lib/FSi/Component/DataSource/Extension/Core/Ordering/Field/FieldExtension.php
@@ -38,7 +38,7 @@ class FieldExtension extends FieldAbstractExtension
*/
public function getExtendedFieldTypes()
{
- return array('text', 'number', 'date', 'time', 'datetime', 'entity');
+ return array('text', 'number', 'date', 'time', 'datetime');
}
/** | Sorting results by entity field does not make any sense | fsi-open_datasource | train |
19f2f9be1740eab6eedaf3304ba867140510720c | diff --git a/lib/rails_admin/config/configurable.rb b/lib/rails_admin/config/configurable.rb
index <HASH>..<HASH> 100644
--- a/lib/rails_admin/config/configurable.rb
+++ b/lib/rails_admin/config/configurable.rb
@@ -23,6 +23,23 @@ module RailsAdmin
self.class.register_deprecated_instance_option(option_name, replacement_option_name, scope, &custom_error)
end
+ private
+
+ def with_recurring(option_name, value_proc, default_proc)
+ # Track recursive invocation with an instance variable. This prevents run-away recursion
+ # and allows configurations such as
+ # label { "#{label}".upcase }
+ # This will use the default definition when called recursively.
+ if instance_variable_get("@#{option_name}_recurring")
+ instance_eval(&default_proc)
+ else
+ instance_variable_set("@#{option_name}_recurring", true)
+ instance_eval(&value_proc)
+ end
+ ensure
+ instance_variable_set("@#{option_name}_recurring", false)
+ end
+
module ClassMethods
# Register an instance option. Instance option is a configuration
# option that stores its value within an instance variable and is
@@ -51,17 +68,7 @@ module RailsAdmin
value = instance_variable_get("@#{option_name}_registered")
case value
when Proc
- # Track recursive invocation with an instance variable. This prevents run-away recursion
- # and allows configurations such as
- # label { "#{label}".upcase }
- # This will use the default definition when called recursively.
- if instance_variable_get("@#{option_name}_recurring")
- value = instance_eval(&default)
- else
- instance_variable_set("@#{option_name}_recurring", true)
- value = instance_eval(&value)
- instance_variable_set("@#{option_name}_recurring", false)
- end
+ value = with_recurring(option_name, value, default)
when nil
value = instance_eval(&default)
end
diff --git a/spec/integration/config/edit/rails_admin_config_edit_spec.rb b/spec/integration/config/edit/rails_admin_config_edit_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/integration/config/edit/rails_admin_config_edit_spec.rb
+++ b/spec/integration/config/edit/rails_admin_config_edit_spec.rb
@@ -63,6 +63,20 @@ describe 'RailsAdmin Config DSL Edit Section', type: :request do
visit new_path(model_name: 'team')
expect(find_field('team[color]').value).to eq('black')
end
+
+ it 'renders custom value next time if error happend' do
+ RailsAdmin.config(Team) do
+ field :name do
+ render do
+ bindings[:object].persisted? ? 'Custom Name' : raise(ZeroDivisionError)
+ end
+ end
+ end
+ expect { visit new_path(model_name: 'team') }.to raise_error(/ZeroDivisionError/)
+ record = FactoryBot.create(:team)
+ visit edit_path(model_name: 'team', id: record.id)
+ expect(page).to have_content('Custom Name')
+ end
end
describe 'css hooks' do | Fix field is reseted to default if error happened | sferik_rails_admin | train |
7bacdcc99e63501e16ac6dbcb73490a317c9e86b | diff --git a/fastlane.gemspec b/fastlane.gemspec
index <HASH>..<HASH> 100644
--- a/fastlane.gemspec
+++ b/fastlane.gemspec
@@ -34,13 +34,13 @@ Gem::Specification.new do |spec|
spec.add_dependency 'fastlane_core', '>= 0.6.0' # all shared code and dependencies
# All the fastlane tools
- spec.add_dependency 'deliver', '>= 0.9.1'
+ spec.add_dependency 'deliver', '>= 0.9.2'
spec.add_dependency 'snapshot', '>= 0.6.1'
spec.add_dependency 'frameit', '>= 1.0.0'
- spec.add_dependency 'pem', '>= 0.5.3'
+ spec.add_dependency 'pem', '>= 0.5.4'
spec.add_dependency 'sigh', '>= 0.4.9'
spec.add_dependency 'produce', '>= 0.2.0'
- spec.add_dependency 'cert', '>= 0.1.6'
+ spec.add_dependency 'cert', '>= 0.1.7'
# Development only
spec.add_development_dependency 'bundler'
diff --git a/lib/fastlane/actions/cert.rb b/lib/fastlane/actions/cert.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane/actions/cert.rb
+++ b/lib/fastlane/actions/cert.rb
@@ -18,16 +18,7 @@ module Fastlane
Dir.chdir(FastlaneFolder.path || Dir.pwd) do
# This should be executed in the fastlane folder
- values = params.first
- unless values.kind_of?Hash
- # Old syntax
- values = {}
- params.each do |val|
- values[val] = true
- end
- end
-
- Cert.config = FastlaneCore::Configuration.create(Cert::Options.available_options, (values || {}))
+ Cert.config = params # we alread have the finished config
Cert::CertRunner.run
cert_file_path = ENV["CER_FILE_PATH"]
diff --git a/lib/fastlane/actions/increment_build_number.rb b/lib/fastlane/actions/increment_build_number.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane/actions/increment_build_number.rb
+++ b/lib/fastlane/actions/increment_build_number.rb
@@ -62,6 +62,7 @@ module Fastlane
FastlaneCore::ConfigItem.new(key: :xcodeproj,
env_name: "FL_BUILD_NUMBER_PROJECT",
description: "optional, you must specify the path to your main Xcode project if it is not in the project root directory",
+ optional: true,
verify_block: Proc.new do |value|
raise "Please pass the path to the project, not the workspace".red if value.include?"workspace"
raise "Could not find Xcode project".red if (not File.exists?(value) and not Helper.is_test?)
diff --git a/lib/fastlane/actions/pem.rb b/lib/fastlane/actions/pem.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane/actions/pem.rb
+++ b/lib/fastlane/actions/pem.rb
@@ -1,34 +1,25 @@
module Fastlane
module Actions
- module SharedValues
-
- end
-
class PemAction < Action
def self.run(params)
require 'pem'
require 'pem/options'
require 'pem/manager'
- values = params.first
-
begin
FastlaneCore::UpdateChecker.start_looking_for_update('pem') unless Helper.is_test?
- success_block = values[:new_profile]
- values.delete(:new_profile) # as it's not in the configs
+ success_block = params[:new_profile]
- PEM.config = FastlaneCore::Configuration.create(PEM::Options.available_options, (values || {}))
+ PEM.config = params
profile_path = PEM::Manager.start
if profile_path
- success_block.call(File.expand_path(profile_path))
+ success_block.call(File.expand_path(profile_path)) if success_block
end
ensure
FastlaneCore::UpdateChecker.show_update_status('pem', PEM::VERSION)
end
- rescue => ex
- puts ex
end
def self.description
@@ -51,7 +42,15 @@ module Fastlane
def self.available_options
require 'pem'
require 'pem/options'
- PEM::Options.available_options
+
+ unless @options
+ @options = PEM::Options.available_options
+ @options << FastlaneCore::ConfigItem.new(key: :new_profile,
+ env_name: "",
+ description: "Block that is called if there is a new profile",
+ optional: true)
+ end
+ @options
end
def self.is_supported?(platform)
diff --git a/lib/fastlane/actions/sigh.rb b/lib/fastlane/actions/sigh.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane/actions/sigh.rb
+++ b/lib/fastlane/actions/sigh.rb
@@ -6,26 +6,16 @@ module Fastlane
end
class SighAction < Action
- def self.run(params)
+ def self.run(values)
require 'sigh'
require 'sigh/options'
require 'sigh/manager'
require 'credentials_manager/appfile_config'
- values = params.first
-
- unless values.kind_of?Hash
- # Old syntax
- values = {}
- params.each do |val|
- values[val] = true
- end
- end
-
begin
FastlaneCore::UpdateChecker.start_looking_for_update('sigh') unless Helper.is_test?
- Sigh.config = FastlaneCore::Configuration.create(Sigh::Options.available_options, (values || {}))
+ Sigh.config = values # we alread have the finished config
path = Sigh::Manager.start
diff --git a/lib/fastlane/version.rb b/lib/fastlane/version.rb
index <HASH>..<HASH> 100644
--- a/lib/fastlane/version.rb
+++ b/lib/fastlane/version.rb
@@ -1,3 +1,3 @@
module Fastlane
- VERSION = '0.12.0'
+ VERSION = '0.12.1'
end | Updated sigh, pem, increment_build_number and cert actions
Updated dependencies
Version bump | fastlane_fastlane | train |
7d99188bfe8ce6d20b8a77a59c4601dd0cf584fd | diff --git a/bin/rest-ftp-daemon b/bin/rest-ftp-daemon
index <HASH>..<HASH> 100755
--- a/bin/rest-ftp-daemon
+++ b/bin/rest-ftp-daemon
@@ -20,7 +20,7 @@ parser = OptionParser.new do |opts|
opts.banner = "Usage: #{File.basename $0} [options] start|stop|restart"
opts.on("-c", "--config CONFIGFILE") { |config| APP_CONF = config }
opts.on("-e", "--environment ENV") { |env| APP_ENV = env }
- opts.on("", "--dev") { APP_ENV = "development" }
+ opts.on("", "--dev") { APP_ENV = "development" }
opts.on("-p", "--port PORT", "use PORT") { |port| options["port"] = port.to_i }
opts.on("-w", "--workers COUNT", "Use COUNT worker threads") { |count| options["workers"] = count.to_i }
opts.on("-d", "--daemonize", "Run daemonized in the background") { |bool| options["daemonize"] = true }
@@ -29,7 +29,7 @@ parser = OptionParser.new do |opts|
opts.on("-u", "--user NAME", "User to run daemon as (use with -g)") { |user| options["user"] = user }
opts.on("-g", "--group NAME", "Group to run daemon as (use with -u)"){ |group| options["group"] = group }
opts.on_tail("-h", "--help", "Show this message") { puts opts; exit }
- opts.on_tail('-v', '--version', "Show version (#{APP_VER})") { puts APP_VER; exit }
+ opts.on_tail('-v', '--version', "Show version (#{APP_VER})") { puts APP_VER; exit }
end
begin
parser.order!(ARGV)
@@ -43,18 +43,17 @@ rescue OptionParser::InvalidOption => e
exit 12
end
-# Load helpers and config
# Build configuration file path from options
APP_CONF ||= File.expand_path "/etc/#{APP_NAME}.yml"
unless File.exists? APP_CONF
puts "EXITING: cannot read configuration file: #{APP_CONF}"
exit 13
end
+
+# Load helpers and config, and merge options from ARGV into settings
[:helpers, :config].each do |lib|
require File.expand_path("#{app_root}/lib/rest-ftp-daemon/#{lib.to_s}")
end
-
-# Merge options from ARGV into settings
Settings.merge!(options)
# Display compiled configuration
diff --git a/lib/rest-ftp-daemon/api/jobs.rb b/lib/rest-ftp-daemon/api/jobs.rb
index <HASH>..<HASH> 100644
--- a/lib/rest-ftp-daemon/api/jobs.rb
+++ b/lib/rest-ftp-daemon/api/jobs.rb
@@ -70,8 +70,7 @@ module RestFtpDaemon
end
post '/jobs/' do
- info "POST /jobs"
- # #{params.inspect}"
+ info "POST /jobs #{params.inspect}"
# request.body.rewind
begin
diff --git a/lib/rest-ftp-daemon/constants.rb b/lib/rest-ftp-daemon/constants.rb
index <HASH>..<HASH> 100644
--- a/lib/rest-ftp-daemon/constants.rb
+++ b/lib/rest-ftp-daemon/constants.rb
@@ -1,6 +1,5 @@
# Terrific constants
APP_NAME = "rest-ftp-daemon"
-APP_CONF = File.expand_path "/etc/#{APP_NAME}.yml"
APP_VER = "0.100.2"
# Some global constants | minor changes (cleanup, debug) | bmedici_rest-ftp-daemon | train |
60012ce95fa2a82ac4c38900108cc4389831b700 | diff --git a/src/org/jgroups/protocols/MPING.java b/src/org/jgroups/protocols/MPING.java
index <HASH>..<HASH> 100644
--- a/src/org/jgroups/protocols/MPING.java
+++ b/src/org/jgroups/protocols/MPING.java
@@ -21,7 +21,7 @@ import java.util.*;
* back via the regular transport (e.g. TCP) to the sender (discovery request contained sender's regular address,
* e.g. 192.168.0.2:7800).
* @author Bela Ban
- * @version $Id: MPING.java,v 1.35 2008/04/12 12:41:41 belaban Exp $
+ * @version $Id: MPING.java,v 1.36 2008/04/12 12:45:50 belaban Exp $
*/
public class MPING extends PING implements Runnable {
MulticastSocket mcast_sock=null;
@@ -369,7 +369,7 @@ public class MPING extends PING implements Runnable {
break;
}
catch(Throwable ex) {
- log.error("failed receiving packet", ex);
+ log.error("failed receiving packet (from " + packet.getSocketAddress() + ")", ex);
}
finally {
closeInputStream(inp); | printing the address of the sender causing an exception | belaban_JGroups | train |
21b00c30ecc159a5df389fe96287898660f659d2 | diff --git a/kafka/producer/kafka.py b/kafka/producer/kafka.py
index <HASH>..<HASH> 100644
--- a/kafka/producer/kafka.py
+++ b/kafka/producer/kafka.py
@@ -464,7 +464,6 @@ class KafkaProducer(object):
assert timeout >= 0
log.info("Closing the Kafka producer with %s secs timeout.", timeout)
- #first_exception = AtomicReference() # this will keep track of the first encountered exception
invoked_from_callback = bool(threading.current_thread() is self._sender)
if timeout > 0:
if invoked_from_callback: | Remove unused/weird comment line (#<I>) | dpkp_kafka-python | train |
7e47662ffb72d0b286b6d6d0d5f927483ce40f61 | diff --git a/s3backup/sync.py b/s3backup/sync.py
index <HASH>..<HASH> 100644
--- a/s3backup/sync.py
+++ b/s3backup/sync.py
@@ -167,7 +167,7 @@ def get_sync_actions(client_1, client_2):
action_1.remote_timestamp == action_2.remote_timestamp
):
deferred_calls[key] = DeferredFunction(
- delete_client, client_2, key, action_1.local_timestamp
+ delete_client, client_2, key, action_1.remote_timestamp
)
elif (
@@ -176,7 +176,7 @@ def get_sync_actions(client_1, client_2):
action_1.remote_timestamp == action_2.remote_timestamp
):
deferred_calls[key] = DeferredFunction(
- delete_client, client_1, key, action_2.local_timestamp
+ delete_client, client_1, key, action_2.remote_timestamp
)
# TODO: Check DELETE timestamp. if it is older than you should be able to safely ignore it | Fix bug in delete operation not setting remote timestamp | MichaelAquilina_S4 | train |
4a5b1daf588198df2959c1b6afd92f413a7660a3 | diff --git a/tpot/tpot.py b/tpot/tpot.py
index <HASH>..<HASH> 100644
--- a/tpot/tpot.py
+++ b/tpot/tpot.py
@@ -40,21 +40,39 @@ from deap import tools
from deap import gp
class TPOT(object):
- """
+ """TPOT automatically creates and optimizes Machine Learning pipelines using genetic programming.
+
Parameters
----------
population_size: int (default: 100)
- Number of initial pipelines
+ The number of pipelines in the genetic algorithm population.
+ Must be > 0. The more pipelines in the population,
+ the slower TPOT will run, but it's also more likely to
+ find better pipelines.
generations: int (default: 100)
- Number of generations to evolve the pipeline
+ The number of generations to run pipeline optimization for. Must be > 0.
+ The more generations you give TPOT to run, the longer it takes,
+ but it's also more likely to find better pipelines.
mutation_rate: float (default: 0.9)
- Value to control the mutation rate of a pipeline
+ The mutation rate for the genetic programming algorithm
+ in the range [0.0, 1.0]. This tells the genetic programming algorithm
+ how many pipelines to apply random changes to every generation.
+ We don't recommend that you tweak this parameter unless you
+ know what you're doing.
crossover_rate: float (default: 0.05)
- Likelihood of swapping elements between pipelines
+ The crossover rate for the genetic programming algorithm
+ in the range [0.0, 1.0]. This tells the genetic programming
+ algorithm how many pipelines to "breed" every generation.
+ We don't recommend that you tweak this parameter
+ unless you know what you're doing.
random_state: int (default: 0)
- Value to initialize a random seed. No random seed if None
+ The random number generator seed for TPOT.
+ Use this to make sure that TPOT will give you the same results
+ each time you run it against the same data set with that seed.
+ No random seed if random_state=None.
verbosity: int {0, 1, 2} (default: 0)
- Verbosity level for output printed to the standard output device
+ How much information TPOT communicates while
+ it's running. 0 = none, 1 = minimal, 2 = all
Attributes
----------
@@ -71,7 +89,6 @@ class TPOT(object):
mutation_rate=0.9, crossover_rate=0.05,
random_state=0, verbosity=0):
"""Sets up the genetic programming algorithm for pipeline optimization."""
-
self.population_size = population_size
self.generations = generations
self.mutation_rate = mutation_rate
@@ -109,14 +126,11 @@ class TPOT(object):
self.toolbox.register('mutate', self._random_mutation_operator)
def fit(self, features, classes, feature_names=None):
- """
- Uses genetic programming to optimize a Machine Learning pipeline that
- maximizes classification accuracy on the provided `features` and `classes`.
-
- Optionally, name the features in the data frame according to `feature_names`.
-
- Performs a stratified training/testing cross-validaton split to avoid
- overfitting on the provided data.
+ """Uses genetic programming to optimize a Machine Learning pipeline that
+ maximizes classification accuracy on the provided `features` and `classes`.
+ Optionally, name the features in the data frame according to `feature_names`.
+ Performs a stratified training/testing cross-validaton split to avoid
+ overfitting on the provided data.
Parameters
----------
@@ -401,7 +415,6 @@ class TPOT(object):
def _dt_feature_selection(self, input_df, num_pairs):
"""Uses decision trees to discover the best pair(s) of features to keep."""
-
num_pairs = min(max(1, num_pairs), 50)
# If this set of features has already been analyzed, use the cache.
@@ -467,7 +480,7 @@ class TPOT(object):
return balanced_accuracy,
def _combined_selection_operator(self, individuals, k):
- """ Regular selection + elitism."""
+ """Regular selection + elitism."""
best_inds = int(0.1 * k)
rest_inds = k - best_inds
return (tools.selBest(individuals, 1) * best_inds + | more verbose docstrings | EpistasisLab_tpot | train |
27755243fa9e72f0b13627ae78f86a8824d761d8 | diff --git a/doc/source/boolalg.rst b/doc/source/boolalg.rst
index <HASH>..<HASH> 100644
--- a/doc/source/boolalg.rst
+++ b/doc/source/boolalg.rst
@@ -249,11 +249,11 @@ So let's put it all together.
If you flip the coin twice, and the result is "heads", "tails",
that result is point :math:`(1, 0)` in a 2-dimensional Boolean space.
-Use the ``iter_space`` iterator to iterate through all possible points in an
+Use the ``iter_points`` iterator to iterate through all possible points in an
N-dimensional Boolean space::
>>> x, y = map(var, 'xy')
- >>> [ p for p in iter_space([x, y]) ]
+ >>> [ point for point in iter_points([x, y]) ]
[{x: 0, y: 0}, {x: 1, y: 0}, {x: 0, y: 1}, {x: 1, y: 1}]
The return value is a dictionary.
@@ -267,7 +267,7 @@ Use the variable :math:`z` to represent the result of the third flip.
>>> x, y, z = map(var, 'xyz')
# Put 'z' in the least-significant position
- >>> [ p for p in iter_space([z, y, x]) ]
+ >>> [ point for point in iter_points([z, y, x]) ]
[{x: 0, y: 0, z: 0},
{x: 0, y: 0, z: 1},
{x: 0, y: 1, z: 0},
diff --git a/pyeda/__init__.py b/pyeda/__init__.py
index <HASH>..<HASH> 100644
--- a/pyeda/__init__.py
+++ b/pyeda/__init__.py
@@ -22,7 +22,7 @@ from pyeda.binop import (
)
from pyeda.common import clog2
from pyeda.dimacs import load_cnf, dump_cnf, load_sat, dump_sat
-from pyeda.boolfunc import iter_space
+from pyeda.boolfunc import iter_points
from pyeda.expr import var, iter_cubes, factor, simplify
from pyeda.expr import Nor, Nand, OneHot0, OneHot
from pyeda.expr import Not, Or, And, Xor, Xnor, Equal, Implies, ITE
diff --git a/pyeda/boolfunc.py b/pyeda/boolfunc.py
index <HASH>..<HASH> 100644
--- a/pyeda/boolfunc.py
+++ b/pyeda/boolfunc.py
@@ -2,7 +2,7 @@
Boolean Functions
Interface Functions:
- iter_space
+ iter_points
Interface Classes:
Variable
@@ -14,7 +14,7 @@ Interface Classes:
from pyeda.common import bit_on
-def iter_space(vs):
+def iter_points(vs):
"""Iterate through all points in an N-dimensional space.
Parameters
@@ -123,7 +123,7 @@ class Function(object):
fst, rst = self.inputs[0], self.inputs[1:]
for p, cf in self.iter_cofactors(fst):
if cf == 1:
- for point in iter_space(rst):
+ for point in iter_points(rst):
point[fst] = p[fst]
yield point
elif cf != 0:
@@ -135,7 +135,7 @@ class Function(object):
fst, rst = self.inputs[0], self.inputs[1:]
for p, cf in self.iter_cofactors(fst):
if cf == 0:
- for point in iter_space(rst):
+ for point in iter_points(rst):
point[fst] = p[fst]
yield point
elif cf != 1:
@@ -144,7 +144,7 @@ class Function(object):
yield point
def iter_outputs(self):
- for point in iter_space(self.inputs):
+ for point in iter_points(self.inputs):
yield point, self.restrict(point)
def reduce(self):
@@ -195,7 +195,7 @@ class Function(object):
vs = list()
elif isinstance(vs, Function):
vs = [vs]
- for point in iter_space(vs):
+ for point in iter_points(vs):
yield point, self.restrict(point)
def cofactors(self, vs=None):
diff --git a/pyeda/constant.py b/pyeda/constant.py
index <HASH>..<HASH> 100644
--- a/pyeda/constant.py
+++ b/pyeda/constant.py
@@ -8,7 +8,7 @@ Interface Classes:
"""
from pyeda.common import boolify
-from pyeda.boolfunc import iter_space, Function
+from pyeda.boolfunc import iter_points, Function
class Constant(Function):
@@ -103,7 +103,7 @@ class One(Constant):
def satisfy_all(self):
vs = sorted(self.support)
- for point in iter_space(vs):
+ for point in iter_points(vs):
yield point
def satisfy_count(self):
diff --git a/pyeda/table.py b/pyeda/table.py
index <HASH>..<HASH> 100644
--- a/pyeda/table.py
+++ b/pyeda/table.py
@@ -10,7 +10,7 @@ Interface Classes:
"""
from pyeda.common import bit_on, boolify, cached_property
-from pyeda.boolfunc import iter_space, Function
+from pyeda.boolfunc import iter_points, Function
from pyeda.expr import Or, And, Not
# Positional Cube Notation
@@ -40,7 +40,7 @@ PC_COUNT_ONES = {n: sum((n >> (i << 1)) & 3 == PC_ONE for i in range(4))
def expr2truthtable(expr):
"""Convert an expression into a truth table."""
- outputs = (expr.restrict(point) for point in iter_space(expr.inputs))
+ outputs = (expr.restrict(point) for point in iter_points(expr.inputs))
return TruthTable(expr.inputs, outputs)
def truthtable2expr(tt, cnf=False): | Rename iter_space to iter_points | cjdrake_pyeda | train |
a90d47da60e1698233e059a5f8c797fff74ff8fc | diff --git a/pkg/kubelet/dockertools/manager.go b/pkg/kubelet/dockertools/manager.go
index <HASH>..<HASH> 100644
--- a/pkg/kubelet/dockertools/manager.go
+++ b/pkg/kubelet/dockertools/manager.go
@@ -584,6 +584,7 @@ func (dm *DockerManager) runContainer(
Image: container.Image,
// Memory and CPU are set here for older versions of Docker (pre-1.6).
Memory: memoryLimit,
+ MemorySwap: -1,
CPUShares: cpuShares,
WorkingDir: container.WorkingDir,
Labels: labels,
@@ -634,8 +635,9 @@ func (dm *DockerManager) runContainer(
NetworkMode: netMode,
IpcMode: ipcMode,
// Memory and CPU are set here for newer versions of Docker (1.6+).
- Memory: memoryLimit,
- CPUShares: cpuShares,
+ Memory: memoryLimit,
+ MemorySwap: -1,
+ CPUShares: cpuShares,
}
if len(opts.DNS) > 0 {
hc.DNS = opts.DNS | Passing memory swap limit -1 by default. Docker remote API never check if memory
swap is enabled by kernel or not, instead by default to set the limit to
memory * 2, and return API error <I> if swap is not enabled.
(cherry picked from commit f<I>a7a<I>f<I>f7bee3d4d<I>cb<I>) | kubernetes_kubernetes | train |
9b13a491fad0cc69be2462cff66fd7daacefc2de | diff --git a/tests/QueryTest.php b/tests/QueryTest.php
index <HASH>..<HASH> 100644
--- a/tests/QueryTest.php
+++ b/tests/QueryTest.php
@@ -277,8 +277,8 @@ class QueryTest extends \PHPUnit_Framework_TestCase
$this->assertEquals(
'update `employee` set `name`=`name`+1',
- ($q=new Query())
- ->field('name')->table('employee')->set('name',$q->expr('`name`+1'))
+ (new Query())
+ ->field('name')->table('employee')->set('name',new Expression('`name`+1'))
->selectTemplate('update')
->render()
); | Fix PHP <I> compatibility. | atk4_dsql | train |
186c2f4e393ab1c60bf48b1a4f70153aebb2d2e5 | diff --git a/lib/rexport/export_methods.rb b/lib/rexport/export_methods.rb
index <HASH>..<HASH> 100644
--- a/lib/rexport/export_methods.rb
+++ b/lib/rexport/export_methods.rb
@@ -247,6 +247,8 @@ module Rexport #:nodoc:
end
end
+ export_items.reset
+
position = 0
rexport_fields.each do |rexport_field|
position += 1 | fix sorting of renamed fields | wwidea_rexport | train |
140eec8cbfafa0d33d8a5a376d3c6919e9958e0b | diff --git a/routes/crud.js b/routes/crud.js
index <HASH>..<HASH> 100644
--- a/routes/crud.js
+++ b/routes/crud.js
@@ -135,7 +135,7 @@ function editDeleted(objId, req, res, next) {
var foreignKeys = {};
addRefs(deletedModel.schema, refs, foreignKeys);
// fetch it again with refs populated
- return runQuery(models.Deleted, refs, {_id: objId}, 'object.').then(function(docs) {
+ return runQuery(models.Deleted, refs, {_id: objId}, null, 'object.').then(function(docs) {
obj = docs && docs[0];
var restored = new deletedModel();
restored.set(obj.object);
@@ -308,8 +308,14 @@ function addRef(path, ref) {
return titlefields && {path: path, titlefields: titlefields, model: refmodel}
}
-function runQuery(model, refs, match, refPathPrefix) {
- var query = model.find(match || {}).sort({_id: 'desc'}).limit(exports.QUERYLIMIT);
+function runQuery(model, refs, match, unwind, refPathPrefix) {
+ var query;
+ if (unwind) {
+ query = model.aggregate({$match: (match || {})}).unwind(unwind);
+ } else {
+ query = model.find(match || {});
+ }
+ query.sort({_id: 'desc'}).limit(exports.QUERYLIMIT);
refs && refs.forEach(function(ref) {
// load the titlefields so the "title" virtual field works
query.populate((refPathPrefix||'') + ref.path, '_id ' + ref.titlefields, ref.model);
@@ -352,6 +358,7 @@ module.exports.table = function(req, res, next) {
return next(); // not found
}
var settings = (req.session.crudSettings && req.session.crudSettings[modelName]) || {};
+ var unwind = req.query.unwind;
var headers =[[{name:'id', colspan:1, nested:false, path:'id'}]];
var footer = [{name:'id', path:'id'}];
@@ -362,10 +369,10 @@ module.exports.table = function(req, res, next) {
if (name == 'id' || name == '_id')
return;
var schema = model.schema.tree[name];
- addToHeader(name, name, schema, 0);
+ addToHeader(name, name, schema, 0, unwind);
});
setRowspans(headers);
- addToFooter(model.schema.tree, '');
+ addToFooter(model.schema.tree, '', unwind);
var query = req.query.query && JSON.parse(req.query.query, function(key, value) {
if (value === "$currentDate") {
return new Date();
@@ -382,21 +389,27 @@ module.exports.table = function(req, res, next) {
formatdata: formatdata,
modelName: modelName,
pageLength: settings.pageLength || 10,
- objs: runQuery(model, refs, query),
+ objs: runQuery(model, refs, query, unwind),
// XXX table needs formatdata to render foreignKeys
// foreignKeys: utils.resolvePromises(foreignKeys)
}).then(function(result) {
// console.dir(result.objs[0].schema);
+ if (unwind) {
+ result.objs = result.objs.map(function(obj) {
+ return new model(obj);
+ });
+ }
result.hiddenColumns = (req.query.fields && findColumnsIndexes(footer, req.query.fields))
- || settings.hiddenColumns || findEmptyColumns(footer, result.objs);
+ || settings.hiddenColumns
+ || findEmptyColumns(footer, result.objs);
res.render('crud.html', result);
}).catch(next); //pass err to next
- function addToHeader(name, path, schema, level) {
+ function addToHeader(name, path, schema, level, unwind) {
if (name.slice(0,2) == '__')
return 0;
var colspan = 1;
- var nested = model.schema.nested[path];
+ var nested = !!model.schema.nested[path];
//console.log(path, 'nested', nested);
//console.dir(model.schema.paths[path]);
if (nested) {
@@ -404,6 +417,12 @@ module.exports.table = function(req, res, next) {
colspan = Object.keys(schema).reduce(function(memo, key){
return memo+addToHeader(key, path+'.'+key, schema[key], level+1)
}, 0);
+ } else if (path === unwind){
+ //schema is a DocumentArray
+ colspan = Object.keys(schema[0].tree).reduce(function(memo, key){
+ return memo+addToHeader(key, path+'.0.'+key, schema[0].tree[key], level+1)
+ }, 0);
+ nested = true;
} else if (schema.ref){
var ref = addRef(path, schema.ref);
if (ref) {
@@ -423,16 +442,19 @@ module.exports.table = function(req, res, next) {
}
//only include leaves
- function addToFooter(schema, path) {
+ function addToFooter(schema, path, unwind) {
Object.keys(schema).forEach(function(name) {
if (name.slice(0,2) == '__')
return;
if (!path && (name == 'id' || name == '_id'))
return;
- if (model.schema.nested[path+name])
- addToFooter(schema[name], path+name+'.')
- else
+ if (model.schema.nested[path+name]) {
+ addToFooter(schema[name], path+name+'.', '')
+ } else if (name === unwind) {
+ addToFooter(schema[name][0].tree, path+name+'.0.')
+ } else {
footer.push({name:name, path: path+name})
+ }
});
}
} | crud: add support for unwinding arrays into rows | onecommons_base | train |
76e56653961401843dafc3c3b87d68dfd5f20d21 | diff --git a/controller/src/main/java/org/jboss/as/controller/ExpressionResolverImpl.java b/controller/src/main/java/org/jboss/as/controller/ExpressionResolverImpl.java
index <HASH>..<HASH> 100644
--- a/controller/src/main/java/org/jboss/as/controller/ExpressionResolverImpl.java
+++ b/controller/src/main/java/org/jboss/as/controller/ExpressionResolverImpl.java
@@ -213,6 +213,11 @@ public class ExpressionResolverImpl implements ExpressionResolver {
// We're in an outer expression, so just discard the top stack element
// created when we saw the '$' and resume tracking the outer expression
stack.pop();
+ if(ch == '$') {
+ modified = true; // since we discarded the '$'
+ } else if (ch == '}') {//this may be the end of the outer expression
+ i--;
+ }
state = GOT_OPEN_BRACE;
}
continue;
diff --git a/controller/src/test/java/org/jboss/as/controller/ExpressionResolverUnitTestCase.java b/controller/src/test/java/org/jboss/as/controller/ExpressionResolverUnitTestCase.java
index <HASH>..<HASH> 100644
--- a/controller/src/test/java/org/jboss/as/controller/ExpressionResolverUnitTestCase.java
+++ b/controller/src/test/java/org/jboss/as/controller/ExpressionResolverUnitTestCase.java
@@ -415,6 +415,35 @@ public class ExpressionResolverUnitTestCase {
assertEquals("default", ExpressionResolver.TEST_RESOLVER.resolveExpressions(expression("${test.property1,test.property2:default}")).asString());
}
+ @Test
+ public void testExpressionWithDollarEndingDefaultValue() throws OperationFailedException {
+ try {
+ ModelNode node = new ModelNode();
+ node.get("expr").set(new ValueExpression("${test.property.dollar.default:default$}-test"));
+ node = ExpressionResolver.TEST_RESOLVER.resolveExpressions(node);
+ assertEquals("default$-test", node.get("expr").asString());
+ node = new ModelNode();
+ node.get("expr").set(new ValueExpression("${test.property.dollar.default:default$test}-test"));
+ node = ExpressionResolver.TEST_RESOLVER.resolveExpressions(node);
+ assertEquals(1, node.keys().size());
+ assertEquals("default$test-test", node.get("expr").asString());
+
+ System.setProperty("test.property.dollar.default", "system-prop-value");
+ node = new ModelNode();
+ node.get("expr").set(new ValueExpression("${test.property.dollar.default:default$}-test"));
+ node = ExpressionResolver.TEST_RESOLVER.resolveExpressions(node);
+ assertEquals(1, node.keys().size());
+ assertEquals("system-prop-value-test", node.get("expr").asString());
+ node = new ModelNode();
+ node.get("expr").set(new ValueExpression("${test.property.dollar.default:default$test}-test"));
+ node = ExpressionResolver.TEST_RESOLVER.resolveExpressions(node);
+ assertEquals(1, node.keys().size());
+ assertEquals("system-prop-value-test", node.get("expr").asString());
+ } finally {
+ System.clearProperty("test.property.dollar.default");
+ }
+ }
+
private ModelNode expression(String str) {
return new ModelNode(new ValueExpression(str));
} | [WFCORE-<I>]: Failure when resolving an expression with the default value containing a '$' at the end.
Checking if we are at the end of an expression. | wildfly_wildfly-core | train |
78caab004f02e6d124cb346130c828f9fdb04a94 | diff --git a/tests/_util.py b/tests/_util.py
index <HASH>..<HASH> 100644
--- a/tests/_util.py
+++ b/tests/_util.py
@@ -1,22 +1,10 @@
import unittest
import asyncio
import websockets
+from websockets.exceptions import ConnectionClosed
from functools import wraps
-def run_until_complete(fun):
- if not asyncio.iscoroutinefunction(fun):
- fun = asyncio.coroutine(fun)
-
- @wraps(fun)
- def wrapper(test, *args, **kw):
- loop = test.loop
- ret = loop.run_until_complete(
- asyncio.wait_for(fun(test, *args, **kw), 5, loop=loop))
- return ret
- return wrapper
-
-
class EchoServer():
def __init__(self, loop, host, port):
@@ -48,7 +36,7 @@ class AsyncTestCase(unittest.TestCase):
self.loop = asyncio.new_event_loop()
self.server = None
asyncio.set_event_loop(None)
- self.echo = self.make_echo_server()
+ self.echo = yield from self.make_echo_server()
def tearDown(self):
if self.server is not None:
@@ -57,6 +45,7 @@ class AsyncTestCase(unittest.TestCase):
self.loop.close()
del self.loop
+ @asyncio.coroutine
def make_echo_server(self):
"""
Creates and returns the 'wss://host:port' of a basic websocket echo
@@ -64,6 +53,6 @@ class AsyncTestCase(unittest.TestCase):
"""
addr = ('127.0.0.1', 8888)
self.server = EchoServer(self.loop, addr[0], addr[1])
- self.loop.run_until_complete(self.server.start())
+ yield from self.loop.run_until_complete(self.server.start())
return 'ws://%s:%s' % addr
diff --git a/tests/test_connection.py b/tests/test_connection.py
index <HASH>..<HASH> 100644
--- a/tests/test_connection.py
+++ b/tests/test_connection.py
@@ -1,10 +1,9 @@
-from ._util import AsyncTestCase, run_until_complete
+from ._util import AsyncTestCase
from beam_interactive.proto import Error
from beam_interactive import start
class TestConnection(AsyncTestCase):
- @run_until_complete
def test_handshakes(self):
conn = yield from start(self.echo, 42, 'asdf', loop=self.loop) | Clean up some async stuff | mixer_beam-interactive-python | train |
94774dfab3b502492e9522ac441a02a8d694c8d3 | diff --git a/core/src/main/java/cucumber/runtime/RuntimeOptions.java b/core/src/main/java/cucumber/runtime/RuntimeOptions.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/cucumber/runtime/RuntimeOptions.java
+++ b/core/src/main/java/cucumber/runtime/RuntimeOptions.java
@@ -88,7 +88,7 @@ public class RuntimeOptions {
pluginFormatterNames.add("progress");
}
if (pluginSummaryPrinterNames.isEmpty()) {
- pluginSummaryPrinterNames.add("cucumber.runtime.DefaultSummaryPrinter");
+ pluginSummaryPrinterNames.add("default_summary");
}
}
diff --git a/core/src/main/java/cucumber/runtime/formatter/PluginFactory.java b/core/src/main/java/cucumber/runtime/formatter/PluginFactory.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/cucumber/runtime/formatter/PluginFactory.java
+++ b/core/src/main/java/cucumber/runtime/formatter/PluginFactory.java
@@ -3,6 +3,8 @@ package cucumber.runtime.formatter;
import cucumber.api.StepDefinitionReporter;
import cucumber.api.SummaryPrinter;
import cucumber.runtime.CucumberException;
+import cucumber.runtime.DefaultSummaryPrinter;
+import cucumber.runtime.NullSummaryPrinter;
import cucumber.runtime.io.URLOutputStream;
import cucumber.runtime.io.UTF8OutputStreamWriter;
import gherkin.formatter.Formatter;
@@ -56,6 +58,8 @@ public class PluginFactory {
put("json", CucumberJSONFormatter.class);
put("usage", UsageFormatter.class);
put("rerun", RerunFormatter.class);
+ put("default_summary", DefaultSummaryPrinter.class);
+ put("null_summary", NullSummaryPrinter.class);
}};
private static final Pattern PLUGIN_WITH_FILE_PATTERN = Pattern.compile("([^:]+):(.*)");
private String defaultOutFormatter = null;
diff --git a/core/src/main/resources/cucumber/api/cli/USAGE.txt b/core/src/main/resources/cucumber/api/cli/USAGE.txt
index <HASH>..<HASH> 100644
--- a/core/src/main/resources/cucumber/api/cli/USAGE.txt
+++ b/core/src/main/resources/cucumber/api/cli/USAGE.txt
@@ -4,8 +4,9 @@ Options:
-g, --glue PATH Where glue code (step definitions and hooks) is loaded from.
-p, --plugin PLUGIN[:PATH_OR_URL] Register a plugin.
- Built-in PLUGIN types: junit, html, pretty, progress, json, usage,
- rerun. PLUGIN can also be a fully qualified class name, allowing
+ Built-in formatter PLUGIN types: junit, html, pretty, progress,
+ json, usage, rerun. Built-in summary PLUGIN types: default_summary,
+ null_summary. PLUGIN can also be a fully qualified class name, allowing
registration of 3rd party plugins.
-f, --format FORMAT[:PATH_OR_URL] Deprecated. Use --plugin instead.
-t, --tags TAG_EXPRESSION Only run scenarios tagged with tags matching TAG_EXPRESSION.
diff --git a/core/src/test/java/cucumber/runtime/RuntimeOptionsTest.java b/core/src/test/java/cucumber/runtime/RuntimeOptionsTest.java
index <HASH>..<HASH> 100644
--- a/core/src/test/java/cucumber/runtime/RuntimeOptionsTest.java
+++ b/core/src/test/java/cucumber/runtime/RuntimeOptionsTest.java
@@ -100,7 +100,7 @@ public class RuntimeOptionsTest {
@Test
public void creates_null_summary_printer() {
- RuntimeOptions options = new RuntimeOptions(asList("--plugin", "cucumber.runtime.NullSummaryPrinter", "--glue", "somewhere"));
+ RuntimeOptions options = new RuntimeOptions(asList("--plugin", "null_summary", "--glue", "somewhere"));
assertPluginExists(options.getPlugins(), "cucumber.runtime.NullSummaryPrinter");
assertPluginNotExists(options.getPlugins(), "cucumber.runtime.DefaultSummaryPrinter");
}
diff --git a/jython/bin/cucumber-jvm.py b/jython/bin/cucumber-jvm.py
index <HASH>..<HASH> 100644
--- a/jython/bin/cucumber-jvm.py
+++ b/jython/bin/cucumber-jvm.py
@@ -5,12 +5,10 @@ cucumber_jython_shaded_path = os.path.dirname(inspect.getfile(inspect.currentfra
sys.path.append(cucumber_jython_shaded_path)
from java.io import File
-from java.lang import Thread
from java.net import URLClassLoader
from cucumber.api.cli import Main
cl = URLClassLoader([File(cucumber_jython_shaded_path).toURL()], Main.getClassLoader())
-Thread.currentThread().contextClassLoader = cl
exitstatus = Main.run(sys.argv[1:], cl)
-sys.exit(exitstatus)
+sys.exit(exitstatus)
\ No newline at end of file | Make short names for the built-in summary plugin types. | cucumber_cucumber-jvm | train |
f7f9a8870db5c05d13c3047316313d64790a1536 | diff --git a/magic.py b/magic.py
index <HASH>..<HASH> 100644
--- a/magic.py
+++ b/magic.py
@@ -101,12 +101,15 @@ def from_buffer(buffer, mime=False):
libmagic = ctypes.CDLL(ctypes.util.find_library('magic'))
-if not libmagic._name:
+if not libmagic or not libmagic._name:
import sys
if sys.platform == "darwin":
# try mac ports location
libmagic = ctypes.CDLL('/opt/local/lib/libmagic.dylib')
-if not libmagic._name:
+ elif sys.platform == "win32":
+ # try local magic1.dll
+ libmagic = ctypes.CDLL('magic1.dll')
+if not libmagic or not libmagic._name:
raise Exception('failed to find libmagic. Check your installation')
magic_t = ctypes.c_void_p | Added win<I> compatibility | ahupp_python-magic | train |
e32f648200fe4c8e845d3b8f3c9a0e94b198105d | diff --git a/src/main/java/org/asciidoctor/ast/AbstractBlock.java b/src/main/java/org/asciidoctor/ast/AbstractBlock.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asciidoctor/ast/AbstractBlock.java
+++ b/src/main/java/org/asciidoctor/ast/AbstractBlock.java
@@ -6,26 +6,15 @@ import java.util.Map;
public interface AbstractBlock {
String id();
-
String title();
-
String role();
-
String style();
-
List<AbstractBlock> blocks();
-
Map<String, Object> attributes();
-
Object content();
-
- String render();
-
+ String convert();
DocumentRuby document();
-
String context();
-
AbstractBlock delegate();
-
List<AbstractBlock> findBy(Map<Object, Object> selector);
}
diff --git a/src/main/java/org/asciidoctor/ast/AbstractBlockImpl.java b/src/main/java/org/asciidoctor/ast/AbstractBlockImpl.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asciidoctor/ast/AbstractBlockImpl.java
+++ b/src/main/java/org/asciidoctor/ast/AbstractBlockImpl.java
@@ -69,8 +69,8 @@ public class AbstractBlockImpl implements AbstractBlock {
}
@Override
- public String render() {
- return delegate.render();
+ public String convert() {
+ return delegate.convert();
}
@Override
diff --git a/src/main/java/org/asciidoctor/internal/JRubyAsciidoctor.java b/src/main/java/org/asciidoctor/internal/JRubyAsciidoctor.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/asciidoctor/internal/JRubyAsciidoctor.java
+++ b/src/main/java/org/asciidoctor/internal/JRubyAsciidoctor.java
@@ -198,7 +198,7 @@ public class JRubyAsciidoctor implements Asciidoctor {
if (content instanceof String) {
textContent = (String) content;
} else {
- textContent = child.render();
+ textContent = child.convert();
}
ContentPart contentPart = ContentPart.createContentPart(child.id(), level, child.context(), child.title(),
child.style(), child.role(), child.attributes(), textContent);
diff --git a/src/test/resources/sample-with-terminal-command.ad b/src/test/resources/sample-with-terminal-command.ad
index <HASH>..<HASH> 100644
--- a/src/test/resources/sample-with-terminal-command.ad
+++ b/src/test/resources/sample-with-terminal-command.ad
@@ -1,3 +1,5 @@
+Hello World
+
$ echo "Hello, World!"
$ gem install asciidoctor
\ No newline at end of file | resolves issue #<I> by renaming render method to convert method in AbstractBlock. | asciidoctor_asciidoctorj | train |
2d88bc17bec77750450075e1672a73bcb5f4341c | diff --git a/yotta/lib/cmakegen.py b/yotta/lib/cmakegen.py
index <HASH>..<HASH> 100644
--- a/yotta/lib/cmakegen.py
+++ b/yotta/lib/cmakegen.py
@@ -55,6 +55,8 @@ class CMakeGen(object):
logger.info("generate for target: %s" % target)
self.target = target
self.config_include_file = None
+ self.build_info_include_file = None
+ self.build_uuid = None
def _writeFile(self, path, contents):
dirname = os.path.dirname(path)
@@ -203,7 +205,7 @@ class CMakeGen(object):
r.append(('%s_%s' % (key_prefix, sanitizePreprocessorSymbol(k)), v))
return r
- def getConfigData(self, all_dependencies, component, builddir):
+ def getConfigData(self, all_dependencies, component, builddir, build_info_header_path):
''' returns (path_to_config_header, cmake_set_definitions) '''
add_defs_header = ''
set_definitions = ''
@@ -213,6 +215,12 @@ class CMakeGen(object):
definitions.append(('TARGET', sanitizePreprocessorSymbol(self.target.getName())))
definitions.append(('TARGET_LIKE_%s' % sanitizePreprocessorSymbol(self.target.getName()),None))
+ # make the path to the build-info header available both to CMake and
+ # in the preprocessor:
+ full_build_info_header_path = os.path.abspath(build_info_header_path)
+ logger.debug('build info header include path: "%s"', full_build_info_header_path)
+ definitions.append(('YOTTA_BUILD_INFO_HEADER', '"'+full_build_info_header_path+'"'))
+
for target in self.target.getSimilarTo_Deprecated():
if '*' not in target:
definitions.append(('TARGET_LIKE_%s' % sanitizePreprocessorSymbol(target),None))
@@ -251,6 +259,50 @@ class CMakeGen(object):
)
return (config_include_file, set_definitions)
+ def getBuildInfo(self, sourcedir, builddir):
+ ''' Write the build info header file, and return (path_to_written_header, set_cmake_definitions) '''
+ cmake_defs = ''
+ preproc_defs = '// yotta build info, #include YOTTA_BUILD_INFO_HEADER to access\n'
+ # standard library modules
+ import datetime
+ # vcs, , represent version controlled directories, internal
+ import vcs
+
+ now = datetime.datetime.utcnow()
+ vcs = vcs.getVCS(sourcedir)
+ if self.build_uuid is None:
+ import uuid
+ self.build_uuid = uuid.uuid4()
+
+ definitions = [
+ ('YOTTA_BUILD_YEAR', now.year, 'UTC year'),
+ ('YOTTA_BUILD_MONTH', now.month, 'UTC month 1-12'),
+ ('YOTTA_BUILD_DAY', now.day, 'UTC day 1-31'),
+ ('YOTTA_BUILD_HOUR', now.hour, 'UTC hour 0-24'),
+ ('YOTTA_BUILD_MINUTE', now.minute, 'UTC minute 0-59'),
+ ('YOTTA_BUILD_SECOND', now.second, 'UTC second 0-61'),
+ ('YOTTA_BUILD_UUID', self.build_uuid, 'unique random UUID for each build'),
+ ]
+ if vcs is not None:
+ definitions += [
+ ('YOTTA_BUILD_VCS_ID', vcs.getCommitId(), 'git or mercurial hash')
+ ('YOTTA_BUILD_VCS_CLEAN', vcs.getCommitId(), 'evaluates true if the version control system was clean, otherwise false')
+ ]
+
+ for d in definitions:
+ preproc_defs += '#define %s %s // %s\n' % d
+ cmake_defs += 'set(%s "%s") # %s\n' % d
+
+ buildinfo_include_file = os.path.join(builddir, 'yotta_build_info.h')
+ self._writeFile(
+ buildinfo_include_file,
+ '#ifndef __YOTTA_BUILD_INFO_H__\n'+
+ '#define __YOTTA_BUILD_INFO_H__\n'+
+ preproc_defs+
+ '#endif // ndef __YOTTA_BUILD_INFO_H__\n'
+ )
+ return (buildinfo_include_file, cmake_defs)
+
def generate(
self, builddir, modbuilddir, component, active_dependencies, immediate_dependencies, all_dependencies, application, toplevel
):
@@ -260,8 +312,14 @@ class CMakeGen(object):
'''
set_definitions = ''
+ if self.build_info_include_file is None:
+ assert(toplevel)
+ self.build_info_include_file, build_info_definitions = self.getBuildInfo(component.path, builddir)
+ set_definitions += build_info_definitions
+
if self.config_include_file is None:
- self.config_include_file, set_definitions = self.getConfigData(all_dependencies, component, builddir)
+ self.config_include_file, config_definitions = self.getConfigData(all_dependencies, component, builddir, self.build_info_include_file)
+ set_definitions += config_definitions
include_root_dirs = ''
if application is not None and component is not application: | generate build info header, and define YOTTA_BUILD_INFO_HEADER which can be used to include it | ARMmbed_yotta | train |
a3dc2b563fb6450ac6f93e574dd75ec3cfdacbf4 | diff --git a/tests/integration/states/test_npm.py b/tests/integration/states/test_npm.py
index <HASH>..<HASH> 100644
--- a/tests/integration/states/test_npm.py
+++ b/tests/integration/states/test_npm.py
@@ -44,8 +44,7 @@ class NpmStateTest(ModuleCase, SaltReturnAssertsMixin):
'''
Determine if URL-referenced NPM module can be successfully installed.
'''
- npm_bin = salt.utils.path.which('npm')
- npm_version = cmd.run('{} -v'.format(npm_bin), timeout=10)
+ npm_version = self.run_function('cmd.run', ['npm -v'])
if LooseVersion(npm_version) >= LooseVersion(MAX_NPM_VERSION):
user = os.environ.get('SUDO_USER', 'root')
npm_dir = os.path.join(RUNTIME_VARS.TMP, 'git-install-npm')
@@ -74,12 +73,13 @@ class NpmStateTest(ModuleCase, SaltReturnAssertsMixin):
ret = self.run_state('npm.installed', name='unused', pkgs=['[email protected]', '[email protected]'], registry="http://registry.npmjs.org/")
self.assertSaltTrueReturn(ret)
- @skipIf(salt.utils.path.which('npm') and LooseVersion(cmd.run('npm -v')) >= LooseVersion(MAX_NPM_VERSION),
- 'Skip with npm >= 5.0.0 until #41770 is fixed')
@destructiveTest
def test_npm_cache_clean(self):
'''
Basic test to determine if NPM successfully cleans its cached packages.
'''
+ npm_version = self.run_function('cmd.run', ['npm -v'])
+ if LooseVersion(npm_version) >= LooseVersion(MAX_NPM_VERSION):
+ self.skipTest('Skip with npm >= 5.0.0 until #41770 is fixed')
ret = self.run_state('npm.cache_cleaned', name='unused', force=True)
self.assertSaltTrueReturn(ret) | use fun_function to call cmd.run | saltstack_salt | train |
1f13755ca304a2ba061a19f398d76534a5d64db8 | diff --git a/findbugs/src/java/edu/umd/cs/findbugs/classfile/impl/ClassPathBuilder.java b/findbugs/src/java/edu/umd/cs/findbugs/classfile/impl/ClassPathBuilder.java
index <HASH>..<HASH> 100644
--- a/findbugs/src/java/edu/umd/cs/findbugs/classfile/impl/ClassPathBuilder.java
+++ b/findbugs/src/java/edu/umd/cs/findbugs/classfile/impl/ClassPathBuilder.java
@@ -58,6 +58,11 @@ import edu.umd.cs.findbugs.util.Archive;
public class ClassPathBuilder implements IClassPathBuilder {
private static final boolean VERBOSE = SystemProperties.getBoolean("findbugs2.verbose.builder");
private static final boolean DEBUG = VERBOSE || SystemProperties.getBoolean("findbugs2.debug.builder");
+
+ private static final int SPECIFIED = 0;
+ private static final int NESTED = 1;
+ private static final int IN_JAR_MANIFEST = 2;
+ private static final int IN_SYSTEM_CLASSPATH = 3;
/**
* Worklist item.
@@ -67,10 +72,12 @@ public class ClassPathBuilder implements IClassPathBuilder {
static class WorkListItem {
private ICodeBaseLocator codeBaseLocator;
private boolean isAppCodeBase;
+ private int howDiscovered;
- public WorkListItem(ICodeBaseLocator codeBaseLocator, boolean isApplication) {
+ public WorkListItem(ICodeBaseLocator codeBaseLocator, boolean isApplication, int howDiscovered) {
this.codeBaseLocator = codeBaseLocator;
this.isAppCodeBase = isApplication;
+ this.howDiscovered = howDiscovered;
}
public ICodeBaseLocator getCodeBaseLocator() {
@@ -80,6 +87,13 @@ public class ClassPathBuilder implements IClassPathBuilder {
public boolean isAppCodeBase() {
return isAppCodeBase;
}
+
+ /**
+ * @return Returns the howDiscovered.
+ */
+ public int getHowDiscovered() {
+ return howDiscovered;
+ }
}
/**
@@ -149,7 +163,7 @@ public class ClassPathBuilder implements IClassPathBuilder {
* @see edu.umd.cs.findbugs.classfile.IClassPathBuilder#addCodeBase(edu.umd.cs.findbugs.classfile.ICodeBaseLocator, boolean)
*/
public void addCodeBase(ICodeBaseLocator locator, boolean isApplication) {
- addToWorkList(projectWorkList, new WorkListItem(locator, isApplication));
+ addToWorkList(projectWorkList, new WorkListItem(locator, isApplication, SPECIFIED));
}
/* (non-Javadoc)
@@ -254,7 +268,7 @@ public class ClassPathBuilder implements IClassPathBuilder {
System.out.println("System classpath entry: " + entry);
}
addToWorkList(workList, new WorkListItem(
- classFactory.createFilesystemCodeBaseLocator(entry), false));
+ classFactory.createFilesystemCodeBaseLocator(entry), false, IN_SYSTEM_CLASSPATH));
}
}
@@ -281,7 +295,7 @@ public class ClassPathBuilder implements IClassPathBuilder {
for (File archive : fileList) {
addToWorkList(workList, new WorkListItem(
- classFactory.createFilesystemCodeBaseLocator(archive.getPath()), false));
+ classFactory.createFilesystemCodeBaseLocator(archive.getPath()), false, IN_SYSTEM_CLASSPATH));
}
}
@@ -326,12 +340,11 @@ public class ClassPathBuilder implements IClassPathBuilder {
// then failing to open/scan it is a fatal error.
// We issue warnings about problems with aux codebases,
// but continue anyway.
- boolean isAppCodeBase = item.isAppCodeBase();
try {
// Open the codebase and add it to the classpath
discoveredCodeBase = new DiscoveredCodeBase(item.getCodeBaseLocator().openCodeBase());
- discoveredCodeBase.getCodeBase().setApplicationCodeBase(isAppCodeBase);
+ discoveredCodeBase.getCodeBase().setApplicationCodeBase(item.isAppCodeBase());
// Note that this codebase has been visited
discoveredCodeBaseMap.put(item.getCodeBaseLocator().toString(), discoveredCodeBase);
@@ -347,15 +360,15 @@ public class ClassPathBuilder implements IClassPathBuilder {
// Check for a Jar manifest for additional aux classpath entries.
scanJarManifestForClassPathEntries(workList, discoveredCodeBase.getCodeBase());
} catch (IOException e) {
- if (isAppCodeBase) {
+ if (item.isAppCodeBase()) {
throw e;
- } else {
+ } else if (item.getHowDiscovered() == SPECIFIED) {
errorLogger.logError("Cannot open codebase " + item.getCodeBaseLocator(), e);
}
} catch (ResourceNotFoundException e) {
- if (isAppCodeBase) {
+ if (item.isAppCodeBase()) {
throw e;
- } else {
+ } else if (item.getHowDiscovered() == SPECIFIED) {
errorLogger.logError("Cannot open codebase " + item.getCodeBaseLocator(), e);
}
}
@@ -401,7 +414,7 @@ public class ClassPathBuilder implements IClassPathBuilder {
classFactory.createNestedArchiveCodeBaseLocator(codeBase, entry.getResourceName());
addToWorkList(
workList,
- new WorkListItem(nestedArchiveLocator, codeBase.isApplicationCodeBase()));
+ new WorkListItem(nestedArchiveLocator, codeBase.isApplicationCodeBase(), NESTED));
}
}
}
@@ -439,7 +452,7 @@ public class ClassPathBuilder implements IClassPathBuilder {
// Codebases found in Class-Path entries are always
// added to the aux classpath, not the application.
- addToWorkList(workList, new WorkListItem(relativeCodeBaseLocator, false));
+ addToWorkList(workList, new WorkListItem(relativeCodeBaseLocator, false, IN_JAR_MANIFEST));
}
}
} finally { | Only log failure to open a codebase if it was explicitly specified
by the user. This avoids spurious warnings about missing codebases
referenced in jar manifests.
git-svn-id: <URL> | spotbugs_spotbugs | train |
36f32c1aa17676e1cdb7063f68c5bf77cda42977 | diff --git a/agent/session.go b/agent/session.go
index <HASH>..<HASH> 100644
--- a/agent/session.go
+++ b/agent/session.go
@@ -12,6 +12,8 @@ import (
"google.golang.org/grpc/codes"
)
+const dispatcherRPCTimeout = 5 * time.Second
+
var (
errSessionDisconnect = errors.New("agent: session disconnect") // instructed to disconnect
errSessionClosed = errors.New("agent: session closed")
@@ -88,9 +90,11 @@ func (s *session) start(ctx context.Context) error {
description.Hostname = s.agent.config.Hostname
}
- stream, err := client.Session(ctx, &api.SessionRequest{
+ sessionCtx, cancel := context.WithTimeout(ctx, dispatcherRPCTimeout)
+ stream, err := client.Session(sessionCtx, &api.SessionRequest{
Description: description,
})
+ cancel()
if err != nil {
return err
}
@@ -115,9 +119,11 @@ func (s *session) heartbeat(ctx context.Context) error {
for {
select {
case <-heartbeat.C:
- resp, err := client.Heartbeat(ctx, &api.HeartbeatRequest{
+ heartbeatCtx, cancel := context.WithTimeout(ctx, dispatcherRPCTimeout)
+ resp, err := client.Heartbeat(heartbeatCtx, &api.HeartbeatRequest{
SessionID: s.sessionID,
})
+ cancel()
if err != nil {
if grpc.Code(err) == codes.NotFound {
err = errNodeNotRegistered | agent: Add a timeout for Heartbeat RPC
If the manager becomes unreachable, but the TCP connection has not been
torn down, I believe this RPC call would hang. Add a timeout so that if
the RPC is not serviced in a reasonable amount of time, this will cause
the agent to failover to a different manager. | docker_swarmkit | train |
90bf1fa1e614cd43e43b931a6ae02ea1ac748376 | diff --git a/tests/textfsm_test.py b/tests/textfsm_test.py
index <HASH>..<HASH> 100755
--- a/tests/textfsm_test.py
+++ b/tests/textfsm_test.py
@@ -97,6 +97,7 @@ class UnitTestFSM(unittest.TestCase):
self.assertEqual(r.line_op, 'Next')
self.assertEqual(r.new_state, '')
self.assertEqual(r.record_op, '')
+
# Line with record.
line = ' ^A beer called ${beer} -> Continue.Record'
r = textfsm.TextFSMRule(line)
@@ -104,6 +105,7 @@ class UnitTestFSM(unittest.TestCase):
self.assertEqual(r.line_op, 'Continue')
self.assertEqual(r.new_state, '')
self.assertEqual(r.record_op, 'Record')
+
# Line with new state.
line = ' ^A beer called ${beer} -> Next.NoRecord End'
r = textfsm.TextFSMRule(line)
@@ -120,6 +122,19 @@ class UnitTestFSM(unittest.TestCase):
self.assertRaises(textfsm.TextFSMTemplateError, textfsm.TextFSMRule,
' ^A beer called ${beer} -> Continue.Record $Hi')
+ def testRulePrefixes(self):
+ """Test valid and invalid rule prefixes."""
+
+ # Bad syntax tests.
+ for prefix in (' ', '.^', ' \t', ''):
+ f = StringIO('Value unused (.)\n\nStart\n' + prefix + 'A simple string.')
+ self.assertRaises(textfsm.TextFSMTemplateError, textfsm.TextFSM, f)
+
+ # Good syntax tests.
+ for prefix in (' ^', ' ^', '\t^'):
+ f = StringIO('Value unused (.)\n\nStart\n' + prefix + 'A simple string.')
+ self.assertIsNotNone(textfsm.TextFSM(f))
+
def testImplicitDefaultRules(self):
for line in (' ^A beer called ${beer} -> Record End',
diff --git a/textfsm/parser.py b/textfsm/parser.py
index <HASH>..<HASH> 100755
--- a/textfsm/parser.py
+++ b/textfsm/parser.py
@@ -800,8 +800,8 @@ class TextFSM(object):
if self.comment_regex.match(line):
continue
- # A rule within a state, starts with whitespace
- if not (line.startswith(' ^') or line.startswith('\t^')):
+ # A rule within a state, starts with 1 or 2 spaces, or a tab.
+ if not line.startswith((' ^', ' ^', '\t^')):
raise TextFSMTemplateError(
"Missing white space or carat ('^') before rule. Line: %s" %
self._line_num) | More flexible spaces before ^ in rule' (#<I>) | google_textfsm | train |
e075817e443e03b99effa0d7e558daf242946a7d | diff --git a/discovery/syncer.go b/discovery/syncer.go
index <HASH>..<HASH> 100644
--- a/discovery/syncer.go
+++ b/discovery/syncer.go
@@ -271,6 +271,10 @@ type GossipSyncer struct {
// number of queries.
rateLimiter *rate.Limiter
+ // syncedSignal is a channel that, if set, will be closed when the
+ // GossipSyncer reaches its terminal chansSynced state.
+ syncedSignal chan struct{}
+
sync.Mutex
quit chan struct{}
@@ -470,6 +474,13 @@ func (g *GossipSyncer) channelGraphSyncer() {
// This is our final terminal state where we'll only reply to
// any further queries by the remote peer.
case chansSynced:
+ g.Lock()
+ if g.syncedSignal != nil {
+ close(g.syncedSignal)
+ g.syncedSignal = nil
+ }
+ g.Unlock()
+
// If we haven't yet sent out our update horizon, and
// we want to receive real-time channel updates, we'll
// do so now.
@@ -1049,6 +1060,24 @@ func (g *GossipSyncer) syncState() syncerState {
return syncerState(atomic.LoadUint32(&g.state))
}
+// ResetSyncedSignal returns a channel that will be closed in order to serve as
+// a signal for when the GossipSyncer has reached its chansSynced state.
+func (g *GossipSyncer) ResetSyncedSignal() chan struct{} {
+ g.Lock()
+ defer g.Unlock()
+
+ syncedSignal := make(chan struct{})
+
+ syncState := syncerState(atomic.LoadUint32(&g.state))
+ if syncState == chansSynced {
+ close(syncedSignal)
+ return syncedSignal
+ }
+
+ g.syncedSignal = syncedSignal
+ return g.syncedSignal
+}
+
// ProcessSyncTransition sends a request to the gossip syncer to transition its
// sync type to a new one.
//
diff --git a/discovery/syncer_test.go b/discovery/syncer_test.go
index <HASH>..<HASH> 100644
--- a/discovery/syncer_test.go
+++ b/discovery/syncer_test.go
@@ -2140,3 +2140,53 @@ func TestGossipSyncerHistoricalSync(t *testing.T) {
t.Fatalf("expected to send a lnwire.QueryChannelRange message")
}
}
+
+// TestGossipSyncerSyncedSignal ensures that we receive a signal when a gossip
+// syncer reaches its terminal chansSynced state.
+func TestGossipSyncerSyncedSignal(t *testing.T) {
+ t.Parallel()
+
+ // We'll create a new gossip syncer and manually override its state to
+ // chansSynced.
+ _, syncer, _ := newTestSyncer(
+ lnwire.NewShortChanIDFromInt(10), defaultEncoding,
+ defaultChunkSize,
+ )
+ syncer.setSyncState(chansSynced)
+
+ // We'll go ahead and request a signal to be notified of when it reaches
+ // this state.
+ signalChan := syncer.ResetSyncedSignal()
+
+ // Starting the gossip syncer should cause the signal to be delivered.
+ syncer.Start()
+
+ select {
+ case <-signalChan:
+ case <-time.After(time.Second):
+ t.Fatal("expected to receive chansSynced signal")
+ }
+
+ syncer.Stop()
+
+ // We'll try this again, but this time we'll request the signal after
+ // the syncer is active and has already reached its chansSynced state.
+ _, syncer, _ = newTestSyncer(
+ lnwire.NewShortChanIDFromInt(10), defaultEncoding,
+ defaultChunkSize,
+ )
+
+ syncer.setSyncState(chansSynced)
+
+ syncer.Start()
+ defer syncer.Stop()
+
+ signalChan = syncer.ResetSyncedSignal()
+
+ // The signal should be delivered immediately.
+ select {
+ case <-signalChan:
+ case <-time.After(time.Second):
+ t.Fatal("expected to receive chansSynced signal")
+ }
+} | discovery: introduce GossipSyncer signal delivery of chansSynced state
In this commit, we introduce another feature to the GossipSyncer in
which it can deliver a signal to an external caller once it reaches its
terminal chansSynced state. This is yet to be used, but will serve
useful with a round-robin sync mechanism, where we wait for to finish
syncing with a specific peer before moving on to the next. | lightningnetwork_lnd | train |
6cd49efdf9bd3238802c2d8d00471776e4a64791 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100755
--- a/index.js
+++ b/index.js
@@ -206,19 +206,13 @@ FSWatcher.prototype._isIgnored = function(path, stats) {
/\..*\.(sw[px])$|\~$|\.subl.*\.tmp/.test(path)
) return true;
- if(!this._userIgnored){
+ if (!this._userIgnored) {
var cwd = this.options.cwd;
- var ignored;
- if(cwd){
- if(!this.options.ignored){
- ignored = [];
- } else {
- ignored = arrify(this.options.ignored).map(function(path){
- return isAbsolute(path) ? path : sysPath.join(cwd, path);
- });
- }
- } else {
- ignored = this.options.ignored;
+ var ignored = this.options.ignored;
+ if (cwd && ignored) {
+ ignored = arrify(ignored).map(function (path) {
+ return isAbsolute(path) ? path : sysPath.join(cwd, path);
+ });
}
this._userIgnored = anymatch(this._globIgnored
.concat(ignored)
@@ -394,10 +388,10 @@ FSWatcher.prototype.add = function(paths, _origAdd, _internal) {
paths = arrify(paths);
if (cwd) paths = paths.map(function(path) {
- if(isAbsolute(path)){
+ if (isAbsolute(path)) {
return path;
- } else if(path[0] == '!'){
- return '!'+sysPath.join(cwd, path.substr(1));
+ } else if (path[0] === '!') {
+ return '!' + sysPath.join(cwd, path.substring(1));
} else {
return sysPath.join(cwd, path);
}
@@ -432,7 +426,7 @@ FSWatcher.prototype.add = function(paths, _origAdd, _internal) {
next(err, res);
}.bind(this));
}.bind(this), function(error, results) {
- results.forEach(function(item){
+ results.forEach(function(item) {
if (!item) return;
this.add(sysPath.dirname(item), sysPath.basename(_origAdd || item));
}, this); | - Code style : missing spaces
- Refactor and simplify case where `cwd` and `ignored` options are used | paulmillr_chokidar | train |
45b17055a54115981e87382bb94d8c69e5d1969a | diff --git a/requirements.txt b/requirements.txt
index <HASH>..<HASH> 100755
--- a/requirements.txt
+++ b/requirements.txt
@@ -7,7 +7,7 @@ pathlib;python_version<'3.4'
python-dateutil==2.8.1
ratelimit==2.2.1
requests-file==1.5.1
-ruamel.yaml==0.16.12
+ruamel.yaml==0.16.13
six==1.15.0
tabulator[cchardet]==1.53.4
typing==3.7.4.3;python_version<'3.5'
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ requirements = ['basicauth',
'python-dateutil==2.8.1',
'ratelimit',
'requests-file',
- 'ruamel.yaml',
+ 'ruamel.yaml>=0.16.13',
'six>=1.15.0',
'tabulator[cchardet]>=1.53.4',
'typing;python_version<"3.5"',
diff --git a/src/hdx/utilities/saver.py b/src/hdx/utilities/saver.py
index <HASH>..<HASH> 100644
--- a/src/hdx/utilities/saver.py
+++ b/src/hdx/utilities/saver.py
@@ -9,19 +9,7 @@ import six
from ruamel.yaml import YAML, RoundTripRepresenter, add_representer, SafeRepresenter
-class UnPrettyRTRepresenter(RoundTripRepresenter):
- def represent_none(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:null', u'null')
-
-
-class UnPrettySafeRepresenter(SafeRepresenter):
- def represent_none(self, data):
- # type: (Any) -> Any
- return self.represent_scalar(u'tag:yaml.org,2002:null', u'null')
-
-
-class PrettySafeRepresenter(SafeRepresenter):
+class CustomSafeRepresenter(SafeRepresenter):
def represent_none(self, data):
# type: (Any) -> Any
if len(self.represented_objects) == 0 and not self.serializer.use_explicit_start:
@@ -30,14 +18,6 @@ class PrettySafeRepresenter(SafeRepresenter):
return self.represent_scalar(u'tag:yaml.org,2002:null', "")
-UnPrettyRTRepresenter.add_representer(None, UnPrettyRTRepresenter.represent_none)
-UnPrettySafeRepresenter.add_representer(None, UnPrettySafeRepresenter.represent_none)
-PrettySafeRepresenter.add_representer(None, PrettySafeRepresenter.represent_none)
-
-
-representers = {False: {False: UnPrettyRTRepresenter, True: RoundTripRepresenter}, True: {False: UnPrettySafeRepresenter, True: PrettySafeRepresenter}}
-
-
def save_str_to_file(string, path, encoding='utf-8'):
# type: (str, str, str) -> None
"""Save string to file
@@ -71,9 +51,12 @@ def save_yaml(dictionary, path, encoding='utf-8', pretty=False, sortkeys=False):
None
"""
with open(path, 'w', encoding=encoding) as f:
- representer = representers[sortkeys][pretty]
yaml = YAML(typ='rt')
- yaml.Representer = representer
+ if sortkeys:
+ representer = CustomSafeRepresenter
+ yaml.Representer = representer
+ else:
+ representer = RoundTripRepresenter
add_representer(OrderedDict, representer.represent_dict, representer=representer)
if pretty:
yaml.indent(offset=2) | Simplification of YAML saving based on ruamel.yaml new version fix | OCHA-DAP_hdx-python-utilities | train |
58ca5c06ca93b588c35f4eb1cc45d445be70027d | diff --git a/lib/dynflow/execution_plan/hooks.rb b/lib/dynflow/execution_plan/hooks.rb
index <HASH>..<HASH> 100644
--- a/lib/dynflow/execution_plan/hooks.rb
+++ b/lib/dynflow/execution_plan/hooks.rb
@@ -21,7 +21,7 @@ module Dynflow
# @param class_name [Class] class of the hook to be run
# @param on [Symbol, Array<Symbol>] when should the hook be run, one of {HOOK_KINDS}
# @return [void]
- def use(class_name, on: HOOK_KINDS)
+ def use(class_name, on: ExecutionPlan.states)
on = Array[on] unless on.kind_of?(Array)
validate_kinds!(on)
if hooks[class_name]
diff --git a/test/execution_plan_hooks_test.rb b/test/execution_plan_hooks_test.rb
index <HASH>..<HASH> 100644
--- a/test/execution_plan_hooks_test.rb
+++ b/test/execution_plan_hooks_test.rb
@@ -66,6 +66,12 @@ module Dynflow
execution_plan_hooks.use :raise_flag, :on => :pending
end
+ class AllTransitionsAction < ::Dynflow::Action
+ include FlagHook
+
+ execution_plan_hooks.use :raise_flag
+ end
+
class ComposedAction < RootOnlyAction
def plan
plan_action(RootOnlyAction)
@@ -182,6 +188,15 @@ module Dynflow
delayed_plan.execution_plan.cancel.each(&:wait)
_(Flag.raised_count).must_equal 1
end
+
+ it 'runs the hook on every state transition' do
+ refute Flag.raised?
+ plan = world.trigger(AllTransitionsAction)
+ plan.finished.wait!
+ # There should be 5 transitions
+ # nothing -> pending -> planning -> planned -> running -> stopped
+ _(Flag.raised_count).must_equal 5
+ end
end
end
end | Do not run hooks twice if not limited by states | Dynflow_dynflow | train |
26ccea19cce2262d09efd84575dbe5d22e63e0dc | diff --git a/django_socketio/events.py b/django_socketio/events.py
index <HASH>..<HASH> 100644
--- a/django_socketio/events.py
+++ b/django_socketio/events.py
@@ -50,7 +50,7 @@ class Event(object):
channel = re.compile(channel)
self.handlers.append((handler, channel))
- def send(self, request, socket, *args):
+ def send(self, request, socket, context, *args):
"""
When an event is sent, run all relevant handlers. Relevant
handlers are those without a channel pattern when the given
@@ -62,15 +62,15 @@ class Event(object):
no_channel = not pattern and not socket.channels
matches = [pattern.match(c) for c in socket.channels if pattern]
if no_channel or filter(None, matches):
- handler(request, socket, *args)
+ handler(request, socket, context, *args)
-on_connect = Event(False) # request, socket
-on_message = Event() # request, socket, message
-on_subscribe = Event() # request, socket, channel
-on_unsubscribe = Event() # request, socket, channel
-on_error = Event() # request, socket, exception
-on_disconnect = Event() # request, socket
-on_finish = Event() # request, socket
+on_connect = Event(False) # request, socket, context
+on_message = Event() # request, socket, context, message
+on_subscribe = Event() # request, socket, context, channel
+on_unsubscribe = Event() # request, socket, context, channel
+on_error = Event() # request, socket, context, exception
+on_disconnect = Event() # request, socket, context
+on_finish = Event() # request, socket, context
# Give each event a name attribute.
for k, v in globals().items():
diff --git a/django_socketio/views.py b/django_socketio/views.py
index <HASH>..<HASH> 100644
--- a/django_socketio/views.py
+++ b/django_socketio/views.py
@@ -39,30 +39,31 @@ def socketio(request):
which is used for sending on_finish events when the server
stops.
"""
+ context = {}
socket = SocketIOChannelProxy(request.environ["socketio"])
- CLIENTS[socket.session.session_id] = (request, socket)
+ CLIENTS[socket.session.session_id] = (request, socket, context)
try:
if socket.on_connect():
- events.on_connect.send(request, socket)
+ events.on_connect.send(request, socket, context)
while True:
message = socket.recv()
if len(message) > 0:
socket.handler.server.log.write(format_log(request, message))
if message[0] == "__subscribe__" and len(message) == 2:
socket.subscribe(message[1])
- events.on_subscribe.send(request, socket, message[1])
+ events.on_subscribe.send(request, socket, context, message[1])
elif message[0] == "__unsubscribe__" and len(message) == 2:
- events.on_unsubscribe.send(request, socket, message[1])
+ events.on_unsubscribe.send(request, socket, context, message[1])
socket.unsubscribe(message[1])
else:
- events.on_message.send(request, socket, message)
+ events.on_message.send(request, socket, context, message)
else:
if not socket.connected():
- events.on_disconnect.send(request, socket)
+ events.on_disconnect.send(request, socket, context)
break
except Exception, exception:
print_exc()
- events.on_error.send(request, socket, exception)
- events.on_finish.send(request, socket)
+ events.on_error.send(request, socket, context, exception)
+ events.on_finish.send(request, socket, context)
del CLIENTS[socket.session.session_id]
return HttpResponse("") | Added context arg to all events. | stephenmcd_django-socketio | train |
2217363845b07b155e7e095cca1b4915ce5da853 | diff --git a/pkg/registry/extensions/daemonset/BUILD b/pkg/registry/extensions/daemonset/BUILD
index <HASH>..<HASH> 100644
--- a/pkg/registry/extensions/daemonset/BUILD
+++ b/pkg/registry/extensions/daemonset/BUILD
@@ -25,6 +25,7 @@ go_library(
"//vendor:k8s.io/apimachinery/pkg/util/validation/field",
"//vendor:k8s.io/apiserver/pkg/endpoints/request",
"//vendor:k8s.io/apiserver/pkg/registry/generic",
+ "//vendor:k8s.io/apiserver/pkg/registry/rest",
"//vendor:k8s.io/apiserver/pkg/storage",
"//vendor:k8s.io/apiserver/pkg/storage/names",
],
@@ -40,6 +41,7 @@ go_test(
"//pkg/api/testapi:go_default_library",
"//pkg/api/testing:go_default_library",
"//pkg/apis/extensions:go_default_library",
+ "//vendor:k8s.io/apiserver/pkg/registry/rest",
],
)
diff --git a/pkg/registry/extensions/daemonset/strategy.go b/pkg/registry/extensions/daemonset/strategy.go
index <HASH>..<HASH> 100644
--- a/pkg/registry/extensions/daemonset/strategy.go
+++ b/pkg/registry/extensions/daemonset/strategy.go
@@ -26,6 +26,7 @@ import (
"k8s.io/apimachinery/pkg/util/validation/field"
genericapirequest "k8s.io/apiserver/pkg/endpoints/request"
"k8s.io/apiserver/pkg/registry/generic"
+ "k8s.io/apiserver/pkg/registry/rest"
"k8s.io/apiserver/pkg/storage"
"k8s.io/apiserver/pkg/storage/names"
"k8s.io/kubernetes/pkg/api"
@@ -42,6 +43,12 @@ type daemonSetStrategy struct {
// Strategy is the default logic that applies when creating and updating DaemonSet objects.
var Strategy = daemonSetStrategy{api.Scheme, names.SimpleNameGenerator}
+// DefaultGarbageCollectionPolicy returns Orphan because that was the default
+// behavior before the server-side garbage collection was implemented.
+func (daemonSetStrategy) DefaultGarbageCollectionPolicy() rest.GarbageCollectionPolicy {
+ return rest.OrphanDependents
+}
+
// NamespaceScoped returns true because all DaemonSets need to be within a namespace.
func (daemonSetStrategy) NamespaceScoped() bool {
return true
diff --git a/pkg/registry/extensions/daemonset/strategy_test.go b/pkg/registry/extensions/daemonset/strategy_test.go
index <HASH>..<HASH> 100644
--- a/pkg/registry/extensions/daemonset/strategy_test.go
+++ b/pkg/registry/extensions/daemonset/strategy_test.go
@@ -19,6 +19,7 @@ package daemonset
import (
"testing"
+ "k8s.io/apiserver/pkg/registry/rest"
_ "k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/testapi"
apitesting "k8s.io/kubernetes/pkg/api/testing"
@@ -33,3 +34,12 @@ func TestSelectableFieldLabelConversions(t *testing.T) {
nil,
)
}
+
+func TestDefaultGarbageCollectionPolicy(t *testing.T) {
+ // Make sure we correctly implement the interface.
+ // Otherwise a typo could silently change the default.
+ var gcds rest.GarbageCollectionDeleteStrategy = Strategy
+ if got, want := gcds.DefaultGarbageCollectionPolicy(), rest.OrphanDependents; got != want {
+ t.Errorf("DefaultGarbageCollectionPolicy() = %#v, want %#v", got, want)
+ }
+} | DaemonSet: Set DefaultGarbageCollectionPolicy to OrphanDependents.
Now that DaemonSet adds ControllerRef to Pods it creates,
we need to set this default so legacy behavior is maintained. | kubernetes_kubernetes | train |
334ae845968b135f01fe09c608fc374dd53258bd | diff --git a/lib/adapters/websql.js b/lib/adapters/websql.js
index <HASH>..<HASH> 100644
--- a/lib/adapters/websql.js
+++ b/lib/adapters/websql.js
@@ -30,7 +30,7 @@ var openDB = utils.getArguments(function (args) {
});
var POUCH_VERSION = 1;
-var POUCH_SIZE = 3000000;
+var POUCH_SIZE = 0; // doesn't matter as long as it's <= 5000000
var ADAPTER_VERSION = 2; // used to manage migrations
// The object stores created for each database | (#<I>) - reduce websql db size to 0 | pouchdb_pouchdb | train |
ba7c18595e579e102feb29ac1a9bfc30101b5232 | diff --git a/customer.go b/customer.go
index <HASH>..<HASH> 100644
--- a/customer.go
+++ b/customer.go
@@ -13,7 +13,7 @@ type CustomerParams struct {
Plan string
Quantity uint64
TrialEnd int64
- DefaultCard string
+ DefaultSource string
}
// SetSource adds valid sources to a CustomerParams object,
diff --git a/customer/client.go b/customer/client.go
index <HASH>..<HASH> 100644
--- a/customer/client.go
+++ b/customer/client.go
@@ -124,8 +124,8 @@ func (c Client) Update(id string, params *stripe.CustomerParams) (*stripe.Custom
body.Add("email", params.Email)
}
- if len(params.DefaultCard) > 0 {
- body.Add("default_card", params.DefaultCard)
+ if len(params.DefaultSource) > 0 {
+ body.Add("default_source", params.DefaultSource)
}
params.AppendTo(body)
} | Rename deprecated customer param value
The API was updated on <I>-<I>-<I> to remove the old Card and DefaultCard
parameters, in favour of Source and DefaultSource.
The codebase was updated in <I>e<I>d to reflect the new
changes to the API, but the customer params did not get updated at that
time. | stripe_stripe-go | train |
ee159ff3df64e653d61d01692cd1048335dcd4f8 | diff --git a/salt/renderers/gpg.py b/salt/renderers/gpg.py
index <HASH>..<HASH> 100644
--- a/salt/renderers/gpg.py
+++ b/salt/renderers/gpg.py
@@ -121,7 +121,9 @@ def decrypt_object(o, gpg):
o[k] = decrypt_object(v, gpg)
return o
elif isinstance(o, list):
- return [decrypt_object(e, gpg) for e in o]
+ for number, value in enumerate(o):
+ o[number] = decrypt_object(value, gpg)
+ return o
else:
return o | Apply fix from #<I> to the <I> branch
This code has been refactored in develop, so the original
pull request wasn't backporting cleanly. This applies the same fix
to the older branch(es). | saltstack_salt | train |
b742731358dbfc6ba4d621c2f8eb2fbff8a8f153 | diff --git a/lib/mk_time/consts.rb b/lib/mk_time/consts.rb
index <HASH>..<HASH> 100644
--- a/lib/mk_time/consts.rb
+++ b/lib/mk_time/consts.rb
@@ -182,7 +182,8 @@ module MkTime
["20171130", 0.2],
["20180315", 0.1],
["20180921", 0.0],
- ["20181221", 0.0] # (<= Provisional end-point)
+ ["20190117", -0.1],
+ ["20190417", 0.0] # (<= Provisional end-point)
].freeze # DUT1 adjustment
end
end
diff --git a/lib/mk_time/version.rb b/lib/mk_time/version.rb
index <HASH>..<HASH> 100644
--- a/lib/mk_time/version.rb
+++ b/lib/mk_time/version.rb
@@ -1,3 +1,3 @@
module MkTime
- VERSION = "0.3.6"
+ VERSION = "0.3.7"
end | UPD: Added a new DUT1 adjustment to constants. | komasaru_mk_time | train |
9fe0b72dfd162de5a645ff1b86db66058b948d52 | diff --git a/src/main/java/com/yahoo/sketches/theta/DirectQuickSelectSketch.java b/src/main/java/com/yahoo/sketches/theta/DirectQuickSelectSketch.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/yahoo/sketches/theta/DirectQuickSelectSketch.java
+++ b/src/main/java/com/yahoo/sketches/theta/DirectQuickSelectSketch.java
@@ -161,8 +161,7 @@ final class DirectQuickSelectSketch extends DirectQuickSelectSketchR {
final ResizeFactor myRF = ResizeFactor.getRF(lgRF);
if ((myRF == ResizeFactor.X1)
&& (lgArrLongs != Util.startingSubMultiple(lgNomLongs + 1, myRF, MIN_LG_ARR_LONGS))) {
- throw new SketchesArgumentException("Possible corruption: ResizeFactor X1, but provided "
- + "array too small for sketch size");
+ insertLgResizeFactor(memObj, memAdd, ResizeFactor.X2.lg());
}
final DirectQuickSelectSketch dqss =
diff --git a/src/main/java/com/yahoo/sketches/theta/HeapQuickSelectSketch.java b/src/main/java/com/yahoo/sketches/theta/HeapQuickSelectSketch.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/yahoo/sketches/theta/HeapQuickSelectSketch.java
+++ b/src/main/java/com/yahoo/sketches/theta/HeapQuickSelectSketch.java
@@ -115,14 +115,13 @@ final class HeapQuickSelectSketch extends HeapUpdateSketch {
final float p = extractP(memObj, memAdd); //bytes 12-15
final int lgRF = extractLgResizeFactor(memObj, memAdd); //byte 0
- final ResizeFactor myRF = ResizeFactor.getRF(lgRF);
+ ResizeFactor myRF = ResizeFactor.getRF(lgRF);
final int familyID = extractFamilyID(memObj, memAdd);
final Family family = Family.idToFamily(familyID);
if (myRF == ResizeFactor.X1
&& lgArrLongs != Util.startingSubMultiple(lgNomLongs + 1, myRF, MIN_LG_ARR_LONGS)) {
- throw new SketchesArgumentException("Possible corruption: ResizeFactor X1, but provided "
- + "array too small for sketch size");
+ myRF = ResizeFactor.X2;
}
final HeapQuickSelectSketch hqss = new HeapQuickSelectSketch(lgNomLongs, seed, p, myRF,
diff --git a/src/test/java/com/yahoo/sketches/theta/DirectQuickSelectSketchTest.java b/src/test/java/com/yahoo/sketches/theta/DirectQuickSelectSketchTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/yahoo/sketches/theta/DirectQuickSelectSketchTest.java
+++ b/src/test/java/com/yahoo/sketches/theta/DirectQuickSelectSketchTest.java
@@ -723,12 +723,8 @@ public class DirectQuickSelectSketchTest {
usk.update(0);
insertLgResizeFactor(mem.getArray(), mem.getCumulativeOffset(0L), 0); // corrupt RF: X1
- try {
- DirectQuickSelectSketch.writableWrap(mem, DEFAULT_UPDATE_SEED);
- fail("Expected SketchesArgumentException");
- } catch (SketchesArgumentException e) {
- //Pass
- }
+ UpdateSketch dqss = DirectQuickSelectSketch.writableWrap(mem, DEFAULT_UPDATE_SEED);
+ assertEquals(dqss.getResizeFactor(), ResizeFactor.X2); // force-promote to X2
}
@Test
diff --git a/src/test/java/com/yahoo/sketches/theta/HeapQuickSelectSketchTest.java b/src/test/java/com/yahoo/sketches/theta/HeapQuickSelectSketchTest.java
index <HASH>..<HASH> 100644
--- a/src/test/java/com/yahoo/sketches/theta/HeapQuickSelectSketchTest.java
+++ b/src/test/java/com/yahoo/sketches/theta/HeapQuickSelectSketchTest.java
@@ -602,12 +602,8 @@ public class HeapQuickSelectSketchTest {
// force ResizeFactor.X1, but allocated capacity too small
insertLgResizeFactor(mem.getArray(), mem.getCumulativeOffset(0L), ResizeFactor.X1.lg());
- try {
- HeapQuickSelectSketch.heapifyInstance(mem, DEFAULT_UPDATE_SEED);
- fail();
- } catch (SketchesArgumentException e) {
- //expected
- }
+ UpdateSketch hqss = HeapQuickSelectSketch.heapifyInstance(mem, DEFAULT_UPDATE_SEED);
+ assertEquals(hqss.getResizeFactor(), ResizeFactor.X2); // force-promote to X2
}
private static void tryBadMem(WritableMemory mem, int byteOffset, int byteValue) { | change consistency check failure on resize factor to force-promote in X1 case | DataSketches_sketches-core | train |
6de98f58392dcbfe517a94f19bbbdc83191fe47a | diff --git a/peru/cache.py b/peru/cache.py
index <HASH>..<HASH> 100644
--- a/peru/cache.py
+++ b/peru/cache.py
@@ -164,12 +164,24 @@ class Cache:
self._git("update-ref", "--no-deref", "HEAD", dummy)
return dummy
+ def _throw_if_dirty(self, tree, path):
+ modified, deleted = self.tree_status(tree, path)
+ if modified or deleted:
+ message = "Imports are dirty. Giving up."
+ if modified:
+ message += "\n\nModified:\n " + "\n ".join(sorted(modified))
+ if deleted:
+ message += "\n\nDeleted:\n " + "\n ".join(sorted(deleted))
+ raise RuntimeError(message)
+
# TODO: This method needs to take a filesystem lock. Probably all of them
# do.
def export_tree(self, tree, dest, previous_tree=None):
if not os.path.exists(dest):
os.makedirs(dest)
+ self._throw_if_dirty(previous_tree, dest)
+
next_commit = self._dummy_commit(tree)
self._checkout_dummy_commit(previous_tree)
self._git("checkout", next_commit, work_tree=dest) | use tree_status() to do our own cleanliness checking | buildinspace_peru | train |
6a5d1c765fc82f4d4e32f5bc41a78cac4f62ef2b | diff --git a/actionpack/lib/action_dispatch/routing/mapper.rb b/actionpack/lib/action_dispatch/routing/mapper.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_dispatch/routing/mapper.rb
+++ b/actionpack/lib/action_dispatch/routing/mapper.rb
@@ -444,9 +444,10 @@ module ActionDispatch
raise "A rack application must be specified" unless path
- options[:as] ||= app_name(app)
+ options[:as] ||= app_name(app)
+ options[:via] ||= :all
- match(path, options.merge(:to => app, :anchor => false, :format => false, :via => :all))
+ match(path, options.merge(:to => app, :anchor => false, :format => false))
define_generate_prefix(app, options[:as])
self
diff --git a/actionpack/test/dispatch/mount_test.rb b/actionpack/test/dispatch/mount_test.rb
index <HASH>..<HASH> 100644
--- a/actionpack/test/dispatch/mount_test.rb
+++ b/actionpack/test/dispatch/mount_test.rb
@@ -22,6 +22,7 @@ class TestRoutingMount < ActionDispatch::IntegrationTest
mount SprocketsApp => "/shorthand"
mount FakeEngine, :at => "/fakeengine"
+ mount FakeEngine, :at => "/getfake", :via => :get
scope "/its_a" do
mount SprocketsApp, :at => "/sprocket"
@@ -52,6 +53,14 @@ class TestRoutingMount < ActionDispatch::IntegrationTest
assert_equal "/shorthand -- /omg", response.body
end
+ def test_mounting_works_with_via
+ get "/getfake"
+ assert_equal "OK", response.body
+
+ post "/getfake"
+ assert_response :not_found
+ end
+
def test_with_fake_engine_does_not_call_invalid_method
get "/fakeengine"
assert_equal "OK", response.body | Make sure :via works with mount | rails_rails | train |
6197a5f4eb74e0c993b025440d257c4058f092e9 | diff --git a/lib/runner.js b/lib/runner.js
index <HASH>..<HASH> 100644
--- a/lib/runner.js
+++ b/lib/runner.js
@@ -64,7 +64,7 @@ function runner (opts) {
});
};
return Promise.map(times(opts.count), () => {
- return Promise.map(opts.url, iterator, { concurrency: opts.parallel ? 2 : 1 });
+ return Promise.map(opts.url, iterator, { concurrency: opts.parallel ? opts.url.length : 1 });
}, { concurrency: 1 })
.then((results) => zip(results));
} | Use url length as concurrency limit | newsuk_timeliner | train |
eadb9d062500b3ae77c5633f03b030ae9140901f | diff --git a/charmhelpers/core/hookenv.py b/charmhelpers/core/hookenv.py
index <HASH>..<HASH> 100644
--- a/charmhelpers/core/hookenv.py
+++ b/charmhelpers/core/hookenv.py
@@ -20,6 +20,7 @@
# Authors:
# Charm Helpers Developers <[email protected]>
+from __future__ import print_function
import os
import json
import yaml
@@ -89,15 +90,17 @@ def log(message, level=None):
message = repr(message)
command += [message]
# Missing juju-log should not cause failures in unit tests
+ # Send log output to stderr
try:
subprocess.call(command)
except OSError as e:
if e.errno == errno.ENOENT:
- pass
+ if level:
+ message = "{}: {}".format(level, message)
+ message = "juju-log: {}".format(message)
+ print(message, file=sys.stderr)
else:
raise
- except:
- raise
class Serializable(UserDict): | Send log output to stderr if juju-log is not available | juju_charm-helpers | train |
fd62570ad9281a19ba5da2fde4e2360de0648f1e | diff --git a/v3/jest.config.js b/v3/jest.config.js
index <HASH>..<HASH> 100644
--- a/v3/jest.config.js
+++ b/v3/jest.config.js
@@ -2,9 +2,6 @@ const { defaults } = require("jest-config");
module.exports = {
testEnvironment: "node",
- setupFiles: [
- "<rootDir>/../apollo-server-env/dist/index.js"
- ],
preset: "ts-jest",
testMatch: null,
testRegex: "/__tests__/.*\\.test\\.(js|ts)$", | Remove `setupFiles`, currently only necessitated by `apollo-server-env`.
The `apollo-server-env` package will not be included in `@apollo/server`. | apollographql_apollo-server | train |
3060dfce5a45dac0d19ec3d73af3cdebdf0bb20e | diff --git a/railties/lib/rails/generators/rails/app/app_generator.rb b/railties/lib/rails/generators/rails/app/app_generator.rb
index <HASH>..<HASH> 100644
--- a/railties/lib/rails/generators/rails/app/app_generator.rb
+++ b/railties/lib/rails/generators/rails/app/app_generator.rb
@@ -327,6 +327,10 @@ module Rails
argv
end
+ def self.default_rc_file
+ File.join(File.expand_path('~'), '.railsrc')
+ end
+
private
def handle_version_request!(argument)
@@ -353,7 +357,7 @@ module Rails
if (customrc = argv.index{ |x| x.include?("--rc=") })
File.expand_path(argv.delete_at(customrc).gsub(/--rc=/, ""))
else
- File.join(File.expand_path("~"), '.railsrc')
+ self.class.default_rc_file
end
end
diff --git a/railties/test/generators/argv_scrubber_test.rb b/railties/test/generators/argv_scrubber_test.rb
index <HASH>..<HASH> 100644
--- a/railties/test/generators/argv_scrubber_test.rb
+++ b/railties/test/generators/argv_scrubber_test.rb
@@ -1,6 +1,7 @@
require 'active_support/test_case'
require 'active_support/testing/autorun'
require 'rails/generators/rails/app/app_generator'
+require 'tempfile'
module Rails
module Generators
@@ -14,7 +15,7 @@ module Rails
define_method(:puts) { |str| output = str }
define_method(:exit) { |code| exit_code = code }
})
- scrubber.prepare
+ scrubber.prepare!
assert_equal "Rails #{Rails::VERSION::STRING}", output
assert_equal 0, exit_code
end
@@ -22,15 +23,54 @@ module Rails
def test_prepare_returns_args
scrubber = ARGVScrubber.new ['hi mom']
- args = scrubber.prepare
+ args = scrubber.prepare!
assert_equal '--help', args.first
end
def test_no_mutations
scrubber = ARGVScrubber.new ['hi mom'].freeze
- args = scrubber.prepare
+ args = scrubber.prepare!
assert_equal '--help', args.first
end
+
+ def test_new_command_no_rc
+ scrubber = Class.new(ARGVScrubber) {
+ def self.default_rc_file
+ File.join(Dir.tmpdir, 'whatever')
+ end
+ }.new ['new']
+ args = scrubber.prepare!
+ assert_nil args.first
+ assert_equal [], args
+ end
+
+ def test_new_homedir_rc
+ file = Tempfile.new 'myrcfile'
+ file.puts '--hello-world'
+ file.flush
+
+ message = nil
+ scrubber = Class.new(ARGVScrubber) {
+ define_singleton_method(:default_rc_file) do
+ file.path
+ end
+ define_method(:puts) { |msg| message = msg }
+ }.new ['new']
+ args = scrubber.prepare!
+ assert_nil args.first
+ assert_equal [nil, '--hello-world'], args
+ assert_match 'hello-world', message
+ assert_match file.path, message
+ ensure
+ file.close
+ file.unlink
+ end
+
+ def test_no_rc
+ scrubber = ARGVScrubber.new ['new', '--no-rc']
+ args = scrubber.prepare!
+ assert_equal [], args
+ end
end
end
end | test some of the rc specification | rails_rails | train |
01a3e92b7489a0e27b0d93ad2e6d881ae053a760 | diff --git a/spyderlib/widgets/pathmanager.py b/spyderlib/widgets/pathmanager.py
index <HASH>..<HASH> 100644
--- a/spyderlib/widgets/pathmanager.py
+++ b/spyderlib/widgets/pathmanager.py
@@ -22,13 +22,14 @@ from spyderlib.utils.qthelpers import get_std_icon, create_toolbutton
from spyderlib.config import get_icon
-#TODO: Add an export button to configure environment variables outside Spyder
class PathManager(QDialog):
- def __init__(self, parent=None, pathlist=None):
+ def __init__(self, parent=None, pathlist=None, ro_pathlist=None):
QDialog.__init__(self, parent)
assert isinstance(pathlist, list)
+ assert isinstance(ro_pathlist, list)
self.pathlist = pathlist
+ self.ro_pathlist = ro_pathlist
self.last_path = os.getcwdu()
@@ -137,26 +138,29 @@ class PathManager(QDialog):
listdict2envdict)
env = get_user_env()
if remove:
- ppath = self.pathlist
+ ppath = self.pathlist+self.ro_pathlist
else:
ppath = env.get('PYTHONPATH', [])
if not isinstance(ppath, list):
ppath = [ppath]
- ppath = [path for path in ppath if path not in self.pathlist]
- ppath.extend(self.pathlist)
+ ppath = [path for path in ppath
+ if path not in (self.pathlist+self.ro_pathlist)]
+ ppath.extend(self.pathlist+self.ro_pathlist)
env['PYTHONPATH'] = ppath
set_user_env( listdict2envdict(env) )
def get_path_list(self):
- """Return path list"""
+ """Return path list (does not include the read-only path list)"""
return self.pathlist
def update_list(self):
"""Update path list"""
self.listwidget.clear()
- for name in self.pathlist:
+ for name in self.pathlist+self.ro_pathlist:
item = QListWidgetItem(name)
item.setIcon(get_std_icon('DirClosedIcon'))
+ if name in self.ro_pathlist:
+ item.setFlags(Qt.NoItemFlags)
self.listwidget.addItem(item)
self.refresh()
@@ -217,9 +221,9 @@ def test():
"""Run path manager test"""
from spyderlib.utils.qthelpers import qapplication
_app = qapplication()
- test = PathManager(None, sys.path)
- if test.exec_():
- print test.get_path_list()
+ test = PathManager(None, sys.path[:-10], sys.path[-10:])
+ test.exec_()
+ print test.get_path_list()
if __name__ == "__main__":
test() | PathManager: added support for a read-only path list (to be used by the forthcoming Project feature) | spyder-ide_spyder | train |
71917881ca91bf462ed4cd15e0b1fc1bb39853d7 | diff --git a/law/contrib/slack/parameter.py b/law/contrib/slack/parameter.py
index <HASH>..<HASH> 100644
--- a/law/contrib/slack/parameter.py
+++ b/law/contrib/slack/parameter.py
@@ -22,9 +22,6 @@ class NotifySlackParameter(NotifyParameter):
@staticmethod
def notify(success, title, content, **kwargs):
- # test import
- import slackclient # noqa: F401
-
# overwrite title with slack markdown markup
title = "*Notification from* `{}`".format(content["Task"])
del content["Task"]
diff --git a/law/contrib/telegram/parameter.py b/law/contrib/telegram/parameter.py
index <HASH>..<HASH> 100644
--- a/law/contrib/telegram/parameter.py
+++ b/law/contrib/telegram/parameter.py
@@ -22,9 +22,6 @@ class NotifyTelegramParameter(NotifyParameter):
@staticmethod
def notify(success, title, content, **kwargs):
- # test import
- import telegram # noqa: F401
-
# overwrite title with telegram markdown markup
title = "*Notification from* `{}`".format(content["Task"])
del content["Task"] | Remove test imports in telegram and slack notification parameters. | riga_law | train |
e200e4811b69cc74f97d485843cc4d35ca11b56a | diff --git a/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketRstTest.java b/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketRstTest.java
index <HASH>..<HASH> 100644
--- a/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketRstTest.java
+++ b/testsuite/src/main/java/io/netty/testsuite/transport/socket/SocketRstTest.java
@@ -35,7 +35,8 @@ import static org.junit.Assert.assertTrue;
public class SocketRstTest extends AbstractSocketTest {
protected void assertRstOnCloseException(IOException cause, Channel clientChannel) {
if (Locale.getDefault() == Locale.US || Locale.getDefault() == Locale.UK) {
- assertTrue("actual message: " + cause.getMessage(), cause.getMessage().contains("reset"));
+ assertTrue("actual message: " + cause.getMessage(),
+ cause.getMessage().contains("reset") || cause.getMessage().contains("closed"));
}
} | SocketRstTest fails due to exception message check
Motivation:
For lack of a better way the SocketRstTest inspects the content of the exception message to check if a RST occurred. However on windows the exception message is different than on other Unix based platforms and the assertion statement fails.
Modifications:
- Hack another string check in the unit test
Result:
SocketRstTest passes on windows
Fixes <URL> | netty_netty | train |
12b76a005d54bfc61d4323273bc695941beedb93 | diff --git a/src/app/Classes/Validators/ContentValidator.php b/src/app/Classes/Validators/ContentValidator.php
index <HASH>..<HASH> 100644
--- a/src/app/Classes/Validators/ContentValidator.php
+++ b/src/app/Classes/Validators/ContentValidator.php
@@ -128,7 +128,7 @@ class ContentValidator extends AbstractValidator
$sheet = $this->getSheet($sheetName);
$found = $sheet->pluck($column)->search(function ($columnValue, $index) use ($value, $rowNumber) {
- return $value === $columnValue && $index + 2 !== $rowNumber;
+ return trim($value) === trim($columnValue) && $index + 2 !== $rowNumber;
});
if ($found === false) { | we are trimming down strings now when we compare for unique_in_column | laravel-enso_DataImport | train |
c159e83953cf28a1998605fd4c20d567ae253a6f | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -1,8 +1,10 @@
-module.exports.solve = solve;
-module.exports.getInDegree = getInDegree;
-module.exports.getEdges = getEdges;
+module.exports.solve = solve;
+module.exports.getInDegree = getInDegree;
+module.exports.getEdges = getEdges;
+module.exports.addMissingKeys = addMissingKeys;
-function solve(graph) {
+function solve(g) {
+ let graph = addMissingKeys(g);
let edges = getEdges(graph);
let inDegree = getInDegree(graph, edges);
@@ -73,4 +75,17 @@ function getEdges(graph) {
}
}
return edges;
+}
+
+function addMissingKeys(graph) {
+ // Add all the missing keys to the graph as nodes with no in-degrees
+ for(let key in graph) {
+ for(let [index, value] of graph[key].entries()) {
+ if(graph[value] === undefined) {
+ graph[value] = [];
+ }
+ }
+ }
+
+ return graph;
}
\ No newline at end of file | added a fix for nodes with missing keys | haavistu_dependency-solver | train |
33ff9297c77fc937062f58480177b2a14e97404e | diff --git a/src/Forms/FormField.php b/src/Forms/FormField.php
index <HASH>..<HASH> 100644
--- a/src/Forms/FormField.php
+++ b/src/Forms/FormField.php
@@ -300,6 +300,32 @@ class FormField implements FormElement
}
/**
+ * Get the fixed error message
+ * @retur array The fixed error message
+ */
+ public function getFixedError()
+ {
+ return $this->fixed_error;
+ }
+
+ /**
+ * Set the form field with an error state. This will override the
+ * fixed error.
+ *
+ * @param array $msg The error message. Must contain a 'msg' key.
+ * @return FormField provides fluent interface
+ */
+ public function setError(array $msg)
+ {
+ if (!isset($msg['msg']))
+ throw new InvalidArgumentException("Error message does not contain msg key");
+
+ $this->errors = [$msg];
+ $this->fixed_error = $msg;
+ return $this;
+ }
+
+ /**
* @return string The name of the field
*/
public function getName(bool $strip_array = false) | Allow overriding of error in form field | Wedeto_HTTP | train |
9f10a58e6a9fa8726fa033008f203ff577e1fc03 | diff --git a/tweepy/mixins.py b/tweepy/mixins.py
index <HASH>..<HASH> 100644
--- a/tweepy/mixins.py
+++ b/tweepy/mixins.py
@@ -25,6 +25,9 @@ class HashableID(EqualityComparableID):
class DataMapping(Mapping):
__slots__ = ()
+ def __contains__(self, item):
+ return item in self.data
+
def __getattr__(self, name):
try:
return self.data[name] | Override Mapping.__contains__ in DataMapping
This allows membership tests to check for existence within data, rather than existence of the attribute at all | tweepy_tweepy | train |
2f8766349ac7936f36e9e8fcc7864d0a57ef5ca7 | diff --git a/includes/class-freemius.php b/includes/class-freemius.php
index <HASH>..<HASH> 100755
--- a/includes/class-freemius.php
+++ b/includes/class-freemius.php
@@ -12154,14 +12154,82 @@
// We have to set the user before getting user scope API handler.
$this->_user = $user;
+ // Install the plugin.
+ $result = $this->create_installs_with_user(
+ $user,
+ $license_key,
+ $trial_plan_id,
+ $sites,
+ $redirect
+ );
+
+ if ( ! $this->is_api_result_entity( $result ) &&
+ ! $this->is_api_result_object( $result, 'installs' )
+ ) {
+ // @todo Handler potential API error of the $result
+ }
+
+ if ( empty( $sites ) ) {
+ $site = new FS_Site( $result );
+ $this->_site = $site;
+
+ if ( ! $setup_account ) {
+ $this->_store_site();
+
+ $this->sync_plan_if_not_exist( $site->plan_id );
+
+ if ( ! empty( $license_key ) && FS_Plugin_License::is_valid_id( $site->license_id ) ) {
+ $this->sync_license_if_not_exist( $site->license_id, $license_key );
+ }
+
+ return $site;
+ }
+
+ return $this->setup_account( $this->_user, $this->_site, $redirect );
+ } else {
+ $installs = array();
+ foreach ( $result->installs as $install ) {
+ $installs[] = new FS_Site( $install );
+ }
+
+ return $this->setup_network_account(
+ $user,
+ $installs,
+ $redirect
+ );
+ }
+ }
+
+ /**
+ * Initiate an API request to create a collection of installs.
+ *
+ * @author Vova Feldman (@svovaf)
+ * @since 2.0.0
+ *
+ * @param \FS_User $user
+ * @param bool $license_key
+ * @param bool $trial_plan_id
+ * @param array $sites
+ * @param bool $redirect
+ * @param bool $silent
+ *
+ * @return object|mixed
+ */
+ private function create_installs_with_user(
+ FS_User $user,
+ $license_key = false,
+ $trial_plan_id = false,
+ $sites = array(),
+ $redirect = false,
+ $silent = false
+ ) {
$extra_install_params = array(
'uid' => $this->get_anonymous_id(),
'is_disconnected' => false,
);
if ( ! empty( $license_key ) ) {
- $filtered_license_key = $this->apply_filters( 'license_key', $license_key );
- $extra_install_params['license_key'] = $filtered_license_key;
+ $extra_install_params['license_key'] = $this->apply_filters( 'license_key', $license_key );
} else if ( FS_Plugin_Plan::is_valid_id( $trial_plan_id ) ) {
$extra_install_params['trial_plan_id'] = $trial_plan_id;
}
@@ -12173,13 +12241,15 @@
$args = $this->get_install_data_for_api( $extra_install_params, false, false );
// Install the plugin.
- $result = $this->get_api_user_scope()->call(
+ $result = $this->get_api_user_scope_by_user( $user )->call(
"/plugins/{$this->get_id()}/installs.json",
'post',
$args
);
- if ( ! $this->is_api_result_entity( $result ) && ! $this->is_api_result_object( $result, 'installs' ) ) {
+ if ( ! $this->is_api_result_entity( $result ) &&
+ ! $this->is_api_result_object( $result, 'installs' )
+ ) {
if ( ! empty( $args['license_key'] ) ) {
// Pass full the fully entered license key to the failure handler.
$args['license_key'] = $license_key;
@@ -12187,12 +12257,14 @@
$result = $this->apply_filters( 'after_install_failure', $result, $args );
+ if ( ! $silent ) {
$this->_admin_notices->add(
sprintf( $this->get_text_inline( 'Couldn\'t activate %s.', 'could-not-activate-x' ), $this->get_plugin_name() ) . ' ' .
$this->get_text_inline( 'Please contact us with the following message:', 'contact-us-with-error-message' ) . ' ' . '<b>' . $result->error->message . '</b>',
$this->get_text_x_inline( 'Oops', 'exclamation', 'oops' ) . '...',
'error'
);
+ }
if ( $redirect ) {
/**
@@ -12209,36 +12281,9 @@
fs_redirect( $this->get_activation_url( array( 'error' => $result->error->message ) ) );
}
-
- return $result;
}
- if ( empty( $sites ) ) {
- $site = new FS_Site( $result );
- $this->_site = $site;
-
- if ( ! $setup_account ) {
- $this->_store_site();
-
- $this->sync_plan_if_not_exist( $site->plan_id );
-
- if ( ! empty( $license_key ) && FS_Plugin_License::is_valid_id( $site->license_id ) ) {
- $this->sync_license_if_not_exist( $site->license_id, $license_key );
- }
-
- return $site;
- }
-
- return $this->setup_account( $this->_user, $this->_site, $redirect );
- } else {
- $this->install_many_with_new_user(
- $this->_user->id,
- $this->_user->public_key,
- $this->_user->secret_key,
- $result->installs,
- $redirect
- );
- }
+ return $result;
}
/** | [multisite] [bulk-installs] Moved the bulk installs backend creation into a separated helper method for code reusability. | Freemius_wordpress-sdk | train |
adbf25eddb7bcd65b2534a018daf899645670b0c | diff --git a/test/resolveGitDir.spec.js b/test/resolveGitDir.spec.js
index <HASH>..<HASH> 100644
--- a/test/resolveGitDir.spec.js
+++ b/test/resolveGitDir.spec.js
@@ -2,14 +2,17 @@ import path from 'path'
import resolveGitDir from '../src/resolveGitDir'
describe('resolveGitDir', () => {
- it('should resolve to current working dir if not set in config', () => {
- const expected = path.resolve(process.cwd())
+ it('should resolve to current working dir when .git is in the same dir', () => {
+ const expected = process.cwd()
expect(resolveGitDir()).toEqual(expected)
- expect(path.isAbsolute(resolveGitDir())).toBe(true)
})
- it('should resolve to current working dir if set to default', () => {
- const expected = path.resolve(process.cwd())
- expect(resolveGitDir()).toEqual(expected)
- expect(path.isAbsolute(resolveGitDir())).toBe(true)
+
+ it('should resolve to the parent dir when .git is in the parent dir', () => {
+ const expected = path.dirname(__dirname)
+ const processCwdBkp = process.cwd
+ process.cwd = () => __dirname
+ // path.resolve to strip trailing slash
+ expect(path.resolve(resolveGitDir())).toEqual(expected)
+ process.cwd = processCwdBkp
})
}) | test: Improve tests for resolveGitDir (#<I>)
Remove a duplicate test and add a test for the case when .git dir is not in the
project root. | okonet_lint-staged | train |
e8ebd16dac59b3eb9f7fafc8aa69fbb89633ae2d | diff --git a/lib/cmd/query.js b/lib/cmd/query.js
index <HASH>..<HASH> 100644
--- a/lib/cmd/query.js
+++ b/lib/cmd/query.js
@@ -29,7 +29,7 @@ class Query extends Parser {
if (opts.logger.query)
opts.logger.query(`QUERY: ${opts.logger.logParam ? this.displaySql() : this.sql}`);
this.onPacketReceive = this.readResponsePacket;
- if (!this.initialValues) {
+ if (this.initialValues === undefined) {
//shortcut if no parameters
out.startPacket(this);
out.writeInt8(0x03);
diff --git a/test/integration/test-pool.js b/test/integration/test-pool.js
index <HASH>..<HASH> 100644
--- a/test/integration/test-pool.js
+++ b/test/integration/test-pool.js
@@ -108,6 +108,20 @@ describe('Pool', () => {
pool.end();
});
+ it('query with null placeholder', async function () {
+ const pool = base.createPool({ connectionLimit: 1 });
+ let rows = await pool.query('select ? as a', [null]);
+ assert.deepEqual(rows, [{ a: null }]);
+ await pool.end();
+ });
+
+ it('query with null placeholder no array', async function () {
+ const pool = base.createPool({ connectionLimit: 1 });
+ let rows = await pool.query('select ? as a', null);
+ assert.deepEqual(rows, [{ a: null }]);
+ await pool.end();
+ });
+
it('pool with wrong authentication', function (done) {
if (process.env.srv === 'maxscale' || process.env.srv === 'skysql-ha') this.skip(); //to avoid host beeing blocked
this.timeout(10000);
diff --git a/test/integration/test-query.js b/test/integration/test-query.js
index <HASH>..<HASH> 100644
--- a/test/integration/test-query.js
+++ b/test/integration/test-query.js
@@ -20,6 +20,16 @@ describe('basic query', () => {
.catch(done);
});
+ it('query with null placeholder', async function () {
+ let rows = await shareConn.query('select ? as a', [null]);
+ assert.deepEqual(rows, [{ a: null }]);
+ });
+
+ it('query with null placeholder no array', async function () {
+ let rows = await shareConn.query('select ? as a', null);
+ assert.deepEqual(rows, [{ a: null }]);
+ });
+
it('parameter last', async () => {
const value = "'`\\";
const conn = await base.createConnection(); | [CONJS-<I>] permitting providing null as a value without an array | MariaDB_mariadb-connector-nodejs | train |
8af9adc1fc982b10e1fd939810e96ec19c2fdab5 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -5,10 +5,9 @@ function removeElement(array, index) {
if (location !== -1) {
array.splice(location, 1);
- return array;
}
- return [];
+ return array;
}
module.exports = removeElement;
diff --git a/test.js b/test.js
index <HASH>..<HASH> 100644
--- a/test.js
+++ b/test.js
@@ -9,4 +9,8 @@ describe('tests for checkign removal of index from orderd array', function() {
it('should return array without element "c" in orderd form', function() {
expect(remove(['a', 'b', 'c', 'd'], 'c')).to.eql(['a', 'b', 'd']);
});
+
+ it('should return original array if element not found in array', function() {
+ expect(remove(['a', 'b', 'c', 'd'], 'z')).to.eql(['a', 'b', 'c', 'd']);
+ });
}); | removed the return empty array if the element is not found logic | vikramcse_orderd-array-remove | train |
0f200ca52990253d3e9635e4a0c877250a13e39a | diff --git a/lib/generators/doorkeeper/templates/initializer.rb b/lib/generators/doorkeeper/templates/initializer.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/doorkeeper/templates/initializer.rb
+++ b/lib/generators/doorkeeper/templates/initializer.rb
@@ -57,4 +57,11 @@ Doorkeeper.configure do
# (Similar behaviour: https://developers.google.com/accounts/docs/OAuth2InstalledApp#choosingredirecturi)
#
# test_redirect_uri 'urn:ietf:wg:oauth:2.0:oob'
+
+ # Under some circumstances you might want to have applications auto-approved,
+ # so that the user skips the authorization step.
+ # For example if dealing with trusted a application.
+ # skip_authorization do |resource_owner, client|
+ # client.superapp? or resource_owner.admin?
+ # end
end | added skip_authorization to template initializer | doorkeeper-gem_doorkeeper | train |
8c50c2aa49a913fb4dbcfdc4a1e0b944441a97b0 | diff --git a/system-test/storage.js b/system-test/storage.js
index <HASH>..<HASH> 100644
--- a/system-test/storage.js
+++ b/system-test/storage.js
@@ -60,6 +60,13 @@ describe('storage', function() {
});
after(function(done) {
+ if (env.projectId === 'long-door-651') {
+ // The system tests account is unable to delete files.
+ // RE: https://github.com/GoogleCloudPlatform/google-cloud-node/issues/2224
+ done();
+ return;
+ }
+
storage.getBuckets({
prefix: TESTS_PREFIX
}, function(err, buckets) {
@@ -537,15 +544,17 @@ describe('storage', function() {
it('should test the iam permissions', function(done) {
var testPermissions = [
'storage.buckets.get',
- 'storage.buckets.getIamPolicy'
+
+ // Unable to test.
+ // RE: https://github.com/GoogleCloudPlatform/google-cloud-node/issues/2224
+ // 'storage.buckets.getIamPolicy'
];
bucket.iam.testPermissions(testPermissions, function(err, permissions) {
assert.ifError(err);
assert.deepEqual(permissions, {
- 'storage.buckets.get': true,
- 'storage.buckets.getIamPolicy': true
+ 'storage.buckets.get': true
});
done(); | storage: tests: remove things the system tests account cannot do (#<I>) | googleapis_nodejs-storage | train |
977e6438c5976fb9099209d9308a92b327c34abf | diff --git a/pyam_analysis/core.py b/pyam_analysis/core.py
index <HASH>..<HASH> 100644
--- a/pyam_analysis/core.py
+++ b/pyam_analysis/core.py
@@ -219,11 +219,11 @@ class IamDataFrame(object):
see function select() for details
comment: str
a comment pertaining to the category
- assign: boolean (default True)
+ assign: boolean, default True
assign categorization to data (if false, display only)
color: str
assign a color to this category
- display: str or None (default None)
+ display: str or None, default None
display style of scenarios assigned to this category (list, pivot)
(no display if None)
"""
@@ -405,11 +405,11 @@ class IamDataFrame(object):
idx_cols: str or list of strings, optional
list of index columns to display
(summing over non-selected columns)
- color_by_cat: boolean, default: False
+ color_by_cat: boolean, default False
use category coloring scheme, replace full legend by category
save: str, optional
filename for export of figure (as png)
- ret_ax: boolean, optional, default: False
+ ret_ax: boolean, optional, default False
return the 'axes()' object of the plot
"""
if not idx_cols: | harmonizing definition of defaults in docstrings | IAMconsortium_pyam | train |
4e3a6427569dc38705ec0338db5fd492d453739f | diff --git a/src/plugins/Transloadit/index.js b/src/plugins/Transloadit/index.js
index <HASH>..<HASH> 100644
--- a/src/plugins/Transloadit/index.js
+++ b/src/plugins/Transloadit/index.js
@@ -299,6 +299,8 @@ module.exports = class Transloadit extends Plugin {
}
onRestored (pluginData) {
+ const opts = this.opts
+
const knownUploads = pluginData[this.id].files || []
const knownResults = pluginData[this.id].results || []
const previousAssemblies = pluginData[this.id].assemblies || {}
@@ -445,10 +447,15 @@ module.exports = class Transloadit extends Plugin {
// Emit events for assemblies that have completed or errored while we were away.
const diffAssemblyStatus = (prev, next) => {
console.log('[Transloadit] Diff assemblies', prev, next)
- if (next.ok === 'ASSEMBLY_COMPLETED' && prev.ok !== 'ASSEMBLY_COMPLETED') {
+
+ if (opts.waitForEncoding && next.ok === 'ASSEMBLY_COMPLETED' && prev.ok !== 'ASSEMBLY_COMPLETED') {
console.log(' Emitting transloadit:complete for', next.assembly_id, next)
this.core.emit('transloadit:complete', next)
+ } else if (opts.waitForMetadata && next.upload_meta_data_extracted && !prev.upload_meta_data_extracted) {
+ console.log(' Emitting transloadit:complete after metadata extraction for', next.assembly_id, next)
+ this.core.emit('transloadit:complete', next)
}
+
if (next.error && !prev.error) {
console.log(' !!! Emitting transloadit:assembly-error for', next.assembly_id, next)
this.core.emit('transloadit:assembly-error', next, new Error(next.message)) | transloadit: Emit `transloadit:complete` correctly after restore with `waitForMetadata`. | transloadit_uppy | train |
1a0603377c0aa568b8013441bd420bc0b04ec290 | diff --git a/src/metriqued/metriqued/cube_api.py b/src/metriqued/metriqued/cube_api.py
index <HASH>..<HASH> 100644
--- a/src/metriqued/metriqued/cube_api.py
+++ b/src/metriqued/metriqued/cube_api.py
@@ -13,7 +13,9 @@ from tornado.web import authenticated
from metriqued.core_api import MetriqueHdlr
from metriqued.utils import query_add_date, parse_pql_query
-from metriqueu.utils import utcnow, batch_gen, jsonhash
+from metriqueu.utils import utcnow, batch_gen
+
+OBJ_KEYS = set(['_id', '_hash', '_oid', '_start', '_end'])
class DropHdlr(MetriqueHdlr):
@@ -379,38 +381,22 @@ class SaveObjectsHdlr(MetriqueHdlr):
Do some basic object validatation and add an _start timestamp value
'''
- new_obj_hashes = []
- for i, obj in enumerate(objects):
- # we don't want these in the jsonhash
- _start = obj.pop('_start') if '_start' in obj else None
- _end = obj.pop('_end') if '_end' in obj else None
- _id = obj.pop('_id') if '_id' in obj else None
- if not isinstance(_start, (int, float)):
- _t = type(_start)
- self._raise(400, "_start must be float; got %s" % _t)
- if not isinstance(_end, (int, float, type(None))):
- _t = type(_end)
- self._raise(400, "_end must be float/None; got %s" % _t)
- if not obj.get('_oid'):
- self._raise(400, "_oid field MUST be defined: %s" % obj)
- if not obj.get('_hash'):
- # hash the object (minus _start/_end/_id)
- obj['_hash'] = jsonhash(obj)
- if _end is None:
- new_obj_hashes.append(obj['_hash'])
-
- # add back _start and _end properties
- obj['_start'] = _start
- obj['_end'] = _end
- obj['_id'] = _id if _id else jsonhash(obj)
- objects[i] = obj
-
+ for obj in objects:
+ keys = set(obj.keys())
+ if not OBJ_KEYS.issubset(keys):
+ self._raise(400,
+ "objects must have %s; got %s" % (OBJ_KEYS, keys))
+ if not isinstance(obj['_start'], (int, float)):
+ self._raise(400, "_start must be float")
+ if not isinstance(obj['_end'], (int, float, type(None))):
+ self._raise(400, "_end must be float/None")
+
+ hashes = [o['_hash'] for o in objects]
# Filter out objects whose most recent version did not change
- docs = _cube.find({'_hash': {'$in': new_obj_hashes},
- '_end': None},
+ docs = _cube.find({'_hash': {'$in': hashes}, '_end': None},
fields={'_hash': 1, '_id': -1})
- _dup_hashes = set([doc['_hash'] for doc in docs])
- objects = [obj for obj in objects if obj['_hash'] not in _dup_hashes]
+ dup_hashes = set([doc['_hash'] for doc in docs])
+ objects = [o for o in objects if o['_hash'] not in dup_hashes]
objects = filter(None, objects)
return objects | client ensures needed keys are in objs; server just checks them | kejbaly2_metrique | train |
27d186e20850562707c4386d8c32ea0d509d9f58 | diff --git a/aws/resource_aws_sqs_queue_policy.go b/aws/resource_aws_sqs_queue_policy.go
index <HASH>..<HASH> 100644
--- a/aws/resource_aws_sqs_queue_policy.go
+++ b/aws/resource_aws_sqs_queue_policy.go
@@ -101,7 +101,7 @@ func resourceAwsSqsQueuePolicyUpsert(d *schema.ResourceData, meta interface{}) e
var equivalent bool
equivalent, err = awspolicy.PoliciesAreEquivalent(*queuePolicy, policy)
if !equivalent {
- return fmt.Errorf("SQS attribute not updated")
+ return notUpdatedError
}
}
} | Update aws/resource_aws_sqs_queue_policy.go | terraform-providers_terraform-provider-aws | train |
2b991629b8b456f41b9809fab488a59b08f8eaee | diff --git a/forms/FieldSet.php b/forms/FieldSet.php
index <HASH>..<HASH> 100755
--- a/forms/FieldSet.php
+++ b/forms/FieldSet.php
@@ -179,7 +179,10 @@ class FieldSet extends DataObjectSet {
foreach($this->items as $i => $child) {
if(is_object($child)){
- if(($child->Name() == $fieldName || $child->Title() == $fieldName) && (!$dataFieldOnly || $child->hasData())) {
+ $childName = $child->Name();
+ if(!$childName) $childName = $child->Title();
+
+ if(($childName == $fieldName) && (!$dataFieldOnly || $child->hasData())) {
array_splice( $this->items, $i, 1 );
break;
} else if($child->isComposite()) { | API CHANGE: In FieldSet::removeByName(), only match on field Title() if field Name() isn't set. | silverstripe_silverstripe-framework | train |
dbdbf499a2f7f859353b2a375ba7c48ed062352c | diff --git a/Maven/src/main/java/com/technophobia/substeps/runner/ForkedRunner.java b/Maven/src/main/java/com/technophobia/substeps/runner/ForkedRunner.java
index <HASH>..<HASH> 100644
--- a/Maven/src/main/java/com/technophobia/substeps/runner/ForkedRunner.java
+++ b/Maven/src/main/java/com/technophobia/substeps/runner/ForkedRunner.java
@@ -158,6 +158,9 @@ public class ForkedRunner implements MojoRunner {
processBuilder.redirectErrorStream(true);
try {
+
+ log.debug("Starting substeps process with command " + Joiner.on(" ").join(processBuilder.command()));
+
this.forkedJVMProcess = processBuilder.start();
consumer = new InputStreamConsumer(this.forkedJVMProcess.getInputStream(), log, processStarted,
diff --git a/Maven/src/main/java/com/technophobia/substeps/runner/SubstepsRunnerMojo.java b/Maven/src/main/java/com/technophobia/substeps/runner/SubstepsRunnerMojo.java
index <HASH>..<HASH> 100644
--- a/Maven/src/main/java/com/technophobia/substeps/runner/SubstepsRunnerMojo.java
+++ b/Maven/src/main/java/com/technophobia/substeps/runner/SubstepsRunnerMojo.java
@@ -64,13 +64,12 @@ public class SubstepsRunnerMojo extends AbstractMojo {
private final ExecutionReportBuilder executionReportBuilder = null;
/**
- * When running in forked mode, a port is required to communicated between
- * maven and substeps
+ * When running in forked mode, a port is required to communicate between
+ * maven and substeps, uo set explicitly use -DjmxPort=9999
*
- * @parameter default-value="9999"
- * @required
+ * @parameter default-value="9999" expression="${jmxPort}"
*/
- private int jmxPort;
+ private Integer jmxPort;
/**
* A space delimited string of vm arguments to pass to the forked jvm
@@ -245,7 +244,6 @@ public class SubstepsRunnerMojo extends AbstractMojo {
private void ensureValidConfiguration() throws MojoExecutionException {
ensureForkedIfStepImplementationArtifactsSpecified();
-
}
private void ensureForkedIfStepImplementationArtifactsSpecified() throws MojoExecutionException {
@@ -256,4 +254,5 @@ public class SubstepsRunnerMojo extends AbstractMojo {
}
}
+
} | Change jmxPort property to use expression to ease usage from command line | G2G3Digital_substeps-framework | train |
18e88c502e1940e0a8480f9f2dc49f093256766a | diff --git a/pyrabbit/http.py b/pyrabbit/http.py
index <HASH>..<HASH> 100644
--- a/pyrabbit/http.py
+++ b/pyrabbit/http.py
@@ -46,6 +46,7 @@ class HTTPClient(object):
'all_nodes': 'nodes',
'all_vhosts': 'vhosts',
'all_users': 'users',
+ 'all_bindings': 'bindings',
'whoami': 'whoami',
'queues_by_vhost': 'queues/%s',
'queues_by_name': 'queues/%s/%s',
@@ -53,7 +54,13 @@ class HTTPClient(object):
'exchange_by_name': 'exchanges/%s/%s',
'live_test': 'aliveness-test/%s',
'purge_queue': 'queues/%s/%s/contents',
- 'connections_by_name': 'connections/%s'}
+ 'connections_by_name': 'connections/%s',
+ 'bindings_by_source_exch': 'exchanges/%s/%s/bindings/source',
+ 'bindings_by_dest_exch': 'exchanges/%s/%s/bindings/destination',
+ 'bindings_on_queue': 'queues/%s/%s/bindings',
+ 'get_from_queue': 'queues/%s/%s/get',
+ 'publish_to_exchange': 'exchanges/%s/%s/publish',
+ }
def __init__(self, server, uname, passwd):
"""
@@ -86,10 +93,16 @@ class HTTPClient(object):
:param string path: A URL
:param string reqtype: The HTTP method (GET, POST, etc.) to use
in the request.
+ :param string body: A string representing any data to be sent in the
+ body of the HTTP request.
+ :param dict headers: {header-name: header-value} dictionary.
"""
try:
- resp, content = self.client.request(path, reqtype, body or '', headers or {})
+ resp, content = self.client.request(path,
+ reqtype,
+ body or '',
+ headers or {})
except Exception as out:
# net-related exception types from httplib2 are unpredictable.
raise NetworkError("Error: %s %s" % (type(out), out))
@@ -305,9 +318,9 @@ class HTTPClient(object):
"durable": durable, "internal": internal,
"arguments": arguments or []})
self.do_call(os.path.join(self.base_url, path),
- 'PUT',
- body,
- headers={'content-type': 'application/json'})
+ 'PUT',
+ body,
+ headers={'content-type': 'application/json'})
return True
def delete_exchange(self, vhost, name):
@@ -324,20 +337,22 @@ class HTTPClient(object):
self.do_call(os.path.join(self.base_url, path), 'DELETE')
return True
- def get_connections(self, name=None):
+ def get_connections(self):
"""
:returns: list of dicts, or an empty list if there are no connections.
- if *name* is not None, returns a dict.
:param string name: The name of a specific connection to get
"""
- if name:
- path = HTTPClient.urls['connections_by_name']
- else:
- path = HTTPClient.urls['all_connections']
+ path = HTTPClient.urls['all_connections']
resp, content = self.do_call(os.path.join(self.base_url, path), 'GET')
conns = self.decode_json_content(content)
return conns
+ def get_connection(self, name):
+ path = HTTPClient.urls['connections_by_name']
+ resp, content = self.do_call(os.path.join(self.base_url, path), 'GET')
+ conn = self.decode_json_content(content)
+ return conn
+
def get_channels(self):
"""
Return a list of dicts containing details about broker connections. | Stubbed out more API URLs to be supported in HTTPClient, broke out get_connection from get_connections, and a couple of other relatively trivial tweaks. | bkjones_pyrabbit | train |
f2a6bf692b30063b7c550dba4558398ea2ba594b | diff --git a/test_path.py b/test_path.py
index <HASH>..<HASH> 100644
--- a/test_path.py
+++ b/test_path.py
@@ -136,8 +136,11 @@ class TempDirTestCase(unittest.TestCase):
d = path(self.tempdir)
subdir = d / 'subdir'
subdir.makedirs()
+ old_dir = os.getcwd()
with subdir:
self.assertEquals(os.getcwd(), subdir)
+ self.assertEquals(os.getcwd(), old_dir)
+
def testTouch(self):
# NOTE: This test takes a long time to run (~10 seconds). | added context mananger, more tests | jaraco_path.py | train |
841d561d82119a455fdfa8a082f6c3e2574a790a | diff --git a/api/models.py b/api/models.py
index <HASH>..<HASH> 100644
--- a/api/models.py
+++ b/api/models.py
@@ -17,7 +17,7 @@ from celery.canvas import group
from django.conf import settings
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
-from django.db import models, connections
+from django.db import models
from django.db.models import Max
from django.db.models.signals import post_delete
from django.db.models.signals import post_save
@@ -39,20 +39,6 @@ def log_event(app, msg, level=logging.INFO):
logger.log(level, msg)
-def close_db_connections(func, *args, **kwargs):
- """
- Decorator to close db connections during threaded execution
-
- Note this is necessary to work around:
- https://code.djangoproject.com/ticket/22420
- """
- def _inner(*args, **kwargs):
- func(*args, **kwargs)
- for conn in connections.all():
- conn.close()
- return _inner
-
-
def validate_app_structure(value):
"""Error if the dict values aren't ints >= 0."""
try:
@@ -348,7 +334,6 @@ class Container(UuidAuditedModel):
def _command_announceable(self):
return self._command.lower() in ['start web', '']
- @close_db_connections
@transition(field=state, source=INITIALIZED, target=CREATED)
def create(self):
image = self.release.image
@@ -361,14 +346,12 @@ class Container(UuidAuditedModel):
use_announcer=self._command_announceable(),
**kwargs)
- @close_db_connections
@transition(field=state,
source=[CREATED, UP, DOWN],
target=UP, crashed=DOWN)
def start(self):
self._scheduler.start(self._job_id, self._command_announceable())
- @close_db_connections
@transition(field=state,
source=[INITIALIZED, CREATED, UP, DOWN],
target=UP,
@@ -394,12 +377,10 @@ class Container(UuidAuditedModel):
# destroy old container
self._scheduler.destroy(old_job_id, self._command_announceable())
- @close_db_connections
@transition(field=state, source=UP, target=DOWN)
def stop(self):
self._scheduler.stop(self._job_id, self._command_announceable())
- @close_db_connections
@transition(field=state,
source=[INITIALIZED, CREATED, UP, DOWN],
target=DESTROYED) | ref(controller): remove close_db_connection tests hack
The decorator once allowed us to work around issues with running
our test suite, but does not seem to be needed now. | deis_controller-sdk-go | train |
2f81383d07904a651ac6f06533dfee8a42baaf60 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -155,6 +155,12 @@ export {
* @type Accordion
*/
Accordion,
+
+ /**
+ * Inline Left Navigation Menu.
+ * @type InlineLeftNav
+ */
+ InlineLeftNav,
};
/** | fix(inline-left-nav): export InlineLeftNav from the barrel. Closes #<I> (#<I>) | carbon-design-system_carbon-components | train |
1244a7164e4a8599c650b6e11908064958b8d50c | diff --git a/restcomm/restcomm.mscontrol.jsr309/src/main/java/org/restcomm/connect/mscontrol/jsr309/Jsr309CallController.java b/restcomm/restcomm.mscontrol.jsr309/src/main/java/org/restcomm/connect/mscontrol/jsr309/Jsr309CallController.java
index <HASH>..<HASH> 100644
--- a/restcomm/restcomm.mscontrol.jsr309/src/main/java/org/restcomm/connect/mscontrol/jsr309/Jsr309CallController.java
+++ b/restcomm/restcomm.mscontrol.jsr309/src/main/java/org/restcomm/connect/mscontrol/jsr309/Jsr309CallController.java
@@ -716,7 +716,7 @@ public class Jsr309CallController extends MediaServerController {
params.put(Recorder.START_BEEP, Boolean.FALSE);
// Video parameters
- if (MediaAttributes.MediaType.AUDIO_VIDEO.equals(message.media()) ||
+ if (MediaAttributes.MediaType.AUDIO_VIDEO.equals(message.media()) ||
MediaAttributes.MediaType.VIDEO_ONLY.equals(message.media())) {
params.put(Recorder.VIDEO_CODEC, CodecConstants.H264);
String sVideoFMTP = "profile=" + "66"; | Fix for trailing spaces. Issue #<I>. | RestComm_Restcomm-Connect | train |
3abdc2c9a5d948947d9891ca2f8b49ac8d8c6fcc | diff --git a/tasks/jstestdriver.js b/tasks/jstestdriver.js
index <HASH>..<HASH> 100644
--- a/tasks/jstestdriver.js
+++ b/tasks/jstestdriver.js
@@ -105,8 +105,12 @@ module.exports = function (grunt) {
return evalOptionsData(arr) ? arr : null;
}
- function hasFailedTests(resultArray) {
- console.log(typeof resultArray);
+ function hasFailedTests(result) {
+ var prop, resultStr = "";
+
+ for (prop in result) {
+ console.log(result[prop]);
+ }
return false;
} | Commiting from grunt when fail | rickyclegg_grunt-jstestdriver | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.