hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
3e43378e5d3bcc1ccf0ac1c27fcd0f381a2b2bb4 | diff --git a/js/okcoinusd.js b/js/okcoinusd.js
index <HASH>..<HASH> 100644
--- a/js/okcoinusd.js
+++ b/js/okcoinusd.js
@@ -633,19 +633,21 @@ module.exports = class okcoinusd extends Exchange {
}
handleErrors (code, reason, url, method, headers, body) {
- let response = JSON.parse (body);
- if ('error_code' in response) {
- let error = this.safeString (response, 'error_code');
- let message = this.id + ' ' + this.json (response);
- if (error in this.exceptions) {
- let ExceptionClass = this.exceptions[error];
- throw new ExceptionClass (message);
- } else {
- throw new ExchangeError (message);
+ if (body[0] === '{') {
+ let response = JSON.parse (body);
+ if ('error_code' in response) {
+ let error = this.safeString (response, 'error_code');
+ let message = this.id + ' ' + this.json (response);
+ if (error in this.exceptions) {
+ let ExceptionClass = this.exceptions[error];
+ throw new ExceptionClass (message);
+ } else {
+ throw new ExchangeError (message);
+ }
}
+ if ('result' in response)
+ if (!response['result'])
+ throw new ExchangeError (this.id + ' ' + this.json (response));
}
- if ('result' in response)
- if (!response['result'])
- throw new ExchangeError (this.id + ' ' + this.json (response));
}
}; | added json-decoding protection against python failures to okcoinusd #<I> #<I> | ccxt_ccxt | train |
217634a714794136d9a3774ff837a68bb36c2def | diff --git a/manticore/ethereum/__init__.py b/manticore/ethereum/__init__.py
index <HASH>..<HASH> 100644
--- a/manticore/ethereum/__init__.py
+++ b/manticore/ethereum/__init__.py
@@ -1546,3 +1546,12 @@ class ManticoreEVM(Manticore):
# We suppress because otherwise we log it many times and it looks weird.
def _did_finish_run_callback(self):
pass
+
+ def __repr__(self):
+ return self.__str__()
+
+ def __str__(self):
+ return "<ManticoreEVM | Alive States: {}; Terminated States: {}>".format(
+ self.count_running_states(),
+ self.count_terminated_states()
+ )
\ No newline at end of file | String representation for Manticore EVM (#<I>)
* __str__ and __repr__ for ManticoreEVM
* make __repr__ call __str__ | trailofbits_manticore | train |
863cc7502ca55bb838c628258f102ea8e6893835 | diff --git a/snapshot/cmd/snapshot-pv-provisioner/snapshot-pv-provisioner.go b/snapshot/cmd/snapshot-pv-provisioner/snapshot-pv-provisioner.go
index <HASH>..<HASH> 100644
--- a/snapshot/cmd/snapshot-pv-provisioner/snapshot-pv-provisioner.go
+++ b/snapshot/cmd/snapshot-pv-provisioner/snapshot-pv-provisioner.go
@@ -69,31 +69,12 @@ func newSnapshotProvisioner(client kubernetes.Interface, crdclient *rest.RESTCli
var _ controller.Provisioner = &snapshotProvisioner{}
-func (p *snapshotProvisioner) getPVFromVolumeSnapshotDataSpec(snapshotDataSpec *crdv1.VolumeSnapshotDataSpec) (*v1.PersistentVolume, error) {
- if snapshotDataSpec.PersistentVolumeRef == nil {
- return nil, fmt.Errorf("VolumeSnapshotDataSpec is not bound to any PV")
- }
- pvName := snapshotDataSpec.PersistentVolumeRef.Name
- if pvName == "" {
- return nil, fmt.Errorf("The PV name is not specified in snapshotdata %#v", *snapshotDataSpec)
- }
- pv, err := p.client.CoreV1().PersistentVolumes().Get(pvName, metav1.GetOptions{})
- if err != nil {
- return nil, fmt.Errorf("Failed to retrieve PV %s from the API server: %q", pvName, err)
- }
- return pv, nil
-}
-
func (p *snapshotProvisioner) snapshotRestore(snapshotName string, snapshotData crdv1.VolumeSnapshotData, options controller.VolumeOptions) (*v1.PersistentVolumeSource, map[string]string, error) {
// validate the PV supports snapshot and restore
spec := &snapshotData.Spec
- pv, err := p.getPVFromVolumeSnapshotDataSpec(spec)
- if err != nil {
- return nil, nil, err
- }
- volumeType := crdv1.GetSupportedVolumeFromPVSpec(&pv.Spec)
+ volumeType := crdv1.GetSupportedVolumeFromSnapshotDataSpec(spec)
if len(volumeType) == 0 {
- return nil, nil, fmt.Errorf("unsupported volume type found in PV %#v", *spec)
+ return nil, nil, fmt.Errorf("unsupported volume type found in SnapshotData %#v", *spec)
}
plugin, ok := volumePlugins[volumeType]
if !ok { | Don't check source PV when restoring from snapshot
The original PV could have been deleted. Use SnapshotData to check
if Restore is supported instead
Issue #<I> | kubernetes-incubator_external-storage | train |
9825d6ddc2ea2a846b5ed748ad1f0f809c509607 | diff --git a/lib/handlers/file-proxy.js b/lib/handlers/file-proxy.js
index <HASH>..<HASH> 100644
--- a/lib/handlers/file-proxy.js
+++ b/lib/handlers/file-proxy.js
@@ -23,7 +23,7 @@ function readFiles(files, callback) {
var execCallback = function (err, stat) {
if (!err && stat && stat.isFile()) {
callback(null, file, stat.size);
- } else if (file && file.pluginName) {
+ } else if (file && file.originalKey) {
pluginMgr.requestBin(file, function(buffer, err, res) {
file = file.originalKey;
callback(err, file, buffer ? buffer.length : 0, buffer, res);
diff --git a/lib/plugins/index.js b/lib/plugins/index.js
index <HASH>..<HASH> 100644
--- a/lib/plugins/index.js
+++ b/lib/plugins/index.js
@@ -72,7 +72,7 @@ var CUSTOM_CERT_HEADER = config.CUSTOM_CERT_HEADER;
var ENABLE_CAPTURE_HEADER = config.ENABLE_CAPTURE_HEADER;
var PLUGIN_HOOKS = config.PLUGIN_HOOKS;
var PLUGIN_HOOK_NAME_HEADER = config.PLUGIN_HOOK_NAME_HEADER;
-var HTTP_RE = /^https?:\/\//;
+var HTTP_RE = /^https?:\/\/./;
var conf = {};
var EXCLUDE_CONF_KEYS = {
uid: 1,
@@ -1563,6 +1563,7 @@ httpMgr.setPluginMgr(pluginMgr);
var PLUGIN_KEY_RE =/^\$(?:whistle\.)?([a-z\d_-]+)[/:]([\S\s]+)$/;
var MAX_VALUE_LEN = 1024 * 1024 * 16;
+var MAX_URL_VAL_LEN = 1024 * 256;
function requestValue(options, callback, isBin) {
options.needRawData = isBin;
@@ -1587,6 +1588,13 @@ function requestValue(options, callback, isBin) {
}
pluginMgr.resolveKey = function(url, rule, req) {
+ if (HTTP_RE.test(url)) {
+ return {
+ url: url,
+ originalKey: url,
+ maxLength: MAX_URL_VAL_LEN
+ };
+ }
if (!PLUGIN_KEY_RE.test(url)) {
return;
} | feat: read rule value from remote url | avwo_whistle | train |
9326358661efcd9fa32ed797430069a4be8a50e6 | diff --git a/src/main/java/nl/talsmasoftware/umldoclet/javadoc/UMLFactory.java b/src/main/java/nl/talsmasoftware/umldoclet/javadoc/UMLFactory.java
index <HASH>..<HASH> 100644
--- a/src/main/java/nl/talsmasoftware/umldoclet/javadoc/UMLFactory.java
+++ b/src/main/java/nl/talsmasoftware/umldoclet/javadoc/UMLFactory.java
@@ -180,15 +180,12 @@ public class UMLFactory {
private boolean isMethodFromExcludedSuperclass(ExecutableElement method) {
boolean result = false;
Element containingClass = method.getEnclosingElement();
- if (ElementKind.CLASS.equals(containingClass.getKind())) {
- TypeName typeName = TypeNameVisitor.INSTANCE.visit(containingClass.asType());
- result = config.getExcludedTypeReferences().contains(typeName.qualified)
- || methodsFromExcludedSuperclasses().stream().anyMatch(
+ if (containingClass.getKind().isClass() || containingClass.getKind().isInterface()) {
+ result = methodsFromExcludedSuperclasses().stream().anyMatch(
m -> equalMethodSignatures(m, method)
&& env.getTypeUtils().isAssignable(containingClass.asType(), m.getEnclosingElement().asType()));
- } else if (ElementKind.ENUM.equals(containingClass.getKind())) {
- result = isGenericEnumMethod(method);
}
+ result = result || isExcludedEnumMethod(method);
return result;
}
@@ -201,7 +198,6 @@ public class UMLFactory {
.map(TypeElement::getEnclosedElements).flatMap(Collection::stream)
.filter(elem -> ElementKind.METHOD.equals(elem.getKind()))
.filter(ExecutableElement.class::isInstance).map(ExecutableElement.class::cast)
- .filter(method -> !method.getModifiers().contains(Modifier.STATIC))
.filter(method -> !method.getModifiers().contains(Modifier.ABSTRACT))
.filter(method -> visibilityOf(method.getModifiers()).compareTo(Visibility.PRIVATE) > 0)
.collect(toCollection(LinkedHashSet::new));
@@ -209,14 +205,16 @@ public class UMLFactory {
return _methodsFromExcludedSuperclasses;
}
- private boolean isGenericEnumMethod(ExecutableElement method) {
- String name = method.getSimpleName().toString();
- if ("values".equals(name)) {
- return method.getParameters().size() == 0 && method.getModifiers().contains(Modifier.STATIC);
- } else if ("valueOf".equals(name)) {
- return method.getParameters().size() == 1
- && method.getModifiers().contains(Modifier.STATIC)
- && "java.lang.String".equals(TypeNameVisitor.INSTANCE.visit(method.getParameters().get(0).asType()).qualified);
+ private boolean isExcludedEnumMethod(ExecutableElement method) {
+ if (config.getExcludedTypeReferences().contains(Enum.class.getName())
+ && ElementKind.ENUM.equals(method.getEnclosingElement().getKind())
+ && method.getModifiers().contains(Modifier.STATIC)) {
+ if ("values".equals(method.getSimpleName().toString()) && method.getParameters().isEmpty()) {
+ return true;
+ } else if ("valueOf".equals(method.getSimpleName().toString()) && method.getParameters().size() == 1) {
+ String paramType = TypeNameVisitor.INSTANCE.visit(method.getParameters().get(0).asType()).qualified;
+ return String.class.getName().equals(paramType);
+ }
}
return false;
} | Support for leaving out standard static methods from Enum classes. | talsma-ict_umldoclet | train |
08ce1b19a6e5b4e15ce5ae67ce98bf3514269f11 | diff --git a/autograd/convenience_wrappers.py b/autograd/convenience_wrappers.py
index <HASH>..<HASH> 100644
--- a/autograd/convenience_wrappers.py
+++ b/autograd/convenience_wrappers.py
@@ -61,7 +61,7 @@ def jacobian(fun, argnum=0):
def hessian_vector_product(fun, argnum=0):
"""Builds a function that returns the exact Hessian-vector product.
- The returned function has arguments (vector, *args, **kwargs), and takes
+ The returned function has arguments (*args, vector, **kwargs), and takes
roughly 4x as long to evaluate as the original function."""
fun_grad = grad(fun, argnum)
def vector_dot_grad(*args, **kwargs):
@@ -78,7 +78,6 @@ def hessian(fun, argnum=0):
def hessian_fun(*args, **kwargs):
arg_in = args[argnum]
directions = np.eye(arg_in.size) # axis-aligned directions.
- hvp_list = [hvp(*(args+(dir,)), **kwargs) for dir in directions]
+ hvp_list = [hvp(*(args+(direction,)), **kwargs) for direction in directions]
return np.array(hvp_list)
return hessian_fun
- | Updated docstring to reflect new argument order in hess_vec_product | HIPS_autograd | train |
49199dbde5f6b6b27b91dda41dd396e9cbf00046 | diff --git a/scripts/git-precommit-hook.py b/scripts/git-precommit-hook.py
index <HASH>..<HASH> 100755
--- a/scripts/git-precommit-hook.py
+++ b/scripts/git-precommit-hook.py
@@ -27,10 +27,15 @@ def run_format_check(files):
rust_files = [x for x in files if x.endswith('.rs')]
if not rust_files:
return 0
- return subprocess.Popen(['cargo', 'fmt', '--',
- '--write-mode=diff',
- '--skip-children',
- '--color=always'] + rust_files).wait()
+ rv = subprocess.Popen(['cargo', 'fmt', '--',
+ '--write-mode=diff',
+ '--skip-children',
+ '--color=always'] + rust_files).wait()
+ if rv != 0:
+ print('', file=sys.stderr)
+ print('\033[1m\033[2minfo: to fix this run `cargo fmt` and '
+ 'commit again\033[0m', file=sys.stderr)
+ return rv
def main(): | feat: Added info for how to run cargo fmt | getsentry_semaphore | train |
aa850bf452afba9b7c49916472b32d1d80af20d0 | diff --git a/lib/mapreduce/mapreduce.go b/lib/mapreduce/mapreduce.go
index <HASH>..<HASH> 100644
--- a/lib/mapreduce/mapreduce.go
+++ b/lib/mapreduce/mapreduce.go
@@ -106,13 +106,26 @@ func contains(set []string, val string) bool {
}
type Job struct {
- Type string `json:"type"`
- Input string `json:"input"`
- Image string `json:"image"`
- Cmd []string `json:"command"`
- Limit int `json:"limit"`
- Parallel int `json:"parallel"`
- TimeOut int `json:"timeout"`
+ Type string `json:"type"`
+ Input string `json:"input"`
+ Image string `json:"image"`
+ Cmd []string `json:"command"`
+ Limit int `json:"limit"`
+ Parallel int `json:"parallel"`
+ TimeOut int `json:"timeout"`
+ CpuShares int `json:"cpu-shares"`
+ Memory int `json:"memory"`
+}
+
+func (j Job) containerConfig() *dockerclient.ContainerConfig {
+ c := &dockerclient.ContainerConfig{Image: j.Image, Cmd: j.Cmd}
+ if j.CpuShares != 0 {
+ c.CpuShares = int64(j.CpuShares)
+ }
+ if j.Memory != 0 {
+ c.Memory = int64(j.Memory)
+ }
+ return c
}
type materializeInfo struct { | Adds Cpu and memory quotas to jobs. | pachyderm_pachyderm | train |
24baed0c1aee1e4791cc84695322695b78826c94 | diff --git a/umap/static/umap/js/umap.slideshow.js b/umap/static/umap/js/umap.slideshow.js
index <HASH>..<HASH> 100644
--- a/umap/static/umap/js/umap.slideshow.js
+++ b/umap/static/umap/js/umap.slideshow.js
@@ -90,7 +90,7 @@ L.U.Slideshow = L.Class.extend({
play: function () {
if (this._id) return;
- if (this.map.editEnabled) return;
+ if (this.map.editEnabled || !this.map.options.slideshow.active) return;
L.DomUtil.addClass(document.body, L.U.Slideshow.CLASSNAME);
this._id = window.setInterval(L.bind(this.loop, this), this.options.delay);
this.resetSpinners(); | Prevent old style slideshow to be actived on load
We used to only consider the delay to define if the slideshow was
active or not. | umap-project_umap | train |
71dc7abdc64c6f8d77c7c3248b0965eb56185f98 | diff --git a/Model/ContactClientModel.php b/Model/ContactClientModel.php
index <HASH>..<HASH> 100644
--- a/Model/ContactClientModel.php
+++ b/Model/ContactClientModel.php
@@ -501,6 +501,9 @@ class ContactClientModel extends FormModel
}
$params['contactclient_id'] = $contactClient->getId();
+ $userTZ = new \DateTime('now');
+ $userTzName = $userTZ->getTimezone()->getName();
+
if ('revenue' != $type) {
$params['type'] = $type;
foreach ($utmSources as $utmSource) {
@@ -515,8 +518,6 @@ class ContactClientModel extends FormModel
// For some reason, Mautic only sets UTC in Query Date builder
// if its an intra-day date range ¯\_(ツ)_/¯
// so we have to do it here.
- $userTZ = new \DateTime('now');
- $userTzName = $userTZ->getTimezone()->getName();
$paramDateTo = $q->getParameter('dateTo');
$paramDateFrom = $q->getParameter('dateFrom');
$paramDateTo = new \DateTime($paramDateTo);
@@ -572,14 +573,14 @@ class ContactClientModel extends FormModel
}
$dbUnit = $query->getTimeUnitFromDateRange($dateFrom, $dateTo);
$dbUnit = $query->translateTimeUnit($dbUnit);
- $dateConstruct = 'DATE_FORMAT(t.date_added, \''.$dbUnit.'\')';
+ $dateConstruct = "DATE_FORMAT(CONVERT_TZ(t.date_added, @@global.time_zone, '$userTzName'), '$dbUnit.')";
foreach ($utmSources as $utmSource) {
$q->select($dateConstruct.' AS date, ROUND(SUM(t.attribution), 2) AS count')
->groupBy($dateConstruct);
if (empty($utmSource)) { // utmSource can be a NULL value
- $q->where('utm_source IS NULL');
+ $q->andWhere('utm_source IS NULL');
} else {
- $q->where('utm_source = :utmSource')
+ $q->andWhere('utm_source = :utmSource')
->setParameter('utmSource', $utmSource);
} | fix query on revenue by source version of client stat chart | TheDMSGroup_mautic-contact-client | train |
89a2c894eec385b8ae5f86d86bb358d1e0bae203 | diff --git a/src/Illuminate/Contracts/Validation/Validator.php b/src/Illuminate/Contracts/Validation/Validator.php
index <HASH>..<HASH> 100644
--- a/src/Illuminate/Contracts/Validation/Validator.php
+++ b/src/Illuminate/Contracts/Validation/Validator.php
@@ -10,6 +10,8 @@ interface Validator extends MessageProvider
* Run the validator's rules against its data.
*
* @return array
+ *
+ * @throws \Illuminate\Validation\ValidationException
*/
public function validate();
@@ -17,6 +19,8 @@ interface Validator extends MessageProvider
* Get the attributes and values that were validated.
*
* @return array
+ *
+ * @throws \Illuminate\Validation\ValidationException
*/
public function validated(); | validator contract throws docblock (#<I>) | laravel_framework | train |
00961f31328466251372bdc203a6e16cb1be506e | diff --git a/lib/sage_world/version.rb b/lib/sage_world/version.rb
index <HASH>..<HASH> 100644
--- a/lib/sage_world/version.rb
+++ b/lib/sage_world/version.rb
@@ -1,3 +1,3 @@
module SageWorld
- VERSION = "0.1.3"
+ VERSION = "0.1.4"
end | Change version number to <I> | vinsol_sageworld-catalog | train |
7e0bd442a7d8ee660d3f1e069ecb0d01dc24e5d3 | diff --git a/server/sonar-webserver-webapi/src/main/java/org/sonar/server/qualityprofile/QProfileBackuperImpl.java b/server/sonar-webserver-webapi/src/main/java/org/sonar/server/qualityprofile/QProfileBackuperImpl.java
index <HASH>..<HASH> 100644
--- a/server/sonar-webserver-webapi/src/main/java/org/sonar/server/qualityprofile/QProfileBackuperImpl.java
+++ b/server/sonar-webserver-webapi/src/main/java/org/sonar/server/qualityprofile/QProfileBackuperImpl.java
@@ -95,7 +95,7 @@ public class QProfileBackuperImpl implements QProfileBackuper {
importedRule.setRepository(ruleKey.repository());
importedRule.setKey(ruleKey.rule());
importedRule.setSeverity(exportRuleDto.getSeverityString());
- if (importedRule.isCustomRule()) {
+ if (exportRuleDto.isCustomRule()) {
importedRule.setTemplate(exportRuleDto.getTemplateRuleKey().rule());
importedRule.setDescription(exportRuleDto.getDescriptionOrThrow());
}
@@ -138,13 +138,13 @@ public class QProfileBackuperImpl implements QProfileBackuper {
List<ImportedRule> importedRules = qProfile.getRules();
- Map<RuleKey, RuleDto> ruleDefinitionsByKey = getImportedRulesDefinitions(dbSession, importedRules);
- checkIfRulesFromExternalEngines(ruleDefinitionsByKey.values());
+ Map<RuleKey, RuleDto> ruleKeyToDto = getImportedRulesDtos(dbSession, importedRules);
+ checkIfRulesFromExternalEngines(ruleKeyToDto.values());
- Map<RuleKey, RuleDto> customRulesDefinitions = createCustomRulesIfNotExist(dbSession, importedRules, ruleDefinitionsByKey);
- ruleDefinitionsByKey.putAll(customRulesDefinitions);
+ Map<RuleKey, RuleDto> customRulesDefinitions = createCustomRulesIfNotExist(dbSession, importedRules, ruleKeyToDto);
+ ruleKeyToDto.putAll(customRulesDefinitions);
- List<RuleActivation> ruleActivations = toRuleActivations(importedRules, ruleDefinitionsByKey);
+ List<RuleActivation> ruleActivations = toRuleActivations(importedRules, ruleKeyToDto);
BulkChangeResult changes = profileReset.reset(dbSession, targetProfile, ruleActivations);
return new QProfileRestoreSummary(targetProfile, changes);
@@ -154,15 +154,15 @@ public class QProfileBackuperImpl implements QProfileBackuper {
* Returns map of rule definition for an imported rule key.
* The imported rule key may refer to a deprecated rule key, in which case the the RuleDto will correspond to a different key (the new key).
*/
- private Map<RuleKey, RuleDto> getImportedRulesDefinitions(DbSession dbSession, List<ImportedRule> rules) {
+ private Map<RuleKey, RuleDto> getImportedRulesDtos(DbSession dbSession, List<ImportedRule> rules) {
Set<RuleKey> ruleKeys = rules.stream()
.map(ImportedRule::getRuleKey)
.collect(toSet());
- Map<RuleKey, RuleDto> rulesDefinitions = db.ruleDao().selectByKeys(dbSession, ruleKeys).stream()
+ Map<RuleKey, RuleDto> ruleDtos = db.ruleDao().selectByKeys(dbSession, ruleKeys).stream()
.collect(Collectors.toMap(RuleDto::getKey, identity()));
Set<RuleKey> unrecognizedRuleKeys = ruleKeys.stream()
- .filter(r -> !rulesDefinitions.containsKey(r))
+ .filter(r -> !ruleDtos.containsKey(r))
.collect(toSet());
if (!unrecognizedRuleKeys.isEmpty()) {
@@ -177,11 +177,11 @@ public class QProfileBackuperImpl implements QProfileBackuper {
for (RuleDto rule : rulesBasedOnDeprecatedKeys) {
DeprecatedRuleKeyDto deprecatedRuleKey = deprecatedRuleKeysByUuid.get(rule.getUuid());
RuleKey oldRuleKey = RuleKey.of(deprecatedRuleKey.getOldRepositoryKey(), deprecatedRuleKey.getOldRuleKey());
- rulesDefinitions.put(oldRuleKey, rule);
+ ruleDtos.put(oldRuleKey, rule);
}
}
- return rulesDefinitions;
+ return ruleDtos;
}
private static void checkIfRulesFromExternalEngines(Collection<RuleDto> ruleDefinitions) { | SONAR-<I> fix quality profile copy inconsistency | SonarSource_sonarqube | train |
e71ff0aab482e5b059dab7275455e0aa26c88589 | diff --git a/lib/public_suffix/list.rb b/lib/public_suffix/list.rb
index <HASH>..<HASH> 100644
--- a/lib/public_suffix/list.rb
+++ b/lib/public_suffix/list.rb
@@ -44,7 +44,7 @@ module PublicSuffix
include Enumerable
class << self
- attr_accessor :default_definition
+ attr_writer :default_definition
end
# Gets the default rule list. | Use attr_writer, since #default_definition is already defined. | weppos_publicsuffix-ruby | train |
291731c5b23e2a11641b23a6d7b2207f124740bc | diff --git a/pkg/minikube/cruntime/docker.go b/pkg/minikube/cruntime/docker.go
index <HASH>..<HASH> 100644
--- a/pkg/minikube/cruntime/docker.go
+++ b/pkg/minikube/cruntime/docker.go
@@ -22,7 +22,6 @@ import (
"strings"
"github.com/golang/glog"
- "github.com/kballard/go-shellquote"
"github.com/pkg/errors"
"k8s.io/minikube/pkg/minikube/out"
)
@@ -157,7 +156,8 @@ func (r *Docker) StopContainers(ids []string) error {
return nil
}
glog.Infof("Stopping containers: %s", ids)
- c := exec.Command("docker", "stop", shellquote.Join(ids...))
+ args := append([]string{"stop"}, ids...)
+ c := exec.Command("docker", args...)
if _, err := r.Runner.RunCmd(c); err != nil {
return errors.Wrap(err, "stopping containers docker.")
} | change shellquote to args | kubernetes_minikube | train |
f1db3dd0d4e3c6ef20e574a30753a96dbdb09b2f | diff --git a/files.go b/files.go
index <HASH>..<HASH> 100644
--- a/files.go
+++ b/files.go
@@ -212,6 +212,8 @@ type ListFolderOutput struct {
// ListFolder returns the metadata for a file or folder.
func (c *Files) ListFolder(in *ListFolderInput) (out *ListFolderOutput, err error) {
+ in.Path = normalizePath(in.Path)
+
body, err := c.call("/files/list_folder", in)
if err != nil {
return
@@ -285,6 +287,8 @@ type SearchOutput struct {
// Search for files and folders.
func (c *Files) Search(in *SearchInput) (out *SearchOutput, err error) {
+ in.Path = normalizePath(in.Path)
+
if in.Mode == "" {
in.Mode = SearchModeFilename
}
@@ -369,3 +373,12 @@ func (c *Files) GetPreview(in *GetPreviewInput) (out *GetPreviewOutput, err erro
out = &GetPreviewOutput{body}
return
}
+
+// Normalize path so people can use "/" as they expect.
+func normalizePath(s string) string {
+ if s == "/" {
+ return ""
+ } else {
+ return s
+ }
+}
diff --git a/files_test.go b/files_test.go
index <HASH>..<HASH> 100644
--- a/files_test.go
+++ b/files_test.go
@@ -67,6 +67,17 @@ func TestFiles_ListFolder(t *testing.T) {
assert.True(t, out.HasMore)
}
+func TestFiles_ListFolder_root(t *testing.T) {
+ t.Parallel()
+ c := client()
+
+ _, err := c.Files.ListFolder(&ListFolderInput{
+ Path: "/",
+ })
+
+ assert.NoError(t, err)
+}
+
func TestFiles_Search(t *testing.T) {
c := client() | add path normalization so / works as expected. Closes #8 | tj_go-dropbox | train |
bd77f25262a652227b2dd3f4ee33c3f0bd5b9d77 | diff --git a/lib/solargraph/language_server/host.rb b/lib/solargraph/language_server/host.rb
index <HASH>..<HASH> 100644
--- a/lib/solargraph/language_server/host.rb
+++ b/lib/solargraph/language_server/host.rb
@@ -136,7 +136,7 @@ module Solargraph
def open? uri
result = nil
@change_semaphore.synchronize do
- result = library.open?(uri_to_file(uri))
+ result = unsafe_open?(uri)
end
result
end
@@ -503,6 +503,10 @@ module Solargraph
@change_queue.any?{|change| change['textDocument']['uri'] == file_uri}
end
+ def unsafe_open? uri
+ library.open?(uri_to_file(uri))
+ end
+
def requests
@requests ||= {}
end
@@ -546,9 +550,13 @@ module Solargraph
@diagnostics_queue.push change['textDocument']['uri']
next true
else
- # @todo Change is out of order. Save it for later
- STDERR.puts "Skipping out of order change to #{change['textDocument']['uri']}"
- next false
+ if unsafe_open?(change['textDocument']['uri'])
+ STDERR.puts "Skipping out of order change to #{change['textDocument']['uri']}"
+ next false
+ else
+ STDERR.puts "Deleting out of order change to closed file #{change['textDocument']['uri']}"
+ next true
+ end
end
end
refreshable = changed and @change_queue.empty? | Host deletes out of order changes to closed files. | castwide_solargraph | train |
4803401a0d9209b400ab82dd5abb261fbb5c21af | diff --git a/scss/tests/test_misc.py b/scss/tests/test_misc.py
index <HASH>..<HASH> 100644
--- a/scss/tests/test_misc.py
+++ b/scss/tests/test_misc.py
@@ -6,7 +6,7 @@ from scss import Scss
def test_super_selector():
- compiler = Scss(scss_opts=dict(compress=False))
+ compiler = Scss(scss_opts=dict(style='expanded'))
input = """\
foo, bar {
a: b;
@@ -19,6 +19,7 @@ baz {
super foo, super bar {
a: b;
}
+
super baz {
c: d;
}
@@ -30,7 +31,7 @@ super baz {
def test_debug_info():
# nb: debug info doesn't work if the source isn't a file
- compiler = Scss(scss_opts=dict(compress=False, debug_info=True))
+ compiler = Scss(scss_opts=dict(style='expanded', debug_info=True))
compiler._scss_files = {}
compiler._scss_files['input.css'] = """\
div {
@@ -45,6 +46,7 @@ table {
div {
color: green;
}
+
@media -sass-debug-info{filename{font-family:file\:\/\/input\.css}line{font-family:\\000034}}
table {
color: red; | Fix misc tests to match new styles. | Kronuz_pyScss | train |
7f007d73445970ca62240d35f34b24b3a955b380 | diff --git a/src/com/opera/core/systems/scope/handlers/IConnectionHandler.java b/src/com/opera/core/systems/scope/handlers/IConnectionHandler.java
index <HASH>..<HASH> 100644
--- a/src/com/opera/core/systems/scope/handlers/IConnectionHandler.java
+++ b/src/com/opera/core/systems/scope/handlers/IConnectionHandler.java
@@ -9,6 +9,11 @@ public interface IConnectionHandler {
* @return false if the connection should be closed immediately, or true to continue.
*/
boolean onConnected(StpConnection connection);
+
+ /**
+ * This event is triggered when a handshake has been received successfully.
+ */
+ void onHandshake();
void onDisconnect(); | Added onHandshake event handler, so that we can use that to enable Stp1. | operasoftware_operaprestodriver | train |
b0c9fb803a1e91f7b95a1e02745d7d8f18e8db9d | diff --git a/tests/lax_numpy_test.py b/tests/lax_numpy_test.py
index <HASH>..<HASH> 100644
--- a/tests/lax_numpy_test.py
+++ b/tests/lax_numpy_test.py
@@ -168,9 +168,15 @@ JAX_REDUCER_NO_DTYPE_RECORDS = [
op_record("any", 1, all_dtypes, all_shapes, jtu.rand_some_zero(), []),
op_record("max", 1, all_dtypes, nonempty_shapes, jtu.rand_default(), []),
op_record("min", 1, all_dtypes, nonempty_shapes, jtu.rand_default(), []),
- op_record("ptp", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
]
+numpy_version = tuple(map(int, onp.version.version.split('.')))
+if numpy_version >= (1, 15):
+ JAX_REDUCER_NO_DTYPE_RECORDS += [
+ op_record("ptp", 1, number_dtypes, nonempty_shapes, jtu.rand_default(), []),
+ ]
+
+
JAX_ARGMINMAX_RECORDS = [
op_record("argmin", 1, all_dtypes, nonempty_shapes, jtu.rand_some_equal(), []),
op_record("argmax", 1, all_dtypes, nonempty_shapes, jtu.rand_some_equal(), []), | gate np.ptp tests by numpy version check >= <I> | tensorflow_probability | train |
34232958bfe17090c9c6d2941eead6c8a4272da0 | diff --git a/src/main/java/org/jpmml/converter/ObjectFeature.java b/src/main/java/org/jpmml/converter/ObjectFeature.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jpmml/converter/ObjectFeature.java
+++ b/src/main/java/org/jpmml/converter/ObjectFeature.java
@@ -19,10 +19,15 @@
package org.jpmml.converter;
import org.dmg.pmml.DataType;
+import org.dmg.pmml.Field;
import org.dmg.pmml.FieldName;
public class ObjectFeature extends Feature {
+ public ObjectFeature(PMMLEncoder encoder, Field<?> field){
+ this(encoder, field.getName(), field.getDataType());
+ }
+
public ObjectFeature(PMMLEncoder encoder, FieldName name, DataType dataType){
super(encoder, name, dataType);
}
diff --git a/src/main/java/org/jpmml/converter/PMMLEncoder.java b/src/main/java/org/jpmml/converter/PMMLEncoder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jpmml/converter/PMMLEncoder.java
+++ b/src/main/java/org/jpmml/converter/PMMLEncoder.java
@@ -109,7 +109,7 @@ public class PMMLEncoder {
public DataField createDataField(FieldName name, OpType opType, DataType dataType, List<?> values){
DataField dataField = new DataField(name, opType, dataType);
- if(values != null && values.size() > 0){
+ if(values != null && !values.isEmpty()){
PMMLUtil.addValues(dataField, values);
}
@@ -214,28 +214,11 @@ public class PMMLEncoder {
}
public Field<?> toCategorical(FieldName name, List<?> values){
- Field<?> field = getField(name);
-
- dataField:
- if(field instanceof DataField){
- DataField dataField = (DataField)field;
-
- List<?> existingValues = PMMLUtil.getValues(dataField);
- if(existingValues != null && existingValues.size() > 0){
-
- if((existingValues).equals(values)){
- break dataField;
- }
-
- throw new IllegalArgumentException("Field " + name.getValue() + " has valid values " + existingValues);
- }
-
- PMMLUtil.addValues(dataField, values);
- }
-
- field.setOpType(OpType.CATEGORICAL);
+ return toDiscrete(name, OpType.CATEGORICAL, values);
+ }
- return field;
+ public Field<?> toOrdinal(FieldName name, List<?> values){
+ return toDiscrete(name, OpType.ORDINAL, values);
}
public DefineFunction getDefineFunction(String name){
@@ -268,6 +251,35 @@ public class PMMLEncoder {
return this.defineFunctions;
}
+ private Field<?> toDiscrete(FieldName name, OpType opType, List<?> values){
+ Field<?> field = getField(name);
+
+ dataField:
+ if(field instanceof DataField){
+ DataField dataField = (DataField)field;
+
+ if(values == null || values.isEmpty()){
+ break dataField;
+ }
+
+ List<?> existingValues = PMMLUtil.getValues(dataField);
+ if(existingValues != null && !existingValues.isEmpty()){
+
+ if((existingValues).equals(values)){
+ break dataField;
+ }
+
+ throw new IllegalArgumentException("Field " + name.getValue() + " has valid values " + existingValues);
+ }
+
+ PMMLUtil.addValues(dataField, values);
+ }
+
+ field.setOpType(opType);
+
+ return field;
+ }
+
private FieldName checkName(Field<?> field){
FieldName name = field.getName();
diff --git a/src/main/java/org/jpmml/converter/WildcardFeature.java b/src/main/java/org/jpmml/converter/WildcardFeature.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/jpmml/converter/WildcardFeature.java
+++ b/src/main/java/org/jpmml/converter/WildcardFeature.java
@@ -40,6 +40,15 @@ public class WildcardFeature extends Feature {
return (DataField)field;
}
+ @Override
+ public ContinuousFeature toContinuousFeature(){
+ PMMLEncoder encoder = ensureEncoder();
+
+ DataField dataField = (DataField)encoder.toContinuous(getName());
+
+ return new ContinuousFeature(encoder, dataField);
+ }
+
public CategoricalFeature toCategoricalFeature(List<?> values){
PMMLEncoder encoder = ensureEncoder();
@@ -52,12 +61,11 @@ public class WildcardFeature extends Feature {
return new CategoricalFeature(encoder, dataField);
}
- @Override
- public ContinuousFeature toContinuousFeature(){
+ public ObjectFeature toOrdinalFeature(List<?> values){
PMMLEncoder encoder = ensureEncoder();
- DataField dataField = (DataField)encoder.toContinuous(getName());
+ DataField dataField = (DataField)encoder.toOrdinal(getName(), values);
- return new ContinuousFeature(encoder, dataField);
+ return new ObjectFeature(encoder, dataField);
}
}
\ No newline at end of file | Added method PMMLEncoder#toOrdinal(FieldName, List<?>) | jpmml_jpmml-converter | train |
8627a457a43ecdb5833f2b8144582b044406f91b | diff --git a/msg/scratch_msgs.js b/msg/scratch_msgs.js
index <HASH>..<HASH> 100644
--- a/msg/scratch_msgs.js
+++ b/msg/scratch_msgs.js
@@ -5322,7 +5322,7 @@ Blockly.ScratchMsgs.locales["fa"] =
"OPERATORS_EQUALS": "%1 = %2",
"OPERATORS_AND": "%1 و %2",
"OPERATORS_OR": "%1 یا %2",
- "OPERATORS_NOT": "%1 نباشد",
+ "OPERATORS_NOT": "چنین نیست که %1",
"OPERATORS_JOIN": "چسباندن %1 به %2",
"OPERATORS_JOIN_APPLE": "سیب",
"OPERATORS_JOIN_BANANA": "موز",
@@ -7787,7 +7787,7 @@ Blockly.ScratchMsgs.locales["zu"] =
"DATA_DELETEOFLIST": " susa %1 ka %2 ",
"DATA_DELETEALLOFLIST": "susa konke %1",
"DATA_INSERTATLIST": "faka u %1 ku %2 ka %3",
- "DATA_REPLACEITEMOFLIST": "replace item %1 of %2 with %3",
+ "DATA_REPLACEITEMOFLIST": "faka okunye esikhundleni %1 ku %2 ngo %3",
"DATA_ITEMOFLIST": "into %1 ye %2",
"DATA_ITEMNUMOFLIST": "into # yaku %1 ku %2",
"DATA_LENGTHOFLIST": "ubude be %1",
@@ -7832,7 +7832,7 @@ Blockly.ScratchMsgs.locales["zu"] =
"LOOKS_EFFECT_BRIGHTNESS": "ukukhanya",
"LOOKS_EFFECT_GHOST": "isipoki",
"LOOKS_CHANGEEFFECTBY": "shintsha umphumela ongu %1 ngo %2",
- "LOOKS_SETEFFECTTO": "set %1 effect to %2",
+ "LOOKS_SETEFFECTTO": "beka imiphumela ye %1 uye ku %2",
"LOOKS_CLEARGRAPHICEFFECTS": "sula imiphumela yokuqhafaza ",
"LOOKS_CHANGESIZEBY": "shintsha ubukhulu nge %1",
"LOOKS_SETSIZETO": "Lungisa ubukhulu ubuyise ku %1",
@@ -8027,7 +8027,7 @@ Blockly.ScratchMsgs.locales["zu"] =
"NEW_PROCEDURE": "Enza ibhulokisi ",
"PROCEDURE_ALREADY_EXISTS": "Inqubomgomo %1 isivele ikhona ",
"PROCEDURE_DEFAULT_NAME": "igama lebhulokisi ",
- "PROCEDURE_USED": "To delete a block definition, first remove all uses of the block",
+ "PROCEDURE_USED": "Ukuze ususe incazelo yebhulokisi, qala usule konke okusebenzile kwale bhulokisi",
"NEW_LIST": "Yenza uhlu ",
"NEW_LIST_TITLE": "Igama loluhlu olusha ",
"LIST_MODAL_TITLE": "Uhlu olusha",
@@ -12619,7 +12619,7 @@ Blockly.ScratchMsgs.locales["nb"] =
"NEW_PROCEDURE": "Lag en kloss",
"PROCEDURE_ALREADY_EXISTS": "Det finnes allerede en prosedyre som heter \"%1\".",
"PROCEDURE_DEFAULT_NAME": "klossnavn",
- "PROCEDURE_USED": "To delete a block definition, first remove all uses of the block",
+ "PROCEDURE_USED": "Klossen må slettes fra alle skript før definisjonen kan slettes.",
"NEW_LIST": "Lag en Liste",
"NEW_LIST_TITLE": "Nytt navn på listen:",
"LIST_MODAL_TITLE": "Ny liste", | [skip ci] Update translations from transifex | LLK_scratch-blocks | train |
949149149ca2764dd7a08d34d6b7c0abba7ba00d | diff --git a/Filter/ViewData/AggregateViewData.php b/Filter/ViewData/AggregateViewData.php
index <HASH>..<HASH> 100644
--- a/Filter/ViewData/AggregateViewData.php
+++ b/Filter/ViewData/AggregateViewData.php
@@ -46,4 +46,18 @@ class AggregateViewData extends ViewData
{
$this->items[] = $item;
}
+
+ /**
+ * @param callback $callback
+ */
+ public function sortItems($callback = null)
+ {
+ if ($callback === null) {
+ $callback = function ($a, $b) {
+ return strcmp($a->getName(), $b->getName());
+ };
+ }
+
+ usort($this->items, $callback);
+ }
}
diff --git a/Filter/Widget/Dynamic/DynamicAggregate.php b/Filter/Widget/Dynamic/DynamicAggregate.php
index <HASH>..<HASH> 100644
--- a/Filter/Widget/Dynamic/DynamicAggregate.php
+++ b/Filter/Widget/Dynamic/DynamicAggregate.php
@@ -168,34 +168,25 @@ class DynamicAggregate extends AbstractFilter implements ViewDataFactoryInterfac
foreach ($aggregation as $bucket) {
$name = $bucket['key'];
- if ($name != $activeName && $activeName != 'all-selected') {
+ if (($name != $activeName && $activeName != 'all-selected') ||
+ ($activeName == 'all-selected' && in_array($name, $activeNames))) {
continue;
}
- $this->addNonZeroValuesToUnsortedChoices($unsortedChoices, $activeName, $bucket, $data);
+ $this->modifyUnsortedChoices($unsortedChoices, $activeName, $bucket, $data);
+ $this->addViewDataItem($data, $name, $unsortedChoices[$name]);
+ unset($unsortedChoices[$name]);
}
}
- foreach ($unsortedChoices['all-selected'] as $name => $buckets) {
- if (in_array($name, $activeNames)) {
- continue;
+ if ($this->getShowZeroChoices() && !empty($unsortedChoices)) {
+ foreach ($unsortedChoices as $name => $choices) {
+ $this->addViewDataItem($data, $name, $unsortedChoices[$name]);
}
-
- $unsortedChoices[$name] = $buckets;
}
- unset($unsortedChoices['all-selected']);
- ksort($unsortedChoices);
-
/** @var AggregateViewData $data */
- foreach ($unsortedChoices as $name => $choices) {
- $choiceViewData = new ViewData\ChoicesAwareViewData();
- $choiceViewData->setName($name);
- $choiceViewData->setChoices($choices);
- $choiceViewData->setUrlParameters([]);
- $choiceViewData->setResetUrlParameters($data->getResetUrlParameters());
- $data->addItem($choiceViewData);
- }
+ $data->sortItems();
return $data;
}
@@ -335,7 +326,6 @@ class DynamicAggregate extends AbstractFilter implements ViewDataFactoryInterfac
$choice->setLabel($bucket['key']);
$choice->setCount(0);
$unsortedChoices[$groupName][$bucket['key']] = $choice;
- $unsortedChoices['all-selected'][$groupName][$bucket['key']] = $choice;
}
}
@@ -348,8 +338,10 @@ class DynamicAggregate extends AbstractFilter implements ViewDataFactoryInterfac
* @param AggregationValue $agg
* @param ViewData $data
*/
- protected function addNonZeroValuesToUnsortedChoices(&$unsortedChoices, $activeName, $agg, $data) {
+ protected function modifyUnsortedChoices(&$unsortedChoices, $activeName, $agg, $data)
+ {
$name = $agg['key'];
+
foreach ($agg['value']['buckets'] as $bucket) {
$active = $this->isChoiceActive($bucket['key'], $data, $activeName);
$choice = new ViewData\Choice();
@@ -361,11 +353,22 @@ class DynamicAggregate extends AbstractFilter implements ViewDataFactoryInterfac
$this->getOptionUrlParameters($bucket['key'], $name, $data, $active)
);
- if ($activeName == 'all-selected') {
- $unsortedChoices[$activeName][$name][$bucket['key']] = $choice;
- } else {
- $unsortedChoices[$activeName][$bucket['key']] = $choice;
- }
+ $unsortedChoices[$name][$bucket['key']] = $choice;
}
}
+
+ /**
+ * @param AggregateViewData $data
+ * @param string $name
+ * @param ViewData\Choice[] $choices
+ */
+ protected function addViewDataItem($data, $name, $choices)
+ {
+ $choiceViewData = new ViewData\ChoicesAwareViewData();
+ $choiceViewData->setName($name);
+ $choiceViewData->setChoices($choices);
+ $choiceViewData->setUrlParameters([]);
+ $choiceViewData->setResetUrlParameters($data->getResetUrlParameters());
+ $data->addItem($choiceViewData);
+ }
} | removed all-selected formation in unsortedChoices array | ongr-io_FilterManagerBundle | train |
c5b3bead3aae143b04f1254eb25f9ca7d3340cfc | diff --git a/simpleclient/src/main/java/io/prometheus/client/Collector.java b/simpleclient/src/main/java/io/prometheus/client/Collector.java
index <HASH>..<HASH> 100644
--- a/simpleclient/src/main/java/io/prometheus/client/Collector.java
+++ b/simpleclient/src/main/java/io/prometheus/client/Collector.java
@@ -3,12 +3,16 @@ package io.prometheus.client;
import java.util.Arrays;
import java.util.List;
+import java.util.regex.Pattern;
/**
* A collector for a set of metrics.
* <p>
* Normal users should use {@link Gauge}, {@link Counter} and {@link Summary}.
+ * <p>
* Subclasssing Collector is for advanced uses, such as proxying metrics from another monitoring system.
+ * It is it the responsibility of subclasses to ensure they produce valid metrics.
+ * @see <a href="http://prometheus.io/docs/instrumenting/exposition_formats/">Exposition formats</a>.
*/
public abstract class Collector {
/**
@@ -110,6 +114,23 @@ public abstract class Collector {
}
/**
+ * Register the Collector with the default registry.
+ */
+ public <T extends Collector> T register() {
+ return register(CollectorRegistry.defaultRegistry);
+ }
+
+ /**
+ * Register the Collector with the given registry.
+ */
+ public <T extends Collector> T register(CollectorRegistry registry) {
+ registry.register(this);
+ return (T)this;
+ }
+
+ /* Various utility functions for implementing Collectors. */
+
+ /**
* Number of nanoseconds in a second.
*/
public static final double NANOSECONDS_PER_SECOND = 1E9;
@@ -118,18 +139,28 @@ public abstract class Collector {
*/
public static final double MILLISECONDS_PER_SECOND = 1E3;
+ protected static final Pattern METRIC_NAME_RE = Pattern.compile("[a-zA-Z_:][a-zA-Z0-9_:]*");
+ protected static final Pattern METRIC_LABEL_NAME_RE = Pattern.compile("[a-zA-Z_][a-zA-Z0-9_]*");
+ protected static final Pattern RESERVED_METRIC_LABEL_NAME_RE = Pattern.compile("__.*");
+
/**
- * Register the Collector with the default registry.
+ * Throw an exception if the metric name is invalid.
*/
- public <T extends Collector> T register() {
- return register(CollectorRegistry.defaultRegistry);
+ protected static void checkMetricName(String name) {
+ if (!METRIC_NAME_RE.matcher(name).matches()) {
+ throw new IllegalArgumentException("Invalid metric name: " + name);
+ }
}
/**
- * Register the Collector with the given registry.
+ * Throw an exception if the metric label name is invalid.
*/
- public <T extends Collector> T register(CollectorRegistry registry) {
- registry.register(this);
- return (T)this;
+ protected static void checkMetricLabelName(String name) {
+ if (!METRIC_LABEL_NAME_RE.matcher(name).matches()) {
+ throw new IllegalArgumentException("Invalid metric label name: " + name);
+ }
+ if (RESERVED_METRIC_LABEL_NAME_RE.matcher(name).matches()) {
+ throw new IllegalArgumentException("Invalid metric label name, reserved for internal use: " + name);
+ }
}
}
diff --git a/simpleclient/src/main/java/io/prometheus/client/SimpleCollector.java b/simpleclient/src/main/java/io/prometheus/client/SimpleCollector.java
index <HASH>..<HASH> 100644
--- a/simpleclient/src/main/java/io/prometheus/client/SimpleCollector.java
+++ b/simpleclient/src/main/java/io/prometheus/client/SimpleCollector.java
@@ -141,10 +141,15 @@ public abstract class SimpleCollector<Child, T extends SimpleCollector> extends
name = b.namespace + '_' + name;
}
fullname = name;
+ checkMetricName(fullname);
if (b.help.isEmpty()) throw new IllegalStateException("Help hasn't been set.");
help = b.help;
labelNames = Arrays.asList(b.labelNames);
+ for (String n: labelNames) {
+ checkMetricLabelName(n);
+ }
+
// Initlize metric if it has no labels.
if (labelNames.size() == 0) {
noLabelsChild = labels();
diff --git a/simpleclient/src/test/java/io/prometheus/client/SimpleCollectorTest.java b/simpleclient/src/test/java/io/prometheus/client/SimpleCollectorTest.java
index <HASH>..<HASH> 100644
--- a/simpleclient/src/test/java/io/prometheus/client/SimpleCollectorTest.java
+++ b/simpleclient/src/test/java/io/prometheus/client/SimpleCollectorTest.java
@@ -102,6 +102,21 @@ public class SimpleCollectorTest {
Gauge.build().name("c").create();
}
+ @Test(expected=IllegalArgumentException.class)
+ public void testInvalidNameThrows() {
+ Gauge.build().name("c'a").create();
+ }
+
+ @Test(expected=IllegalArgumentException.class)
+ public void testInvalidLabelNameThrows() {
+ Gauge.build().name("a").labelNames("c:d").help("h").create();
+ }
+
+ @Test(expected=IllegalArgumentException.class)
+ public void testReservedLabelNameThrows() {
+ Gauge.build().name("a").labelNames("__name__").help("h").create();
+ }
+
@Test
public void testSetChild() {
metric.setChild(new Gauge.Child(){ | Add checks for invalid metric names and labels.
Document who's responsible for checking produced metrics are valid.
Closes #<I> | prometheus_client_java | train |
1e2c549460176a0d7cb1ac4fbd260f55cbef0f8f | diff --git a/features/bootstrap/UserSwitchingContext.php b/features/bootstrap/UserSwitchingContext.php
index <HASH>..<HASH> 100644
--- a/features/bootstrap/UserSwitchingContext.php
+++ b/features/bootstrap/UserSwitchingContext.php
@@ -1,7 +1,4 @@
<?php
-use Behat\Behat\Context\Context,
- Behat\Behat\Context\SnippetAcceptingContext,
- Behat\Gherkin\Node\TableNode;
use PaulGibbs\WordpressBehatExtension\Context\RawWordpressContext as WordPressContext;
use PaulGibbs\WordpressBehatExtension\Context\Traits\UserAwareContextTrait as UserContext;
@@ -9,7 +6,7 @@ use PaulGibbs\WordpressBehatExtension\Context\Traits\UserAwareContextTrait as Us
/**
* Defines application features from the specific context.
*/
-class UserSwitchingContext extends WordPressContext implements Context, SnippetAcceptingContext {
+class UserSwitchingContext extends WordPressContext {
use UserContext;
/** | Remove some redundant imports and interfaces. | johnbillion_user-switching | train |
26ec74886593b8172479bed5ce158efa8daae864 | diff --git a/src/Model/ModelManager.php b/src/Model/ModelManager.php
index <HASH>..<HASH> 100644
--- a/src/Model/ModelManager.php
+++ b/src/Model/ModelManager.php
@@ -18,6 +18,7 @@ use Doctrine\DBAL\Exception;
use Doctrine\DBAL\LockMode;
use Doctrine\DBAL\Platforms\AbstractPlatform;
use Doctrine\DBAL\Types\Type;
+use Doctrine\ORM\AbstractQuery;
use Doctrine\ORM\EntityManagerInterface;
use Doctrine\ORM\Mapping\ClassMetadata;
use Doctrine\ORM\OptimisticLockException;
@@ -208,7 +209,7 @@ final class ModelManager implements ModelManagerInterface, LockInterface, ProxyR
public function supportsQuery(object $query): bool
{
- return $query instanceof ProxyQuery || $query instanceof QueryBuilder;
+ return $query instanceof ProxyQuery || $query instanceof AbstractQuery || $query instanceof QueryBuilder;
}
public function executeQuery(object $query)
@@ -217,6 +218,10 @@ final class ModelManager implements ModelManagerInterface, LockInterface, ProxyR
return $query->getQuery()->execute();
}
+ if ($query instanceof AbstractQuery) {
+ return $query->execute();
+ }
+
if ($query instanceof ProxyQuery) {
/** @phpstan-var Paginator<T> $results */
$results = $query->execute();
@@ -225,10 +230,11 @@ final class ModelManager implements ModelManagerInterface, LockInterface, ProxyR
}
throw new \InvalidArgumentException(sprintf(
- 'Argument 1 passed to %s() must be an instance of %s or %s',
+ 'Argument 1 passed to %s() must be an instance of %s, %s, or %s',
__METHOD__,
QueryBuilder::class,
- ProxyQuery::class,
+ AbstractQuery::class,
+ ProxyQuery::class
));
}
diff --git a/tests/Model/ModelManagerTest.php b/tests/Model/ModelManagerTest.php
index <HASH>..<HASH> 100644
--- a/tests/Model/ModelManagerTest.php
+++ b/tests/Model/ModelManagerTest.php
@@ -17,6 +17,7 @@ use Doctrine\DBAL\Connection;
use Doctrine\DBAL\Exception;
use Doctrine\DBAL\Platforms\AbstractPlatform;
use Doctrine\DBAL\Types\Type;
+use Doctrine\ORM\AbstractQuery;
use Doctrine\ORM\EntityManagerInterface;
use Doctrine\ORM\Mapping\ClassMetadata;
use Doctrine\ORM\OptimisticLockException;
@@ -149,6 +150,7 @@ final class ModelManagerTest extends TestCase
public function supportsQueryDataProvider(): iterable
{
yield [true, new ProxyQuery($this->createMock(QueryBuilder::class))];
+ yield [true, $this->createMock(AbstractQuery::class)];
yield [true, $this->createMock(QueryBuilder::class)];
yield [false, new \stdClass()];
} | Update ModelManager to support Doctrine\ORM\Query directly (#<I>) | sonata-project_SonataDoctrineORMAdminBundle | train |
26ac84e6246cd579ad53ceb2100dab049dd9c039 | diff --git a/lib/travis/model/repository/settings.rb b/lib/travis/model/repository/settings.rb
index <HASH>..<HASH> 100644
--- a/lib/travis/model/repository/settings.rb
+++ b/lib/travis/model/repository/settings.rb
@@ -46,7 +46,7 @@ class Repository::Settings
register :ssh_keys
- attr_accessor :collections
+ attr_accessor :collections, :settings
def initialize(repository, settings)
self.repository = repository
@@ -54,18 +54,6 @@ class Repository::Settings
initialize_collections
end
- def settings
- # TODO: this class started as a thin wrapper over hash,
- # this part could be refactored to not rely on the
- # hash, but rather on the structured data like collections
- # and fields
- collections.each do |collection|
- @settings[collection.registered_at] = collection.to_hashes unless collection.empty?
- end
-
- @settings
- end
-
def initialize_collections
self.collections = []
self.class.collections.each do |path, klass|
@@ -145,8 +133,16 @@ class Repository::Settings
end
def save
+ # TODO: this class started as a thin wrapper over hash,
+ # this part could be refactored to not rely on the
+ # hash, but rather on the structured data like collections
+ # and fields
+ collections.each do |collection|
+ @settings[collection.registered_at] = collection.to_hashes
+ end
+
repository.settings = settings.to_json
- repository.save
+ repository.save!
end
private | Save collections on #save method in settings | travis-ci_travis-core | train |
5af9b5b693c2e75decb42181c66e6e8a155117d9 | diff --git a/actions/info_test.go b/actions/info_test.go
index <HASH>..<HASH> 100644
--- a/actions/info_test.go
+++ b/actions/info_test.go
@@ -27,14 +27,14 @@ var _ = Describe("Info", func() {
})
Describe("ServerInfo", func() {
- It("returns the version of the cli and CM server, as well as auth server URL and client name", func() {
+ It("returns the version of the cli and CM server, as well as auth server URL", func() {
request := client.NewInfoRequest(cfg)
responseObj := http.Response{
StatusCode: 200,
Body: ioutil.NopCloser(bytes.NewBufferString(`{
"app":{"version":"my-version","name":"Pivotal Credential Manager"},
- "auth-server":{"url":"https://example.com","client":"credhub"}
+ "auth-server":{"url":"https://example.com"}
}`)),
}
@@ -47,7 +47,6 @@ var _ = Describe("Info", func() {
serverInfo, _ := subject.GetServerInfo()
Expect(serverInfo.App.Version).To(Equal("my-version"))
Expect(serverInfo.AuthServer.Url).To(Equal("https://example.com"))
- Expect(serverInfo.AuthServer.Client).To(Equal("credhub"))
})
It("returns error if server returned a non 200 status code", func() { | Remove bad test
[#<I>] UAA Client used by CLI should be 'credhub_cli' | cloudfoundry-incubator_credhub-cli | train |
1f632e7ec345e4d59db448ca40bf44d2f68f48ec | diff --git a/src/Core.php b/src/Core.php
index <HASH>..<HASH> 100644
--- a/src/Core.php
+++ b/src/Core.php
@@ -760,7 +760,7 @@
$f_column = $annotation->getProperty('column');
self::$_cached_entity_classes[$classname]['relations'][$property_name] = array('collection' => $collection, 'property' => $property_name, 'foreign_column' => $foreign_column, 'manytomany' => $manytomany, 'joinclass' => $joinclass, 'class' => $annotation->getProperty('class'), 'column' => $f_column, 'orderby' => $orderby);
if (!$collection) {
- if (!$column_annotation) {
+ if (!$column_annotation || !isset($column)) {
throw new Exception("The property '{$property_name}' in class '{$classname}' is missing an @Column annotation, or is improperly marked as not being a collection");
}
$column_name = $column_prefix . (($annotation->hasProperty('name')) ? $annotation->getProperty('name') : substr($property_name, 1)); | add an extra isset() to get rid of an error | thebuggenie_b2db | train |
376d71771e80fdde12f982fa8f9780160fd63b65 | diff --git a/spectator-ext-ipc/src/main/java/com/netflix/spectator/ipc/IpcLogEntry.java b/spectator-ext-ipc/src/main/java/com/netflix/spectator/ipc/IpcLogEntry.java
index <HASH>..<HASH> 100644
--- a/spectator-ext-ipc/src/main/java/com/netflix/spectator/ipc/IpcLogEntry.java
+++ b/spectator-ext-ipc/src/main/java/com/netflix/spectator/ipc/IpcLogEntry.java
@@ -610,6 +610,10 @@ public final class IpcLogEntry {
return attemptFinal;
}
+ private String getEndpoint() {
+ return (endpoint == null) ? "unknown" : endpoint;
+ }
+
private boolean isClient() {
return marker != null && "ipc-client".equals(marker.getName());
}
@@ -649,7 +653,7 @@ public final class IpcLogEntry {
}
// Optional for both client and server
- putTag(tags, IpcTagKey.endpoint.key(), endpoint);
+ putTag(tags, IpcTagKey.endpoint.key(), getEndpoint());
putTag(tags, IpcTagKey.vip.key(), vip);
putTag(tags, IpcTagKey.protocol.key(), protocol);
putTag(tags, IpcTagKey.statusDetail.key(), statusDetail);
diff --git a/spectator-ext-ipc/src/test/java/com/netflix/spectator/ipc/IpcLogEntryTest.java b/spectator-ext-ipc/src/test/java/com/netflix/spectator/ipc/IpcLogEntryTest.java
index <HASH>..<HASH> 100644
--- a/spectator-ext-ipc/src/test/java/com/netflix/spectator/ipc/IpcLogEntryTest.java
+++ b/spectator-ext-ipc/src/test/java/com/netflix/spectator/ipc/IpcLogEntryTest.java
@@ -21,6 +21,7 @@ import com.netflix.spectator.api.DefaultRegistry;
import com.netflix.spectator.api.DistributionSummary;
import com.netflix.spectator.api.ManualClock;
import com.netflix.spectator.api.Registry;
+import com.netflix.spectator.api.Utils;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@@ -680,4 +681,20 @@ public class IpcLogEntryTest {
IpcMetric.validate(registry);
}
+
+ @Test
+ public void endpointUnknownIfNotSet() {
+ Registry registry = new DefaultRegistry();
+ IpcLogger logger = new IpcLogger(registry, clock, LoggerFactory.getLogger(getClass()));
+
+ logger.createServerEntry()
+ .withOwner("test")
+ .markStart()
+ .markEnd()
+ .log();
+
+ registry.counters().forEach(c -> {
+ Assert.assertEquals("unknown", Utils.getTagValue(c.id(), "ipc.endpoint"));
+ });
+ }
}
diff --git a/spectator-ext-ipcservlet/src/test/java/com/netflix/spectator/ipcservlet/IpcServletFilterTest.java b/spectator-ext-ipcservlet/src/test/java/com/netflix/spectator/ipcservlet/IpcServletFilterTest.java
index <HASH>..<HASH> 100644
--- a/spectator-ext-ipcservlet/src/test/java/com/netflix/spectator/ipcservlet/IpcServletFilterTest.java
+++ b/spectator-ext-ipcservlet/src/test/java/com/netflix/spectator/ipcservlet/IpcServletFilterTest.java
@@ -120,7 +120,7 @@ public class IpcServletFilterTest {
checkClientErrorReason(registry, "HTTP_500");
checkServerErrorReason(registry, "RuntimeException");
checkMethod(registry, "GET");
- checkClientEndpoint(registry, null);
+ checkClientEndpoint(registry, "unknown");
checkServerEndpoint(registry, "/throw");
IpcMetric.validate(registry);
} | set endpoint to unknown if not specified (#<I>)
For some dashboard use-cases the view is based on a foreach
of the endpoint values. If the endpoint is not set, then
those requests are missing. | Netflix_spectator | train |
39c5089136cc0b954b25e6cdb29143a284691bdb | diff --git a/app/controllers/contact_form_controller.rb b/app/controllers/contact_form_controller.rb
index <HASH>..<HASH> 100644
--- a/app/controllers/contact_form_controller.rb
+++ b/app/controllers/contact_form_controller.rb
@@ -16,8 +16,8 @@ class ContactFormController < ApplicationController
flash[:error] << @contact_form.errors.full_messages.map { |s| s.to_s }.join(",")
render :new
end
- rescue ScriptError
- flash[:error] = 'Sorry, this message appears to be spam and was not delivered.'
+ rescue
+ flash[:error] = 'Sorry, this message was not delivered.'
render :new
end
diff --git a/app/models/contact_form.rb b/app/models/contact_form.rb
index <HASH>..<HASH> 100644
--- a/app/models/contact_form.rb
+++ b/app/models/contact_form.rb
@@ -20,7 +20,7 @@ class ContactForm < MailForm::Base
{
:subject => "Contact Form:#{subject}",
:to => ScholarSphere::Application.config.contact_email,
- :from => %("#{name}" <#{email}>)
+ :from => "ScholarSphere Form <[email protected]>"
}
end
end | updating the address for the from on email sending and now rescuing any errors on send (instead of only spam). refs #<I> | samvera_hyrax | train |
89f67f7c50fd98b75b23b1efa4d804255b1342f1 | diff --git a/PhealFileCacheForced.php b/PhealFileCacheForced.php
index <HASH>..<HASH> 100644
--- a/PhealFileCacheForced.php
+++ b/PhealFileCacheForced.php
@@ -26,7 +26,7 @@
*/
/**
* Filecache which always validates a cached file to be valid
- * for example for tests
+ * for example and for tests
*/
class PhealFileCacheForced extends PhealFileCache
{ | Update PhealFileCacheForced.php | 3rdpartyeve_phealng | train |
2930a02640dbab768ab0756ab05167353dae0cb0 | diff --git a/core-parent/core/src/main/java/fr/javatronic/damapping/processor/validator/DASourceClassValidatorImpl.java b/core-parent/core/src/main/java/fr/javatronic/damapping/processor/validator/DASourceClassValidatorImpl.java
index <HASH>..<HASH> 100644
--- a/core-parent/core/src/main/java/fr/javatronic/damapping/processor/validator/DASourceClassValidatorImpl.java
+++ b/core-parent/core/src/main/java/fr/javatronic/damapping/processor/validator/DASourceClassValidatorImpl.java
@@ -43,6 +43,7 @@ public class DASourceClassValidatorImpl implements DASourceClassValidator {
new ConstructorValidationStep(),
new EnumValidationStep(),
new GenericParameterValidationStep(),
+ new GenericParameterValidationStep(),
new MapperDependencyOnMapperFactoryValidationStep(),
new MapperDependencyConsistencyValidationStep(),
new MapperFactoryMethodErasureValidationStep() | [#<I>] add Validation that dedicated class has no generic parameter | lesaint_damapping | train |
fb57994dbc366e7d4b666ea6c6db489df73007fc | diff --git a/lib/oneview-sdk/client.rb b/lib/oneview-sdk/client.rb
index <HASH>..<HASH> 100644
--- a/lib/oneview-sdk/client.rb
+++ b/lib/oneview-sdk/client.rb
@@ -51,7 +51,7 @@ module OneviewSDK
if options[:api_version] && options[:api_version].to_i > @max_api_version
logger.warn "API version #{options[:api_version]} is greater than the appliance API version (#{@max_api_version})"
end
- @api_version = options[:api_version] || [OneviewSDK::DEFAULT_API_VERSION, @max_api_version].min
+ @api_version = options[:api_version] || @max_api_version
# Set the default OneviewSDK module API version
OneviewSDK.api_version = @api_version unless OneviewSDK.api_version_updated? || !OneviewSDK::SUPPORTED_API_VERSIONS.include?(@api_version)
@ssl_enabled = true
diff --git a/spec/shared_context.rb b/spec/shared_context.rb
index <HASH>..<HASH> 100644
--- a/spec/shared_context.rb
+++ b/spec/shared_context.rb
@@ -25,6 +25,7 @@ RSpec.shared_context 'cli context', a: :b do
ENV['ONEVIEWSDK_USER'] = 'Admin'
ENV['ONEVIEWSDK_TOKEN'] = 'secret456'
ENV['I3S_URL'] = 'https://i3s.example.com'
+ ENV['ONEVIEWSDK_VARIANT'] = 'C7000'
end
end
diff --git a/spec/unit/client_spec.rb b/spec/unit/client_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/client_spec.rb
+++ b/spec/unit/client_spec.rb
@@ -372,7 +372,7 @@ RSpec.describe OneviewSDK::Client do
options = { url: 'https://oneview.example.com', token: 'token123' }
client = OneviewSDK::Client.new(options)
expect(client.token).to eq('token123')
- expect(client.api_version).to eq(200)
+ expect(client.api_version).to eq(1800)
i3s_options = { url: 'https://imagestreamer.example.com', api_version: 300 }
i3s_client = client.new_i3s_client(i3s_options)
diff --git a/spec/unit/oneview_sdk_spec.rb b/spec/unit/oneview_sdk_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/oneview_sdk_spec.rb
+++ b/spec/unit/oneview_sdk_spec.rb
@@ -14,7 +14,7 @@ RSpec.describe OneviewSDK do
it 'raises an error when an invalid API300 version is called' do
expect { OneviewSDK::API999 }.to raise_error(NameError,
- 'The API999 method or resource does not exist for OneView API version 200.')
+ 'The API999 method or resource does not exist for OneView API version 1800.')
end
it 'has a default api version' do | Refactored method to set default | HewlettPackard_oneview-sdk-ruby | train |
3ad3375777b431a559b624d359c9af667de319f1 | diff --git a/src/Psalm/Internal/Analyzer/Statements/Expression/BinaryOp/AndAnalyzer.php b/src/Psalm/Internal/Analyzer/Statements/Expression/BinaryOp/AndAnalyzer.php
index <HASH>..<HASH> 100644
--- a/src/Psalm/Internal/Analyzer/Statements/Expression/BinaryOp/AndAnalyzer.php
+++ b/src/Psalm/Internal/Analyzer/Statements/Expression/BinaryOp/AndAnalyzer.php
@@ -135,7 +135,7 @@ class AndAnalyzer
$changed_var_ids,
$left_referenced_var_ids,
$statements_analyzer,
- [],
+ $statements_analyzer->getTemplateTypeMap() ?: [],
$context->inside_loop,
new CodeLocation($statements_analyzer->getSource(), $stmt->left),
$context->inside_negation
diff --git a/tests/TypeReconciliation/ConditionalTest.php b/tests/TypeReconciliation/ConditionalTest.php
index <HASH>..<HASH> 100644
--- a/tests/TypeReconciliation/ConditionalTest.php
+++ b/tests/TypeReconciliation/ConditionalTest.php
@@ -2626,6 +2626,46 @@ class ConditionalTest extends \Psalm\Tests\TestCase
isValid($val) ? $takesValid($val) : $takesInvalid($val);
}'
],
+ 'reconcileMoreThanOneGenericObject' => [
+ '<?php
+
+ final class Invalid {}
+
+ /**
+ * @template T
+ */
+ final class Valid {}
+
+ /**
+ * @template T
+ *
+ * @param Invalid|Valid<T> $val
+ * @psalm-assert-if-true Valid<T> $val
+ */
+ function isValid($val): bool
+ {
+ return $val instanceof Valid;
+ }
+
+ /**
+ * @template T
+ * @param Valid<T>|Invalid $val1
+ * @param Valid<T>|Invalid $val2
+ * @param Valid<T>|Invalid $val3
+ */
+ function inGenericContext($val1, $val2, $val3): void
+ {
+ $takesValid =
+ /** @param Valid<T> $_valid */
+ function ($_valid): void {};
+
+ if (isValid($val1) && isValid($val2) && isValid($val3)) {
+ $takesValid($val1);
+ $takesValid($val2);
+ $takesValid($val3);
+ }
+ }'
+ ]
];
} | Fix generic destruction in assertion (#<I>) | vimeo_psalm | train |
d90156a75342f5165e7cdb1a76ac591b9aaaf10e | diff --git a/lib/finite_machine/transition.rb b/lib/finite_machine/transition.rb
index <HASH>..<HASH> 100644
--- a/lib/finite_machine/transition.rb
+++ b/lib/finite_machine/transition.rb
@@ -64,7 +64,7 @@ module FiniteMachine
#
# @api public
def self.create(machine, attrs = {})
- _transition = self.new(machine, attrs)
+ _transition = new(machine, attrs)
_transition.update_transitions
_transition.define_state_methods
_transition.define_event
@@ -76,8 +76,13 @@ module FiniteMachine
# @return [Symbol]
#
# @api public
- def to_state
- machine.transitions[name][from_state]
+ def to_state(*args)
+ if machine.transitions[name][from_state].is_a? Array
+ found_trans = machine.events_chain[name].find_transition(*args)
+ found_trans.map[from_state]
+ else
+ machine.transitions[name][from_state]
+ end
end
# Reduce conditions | Add ability to check to_state for branching condition. | piotrmurach_finite_machine | train |
cea99cba12dd0da44a28f4369ed8d38dbc0768ee | diff --git a/XPathFunctions/getConceptDataTypeDerivedFrom.php b/XPathFunctions/getConceptDataTypeDerivedFrom.php
index <HASH>..<HASH> 100644
--- a/XPathFunctions/getConceptDataTypeDerivedFrom.php
+++ b/XPathFunctions/getConceptDataTypeDerivedFrom.php
@@ -82,8 +82,14 @@ function getConceptDataTypeDerivedFrom( $context, $provider, $args )
{
throw new \InvalidArgumentException( "XBRL taxonomy in context not valid" );
}
+ $argTaxonomy = $taxonomy->getTaxonomyForNamespace($args[0]->NamespaceUri);
+ if ( ! $argTaxonomy )
+ {
+ throw new \InvalidArgumentException( "XBRL taxonomy in context not valid" );
+ }
+
+ $taxonomyElement = $argTaxonomy->getElementByName( $args[0]->LocalName );
- $taxonomyElement = $taxonomy->getElementByName( $args[0]->LocalName );
if ( ! $taxonomyElement || ! isset( $taxonomyElement['substitutionGroup'] ) || empty( $taxonomyElement['substitutionGroup'] ) )
{
throw XPath2Exception::withErrorCode( "xfie:invalidConceptQName", "Not a valid XBRL concept: does not have a substitution group" );
@@ -108,7 +114,7 @@ function getConceptDataTypeDerivedFrom( $context, $provider, $args )
{
throw new \InvalidArgumentException( "A schema for '{$baseType->NamespaceUri}' does not exist" );
}
- $type->Prefix = $taxonomy->getPrefix();
+ $baseType->Prefix = $taxonomy->getPrefix();
}
$baseTypeName = (string)$baseType; | Fixed an issue because the function did not use the correct taxonomy to
locate an element. | bseddon_XBRL | train |
79de10216e7b8c8ad339898f693b39c28dcd8bf3 | diff --git a/lib/paper_trail/model_config.rb b/lib/paper_trail/model_config.rb
index <HASH>..<HASH> 100644
--- a/lib/paper_trail/model_config.rb
+++ b/lib/paper_trail/model_config.rb
@@ -87,7 +87,10 @@ module PaperTrail
@model_class.send(
"#{recording_order}_destroy",
- ->(r) { r.paper_trail.record_destroy if r.paper_trail.save_version? }
+ lambda do |r|
+ return unless r.paper_trail.save_version?
+ r.paper_trail.record_destroy(recording_order)
+ end
)
return if @model_class.paper_trail_options[:on].include?(:destroy)
diff --git a/lib/paper_trail/record_trail.rb b/lib/paper_trail/record_trail.rb
index <HASH>..<HASH> 100644
--- a/lib/paper_trail/record_trail.rb
+++ b/lib/paper_trail/record_trail.rb
@@ -257,7 +257,11 @@ module PaperTrail
merge_metadata_into(data)
end
- def record_destroy
+ # `recording_order` is "after" or "before". See ModelConfig#on_destroy.
+ #
+ # @api private
+ def record_destroy(recording_order)
+ @in_after_callback = recording_order == "after"
if enabled? && [email protected]_record?
version = @record.class.paper_trail.version_class.create(data_for_destroy)
if version.errors.any?
@@ -269,6 +273,8 @@ module PaperTrail
save_associations(version)
end
end
+ ensure
+ @in_after_callback = false
end
# Returns data for record destroy | Set @in_after_callback correctly during record_destroy
A destroy event can be handled either in a before-callback
or an after-callback. It's configurable.
However, we were not setting @in_after_callback accordingly.
I'm not aware of a specific bug caused by failing to set
this instance variable correctly, but if the instance variable
is a lie, it's easy to imagine such a bug now or in the future. | paper-trail-gem_paper_trail | train |
c574a1d26709c9f934ef749deb62cf221906ac4c | diff --git a/denovonear/load_gene.py b/denovonear/load_gene.py
index <HASH>..<HASH> 100755
--- a/denovonear/load_gene.py
+++ b/denovonear/load_gene.py
@@ -183,8 +183,7 @@ def minimise_transcripts(transcripts, de_novos):
# find the transcripts with the greatest length, and the most de novos
max_length = max( counts[k]["len"] for k in max_ids )
- tx_ids = [ k.get_name() for k in transcripts if counts[k.get_name()]["len"] == max_length ]
- max_transcripts = {x: counts[x] for x in counts if x in tx_ids}
+ max_transcripts = {x: counts[x] for x in counts if x in max_ids and counts[x]['len'] == max_length}
# find which de novos occur in the transcript with the most de novos
best = [x for x in transcripts if x.get_name() == next(iter(max_transcripts))][0] | fix bug picking transcript with most de novos and longest CDS | jeremymcrae_denovonear | train |
e4e230f1b5dc6ce4526787a69e14683131b996bb | diff --git a/lib/jar_dependencies.rb b/lib/jar_dependencies.rb
index <HASH>..<HASH> 100644
--- a/lib/jar_dependencies.rb
+++ b/lib/jar_dependencies.rb
@@ -214,6 +214,7 @@ module Jars
end
urls = jars_lock_from_class_loader
end
+ no_more_warnings
elsif jars_lock = Jars.lock_path
Jars.debug { "--- load jars from #{jars_lock}" }
@@jars_lock = jars_lock
@@ -222,8 +223,8 @@ module Jars
require 'jars/classpath' unless defined? Jars::Classpath
classpath = Jars::Classpath.new( nil, jars_lock )
classpath.require( scope )
+ no_more_warnings
end
- no_more_warnings
Jars.debug {
loaded = @@jars.collect{ |k,v| "#{k}:#{v}" }
"--- loaded jars ---\n\t#{loaded.join("\n\t")}"
diff --git a/lib/jars/version.rb b/lib/jars/version.rb
index <HASH>..<HASH> 100644
--- a/lib/jars/version.rb
+++ b/lib/jars/version.rb
@@ -1,5 +1,5 @@
module Jars
- VERSION = '0.2.4'.freeze
+ VERSION = '0.2.5'.freeze
JRUBY_PLUGINS_VERSION = '1.1.3'.freeze
DEPENDENCY_PLUGIN_VERSION = '2.8'.freeze
end | get the old output back and next version | mkristian_jar-dependencies | train |
df0acc8f380860e379eafa9408d0fe82ccd60876 | diff --git a/benchexec/container.py b/benchexec/container.py
index <HASH>..<HASH> 100644
--- a/benchexec/container.py
+++ b/benchexec/container.py
@@ -799,7 +799,11 @@ def setup_seccomp_filter():
logging.info("Could not enable seccomp filter for container isolation: %s", e)
-_ALL_SIGNALS = range(1, signal.NSIG)
+try:
+ _ALL_SIGNALS = signal.valid_signals()
+except AttributeError:
+ # Only exists on Python 3.8+
+ _ALL_SIGNALS = range(1, signal.NSIG)
_FORWARDABLE_SIGNALS = set(range(1, 32)).difference(
[signal.SIGKILL, signal.SIGSTOP, signal.SIGCHLD]
) | Avoid warning about invalid signals on Python <I> | sosy-lab_benchexec | train |
ab0133ada03ae2b0ef6288b282340daea14f3e81 | diff --git a/yowsup/layers/coder/decoder.py b/yowsup/layers/coder/decoder.py
index <HASH>..<HASH> 100644
--- a/yowsup/layers/coder/decoder.py
+++ b/yowsup/layers/coder/decoder.py
@@ -239,7 +239,7 @@ class ReadDecoder:
if size == 0 or tag is None:
raise ValueError("nextTree sees 0 list or null tag")
- attribCount = (size - 2 + size % 2)/2;
+ attribCount = (size - 2 + size % 2)/2
attribs = self.readAttributes(attribCount, data)
if size % 2 ==1:
return ProtocolTreeNode(tag, attribs)
@@ -264,7 +264,7 @@ class ReadDecoder:
else:
nodeData = self.readString(read2, data)
- if nodeData:
+ if nodeData and type(nodeData) is not str:
nodeData = "".join(map(chr, nodeData))
return ProtocolTreeNode(tag, attribs, nodeChildren, nodeData) | Fixed error when node data is string, closes #<I> | tgalal_yowsup | train |
641aaf6eb1701e67f284dacacf7d7aedd055cde6 | diff --git a/pyeapi/client.py b/pyeapi/client.py
index <HASH>..<HASH> 100644
--- a/pyeapi/client.py
+++ b/pyeapi/client.py
@@ -142,7 +142,8 @@ class Config(SafeConfigParser):
@property
def connections(self):
- """ Returns all of the loaded connections names as a list
+ """
+ Returns all of the loaded connections names as a list
"""
conn = lambda x: str(x).replace('connection:', '')
return [conn(name) for name in self.sections()]
@@ -286,6 +287,8 @@ class Config(SafeConfigParser):
self.set(name, key, value)
self.generate_tags()
+# TODO: This is a global variable (in the module) - to review the impact on
+# having a shared state for the config file.
config = Config()
def load_config(filename):
diff --git a/pyeapi/eapilib.py b/pyeapi/eapilib.py
index <HASH>..<HASH> 100644
--- a/pyeapi/eapilib.py
+++ b/pyeapi/eapilib.py
@@ -46,7 +46,7 @@ import ssl
from http.client import HTTPConnection, HTTPSConnection
from pyeapi.utils import debug, make_iterable
-LOGGER = logging.getLogger(__name__)
+_LOGGER = logging.getLogger(__name__)
DEFAULT_HTTP_PORT = 80
DEFAULT_HTTPS_PORT = 443
@@ -227,7 +227,7 @@ class EapiConnection(object):
_auth = _auth_bin.decode()
_auth = _auth.replace('\n', '')
self._auth = _auth
- LOGGER.debug('Autentication string is: {}'.format(_auth))
+ _LOGGER.debug('Autentication string is: {}'.format(_auth))
def request(self, commands, encoding=None, reqid=None):
"""Generates an eAPI request object
@@ -333,7 +333,7 @@ class EapiConnection(object):
code and error message from the eAPI response.
"""
try:
- LOGGER.debug("Request content: {}".format(data))
+ _LOGGER.debug("Request content: {}".format(data))
debug('eapi_request: %s' % data)
self.transport.putrequest('POST', '/command-api')
@@ -351,10 +351,10 @@ class EapiConnection(object):
response = self.transport.getresponse()
response_content = response.read()
response_content = response_content.decode() # PY2/3 bytes/str conversion
- LOGGER.debug("Response: status: {status}, reason: {reason}".format(
+ _LOGGER.debug("Response: status: {status}, reason: {reason}".format(
status=response.status,
reason=response.reason))
- LOGGER.debug("Response content: {}".format(response_content))
+ _LOGGER.debug("Response content: {}".format(response_content))
decoded = json.loads(response_content)
debug('eapi_response: %s' % decoded)
@@ -365,7 +365,7 @@ class EapiConnection(object):
return decoded
except (socket.error, ValueError) as exc:
- LOGGER.exception(exc)
+ _LOGGER.exception(exc)
self.error = exc
raise ConnectionError(str(self), 'unable to connect to eAPI')
diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -8,8 +8,8 @@ here = path.abspath(path.dirname(__file__))
with open(path.join(here, 'docs/description.rst'), encoding='utf-8') as f:
long_description = f.read()
-with open(path.join(here, 'VERSION')) as version_file:
- version = version_file.read().strip().decode()
+with open(path.join(here, 'VERSION'), mode='rt', encoding='utf-8') as version_file:
+ version = version_file.read().strip()
setup(
name='pyeapi-py3', | Refactoring and fixing minor issues. | arista-eosplus_pyeapi | train |
cdad6be2cae67f537d5d5b4c24c674fb4567183a | diff --git a/spec/lib/simple_navigation/adapters/rails_spec.rb b/spec/lib/simple_navigation/adapters/rails_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/lib/simple_navigation/adapters/rails_spec.rb
+++ b/spec/lib/simple_navigation/adapters/rails_spec.rb
@@ -4,7 +4,7 @@ module SimpleNavigation
module Adapters
describe Rails do
let(:action_controller) { ActionController::Base }
- let(:adapter) { SimpleNavigation::Adapters::Rails.new(context) }
+ let(:adapter) { Rails.new(context) }
let(:context) { double(:context, controller: controller) }
let(:controller) { double(:controller) }
let(:request) { double(:request) } | Removing unnecessary namespaing in Rails adapter spec | codeplant_simple-navigation | train |
ac8e49a1c340793100a4a89920ecc90714d2aeab | diff --git a/lib/usmu/version.rb b/lib/usmu/version.rb
index <HASH>..<HASH> 100644
--- a/lib/usmu/version.rb
+++ b/lib/usmu/version.rb
@@ -1,5 +1,5 @@
module Usmu
# The current version string for the gem
- VERSION = '1.2.1'
+ VERSION = '1.3.0.dev'
end | Bump to <I>.dev | usmu_usmu | train |
99fe9f4bccc666331c816a895aad18b74f092b77 | diff --git a/lib/connect/Connection.js b/lib/connect/Connection.js
index <HASH>..<HASH> 100644
--- a/lib/connect/Connection.js
+++ b/lib/connect/Connection.js
@@ -77,6 +77,15 @@ class Connection extends EventEmitter {
}
/**
+ * Returns connection is in auto commit mode or not
+ *
+ * @return {Boolean}
+ */
+ get autoCommit() {
+ return this._client && this._client.autoCommit;
+ }
+
+ /**
* This call Executes a query
*
* @param {String|Query} query
diff --git a/lib/connect/Pool.js b/lib/connect/Pool.js
index <HASH>..<HASH> 100644
--- a/lib/connect/Pool.js
+++ b/lib/connect/Pool.js
@@ -63,9 +63,7 @@ class Pool extends LPool {
if (adapter.paramType !== undefined)
cfg.paramType = adapter.paramType;
- /* Lightning pool arguments */
- const options = config.pool || {};
- options.resetOnReturn = true;
+ /* Create object pool */
const factory = {
create: () => adapter.createConnection(),
destroy: (client) => {
@@ -79,7 +77,9 @@ class Pool extends LPool {
return client.test();
}
};
- super(factory, options);
+ const poolOptions = config.pool || {};
+ poolOptions.resetOnReturn = true;
+ super(factory, poolOptions);
this.config = cfg;
}
@@ -103,11 +103,19 @@ class Pool extends LPool {
/**
* This method obtains a connection from the connection pool.
- * @param {Function} sessionFn
+ * @param {Object} [options]
+ * @param {Function} [sessionFn]
* @return {Promise}
*/
- acquire(sessionFn) {
+ acquire(options, sessionFn) {
debug('acquire');
+
+ if (typeof options === 'function') {
+ sessionFn = options;
+ options = null;
+ }
+ options = options || {};
+
if (sessionFn && typeof sessionFn !== 'function')
return Promise.reject(new ArgumentError('`sessionFn` argument can be only a function'));
@@ -118,6 +126,8 @@ class Pool extends LPool {
return reject(err);
// Create new connection
const connection = new Connection(this, client);
+ client.autoCommit = options.autoCommit != null ? options.autoCommit :
+ (this.config.defaults && this.config.defaults.autoCommit);
connection.once('closing', () => {
super.release(client).then(() => connection.emitSafe('close'));
});
diff --git a/lib/connect/PreparedQuery.js b/lib/connect/PreparedQuery.js
index <HASH>..<HASH> 100644
--- a/lib/connect/PreparedQuery.js
+++ b/lib/connect/PreparedQuery.js
@@ -41,7 +41,7 @@ class PreparedQuery {
this.options = {
autoCommit: options.autoCommit != null ? options.autoCommit :
- defaults.autoCommit,
+ connection.autoCommit,
cursor: options.cursor != null ? options.cursor : defaults.cursor,
objectRows: options.objectRows != null ? options.objectRows :
(defaults.objectRows != null ? defaults.objectRows : true), | [+] Can set autoCommit setting while acquiring connection | sqbjs_sqb | train |
c7f89f90c16aaf35f30fe6688a36da07ba983772 | diff --git a/spec/authority/authorizer_spec.rb b/spec/authority/authorizer_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/authority/authorizer_spec.rb
+++ b/spec/authority/authorizer_spec.rb
@@ -88,14 +88,14 @@ describe Authority::Authorizer do
describe "the default method" do
- describe "if given an options hash" do
+ context "when given an options hash" do
it "returns false" do
expect(Authority::Authorizer.default(:implodable, @user, {:for => "my_object"})).to be_false
end
end
- describe "if not given an options hash" do
+ context "when not given an options hash" do
it "returns false" do
expect(Authority::Authorizer.default(:implodable, @user)).to be_false | Improve readability of spec output
[ci skip] | nathanl_authority | train |
8cd3cb043b130b5a590e38c0b8be4d8cde654328 | diff --git a/middleware/logger.go b/middleware/logger.go
index <HASH>..<HASH> 100644
--- a/middleware/logger.go
+++ b/middleware/logger.go
@@ -54,7 +54,7 @@ type (
template *fasttemplate.Template
colorer *color.Color
- pool sync.Pool
+ pool *sync.Pool
}
)
@@ -93,7 +93,7 @@ func LoggerWithConfig(config LoggerConfig) echo.MiddlewareFunc {
config.template = fasttemplate.New(config.Format, "${", "}")
config.colorer = color.New()
config.colorer.SetOutput(config.Output)
- config.pool = sync.Pool{
+ config.pool = &sync.Pool{
New: func() interface{} {
return bytes.NewBuffer(make([]byte, 256))
}, | logger: Don't copy lock value in LoggerConfig.pool, use a pointer (#<I>)
Fixes #<I> `go vet` failures | labstack_echo | train |
189d313929230204979a5bf7d5a1fdb3573fede1 | diff --git a/Kwf/Component/Data/Root.php b/Kwf/Component/Data/Root.php
index <HASH>..<HASH> 100644
--- a/Kwf/Component/Data/Root.php
+++ b/Kwf/Component/Data/Root.php
@@ -379,9 +379,7 @@ class Kwf_Component_Data_Root extends Kwf_Component_Data
public function getComponentsBySameClass($lookingForChildClasses, $select = array())
{
- if (!is_array($lookingForChildClasses) &&
- is_instance_of($lookingForChildClasses, 'Kwc_Root_Abstract')
- ) {
+ if (!is_array($lookingForChildClasses) && $lookingForChildClasses == $this->componentClass) {
return array($this);
} | fix getComponentsByClass when looking for root componentClass
and root doesn't inherit Kwc_Root_Abstract (this happens for tests) | koala-framework_koala-framework | train |
43a9563a05854c7c9f3bf65ab395302e56471de3 | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -1939,26 +1939,34 @@ Use [warden-oauth2](https://github.com/opperator/warden-oauth2) or [rack-oauth2]
## Describing and Inspecting an API
-Grape routes can be reflected at runtime. This can notably be useful for generating
-documentation.
+Grape routes can be reflected at runtime. This can notably be useful for generating documentation.
-Grape exposes arrays of API versions and compiled routes. Each route
-contains a `route_prefix`, `route_version`, `route_namespace`, `route_method`,
-`route_path` and `route_params`. The description and the optional hash that
-follows the API path may contain any number of keys and its values are also
-accessible via dynamically-generated `route_[name]` functions.
+Grape exposes arrays of API versions and compiled routes. Each route contains a `route_prefix`, `route_version`, `route_namespace`, `route_method`, `route_path` and `route_params`. You can add custom route settings to the route metadata with `route_setting`.
+
+```ruby
+class TwitterAPI < Grape::API
+ version 'v1'
+ desc "Includes custom settings."
+ route_setting :custom, key: 'value'
+ get do
+
+ end
+end
+```
+
+Examine the routes at runtime.
```ruby
TwitterAPI::versions # yields [ 'v1', 'v2' ]
TwitterAPI::routes # yields an array of Grape::Route objects
-TwitterAPI::routes[0].route_version # yields 'v1'
-TwitterAPI::routes[0].route_description # etc.
+TwitterAPI::routes[0].route_version # => 'v1'
+TwitterAPI::routes[0].route_description # => 'Includes custom settings.'
+TwitterAPI::routes[0].route_settings[:custom] # => { key: 'value' }
```
## Current Route and Endpoint
-It's possible to retrieve the information about the current route from within an API
-call with `route`.
+It's possible to retrieve the information about the current route from within an API call with `route`.
```ruby
class MyAPI < Grape::API
diff --git a/lib/grape/endpoint.rb b/lib/grape/endpoint.rb
index <HASH>..<HASH> 100644
--- a/lib/grape/endpoint.rb
+++ b/lib/grape/endpoint.rb
@@ -158,7 +158,8 @@ module Grape
method: request_method,
path: prepared_path,
params: prepare_routes_path_params(path),
- compiled: path
+ compiled: path,
+ settings: inheritable_setting.route.except(:saved_declared_params, :saved_validations)
))
end
end.flatten
diff --git a/spec/grape/api_spec.rb b/spec/grape/api_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/grape/api_spec.rb
+++ b/spec/grape/api_spec.rb
@@ -1962,6 +1962,17 @@ describe Grape::API do
]
end
end
+ describe 'api with a custom route setting' do
+ before(:each) do
+ subject.route_setting :custom, key: 'value'
+ subject.get 'one'
+ end
+ it 'exposed' do
+ expect(subject.routes.count).to eq 1
+ route = subject.routes.first
+ expect(route.route_settings[:custom]).to eq(key: 'value')
+ end
+ end
end
context 'desc' do | Exposed route_settings for each route's metadata. | ruby-grape_grape | train |
f59fcbf0071aa1d1e77f6d2f4992b3a185327534 | diff --git a/muda/core.py b/muda/core.py
index <HASH>..<HASH> 100644
--- a/muda/core.py
+++ b/muda/core.py
@@ -5,6 +5,7 @@
import jams
import librosa
+import pysoundfile as psf
from .base import *
import warnings
@@ -92,3 +93,32 @@ def load_jam_audio(jam_in, audio_file, **kwargs):
y, sr = librosa.load(audio_file, **kwargs)
return jam_pack(jam, y=y, sr=sr)
+
+
+def save(filename_audio, filename_jam, jam, strict=True, **kwargs):
+ '''Save a muda jam to disk
+
+ Parameters
+ ----------
+ filename_audio: str
+ The path to store the audio file
+
+ filename_jam: str
+ The path to store the jams object
+
+ strict: bool
+ Strict safety checking for jams output
+
+ kwargs
+ Additional parameters to `pysoundfile.write`
+
+ '''
+
+ y = jam.sandbox.muda.pop('y')
+ sr = jam.sandbox.muda.pop('sr')
+
+ # First, dump the audio file
+ psf.write(y, filename_audio, sr, **kwargs)
+
+ # Then dump the jam
+ jam.save(filename_jam, strict=strict)
diff --git a/muda/deformers/effects.py b/muda/deformers/effects.py
index <HASH>..<HASH> 100644
--- a/muda/deformers/effects.py
+++ b/muda/deformers/effects.py
@@ -43,7 +43,7 @@ class Resample(BaseTransformer):
state = BaseTransformer.get_state(self, jam)
if not len(self._state):
- state['rates'] = self.rates
+ state['rates'] = list(self.rates)
state['index'] = 0
else:
state.update(self._state)
diff --git a/muda/deformers/pitch.py b/muda/deformers/pitch.py
index <HASH>..<HASH> 100644
--- a/muda/deformers/pitch.py
+++ b/muda/deformers/pitch.py
@@ -185,7 +185,7 @@ class LinearPitchShift(AbstractPitchShift):
num=self.n_samples,
endpoint=True)
- state['shifts'] = shifts
+ state['shifts'] = list(shifts)
state['index'] = 0
else:
diff --git a/muda/deformers/time.py b/muda/deformers/time.py
index <HASH>..<HASH> 100644
--- a/muda/deformers/time.py
+++ b/muda/deformers/time.py
@@ -112,7 +112,7 @@ class LogspaceTimeStretch(AbstractTimeStretch):
num=self.n_samples,
endpoint=True)
- state['times'] = times
+ state['times'] = list(times)
state['index'] = 0
else:
@@ -155,9 +155,9 @@ class RandomTimeStretch(AbstractTimeStretch):
from the stretch distribution.
'''
- return dict(rate=np.random.lognormal(mean=self.location,
+ return dict(rate=list(np.random.lognormal(mean=self.location,
sigma=self.scale,
- size=None))
+ size=None)))
class AnnotationBlur(BaseTransformer):
@@ -288,10 +288,10 @@ class Splitter(BaseTransformer):
state['duration'] = librosa.get_duration(y=mudabox['y'],
sr=mudabox['sr'])
- state['offset'] = np.arange(start=0,
- stop=(state['duration'] -
- self.min_duration),
- step=self.stride)
+ state['offset'] = list(np.arange(start=0,
+ stop=(state['duration'] -
+ self.min_duration),
+ step=self.stride))
state['index'] = 0
self.n_samples = len(state['offset']) | made state objects serializable. added dumper. | bmcfee_muda | train |
a55f98a20f4a4fba022485e0ee172790dfe2ec3e | diff --git a/lib/commands.js b/lib/commands.js
index <HASH>..<HASH> 100644
--- a/lib/commands.js
+++ b/lib/commands.js
@@ -173,11 +173,10 @@ function start(container, containers, commandArgs, options, done) {
var child = spawn('docker', args, { stdio: 'inherit' })
child.on('close', function(code) { done() })
} else {
- exec('docker ' + args.join(' '), function(err, stdout, stderr) {
- process.stdout.write(stdout)
- process.stderr.write(stderr)
- done()
- })
+ var child = spawn('docker', args)
+ child.stdout.on('data', process.stdout.write)
+ child.stderr.on('data', process.stderr.write)
+ child.on('close', function(code) { done() })
}
}
diff --git a/test/start-simple-container-test.js b/test/start-simple-container-test.js
index <HASH>..<HASH> 100644
--- a/test/start-simple-container-test.js
+++ b/test/start-simple-container-test.js
@@ -1,6 +1,7 @@
var commands = require('../lib/commands'),
expect = require('chai').expect,
- exec = require('child_process').exec
+ exec = require('child_process').exec,
+ _ = require('lodash')
describe('starting a simple container', function() {
var container = {
@@ -10,31 +11,99 @@ describe('starting a simple container', function() {
}
var stdout = ""
- var releaseStdout
-
- before(function(done) {
- captureStdout(function(data, encoding, fd) {
- stdout += data
- }, function(uncapture) {
- var args = ["Hello, world!"]
- commands.start(container, [container], args, { interactive: false }, function() {
- uncapture()
- done()
- }, false)
+
+ after(function(done) {
+ exec('docker ps -a |grep test-', function(err, stdout, stderr) {
+ var containerIds = stdout
+ .split('\n')
+ .map(function(line) { return line.split(' ')[0] })
+ .filter(function(id) { return id !== '' })
+
+ function iterate(idx) {
+ var id = containerIds[idx]
+ if (id) {
+ exec('docker kill ' + id + ' |xargs docker rm', function() {
+ iterate(idx+1)
+ })
+ } else {
+ done()
+ }
+ }
+
+ iterate(0)
})
})
- it('executes the given command', function() {
- expect(stdout).to.eql('Hello, world!\n')
+ describe('a non-daemon container', function() {
+ before(function(done) {
+ captureStdout(function(data, encoding, fd) {
+ stdout += data
+ }, function(uncapture) {
+ var args = ["Hello, world!"]
+ commands.start(container, [container], args, { interactive: false }, function() {
+ uncapture()
+ done()
+ })
+ })
+ })
+
+ it('executes the given command', function() {
+ expect(stdout).to.eql('Hello, world!\n')
+ })
+
+ it('removes the container afterwards (uses --rm)', function(done) {
+ exec('docker inspect test-container', function(err) {
+ if (err) {
+ done()
+ } else {
+ throw new Error('test-container was not removed from docker')
+ }
+ })
+ })
})
- it('removes the container afterwards (uses --rm)', function(done) {
- exec('docker inspect test-container', function(err) {
- if (err) {
- done()
- } else {
- throw new Error('test-container was not removed from docker')
- }
+ describe('a daemon container', function() {
+ var daemonContainer = _.merge({}, container, {
+ name: 'test-daemon-container',
+ daemon: true
+ })
+
+ function startDaemon(message, done) {
+ daemonContainer.command = ["echo", message]
+
+ captureStdout(_.noop, function(uncapture) {
+ commands.start(daemonContainer, [daemonContainer], [], { }, function() {
+ uncapture()
+ done()
+ })
+ })
+ }
+
+ before(function(done) {
+ startDaemon("hello from daemon", done)
+ })
+
+ it('is left running on the background', function(done) {
+ exec('docker inspect test-daemon-container', function(err) {
+ if (err) {
+ throw new Error('test-daemon-container was not running')
+ } else {
+ done()
+ }
+ })
+ })
+
+ it('can be restarted using start command again', function(done) {
+ exec('docker logs test-daemon-container', function(err, stdout, stderr) {
+ expect(stdout).to.eql('hello from daemon\n')
+
+ startDaemon('hello again', function() {
+ exec('docker logs test-daemon-container', function(err, stdout, stderr) {
+ expect(stdout).to.eql('hello again\n')
+ done()
+ })
+ })
+ })
})
})
}) | Add test for starting and restarting daemon containers | mnylen_pig | train |
bd97840d5ccc3f0bfde1e5cfc7abeac9681997ab | diff --git a/python/pyspark/ml/tuning.py b/python/pyspark/ml/tuning.py
index <HASH>..<HASH> 100644
--- a/python/pyspark/ml/tuning.py
+++ b/python/pyspark/ml/tuning.py
@@ -91,20 +91,19 @@ class CrossValidator(Estimator):
>>> from pyspark.ml.evaluation import BinaryClassificationEvaluator
>>> from pyspark.mllib.linalg import Vectors
>>> dataset = sqlContext.createDataFrame(
- ... [(Vectors.dense([0.0, 1.0]), 0.0),
- ... (Vectors.dense([1.0, 2.0]), 1.0),
- ... (Vectors.dense([0.55, 3.0]), 0.0),
- ... (Vectors.dense([0.45, 4.0]), 1.0),
- ... (Vectors.dense([0.51, 5.0]), 1.0)] * 10,
+ ... [(Vectors.dense([0.0]), 0.0),
+ ... (Vectors.dense([0.4]), 1.0),
+ ... (Vectors.dense([0.5]), 0.0),
+ ... (Vectors.dense([0.6]), 1.0),
+ ... (Vectors.dense([1.0]), 1.0)] * 10,
... ["features", "label"])
>>> lr = LogisticRegression()
- >>> grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1, 5]).build()
+ >>> grid = ParamGridBuilder().addGrid(lr.maxIter, [0, 1]).build()
>>> evaluator = BinaryClassificationEvaluator()
>>> cv = CrossValidator(estimator=lr, estimatorParamMaps=grid, evaluator=evaluator)
- >>> # SPARK-7432: The following test is flaky.
- >>> # cvModel = cv.fit(dataset)
- >>> # expected = lr.fit(dataset, {lr.maxIter: 5}).transform(dataset)
- >>> # cvModel.transform(dataset).collect() == expected.collect()
+ >>> cvModel = cv.fit(dataset)
+ >>> evaluator.evaluate(cvModel.transform(dataset))
+ 0.8333...
"""
# a placeholder to make it appear in the generated doc | [SPARK-<I>] [MLLIB] fix flaky CrossValidator doctest
The new test uses CV to compare `maxIter=0` and `maxIter=1`, and validate on the evaluation result. jkbradley | apache_spark | train |
35ee8f4704b4ee8dcbe34900510ac4c1cdabbb84 | diff --git a/microcosm_flask/routing.py b/microcosm_flask/routing.py
index <HASH>..<HASH> 100644
--- a/microcosm_flask/routing.py
+++ b/microcosm_flask/routing.py
@@ -64,6 +64,9 @@ def configure_route_decorator(graph):
parent=ns.controller,
)
+ # set the opaque component data_func to look at the flask request context
+ func = graph.opaque.bind(graph.request_context)(func)
+
# keep audit decoration last (before registering the route) so that
# errors raised by other decorators are captured in the audit trail
if graph.config.route.enable_audit: | adding opaque binding for flask routes | globality-corp_microcosm-flask | train |
f274bf4f33dc6733bfd082c9a47cab3305cbf0b5 | diff --git a/aiogram/dispatcher/storage.py b/aiogram/dispatcher/storage.py
index <HASH>..<HASH> 100644
--- a/aiogram/dispatcher/storage.py
+++ b/aiogram/dispatcher/storage.py
@@ -184,6 +184,78 @@ class BaseStorage:
"""
await self.reset_state(chat=chat, user=user, with_data=True)
+ def has_bucket(self):
+ return False
+
+ async def get_bucket(self, *,
+ chat: typing.Union[str, int, None] = None,
+ user: typing.Union[str, int, None] = None,
+ default: typing.Optional[dict] = None) -> typing.Dict:
+ """
+ Get state-data for user in chat. Return `default` if data is not presented in storage.
+
+ Chat or user is always required. If one of this is not presented,
+ need set the missing value based on the presented
+
+ :param chat:
+ :param user:
+ :param default:
+ :return:
+ """
+ raise NotImplementedError
+
+ async def set_bucket(self, *,
+ chat: typing.Union[str, int, None] = None,
+ user: typing.Union[str, int, None] = None,
+ bucket: typing.Dict = None):
+ """
+ Set data for user in chat
+
+ Chat or user is always required. If one of this is not presented,
+ need set the missing value based on the presented
+
+ :param chat:
+ :param user:
+ :param data:
+ """
+ raise NotImplementedError
+
+ async def update_bucket(self, *,
+ chat: typing.Union[str, int, None] = None,
+ user: typing.Union[str, int, None] = None,
+ bucket: typing.Dict = None,
+ **kwargs):
+ """
+ Update data for user in chat
+
+ You can use data parameter or|and kwargs.
+
+ Chat or user is always required. If one of this is not presented,
+ need set the missing value based on the presented
+
+ :param data:
+ :param chat:
+ :param user:
+ :param kwargs:
+ :return:
+ """
+ raise NotImplementedError
+
+ async def reset_bucket(self, *,
+ chat: typing.Union[str, int, None] = None,
+ user: typing.Union[str, int, None] = None):
+ """
+ Reset data dor user in chat.
+
+ Chat or user is always required. If one of this is not presented,
+ need set the missing value based on the presented
+
+ :param chat:
+ :param user:
+ :return:
+ """
+ await self.set_data(chat=chat, user=user, data={})
+
class FSMContext:
def __init__(self, storage, chat, user): | Change BaseStorage for throttling manager. | aiogram_aiogram | train |
e954a62a59cae5eec690c581849430f13edda3d2 | diff --git a/families/track_and_trade/client/src/services/transactions.js b/families/track_and_trade/client/src/services/transactions.js
index <HASH>..<HASH> 100644
--- a/families/track_and_trade/client/src/services/transactions.js
+++ b/families/track_and_trade/client/src/services/transactions.js
@@ -123,7 +123,7 @@ const changePassword = password => {
* Wraps a Protobuf payload in a TransactionList and submits it to the API.
* Prompts user for their password if their private key is not in memory.
*/
-const submit = payloads => {
+const submit = (payloads, wait = false) => {
if (!_.isArray(payloads)) payloads = [payloads]
return Promise.resolve()
.then(() => {
@@ -138,7 +138,7 @@ const submit = payloads => {
.then(() => {
const txns = payloads.map(payload => txnEncoder.create(payload))
const txnList = txnEncoder.encode(txns)
- return api.postBinary('transactions', txnList)
+ return api.postBinary(`transactions${wait ? '?wait' : ''}`, txnList)
})
}
diff --git a/families/track_and_trade/server/api/index.js b/families/track_and_trade/server/api/index.js
index <HASH>..<HASH> 100644
--- a/families/track_and_trade/server/api/index.js
+++ b/families/track_and_trade/server/api/index.js
@@ -92,6 +92,14 @@ const initInternalParams = (req, res, next) => {
next()
}
+// Middleware for parsing the wait query parameter
+const waitParser = (req, res, next) => {
+ const DEFAULT_WAIT = 60
+ const parsed = req.query.wait === '' ? DEFAULT_WAIT : Number(req.query.wait)
+ req.query.wait = _.isNaN(parsed) ? null : parsed
+ next()
+}
+
// Check the Authorization header if present.
// Saves the encoded public key to the request object.
const authHandler = (req, res, next) => {
@@ -131,6 +139,7 @@ router.use(bodyParser.raw({ type: 'application/octet-stream' }))
router.use(logRequest)
router.use(initInternalParams)
+router.use(waitParser)
router.use(authHandler)
router.get('/agents', handle(agents.list))
diff --git a/families/track_and_trade/server/blockchain/index.js b/families/track_and_trade/server/blockchain/index.js
index <HASH>..<HASH> 100644
--- a/families/track_and_trade/server/blockchain/index.js
+++ b/families/track_and_trade/server/blockchain/index.js
@@ -35,6 +35,7 @@ const StateDeltaSubscribeResponse = root.lookup('StateDeltaSubscribeResponse')
const StateDeltaEvent = root.lookup('StateDeltaEvent')
const ClientBatchSubmitRequest = root.lookup('ClientBatchSubmitRequest')
const ClientBatchSubmitResponse = root.lookup('ClientBatchSubmitResponse')
+const BatchStatus = root.lookup('BatchStatus')
const subscribe = () => {
stream.connect(() => {
@@ -65,21 +66,37 @@ const subscribe = () => {
})
}
-const submit = (txnBytes, { authedKey }) => {
+const submit = (txnBytes, { wait, authedKey }) => {
const batch = batcher.batch(txnBytes, authedKey)
return stream.send(
Message.MessageType.CLIENT_BATCH_SUBMIT_REQUEST,
- ClientBatchSubmitRequest.encode({ batches: [batch] }).finish()
+ ClientBatchSubmitRequest.encode({
+ batches: [batch],
+ waitForCommit: wait !== null,
+ timeout: wait
+ }).finish()
)
.then(response => ClientBatchSubmitResponse.decode(response))
- .then(decoded => {
+ .then((decoded) => {
const status = _.findKey(ClientBatchSubmitResponse.Status,
val => val === decoded.status)
if (status !== 'OK') {
throw new Error(`Batch submission failed with status '${status}'`)
}
- return { batch: batch.headerSignature }
+
+ if (wait === null) {
+ return { batch: batch.headerSignature }
+ }
+
+ if (decoded.batchStatuses[0].status !== BatchStatus.Status.COMMITTED) {
+ throw new Error(decoded.batchStatuses[0].invalidTransactions[0].message)
+ }
+
+ // Wait to return until new block is in database
+ return new Promise(resolve => setTimeout(() => {
+ resolve({ batch: batch.headerSignature })
+ }, 500))
})
} | Add wait query paramter to T&T txn submissions | hyperledger_sawtooth-core | train |
373fb3b57c9edafc1935789d7b5cb99d45656f2e | diff --git a/lib/puppet/parser/scope.rb b/lib/puppet/parser/scope.rb
index <HASH>..<HASH> 100644
--- a/lib/puppet/parser/scope.rb
+++ b/lib/puppet/parser/scope.rb
@@ -631,6 +631,10 @@ module Puppet::Parser
# hash. We store the object ID, not class name, so that we
# can support multiple unrelated classes with the same name.
def setclass(id, name)
+ unless name =~ /^[a-z]\w*$/
+ raise Puppet::ParseError, "Invalid class name '%s'" % name
+ end
+
if self.topscope?
@classtable[id] = name
else
diff --git a/test/language/scope.rb b/test/language/scope.rb
index <HASH>..<HASH> 100755
--- a/test/language/scope.rb
+++ b/test/language/scope.rb
@@ -546,4 +546,21 @@ class TestScope < Test::Unit::TestCase
assert_equal("root", obj["owner"], "Owner did not take")
assert_equal("755", obj["mode"], "Mode did not take")
end
+
+ def test_validclassnames
+ scope = Puppet::Parser::Scope.new()
+
+ ["a-class", "a class", "Class", "a.class"].each do |bad|
+ assert_raise(Puppet::ParseError, "Incorrectly allowed %s" % bad.inspect) do
+ scope.setclass(object_id, bad)
+ end
+ end
+
+ ["a_class", "class", "yayNess"].each do |good|
+ assert_nothing_raised("Incorrectly banned %s" % good.inspect) do
+ scope.setclass(object_id, good)
+ end
+ end
+
+ end
end | Modifying "setclass" on scope to check the validity of class names, now that "set" can be used to set them manually, and added a test for it.
git-svn-id: <URL> | puppetlabs_puppet | train |
d59bce6854b305341f4cda77246b28faef3ad41d | diff --git a/result.go b/result.go
index <HASH>..<HASH> 100644
--- a/result.go
+++ b/result.go
@@ -76,10 +76,6 @@ func (r solution) InputHash() []byte {
func stripVendor(path string, info os.FileInfo, err error) error {
if info.Name() == "vendor" {
if _, err := os.Lstat(path); err == nil {
- if info.IsDir() {
- return removeAll(path)
- }
-
if (info.Mode() & os.ModeSymlink) != 0 {
realInfo, err := os.Stat(path)
if err != nil {
@@ -89,6 +85,10 @@ func stripVendor(path string, info os.FileInfo, err error) error {
return os.Remove(path)
}
}
+
+ if info.IsDir() {
+ return removeAll(path)
+ }
}
} | Don't delete target directory of junctions | sdboyer_gps | train |
35ca1f9b17c19fd0b774ed3307a85b58fa4b2705 | diff --git a/src/Columns.php b/src/Columns.php
index <HASH>..<HASH> 100644
--- a/src/Columns.php
+++ b/src/Columns.php
@@ -139,6 +139,9 @@ class Columns
$set['ptable'] = $activeRecord->ptable ?: 'tl_article';
$set['type'] = substr($activeRecord->type, 0, -5) . 'stop';
$set['sorting'] = $activeRecord->sorting + 1;
+ $set['invisible'] = $activeRecord->invisible;
+ $set['start'] = $activeRecord->start;
+ $set['stop'] = $activeRecord->stop;
$set['tstamp'] = time();
\Database::getInstance() | Copy visibility settings from start element, fixes #<I> | madeyourday_contao-rocksolid-columns | train |
633a4a92c9ad93c822ff402ef27697f69ab608b6 | diff --git a/adafruit_platformdetect/chip.py b/adafruit_platformdetect/chip.py
index <HASH>..<HASH> 100644
--- a/adafruit_platformdetect/chip.py
+++ b/adafruit_platformdetect/chip.py
@@ -23,6 +23,9 @@ HFU540 = "HFU540"
MCP2221 = "MCP2221"
BINHO = "BINHO"
+BCM_RANGE = {'BCM2708', 'BCM2709', 'BCM2835', 'BCM2837', 'bcm2708', 'bcm2709',
+ 'bcm2835', 'bcm2837'}
+
class Chip:
"""Attempt detection of current chip / CPU."""
def __init__(self, detector): | re-add chip.py::BCM_RANGE after merge conflict | adafruit_Adafruit_Python_PlatformDetect | train |
ea37ef5e81f3b7ea1f547d4046a8b3827ebb69a7 | diff --git a/spec/db_schema_parser_spec.rb b/spec/db_schema_parser_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/db_schema_parser_spec.rb
+++ b/spec/db_schema_parser_spec.rb
@@ -142,7 +142,7 @@ describe "Parsing an enum column" do
it "should not raise an error when enum values contains brackets" do
parser = Sequel::Schema::DbSchemaParser.for_db(stub(:database))
schema = [[:example_column,
- { :type => :string,
+ { :type => :enum,
:default => nil,
:ruby_default => nil,
:primary_key => false, | Correct type in spec to make things clearer. | knaveofdiamonds_sequel_migration_builder | train |
1ff5826c26c0da5cd9dcccc1874dbd5ed03f56eb | diff --git a/telethon/telegram_client.py b/telethon/telegram_client.py
index <HASH>..<HASH> 100644
--- a/telethon/telegram_client.py
+++ b/telethon/telegram_client.py
@@ -192,6 +192,9 @@ class TelegramClient(TelegramBareClient):
# Sometimes we need to know who we are, cache the self peer
self._self_input_peer = None
+ # Don't call .get_dialogs() every time a .get_entity() fails
+ self._called_get_dialogs = False
+
# endregion
# region Telegram requests functions
@@ -2401,16 +2404,18 @@ class TelegramClient(TelegramBareClient):
# Add the mark to the peers if the user passed a Peer (not an int),
# or said ID is negative. If it's negative it's been marked already.
# Look in the dialogs with the hope to find it.
- mark = not isinstance(peer, int) or peer < 0
- target_id = utils.get_peer_id(peer)
- if mark:
- for dialog in self.iter_dialogs():
- if utils.get_peer_id(dialog.entity) == target_id:
- return utils.get_input_peer(dialog.entity)
- else:
- for dialog in self.iter_dialogs():
- if dialog.entity.id == target_id:
- return utils.get_input_peer(dialog.entity)
+ if not self._called_get_dialogs:
+ self._called_get_dialogs = True
+ mark = not isinstance(peer, int) or peer < 0
+ target_id = utils.get_peer_id(peer)
+ if mark:
+ for dialog in self.get_dialogs(100):
+ if utils.get_peer_id(dialog.entity) == target_id:
+ return utils.get_input_peer(dialog.entity)
+ else:
+ for dialog in self.get_dialogs(100):
+ if dialog.entity.id == target_id:
+ return utils.get_input_peer(dialog.entity)
raise TypeError(
'Could not find the input entity corresponding to "{}". ' | Call .get_dialogs only once on entity not found | LonamiWebs_Telethon | train |
f92a36f86efbbd2e0d8a3ae44da5efb0b908b2d8 | diff --git a/test/component.spec.js b/test/component.spec.js
index <HASH>..<HASH> 100644
--- a/test/component.spec.js
+++ b/test/component.spec.js
@@ -4261,5 +4261,137 @@ describe("Component", function () {
});
});
+
+ it("root element with if, has data when inited", function (done) {
+ var MyComponent = san.defineComponent({
+ template: '<b s-if="person">{{person.name}}</b>'
+ });
+
+ var myComponent = new MyComponent({
+ data: {
+ person: {
+ name: 'errorrik'
+ }
+ }
+ });
+
+ var wrap = document.createElement('div');
+ document.body.appendChild(wrap);
+ myComponent.attach(wrap);
+
+ expect(wrap.getElementsByTagName('b').length).toBe(1);
+ expect(wrap.getElementsByTagName('b')[0].innerHTML).toBe('errorrik');
+
+ myComponent.data.set('person', null);
+
+ myComponent.nextTick(function () {
+ expect(wrap.getElementsByTagName('b').length).toBe(0);
+
+ myComponent.dispose();
+ document.body.removeChild(wrap);
+ done();
+ });
+
+ });
+
+ it("root element with if, no data when inited", function (done) {
+ var MyComponent = san.defineComponent({
+ template: '<b s-if="person">{{person.name}}</b>'
+ });
+
+ var myComponent = new MyComponent();
+
+ var wrap = document.createElement('div');
+ document.body.appendChild(wrap);
+ myComponent.attach(wrap);
+
+ expect(wrap.getElementsByTagName('b').length).toBe(0);
+ myComponent.data.set('person', {
+ name: 'errorrik'
+ });
+
+ myComponent.nextTick(function () {
+ expect(wrap.getElementsByTagName('b').length).toBe(1);
+ expect(wrap.getElementsByTagName('b')[0].innerHTML).toBe('errorrik');
+
+ myComponent.dispose();
+ document.body.removeChild(wrap);
+ done();
+ });
+
+ });
+
+ it("root element with if as child, has data when inited", function (done) {
+ var Child = san.defineComponent({
+ template: '<b s-if="person">{{person.name}}</b>'
+ });
+
+ var MyComponent = san.defineComponent({
+ template: '<div><x-child person="{{p}}"/></div>',
+ components: {
+ 'x-child': Child
+ }
+ });
+
+ var myComponent = new MyComponent({
+ data: {
+ p: {
+ name: 'errorrik'
+ }
+ }
+ });
+
+ var wrap = document.createElement('div');
+ document.body.appendChild(wrap);
+ myComponent.attach(wrap);
+
+ expect(wrap.getElementsByTagName('b').length).toBe(1);
+ expect(wrap.getElementsByTagName('b')[0].innerHTML).toBe('errorrik');
+
+ myComponent.data.set('p', null);
+
+ myComponent.nextTick(function () {
+ expect(wrap.getElementsByTagName('b').length).toBe(0);
+
+ myComponent.dispose();
+ document.body.removeChild(wrap);
+ done();
+ });
+
+ });
+
+ it("root element with if as child, no data when inited", function (done) {
+ var Child = san.defineComponent({
+ template: '<b s-if="person">{{person.name}}</b>'
+ });
+
+ var MyComponent = san.defineComponent({
+ template: '<div><x-child person="{{p}}"/></div>',
+ components: {
+ 'x-child': Child
+ }
+ });
+
+ var myComponent = new MyComponent();
+
+ var wrap = document.createElement('div');
+ document.body.appendChild(wrap);
+ myComponent.attach(wrap);
+
+ expect(wrap.getElementsByTagName('b').length).toBe(0);
+ myComponent.data.set('p', {
+ name: 'errorrik'
+ });
+
+ myComponent.nextTick(function () {
+ expect(wrap.getElementsByTagName('b').length).toBe(1);
+ expect(wrap.getElementsByTagName('b')[0].innerHTML).toBe('errorrik');
+
+ myComponent.dispose();
+ document.body.removeChild(wrap);
+ done();
+ });
+
+ });
}); | add specs: if directive with component root | baidu_san | train |
7a9f4813cbdf8606451dec91a72116de0d81212d | diff --git a/lib/generators/authkit/templates/app/forms/signup.rb b/lib/generators/authkit/templates/app/forms/signup.rb
index <HASH>..<HASH> 100644
--- a/lib/generators/authkit/templates/app/forms/signup.rb
+++ b/lib/generators/authkit/templates/app/forms/signup.rb
@@ -45,6 +45,10 @@ class Signup
false
end
+ def save!
+ raise ActiveRecord::RecordNotSaved unless save
+ end
+
def save
if valid?
persist! | Add a save! method to the signup | jeffrafter_authkit | train |
96a4cc2e4dd4aba01350f436c5637009e5b2d905 | diff --git a/ansigenome/reqs.py b/ansigenome/reqs.py
index <HASH>..<HASH> 100644
--- a/ansigenome/reqs.py
+++ b/ansigenome/reqs.py
@@ -19,6 +19,8 @@ class Reqs(object):
self.roles = utils.roles_dict(args[0])
self.out_file = options.out_file
+ utils.exit_if_no_roles(len(self.roles.keys()), args[0])
+
if options.format:
self.format = options.format
else: | Add protection against no roles to the reqs cmd | nickjj_ansigenome | train |
a4d94ea640d7e8454110dd275ee16bdf54011bba | diff --git a/js/braziliex.js b/js/braziliex.js
index <HASH>..<HASH> 100644
--- a/js/braziliex.js
+++ b/js/braziliex.js
@@ -97,16 +97,16 @@ module.exports = class braziliex extends Exchange {
'funding': {
'withdraw': {
'active': canWithdraw,
- 'fee': currency['txWithdrawalFee'],
+ 'fee': this.safeFloat (currency, 'txWithdrawalFee'),
},
'deposit': {
'active': canDeposit,
- 'fee': currency['txDepositFee'],
+ 'fee': this.safeFloat (currency, 'txDepositFee'),
},
},
'limits': {
'amount': {
- 'min': currency['minAmountTrade'],
+ 'min': this.safeFloat (currency, 'minAmountTrade'),
'max': Math.pow (10, precision),
},
'price': {
@@ -118,11 +118,11 @@ module.exports = class braziliex extends Exchange {
'max': undefined,
},
'withdraw': {
- 'min': currency['MinWithdrawal'],
+ 'min': this.safeFloat (currency, 'MinWithdrawal'),
'max': Math.pow (10, precision),
},
'deposit': {
- 'min': currency['minDeposit'],
+ 'min': this.safeFloat (currency, 'minDeposit'),
'max': undefined,
},
}, | braziliex fetchCurrencies guards for undefined properties | ccxt_ccxt | train |
292285f0eac28c54026bb21636643699ca74bf78 | diff --git a/docs/CHANGELOG.rst b/docs/CHANGELOG.rst
index <HASH>..<HASH> 100644
--- a/docs/CHANGELOG.rst
+++ b/docs/CHANGELOG.rst
@@ -15,6 +15,10 @@ Changed
- Add ``username`` to ``current_user_permissions`` field of objects on API
- Support retrieval of ``Data.name`` in Python process
+Added
+-----
+- Support workflows as inputs to Python processes
+
===================
20.1.0 - 2019-12-16
diff --git a/resolwe/flow/models/utils.py b/resolwe/flow/models/utils.py
index <HASH>..<HASH> 100644
--- a/resolwe/flow/models/utils.py
+++ b/resolwe/flow/models/utils.py
@@ -341,6 +341,7 @@ def hydrate_input_references(input_, input_schema, hydrate_values=True):
continue
output = copy.deepcopy(data.output)
+ hydrate_input_references(output, data.process.output_schema)
if hydrate_values:
_hydrate_values(output, data.process.output_schema, data)
output["__id"] = data.id
@@ -365,6 +366,7 @@ def hydrate_input_references(input_, input_schema, hydrate_values=True):
continue
output = copy.deepcopy(data.output)
+ hydrate_input_references(output, data.process.output_schema)
if hydrate_values:
_hydrate_values(output, data.process.output_schema, data)
diff --git a/resolwe/process/fields.py b/resolwe/process/fields.py
index <HASH>..<HASH> 100644
--- a/resolwe/process/fields.py
+++ b/resolwe/process/fields.py
@@ -513,9 +513,17 @@ class DataDescriptor:
field_type = field_descriptor["type"].rstrip(":")
if field_type.startswith("list:"):
- field = ListField(ALL_FIELDS_MAP[field_type[5:]]())
+ field_class = ALL_FIELDS_MAP[field_type[len("list:") :]]
+ extra_kwargs = {}
+ if issubclass(field_class, DataField):
+ extra_kwargs["data_type"] = field_type[len("list:data:") :]
+ field = ListField(field_class(**extra_kwargs))
else:
- field = ALL_FIELDS_MAP[field_type]()
+ field_class = ALL_FIELDS_MAP[field_type]
+ extra_kwargs = {}
+ if issubclass(field_class, DataField):
+ extra_kwargs["data_type"] = field_type[len("data:") :]
+ field = field_class(**extra_kwargs)
value = cache[field_name]
value = field.clean(value)
@@ -581,6 +589,8 @@ class DataDescriptor:
class DataField(Field):
"""Data object field."""
+ field_type = "data"
+
def __init__(self, data_type, *args, **kwargs):
"""Construct a data field."""
# TODO: Validate data type format.
@@ -606,10 +616,8 @@ class DataField(Field):
value = cache["__id"]
except KeyError:
raise ValidationError("dictionary must contain an '__id' element")
- elif not isinstance(value, int):
- raise ValidationError(
- "field must be a DataDescriptor, data object primary key or dict"
- )
+ else:
+ raise ValidationError("field must be a DataDescriptor or dict")
return DataDescriptor(value, self, cache)
diff --git a/resolwe/process/tests/processes/python_test.py b/resolwe/process/tests/processes/python_test.py
index <HASH>..<HASH> 100644
--- a/resolwe/process/tests/processes/python_test.py
+++ b/resolwe/process/tests/processes/python_test.py
@@ -252,3 +252,18 @@ class RequirementsProcess(Process):
print("Cores:", outputs.cores)
print("Memory:", outputs.memory)
+
+
+class ProcessWithWorkflowInput(Process):
+ slug = "process-with-workflow-input"
+ name = "Process with workflow input"
+ version = "1.0.0"
+ process_type = "data:test"
+
+ class Input:
+ """Input fields."""
+
+ data = DataField(data_type="", label="Data.")
+
+ def run(self, inputs, outputs):
+ pass
diff --git a/resolwe/process/tests/test_python_process.py b/resolwe/process/tests/test_python_process.py
index <HASH>..<HASH> 100644
--- a/resolwe/process/tests/test_python_process.py
+++ b/resolwe/process/tests/test_python_process.py
@@ -156,6 +156,18 @@ class PythonProcessTest(ProcessTestCase):
self.assertEqual(data.status, "OK")
@with_docker_executor
+ @tag_process("process-with-workflow-input")
+ def test_workflow_as_list_input(self):
+ """Test workflow with non-required data inputs"""
+ with self.preparation_stage():
+ workflow = self.run_process("simple-workflow")
+
+ data = self.run_process("process-with-workflow-input", {"data": workflow.pk})
+
+ data.refresh_from_db()
+ self.assertEqual(data.status, "OK")
+
+ @with_docker_executor
@tag_process("test-python-process-error")
def test_error(self):
"""Test process that raises exception"""
diff --git a/resolwe/process/tests/workflows/test_workflow.yml b/resolwe/process/tests/workflows/test_workflow.yml
index <HASH>..<HASH> 100644
--- a/resolwe/process/tests/workflows/test_workflow.yml
+++ b/resolwe/process/tests/workflows/test_workflow.yml
@@ -21,3 +21,13 @@
input:
data: '{{input.data}}'
data2: '{{input.data2}}'
+
+- slug: simple-workflow
+ name: Simple workflow
+ version: 1.0.0
+ type: data:workflow
+ run:
+ language: workflow
+ program:
+ - id: python-process-1
+ run: test-python-process-2 | Support worflows as inputs to Python processes | genialis_resolwe | train |
a1216df7b7913cccefcf7adff72ce8a829500d94 | diff --git a/src/Entrust/EntrustServiceProvider.php b/src/Entrust/EntrustServiceProvider.php
index <HASH>..<HASH> 100644
--- a/src/Entrust/EntrustServiceProvider.php
+++ b/src/Entrust/EntrustServiceProvider.php
@@ -45,6 +45,8 @@ class EntrustServiceProvider extends ServiceProvider
$this->registerEntrust();
$this->registerCommands();
+
+ $this->mergeConfig();
}
/**
@@ -72,6 +74,18 @@ class EntrustServiceProvider extends ServiceProvider
}
/**
+ * Merges user's and entrust's configs.
+ *
+ * @return void
+ */
+ private function mergeConfig()
+ {
+ $this->mergeConfigFrom(
+ __DIR__.'/../config/config.php', 'entrust'
+ );
+ }
+
+ /**
* Get the services provided.
*
* @return array
diff --git a/src/commands/MigrationCommand.php b/src/commands/MigrationCommand.php
index <HASH>..<HASH> 100644
--- a/src/commands/MigrationCommand.php
+++ b/src/commands/MigrationCommand.php
@@ -36,7 +36,7 @@ class MigrationCommand extends Command
{
$this->laravel->view->addNamespace('entrust', substr(__DIR__, 0, -8).'views');
- $rolesTable = Config::get('entrust.roles_table');
+ $rolesTable = Config::get('entrust::roles_table');
$roleUserTable = Config::get('entrust.role_user_table');
$permissionsTable = Config::get('entrust.permissions_table');
$permissionRoleTable = Config::get('entrust.permission_role_table');
@@ -79,7 +79,7 @@ class MigrationCommand extends Command
*/
protected function createMigration($rolesTable, $roleUserTable, $permissionsTable, $permissionRoleTable)
{
- $migrationFile = realpath(base_path()."/database/migrations")."/".date('Y_m_d_His')."_entrust_setup_tables.php";
+ $migrationFile = base_path("/database/migrations")."/".date('Y_m_d_His')."_entrust_setup_tables.php";
$usersTable = Config::get('auth.table');
$userModel = Config::get('auth.model'); | Fixes migration. Fixes config loading. | Zizaco_entrust | train |
c85adfeb7e88f942409b82de737fc137eb60f093 | diff --git a/lib/ghw.js b/lib/ghw.js
index <HASH>..<HASH> 100755
--- a/lib/ghw.js
+++ b/lib/ghw.js
@@ -30,7 +30,7 @@ function transform(f, transformers, done) {
type: 'heading',
depth: t.depth,
text: '<h' + t.depth + ' id="' + i + '">' + n + '</h' + t.depth + '>'
- }
+ };
}
return t;
});
@@ -86,7 +86,7 @@ function transformers() {
if(headerLen != alignLen) return text;
if(all(lines.map(function(v) {return v.indexOf('|') >= 0;})))
- var ast = {
+ return output({
header: headerParts,
align: alignParts.map(function(p) {
if(p[0] == ':' && p[p.length - 1] == ':') return 'center';
@@ -94,15 +94,15 @@ function transformers() {
return 'left';
}),
lines: lines.map(function(line) { return rtrim(line, '|').split('|');})
- };
-
- return output(ast);
+ });
},
toHTML: function(o) {
if(!o) return '';
var h = o.header? '<thead><tr><th>' + o.header.join('</th><th>') + '</th></tr></thead>': '';
- var c = '<tbody><tr>' + o.lines.map(function(v) {return '<td>' + v.join('</td><td>') + '</td>'}).join('</tr><tr>') + '</tr></tbody>';
+ var c = '<tbody><tr>' + o.lines.map(function(v) {
+ return '<td>' + v.join('</td><td>') + '</td>';
+ }).join('</tr><tr>') + '</tr></tbody>';
return '<table>' + h + c + '</table>';
}
@@ -172,7 +172,7 @@ function transformers() {
if(!v.toAST) {
v.toAST = function(l, r) {
return {'l': l, 'r': r};
- }
+ };
}
} | Fix various jshint warnings | bebraw_ghw | train |
0b95ab8768ab251bdaa7fd6cd906bc4e14360bf1 | diff --git a/pulsar/__init__.py b/pulsar/__init__.py
index <HASH>..<HASH> 100644
--- a/pulsar/__init__.py
+++ b/pulsar/__init__.py
@@ -5,7 +5,7 @@ import os
from .utils.version import get_version
-VERSION = (1, 6, 1, 'beta', 1)
+VERSION = (1, 6, 1, 'alpha', 0)
__version__ = version = get_version(VERSION)
diff --git a/pulsar/apps/wsgi/wrappers.py b/pulsar/apps/wsgi/wrappers.py
index <HASH>..<HASH> 100755
--- a/pulsar/apps/wsgi/wrappers.py
+++ b/pulsar/apps/wsgi/wrappers.py
@@ -627,7 +627,7 @@ class WsgiRequest(EnvironMixin):
path = remove_double_slash('%s/%s' % (self.path, path))
return iri_to_uri(path, query)
- def absolute_uri(self, location=None, scheme=None):
+ def absolute_uri(self, location=None, scheme=None, **query):
"""Builds an absolute URI from ``location`` and variables
available in this request.
@@ -635,7 +635,7 @@ class WsgiRequest(EnvironMixin):
:meth:`full_path`.
"""
if not is_absolute_uri(location):
- location = self.full_path(location)
+ location = self.full_path(location, **query)
if not scheme:
scheme = self.is_secure and 'https' or 'http'
base = '%s://%s' % (scheme, self.get_host()) | added query to request absolute_uri #release-note | quantmind_pulsar | train |
0f79fd226809a57303f7c1beacdc67ca3e4b498b | diff --git a/parsl/executors/workqueue/executor.py b/parsl/executors/workqueue/executor.py
index <HASH>..<HASH> 100644
--- a/parsl/executors/workqueue/executor.py
+++ b/parsl/executors/workqueue/executor.py
@@ -107,7 +107,6 @@ def WorkQueueSubmitThread(task_queue=multiprocessing.Queue(),
q.specify_password_file(project_password_file)
if autolabel:
q.enable_monitoring()
- q.specify_category_mode('parsl-default', WORK_QUEUE_ALLOCATION_MODE_MAX_THROUGHPUT)
if autolabel_window is not None:
q.tune('category-steady-n-tasks', autolabel_window)
@@ -201,12 +200,9 @@ def WorkQueueSubmitThread(task_queue=multiprocessing.Queue(),
logger.error("Unable to create task: {}".format(e))
continue
- if autocategory:
- t.specify_category(category)
- if autolabel:
- q.specify_category_mode(category, WORK_QUEUE_ALLOCATION_MODE_MAX_THROUGHPUT)
- else:
- t.specify_category('parsl-default')
+ t.specify_category(category)
+ if autolabel:
+ q.specify_category_mode(category, WORK_QUEUE_ALLOCATION_MODE_MAX_THROUGHPUT)
# Specify environment variables for the task
if env is not None:
@@ -714,9 +710,10 @@ class WorkQueueExecutor(NoStatusHandlingExecutor):
# Create message to put into the message queue
logger.debug("Placing task {} on message queue".format(task_id))
+ category = func.__qualname__ if self.autocategory else 'parsl-default'
msg = {"task_id": task_id,
"data_loc": function_data_file,
- "category": func.__qualname__,
+ "category": category,
"result_loc": function_result_file,
"input_files": input_files,
"output_files": output_files, | wq excutor: treat parsl-default as any other category (#<I>)
* wq excutor: treat parsl-default as any other category
When not using autocategory, the category of the task is set to
'parsl-default' before sending it to work queue. The work queue code
then can treat parsl-default as any other category. This simplifies the
logic a little bit, as the category of a task is set once and never
changed.
* fix missing self | Parsl_parsl | train |
71d3a67b95184ccaf577ae8e5bee11b1e1c8725d | diff --git a/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java b/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java
+++ b/src/main/java/org/gaul/s3proxy/S3ProxyHandler.java
@@ -1117,7 +1117,8 @@ final class S3ProxyHandler extends AbstractHandler {
ContentMetadata metadata = blob.getMetadata().getContentMetadata();
BlobBuilder.PayloadBlobBuilder builder = blobStore
.blobBuilder(destBlobName)
- .payload(is);
+ .payload(is)
+ .contentLength(metadata.getContentLength());
if (replaceMetadata) {
addContentMetdataFromHttpRequest(builder, request);
} else {
@@ -1127,7 +1128,6 @@ final class S3ProxyHandler extends AbstractHandler {
.contentType(metadata.getContentType())
.userMetadata(blob.getMetadata().getUserMetadata());
}
- builder.contentLength(metadata.getContentLength());
PutOptions options = new PutOptions()
.multipart(forceMultiPartUpload);
@@ -1210,7 +1210,8 @@ final class S3ProxyHandler extends AbstractHandler {
try (InputStream is = request.getInputStream()) {
BlobBuilder.PayloadBlobBuilder builder = blobStore
.blobBuilder(blobName)
- .payload(is);
+ .payload(is)
+ .contentLength(request.getContentLength());
addContentMetdataFromHttpRequest(builder, request);
if (contentMD5 != null) {
builder = builder.contentMD5(contentMD5);
@@ -1582,7 +1583,8 @@ final class S3ProxyHandler extends AbstractHandler {
request.getInputStream())) {
BlobBuilder.PayloadBlobBuilder builder = blobStore
.blobBuilder(uploadId + "." + partNumber)
- .payload(his);
+ .payload(his)
+ .contentLength(request.getContentLength());
addContentMetdataFromHttpRequest(builder, request);
if (contentMD5 != null) {
builder = builder.contentMD5(contentMD5);
@@ -1812,7 +1814,6 @@ final class S3ProxyHandler extends AbstractHandler {
HttpHeaders.CONTENT_ENCODING))
.contentLanguage(request.getHeader(
HttpHeaders.CONTENT_LANGUAGE))
- .contentLength(request.getContentLength())
.userMetadata(userMetadata.build());
String contentType = request.getContentType();
if (contentType != null) { | Explicitly add Content-Length
Previously InitiateMultipartUpload used the incorrect length. Found
with s3fs integration tests. References #2. | gaul_s3proxy | train |
3a17b3eb7c449c59b8077f445224d81b71d31632 | diff --git a/django_afip/admin.py b/django_afip/admin.py
index <HASH>..<HASH> 100644
--- a/django_afip/admin.py
+++ b/django_afip/admin.py
@@ -263,8 +263,7 @@ class TaxPayerAdmin(admin.ModelAdmin):
for pos in taxpayer.fetch_points_of_sales()
]
- # XXX: This seems to crash when none exist?
- created = sum([pos for pos in poses if pos[1]])
+ created = len([pos for pos in poses if pos[1]])
total = len(poses)
self.message_user( | Fix crash when fetching more than one POS | WhyNotHugo_django-afip | train |
660138db142926b05ab9f13503befe556779b0cf | diff --git a/redisson/src/main/java/org/redisson/api/listener/MessageListener.java b/redisson/src/main/java/org/redisson/api/listener/MessageListener.java
index <HASH>..<HASH> 100644
--- a/redisson/src/main/java/org/redisson/api/listener/MessageListener.java
+++ b/redisson/src/main/java/org/redisson/api/listener/MessageListener.java
@@ -26,6 +26,7 @@ import java.util.EventListener;
*
* @see org.redisson.api.RTopic
*/
+@FunctionalInterface
public interface MessageListener<M> extends EventListener {
/** | Improvement - MessageListener should be annotated by @FunctionalInterface #<I> | redisson_redisson | train |
644e9182d9929ade7252719fb6b685efbbe84acb | diff --git a/guava/src/com/google/common/collect/ImmutableMultiset.java b/guava/src/com/google/common/collect/ImmutableMultiset.java
index <HASH>..<HASH> 100644
--- a/guava/src/com/google/common/collect/ImmutableMultiset.java
+++ b/guava/src/com/google/common/collect/ImmutableMultiset.java
@@ -584,9 +584,9 @@ public abstract class ImmutableMultiset<E> extends ImmutableMultisetGwtSerializa
}
}
- @WeakOuter
static final class ElementSet<E> extends ImmutableSet.Indexed<E> {
private final List<Entry<E>> entries;
+ // TODO(cpovirk): @Weak?
private final Multiset<E> delegate;
ElementSet(List<Entry<E>> entries, Multiset<E> delegate) { | Remove @WeakOuter from ImmutableMultiset.ElementSet. @WeakOuter is useful for inner classes but not for static nested classes.
-------------
Created by MOE: <URL> | google_guava | train |
9ce6cab8d55bcf17235c8c98a372ab9a70032652 | diff --git a/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JobRepository.java b/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JobRepository.java
index <HASH>..<HASH> 100644
--- a/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JobRepository.java
+++ b/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/JobRepository.java
@@ -468,8 +468,8 @@ public class JobRepository {
}
}
- void cacheValidationRecord(@Nonnull String snapshotMapName, @Nonnull SnapshotValidationRecord validationRecord) {
- exportedSnapshotDetailsCache.set(snapshotMapName, validationRecord);
+ void cacheValidationRecord(@Nonnull String snapshotName, @Nonnull SnapshotValidationRecord validationRecord) {
+ exportedSnapshotDetailsCache.set(snapshotName, validationRecord);
}
public static final class UpdateJobExecutionRecordEntryProcessor implements
diff --git a/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/MasterContext.java b/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/MasterContext.java
index <HASH>..<HASH> 100644
--- a/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/MasterContext.java
+++ b/hazelcast-jet-core/src/main/java/com/hazelcast/jet/impl/MasterContext.java
@@ -336,11 +336,12 @@ public class MasterContext {
if (localStatus == SUSPENDED_EXPORTING_SNAPSHOT) {
String sourceMapName = jobExecutionRecord.successfulSnapshotDataMapName(jobId);
+ String targetMapName = EXPORTED_SNAPSHOTS_PREFIX + name;
JetInstance jetInstance = coordinationService.getJetService().getJetInstance();
- return copyMapUsingJob(jetInstance, COPY_MAP_JOB_QUEUE_SIZE, sourceMapName, EXPORTED_SNAPSHOTS_PREFIX + name)
+ return copyMapUsingJob(jetInstance, COPY_MAP_JOB_QUEUE_SIZE, sourceMapName, targetMapName)
.whenComplete(withTryCatch(logger, (r, t) -> {
SnapshotValidationRecord validationRecord =
- (SnapshotValidationRecord) jetInstance.getMap(sourceMapName)
+ (SnapshotValidationRecord) jetInstance.getMap(targetMapName)
.get(SnapshotValidationRecord.KEY);
jobRepository.cacheValidationRecord(name, validationRecord);
if (cancelJob) {
diff --git a/hazelcast-jet-core/src/test/java/com/hazelcast/jet/core/ExportSnapshotTest.java b/hazelcast-jet-core/src/test/java/com/hazelcast/jet/core/ExportSnapshotTest.java
index <HASH>..<HASH> 100644
--- a/hazelcast-jet-core/src/test/java/com/hazelcast/jet/core/ExportSnapshotTest.java
+++ b/hazelcast-jet-core/src/test/java/com/hazelcast/jet/core/ExportSnapshotTest.java
@@ -241,6 +241,10 @@ public class ExportSnapshotTest extends JetTestSupport {
job.resume();
assertJobStatusEventually(job, RUNNING);
}
+ // use the state to start new job
+ Job job2 = client.newJob(dag);
+ assertJobStatusEventually(job2, RUNNING);
+ assertTrueAllTheTime(() -> assertEquals(RUNNING, job2.getStatus()), 1);
}
@Test | Tweaks in exporting snapshot of a suspended job (#<I>)
Related to #<I>, I tried again to reproduce the problem, without success | hazelcast_hazelcast | train |
a484f745221ae5f4293e19cb5f45ec2b62b8e07d | diff --git a/kalibro_client/miscellaneous/date_module_result.py b/kalibro_client/miscellaneous/date_module_result.py
index <HASH>..<HASH> 100644
--- a/kalibro_client/miscellaneous/date_module_result.py
+++ b/kalibro_client/miscellaneous/date_module_result.py
@@ -1,9 +1,32 @@
-from recordtype import recordtype
+from collections import namedtuple
+from datetime import datetime
+import dateutil.parser
-class DateModuleResult(recordtype('DateModuleResult',
- ('date', 'module_result'))):
+import kalibro_client.processor
+
+class DateModuleResult(namedtuple('DateModuleResult', 'date module_result')):
__slots__ = ()
- @property
+ # __new__ is overriden since namedtuple is a imutable type
+ def __new__(cls, date, module_result):
+ if module_result is not None and not isinstance(module_result, kalibro_client.processor.ModuleResult):
+ parsed_module_result = kalibro_client.processor.ModuleResult(**module_result)
+ else:
+ parsed_module_result = module_result
+
+ if date is not None and not isinstance(date, datetime):
+ parsed_date = dateutil.parser.parse(date)
+ else:
+ parsed_date = date
+
+ return super(cls, DateModuleResult).__new__(cls, parsed_date, parsed_module_result)
+
+ def _asdict(self):
+ dict_ = super(DateModuleResult, self)._asdict()
+
+ dict_['module_result'] = self.module_result._asdict()
+
+ return dict_
+
def result(self):
return self.module_result.grade
diff --git a/tests/factories.py b/tests/factories.py
index <HASH>..<HASH> 100644
--- a/tests/factories.py
+++ b/tests/factories.py
@@ -187,7 +187,7 @@ class DateModuleResultFactory(factory.Factory):
model = DateModuleResult
date = "2011-10-20T18:26:43.151+00:00"
- module_result = ModuleResultFactory.build()
+ module_result = ModuleResultFactory.build()._asdict()
class RangeSnapshotFactory(factory.Factory):
diff --git a/tests/test_miscellaneous.py b/tests/test_miscellaneous.py
index <HASH>..<HASH> 100644
--- a/tests/test_miscellaneous.py
+++ b/tests/test_miscellaneous.py
@@ -8,7 +8,8 @@ from nose.tools import assert_true, assert_equal, assert_is_not_none, \
from mock import Mock
from factories import NativeMetricFactory, CompoundMetricFactory, \
- DateMetricResultFactory, DateModuleResultFactory, MetricResultFactory
+ DateMetricResultFactory, DateModuleResultFactory, MetricResultFactory, \
+ ModuleResultFactory
from .helpers import not_raises
from kalibro_client.miscellaneous import Granularity, DateMetricResult
@@ -78,19 +79,41 @@ class TestGranularity(object):
class TestDateModuleResult(object):
def setUp(self):
self.subject = DateModuleResultFactory.build()
+ self.module_result = ModuleResultFactory.build()
+ self.second_subject = DateModuleResultFactory.build(date=None, module_result = self.module_result)
def test_properties_getters(self):
assert_true(hasattr(self.subject, 'date'))
assert_true(hasattr(self.subject, 'module_result'))
- @not_raises((AttributeError, ValueError))
+ @raises(AttributeError)
def test_properties_setters(self):
self.subject.date = "2011-10-20T18:27:43.151+00:00"
- self.subject.module_result = None
+ self.subject.module_result = self.module_result
def test_result(self):
- self.subject.module_result = Mock(grade=1.0)
- assert_equal(self.subject.result, 1.0)
+ assert_equal(self.subject.result(), self.module_result.grade)
+
+ def test_constructor(self):
+ assert_equal(self.subject.date, dateutil.parser.parse(DateModuleResultFactory.date))
+ assert_equal(self.second_subject.date, None)
+ assert_equal(self.second_subject.module_result, self.module_result)
+
+ assert_is_not_none(self.subject.module_result)
+ module_result = self.subject.module_result
+ module_result_params = DateModuleResultFactory.module_result
+
+ assert_equal(module_result.grade, float(module_result_params["grade"]))
+ assert_equal(module_result.parent_id, module_result_params["parent_id"])
+ assert_equal(module_result.processing_id, module_result_params["processing_id"])
+
+ def test_asdict(self):
+ dict = self.subject._asdict()
+
+ assert_equal(self.subject.module_result._asdict(), dict["module_result"])
+
+ return dict
+
class TestDateMetricResult(TestCase):
def setUp(self): | Implemented date module result as a named tuple | mezuro_kalibro_client_py | train |
eaf80738870d8178fea0729436ecf9e8354e8c41 | diff --git a/treeherder/services/elasticsearch/utils.py b/treeherder/services/elasticsearch/utils.py
index <HASH>..<HASH> 100644
--- a/treeherder/services/elasticsearch/utils.py
+++ b/treeherder/services/elasticsearch/utils.py
@@ -1,4 +1,5 @@
-from six import string_types
+from six import (iteritems,
+ string_types)
def dict_to_op(d, index_name, doc_type, op_type='index'):
@@ -53,4 +54,4 @@ def to_dict(obj):
]
all_fields = obj.to_dict()
- return {k: v for k, v in all_fields.iteritems() if k in keys}
+ return {k: v for k, v in iteritems(all_fields) if k in keys} | Bug <I> - Make remaining .iteritems() usage Python 3 compatible (#<I>)
Since this usage was added after the others were converted in #<I>.
Fixes:
`AttributeError: 'dict' object has no attribute 'iteritems'` | mozilla_treeherder | train |
4f2389ef16479151d5a9904a0987c9e66d8c6e4a | diff --git a/lib/component_host/controls/start_component/raises_error.rb b/lib/component_host/controls/start_component/raises_error.rb
index <HASH>..<HASH> 100644
--- a/lib/component_host/controls/start_component/raises_error.rb
+++ b/lib/component_host/controls/start_component/raises_error.rb
@@ -3,7 +3,7 @@ module ComponentHost
module StartComponent
module RaisesError
def self.call
- raise error
+ raise error, "An example error"
end
def self.error
diff --git a/lib/component_host/host.rb b/lib/component_host/host.rb
index <HASH>..<HASH> 100644
--- a/lib/component_host/host.rb
+++ b/lib/component_host/host.rb
@@ -89,6 +89,7 @@ module ComponentHost
rescue => error
record_errors_observer.(error)
+ logger.fatal(tags: [:*, :component, :lifecycle]) { "#{error.message} (Error: #{error.class})" }
raise error
end | Terminal errors are logged at the FATAL log level | eventide-project_component-host | train |
d07ab2e5c02b820618d84fd7821ac8e268b3a80f | diff --git a/microdrop/__init__.py b/microdrop/__init__.py
index <HASH>..<HASH> 100755
--- a/microdrop/__init__.py
+++ b/microdrop/__init__.py
@@ -91,7 +91,7 @@ class DmfControlBoardPlugin(SingletonPlugin):
validators=[ValueAtLeast(minimum=0), ]),
Integer.named('frequency').using(default=1e3, optional=True,
validators=[ValueAtLeast(minimum=0), ]),
- Boolean.named('feedback_enabled').using(default=False),
+ Boolean.named('feedback_enabled').using(default=False, optional=True),
Integer.named('sampling_time_ms').using(default=10, optional=True,
validators=[ValueAtLeast(minimum=0), ]),
Integer.named('n_samples').using(default=10, optional=True,
@@ -193,25 +193,6 @@ class DmfControlBoardPlugin(SingletonPlugin):
app.main_window_controller.label_connection_status. \
set_text(connection_status)
- def get_default_options(self):
- return DmfControlBoardOptions()
-
- def get_step_options(self, step=None):
- """
- Return a FeedbackOptions object for the current step in the protocol.
- If none exists yet, create a new one.
- """
- app = get_app()
- if step is None:
- step = app.protocol.current_step_number
-
- options = app.protocol.current_step().get_data(self.name)
- if options is None:
- # No data is registered for this plugin (for this step).
- options = self.get_default_options()
- app.protocol.current_step().set_data(self.name, options)
- return options
-
def get_actuated_area(self):
area = 0
app = get_app()
@@ -416,18 +397,40 @@ class DmfControlBoardPlugin(SingletonPlugin):
"""
self.control_board.set_waveform_frequency(frequency)
+ def get_default_options(self):
+ return DmfControlBoardOptions()
+
+ def get_step_options(self, step_number=None):
+ """
+ Return a FeedbackOptions object for the current step in the protocol.
+ If none exists yet, create a new one.
+ """
+ app = get_app()
+ if step_number is None:
+ step_number = app.protocol.current_step_number
+
+ step = app.protocol.steps[step_number]
+ options = step.get_data(self.name)
+ if options is None:
+ # No data is registered for this plugin (for this step).
+ options = self.get_default_options()
+ step.set_data(self.name, options)
+ return options
+
def get_step_form_class(self):
return self.Fields
def get_step_fields(self):
return self.Fields.field_schema_mapping.keys()
- def set_step_values(self, values_dict):
+ def set_step_values(self, values_dict, step_number=None):
logger.info('[DmfControlBoardPlugin] set_step_values(): values_dict=%s' % values_dict)
el = self.Fields(value=values_dict)
if not el.validate():
raise ValueError('Invalid values: %s' % el.errors)
- options = self.get_step_options()
+ options = self.get_step_options(step_number=step_number)
+ #import pdb; pdb.set_trace()
+ print '[DmfControlBoardPlugin] step_number=%s' % step_number
for name, field in el.iteritems():
if field.value is None:
continue
@@ -436,11 +439,34 @@ class DmfControlBoardPlugin(SingletonPlugin):
else:
setattr(options, name, field.value)
- def get_step_value(self, name):
+ def get_step_values(self, step_number=None):
+ app = get_app()
+ if step_number is None:
+ step_number = app.protocol.current_step_number
+ step = app.protocol.steps[step_number]
+
+ options = step.get_data(self.name)
+ if options is None:
+ return None
+
+ values = {}
+ for name in self.Fields.field_schema_mapping:
+ try:
+ value = getattr(options, name)
+ except AttributeError:
+ value = getattr(options.feedback_options, name)
+ values[name] = value
+ return values
+
+ def get_step_value(self, name, step_number=None):
app = get_app()
if not name in self.Fields.field_schema_mapping:
raise KeyError('No field with name %s for plugin %s' % (name, self.name))
- options = app.protocol.current_step().get_data(self.name)
+ if step_number is None:
+ step_number = app.protocol.current_step_number
+ step = app.protocol.steps[step_number]
+
+ options = step.get_data(self.name)
if options is None:
return None
try: | Add step parameter to field-related methods. | wheeler-microfluidics_dmf-control-board-firmware | train |
d83273ce8cb284bc6338f353a905679a8037cda3 | diff --git a/test/test_libusb_structs.rb b/test/test_libusb_structs.rb
index <HASH>..<HASH> 100644
--- a/test/test_libusb_structs.rb
+++ b/test/test_libusb_structs.rb
@@ -47,12 +47,12 @@ class TestLibusbStructs < Minitest::Test
t = LIBUSB::InterruptTransfer.new allow_device_memory: true
assert_equal :TRANSFER_COMPLETED, t.status
assert_equal true, t.allow_device_memory
- assert_equal nil, t.memory_type
+ assert_nil t.memory_type
t.alloc_buffer(10)
assert_equal :user_space, t.memory_type, "no device assigned -> should use memory from user space"
t.free_buffer
- assert_equal nil, t.memory_type
+ assert_nil t.memory_type
end
end | Avoid warnings about "assert_equal nil, x" | larskanis_libusb | train |
9dbeab488f2ba60bec3db82f9ab029710b1aca42 | diff --git a/client/html/templates/catalog/filter/supplier-body-standard.php b/client/html/templates/catalog/filter/supplier-body-standard.php
index <HASH>..<HASH> 100644
--- a/client/html/templates/catalog/filter/supplier-body-standard.php
+++ b/client/html/templates/catalog/filter/supplier-body-standard.php
@@ -18,7 +18,7 @@ $listConfig = $this->config( 'client/html/catalog/lists/url/config', [] );
<?php $this->block()->start( 'catalog/filter/supplier' ); ?>
<section class="catalog-filter-supplier">
- <?php if( !empty( $this->get( 'supplierList', [] ) ) ) : ?>
+ <?php if( !$this->get( 'supplierList', map() )->isEmpty() ) : ?>
<h2><?= $enc->html( $this->translate( 'client', 'Suppliers' ), $enc::TRUST ); ?></h2>
<fieldset class="supplier-lists"> | Hide supplier filter if no suppliers are available | aimeos_ai-client-html | train |
3f34e6de4320813ae8da41bb40659a722fb5b5ed | diff --git a/runner.js b/runner.js
index <HASH>..<HASH> 100644
--- a/runner.js
+++ b/runner.js
@@ -1,3 +1,4 @@
+#!/usr/bin/env node
(function ( getopt, execr ) {
"use strict"; | Add missing shebang to runner
Script run error without; unspecified type attempts
to run in the current shell, fails on js syntax | tsu-complete_node-execr | train |
b76a85f0900edc9408b978c2a96b8f81b9a4c919 | diff --git a/mockgen/parse.go b/mockgen/parse.go
index <HASH>..<HASH> 100644
--- a/mockgen/parse.go
+++ b/mockgen/parse.go
@@ -641,13 +641,16 @@ func parsePackageImport(srcDir string) (string, error) {
}
}
// fall back to GOPATH mode
- goPath := os.Getenv("GOPATH")
- if goPath == "" {
+ goPaths := os.Getenv("GOPATH")
+ if goPaths == "" {
return "", fmt.Errorf("GOPATH is not set")
}
- sourceRoot := filepath.Join(goPath, "src") + string(os.PathSeparator)
- if !strings.HasPrefix(srcDir, sourceRoot) {
- return "", errOutsideGoPath
+ goPathList := strings.Split(goPaths, string(os.PathListSeparator))
+ for _, goPath := range goPathList {
+ sourceRoot := filepath.Join(goPath, "src") + string(os.PathSeparator)
+ if strings.HasPrefix(srcDir, sourceRoot) {
+ return filepath.ToSlash(strings.TrimPrefix(srcDir, sourceRoot)), nil
+ }
}
- return filepath.ToSlash(strings.TrimPrefix(srcDir, sourceRoot)), nil
+ return "", errOutsideGoPath
}
diff --git a/mockgen/parse_test.go b/mockgen/parse_test.go
index <HASH>..<HASH> 100644
--- a/mockgen/parse_test.go
+++ b/mockgen/parse_test.go
@@ -7,6 +7,7 @@ import (
"io/ioutil"
"os"
"path/filepath"
+ "strings"
"testing"
)
@@ -202,3 +203,45 @@ func TestParsePackageImport_FallbackGoPath(t *testing.T) {
t.Errorf("expect %s, got %s", expected, pkgPath)
}
}
+
+func TestParsePackageImport_FallbackMultiGoPath(t *testing.T) {
+ var goPathList []string
+
+ // first gopath
+ goPath, err := ioutil.TempDir("", "gopath1")
+ if err != nil {
+ t.Error(err)
+ }
+ goPathList = append(goPathList, goPath)
+ defer func() {
+ if err = os.RemoveAll(goPath); err != nil {
+ t.Error(err)
+ }
+ }()
+ srcDir := filepath.Join(goPath, "src/example.com/foo")
+ err = os.MkdirAll(srcDir, 0755)
+ if err != nil {
+ t.Error(err)
+ }
+
+ // second gopath
+ goPath, err = ioutil.TempDir("", "gopath2")
+ if err != nil {
+ t.Error(err)
+ }
+ goPathList = append(goPathList, goPath)
+ defer func() {
+ if err = os.RemoveAll(goPath); err != nil {
+ t.Error(err)
+ }
+ }()
+
+ goPaths := strings.Join(goPathList, string(os.PathListSeparator))
+ os.Setenv("GOPATH", goPaths)
+ os.Setenv("GO111MODULE", "on")
+ pkgPath, err := parsePackageImport(srcDir)
+ expected := "example.com/foo"
+ if pkgPath != expected {
+ t.Errorf("expect %s, got %s", expected, pkgPath)
+ }
+} | support multi-path gopath (#<I>)
Support multi-path gopaths. | golang_mock | train |
0820b36036fdf811b0c263d1180b0f051f7ccaea | diff --git a/lib/jblas.rb b/lib/jblas.rb
index <HASH>..<HASH> 100644
--- a/lib/jblas.rb
+++ b/lib/jblas.rb
@@ -584,6 +584,16 @@ module JBLAS
end
o.close
end
+
+ #def marshal_dump
+ # [rows, columns, data.to_a]
+ #end
+
+ #def mashal_load(d)
+ # puts "ouch"
+ # resize(d[0], d[1])
+ # d[2].each_with_index {|x,i| put(i, x)}
+ #end
end # module MatrixMixin
class MatrixElementWiseProxy | removed marshalling code again - works out of the box | mikiobraun_jblas-ruby | train |
1865c747c5737d5a0961bbae88c0df4cbd242832 | diff --git a/lib/rules/indent.js b/lib/rules/indent.js
index <HASH>..<HASH> 100644
--- a/lib/rules/indent.js
+++ b/lib/rules/indent.js
@@ -41,7 +41,15 @@ module.exports = {
const template = getTemplate(node)
const tokens = getTokens(template)
- const minimalIndent = node.loc.start.column
+ const firstTokenInLine = context
+ .getSourceCode()
+ .getTokensBefore(node, {
+ filter: token => token.loc.end.line === node.loc.start.line,
+ })[0]
+
+ const minimalIndent = firstTokenInLine
+ ? firstTokenInLine.loc.start.column
+ : node.loc.start.column
let currentIndent = 0
tokens.forEach((token, index) => {
diff --git a/tests/lib/rules/indent.js b/tests/lib/rules/indent.js
index <HASH>..<HASH> 100644
--- a/tests/lib/rules/indent.js
+++ b/tests/lib/rules/indent.js
@@ -41,6 +41,32 @@ ruleTester.run('rule "indent"', rule, {
\`
`,
},
+ {
+ code: `
+ const example = pug\`
+ div
+ div: div
+ \`
+ `,
+ },
+ {
+ code: `
+ export default () => pug\`
+ div
+ div: div
+ \`
+ `,
+ },
+ {
+ code: `
+ export default () => {
+ return pug\`
+ div
+ div: div
+ \`
+ }
+ `,
+ },
],
invalid: [
{
@@ -162,5 +188,28 @@ test
endColumn: 17,
}],
},
+ {
+ code: `
+ function asd() {
+ const result = pug\`
+ div
+ div
+ \`
+ }
+ `,
+ errors: [{
+ line: 4,
+ column: 1,
+ endLine: 4,
+ endColumn: 14,
+ message: buildMessage(12, 13),
+ }, {
+ line: 5,
+ column: 1,
+ endLine: 5,
+ endColumn: 16,
+ message: buildMessage(14, 15),
+ }],
+ },
],
}) | Fix "indent" rule: base on start line when request indentation | ezhlobo_eslint-plugin-react-pug | train |
801d752ade5fa907925a2b7de39215c475901ea1 | diff --git a/tangelo/tangelo/__main__.py b/tangelo/tangelo/__main__.py
index <HASH>..<HASH> 100644
--- a/tangelo/tangelo/__main__.py
+++ b/tangelo/tangelo/__main__.py
@@ -405,7 +405,7 @@ def main():
# Create a plugin manager. It is marked global so that the plugins can be
# unloaded when Tangelo exits.
- plugins = tangelo.server.Plugins("tangelo.plugin", plugin_cfg_file)
+ plugins = tangelo.server.Plugins("tangelo.plugin", config_file=plugin_cfg_file, tangelo_dir=invocation_dir)
cherrypy.config.update({"plugins": plugins})
# Create an instance of the main handler object.
diff --git a/tangelo/tangelo/server.py b/tangelo/tangelo/server.py
index <HASH>..<HASH> 100644
--- a/tangelo/tangelo/server.py
+++ b/tangelo/tangelo/server.py
@@ -612,11 +612,12 @@ class Plugins(object):
self.module = None
self.apps = []
- def __init__(self, base_package, config_file):
+ def __init__(self, base_package, config_file, tangelo_dir):
self.base_package = base_package
self.config_file = config_file
self.config_dir = os.path.dirname(self.config_file)
+ self.tangelo_dir = tangelo_dir
self.mtime = 0
self.plugins = {}
self.missing_msg = "Plugin config file %s seems to have disappeared" % (self.config_file)
@@ -631,6 +632,10 @@ class Plugins(object):
def load(self, plugin_name, path):
tangelo.log("PLUGIN", "Loading plugin %s (from %s)" % (plugin_name, path))
+ if not os.path.exists(path):
+ tangelo.log_warning("PLUGIN", "\tNo such path %s" % (path))
+ return False
+
plugin = Plugins.Plugin(path)
# Check for a configuration file.
@@ -641,13 +646,13 @@ class Plugins(object):
config = tangelo.util.yaml_safe_load(config_file)
except TypeError as e:
tangelo.log_warning("PLUGIN", "\tBad configuration in file %s: %s" % (config_file, e))
- return
+ return False
except IOError:
tangelo.log_warning("PLUGIN", "\tCould not open config file %s" % (config_file))
- return
+ return False
except ValueError as e:
tangelo.log_warning("PLUGIN", "\tError reading config file %s: %s" % (config_file, e))
- return
+ return False
# Install the config and an empty dict as the plugin-level
# config and store.
@@ -778,12 +783,13 @@ class Plugins(object):
continue
if enabled and plugin not in self.plugins:
- # Extract the plugin path.
- try:
+ if "path" in conf:
+ # Extract the plugin path.
path = os.path.join(self.config_dir, conf["path"])
- except KeyError:
- tangelo.log_warning("PLUGIN", "error: configuration for plugin '%s' missing required setting 'path'" % (plugin))
- continue
+ else:
+ # Construct the plugin path, given the name of the plugin,
+ # and the base path of Tangelo.
+ path = os.path.join(self.tangelo_dir, "share/tangelo/plugin", plugin)
if not self.load(plugin, path):
tangelo.log_warning("PLUGIN", "Plugin %s failed to load" % (plugin)) | Making "path" option for plugins optional, without it Tangelo searches for a bundled plugin by that name | Kitware_tangelo | train |
8d9217d065bd13f9f59bbb952c90857afccc5e38 | diff --git a/tests/Humbug/Test/GeneratorTest.php b/tests/Humbug/Test/GeneratorTest.php
index <HASH>..<HASH> 100644
--- a/tests/Humbug/Test/GeneratorTest.php
+++ b/tests/Humbug/Test/GeneratorTest.php
@@ -35,12 +35,14 @@ class GeneratorTest extends \PHPUnit_Framework_TestCase
$finder = new Finder;
$finder->files()->name('*.php');
$finder->in($this->root);
+ $finder->sortByName();
$generator = new Generator;
$generator->generate($finder);
$mutables = $generator->getMutables();
- $this->assertEquals($mutables[0]->getFilename(), $this->root . '/library/bool2.php');
- $this->assertEquals($mutables[1]->getFilename(), $this->root . '/library/bool1.php');
+
+ $this->assertEquals($mutables[0]->getFilename(), $this->root . '/library/bool1.php');
+ $this->assertEquals($mutables[1]->getFilename(), $this->root . '/library/bool2.php');
}
public function testShouldGenerateMutableFileObjects() | GeneratorTest fix - undeterministic file sort order | humbug_humbug | train |
f0dbcb4752e2ad0d641835fc0e77043572f53de1 | diff --git a/src/utils/handle-pinch.js b/src/utils/handle-pinch.js
index <HASH>..<HASH> 100644
--- a/src/utils/handle-pinch.js
+++ b/src/utils/handle-pinch.js
@@ -47,7 +47,7 @@ export const scaleFactor = (scale: number, factor: number, opts: Object) => {
const originalFactor = factor;
const zoomFactor = factor * scale;
return {
- zoomFactor,
+ zoomFactor: Math.min(opts.maxScale * 2, Math.max(zoomFactor, opts.minScale / 2)),
scale: zoomFactor / originalFactor,
};
}; | adds back min and max values for factor as we need to control so the user cant pinch to much | houseofradon_pinchit | train |
5d6ebecad9226ea0c927721c296f99f923e8742e | diff --git a/lib/fluent/plugin/in_tail.rb b/lib/fluent/plugin/in_tail.rb
index <HASH>..<HASH> 100644
--- a/lib/fluent/plugin/in_tail.rb
+++ b/lib/fluent/plugin/in_tail.rb
@@ -732,7 +732,7 @@ module Fluent::Plugin
if !io.nil? && @lines.empty?
begin
while true
- @fifo << io.readpartial(2048, @iobuf)
+ @fifo << io.readpartial(8192, @iobuf)
while (line = @fifo.next_line)
@lines << line
end
diff --git a/test/plugin/test_in_tail.rb b/test/plugin/test_in_tail.rb
index <HASH>..<HASH> 100644
--- a/test/plugin/test_in_tail.rb
+++ b/test/plugin/test_in_tail.rb
@@ -196,7 +196,7 @@ class TailInputTest < Test::Unit::TestCase
config = CONFIG_READ_FROM_HEAD + config_element("", "", { "read_lines_limit" => limit }) + PARSE_SINGLE_LINE_CONFIG
end
d = create_driver(config)
- msg = 'test' * 500 # in_tail reads 2048 bytes at once.
+ msg = 'test' * 2000 # in_tail reads 8192 bytes at once.
d.run(expect_emits: 1) do
File.open("#{TMP_DIR}/tail.txt", "ab") {|f| | in_tail: increase read block size
<I> was too small for large logs.
<I> is chosen because BUFSIZ in stdio.h is <I>
on Ubuntu and Debian. | fluent_fluentd | train |
7c34dd9b8c45c1c4ea797f39f163f88ff1b1c05c | diff --git a/packages/simplebar/src/simplebar.js b/packages/simplebar/src/simplebar.js
index <HASH>..<HASH> 100755
--- a/packages/simplebar/src/simplebar.js
+++ b/packages/simplebar/src/simplebar.js
@@ -28,8 +28,9 @@ export default class SimpleBar {
offsetAttr: 'left',
overflowAttr: 'overflowX',
dragOffset: 0,
- isEnabled: true,
+ isOverflowing: true,
isVisible: false,
+ forceVisible: false,
track: {},
scrollbar: {}
},
@@ -40,8 +41,9 @@ export default class SimpleBar {
offsetAttr: 'top',
overflowAttr: 'overflowY',
dragOffset: 0,
- isEnabled: true,
+ isOverflowing: true,
isVisible: false,
+ forceVisible: false,
track: {},
scrollbar: {}
}
@@ -383,8 +385,8 @@ export default class SimpleBar {
this.axis.x.isOverflowing = this.elStyles.overflowX === 'hidden' ? false : this.axis.x.isOverflowing;
this.axis.y.isOverflowing = this.elStyles.overflowY === 'hidden' ? false : this.axis.y.isOverflowing;
- this.axis.x.forceVisible = this.elStyles.overflowX === 'scroll';
- this.axis.y.forceVisible = this.elStyles.overflowY === 'scroll';
+ this.axis.x.forceVisible = this.elStyles.overflowX === 'scroll' || this.options.forceVisible;
+ this.axis.y.forceVisible = this.elStyles.overflowY === 'scroll' || this.options.forceVisible;
this.axis.x.scrollbar.size = this.getScrollbarSize('x');
this.axis.y.scrollbar.size = this.getScrollbarSize('y'); | feat: forceVisible now applicable with css overflow: scroll | Grsmto_simplebar | train |
241d234e44afc4422f346f2f3f15a034e5b3a37f | diff --git a/test/functional/associations/test_many_documents_as_proxy.rb b/test/functional/associations/test_many_documents_as_proxy.rb
index <HASH>..<HASH> 100644
--- a/test/functional/associations/test_many_documents_as_proxy.rb
+++ b/test/functional/associations/test_many_documents_as_proxy.rb
@@ -141,7 +141,7 @@ class ManyDocumentsAsProxyTest < Test::Unit::TestCase
end
should "work with order" do
- comments = @post.comments.find(:all, :order => '$natural desc')
+ comments = @post.comments.find(:all, :order => 'body desc')
comments.should == [@comment2, @comment1]
end
end | Minor: Order by something more predictable than $natural in test for many documents as proxy. | mongomapper_mongomapper | train |
aa3374c36502250f16395081bd9af23971bec392 | diff --git a/spec/views/wobauth/roles/index.html.erb_spec.rb b/spec/views/wobauth/roles/index.html.erb_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/views/wobauth/roles/index.html.erb_spec.rb
+++ b/spec/views/wobauth/roles/index.html.erb_spec.rb
@@ -1,19 +1,16 @@
require 'rails_helper'
-RSpec.describe "roles/index", type: :view do
+RSpec.describe "wobauth/roles/index", type: :view do
before(:each) do
assign(:roles, [
- Role.create!(
- :name => "Name"
- ),
- Role.create!(
- :name => "Name"
- )
+ FactoryBot.create(:role, name: "XYZ"),
+ FactoryBot.create(:role, name: "ABC"),
])
end
it "renders a list of roles" do
render
- assert_select "tr>td", :text => "Name".to_s, :count => 2
+ assert_select "tr>td", :text => "XYZ".to_s, :count => 1
+ assert_select "tr>td", :text => "ABC".to_s, :count => 1
end
end | rewrite, but url_helper doesn't work | swobspace_wobauth | train |
be22ea8323107792523b63d8d6e566441b216d91 | diff --git a/src/main/java/org/javacord/entity/server/invite/InviteBuilder.java b/src/main/java/org/javacord/entity/server/invite/InviteBuilder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/javacord/entity/server/invite/InviteBuilder.java
+++ b/src/main/java/org/javacord/entity/server/invite/InviteBuilder.java
@@ -129,7 +129,7 @@ public class InviteBuilder {
*
* @return The created invite.
*/
- public CompletableFuture<Invite> build() {
+ public CompletableFuture<Invite> create() {
return new RestRequest<Invite>(channel.getApi(), RestMethod.POST, RestEndpoint.CHANNEL_INVITE)
.setUrlParameters(channel.getIdAsString())
.setBody(JsonNodeFactory.instance.objectNode() | Renamed InviteBuilder#build() to InviteBuilder#create() | Javacord_Javacord | train |
8df4bca470a92a83a478b5e4cf319ff3a22eee24 | diff --git a/storage/remote/queue_manager.go b/storage/remote/queue_manager.go
index <HASH>..<HASH> 100644
--- a/storage/remote/queue_manager.go
+++ b/storage/remote/queue_manager.go
@@ -194,6 +194,7 @@ type QueueManager struct {
client StorageClient
watcher *wal.Watcher
+ seriesMtx sync.Mutex
seriesLabels map[uint64]labels.Labels
seriesSegmentIndexes map[uint64]int
droppedSeries map[uint64]struct{}
@@ -264,6 +265,7 @@ func NewQueueManager(reg prometheus.Registerer, logger log.Logger, walDir string
func (t *QueueManager) Append(samples []record.RefSample) bool {
outer:
for _, s := range samples {
+ t.seriesMtx.Lock()
lbls, ok := t.seriesLabels[s.Ref]
if !ok {
t.droppedSamplesTotal.Inc()
@@ -271,8 +273,10 @@ outer:
if _, ok := t.droppedSeries[s.Ref]; !ok {
level.Info(t.logger).Log("msg", "dropped sample for series that was not explicitly dropped via relabelling", "ref", s.Ref)
}
+ t.seriesMtx.Unlock()
continue
}
+ t.seriesMtx.Unlock()
// This will only loop if the queues are being resharded.
backoff := t.cfg.MinBackoff
for {
@@ -356,9 +360,11 @@ func (t *QueueManager) Stop() {
t.watcher.Stop()
// On shutdown, release the strings in the labels from the intern pool.
+ t.seriesMtx.Lock()
for _, labels := range t.seriesLabels {
releaseLabels(labels)
}
+ t.seriesMtx.Unlock()
// Delete metrics so we don't have alerts for queues that are gone.
name := t.client.Name()
queueHighestSentTimestamp.DeleteLabelValues(name)
@@ -378,6 +384,8 @@ func (t *QueueManager) Stop() {
// StoreSeries keeps track of which series we know about for lookups when sending samples to remote.
func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
+ t.seriesMtx.Lock()
+ defer t.seriesMtx.Unlock()
for _, s := range series {
ls := processExternalLabels(s.Labels, t.externalLabels)
lbls := relabel.Process(ls, t.relabelConfigs...)
@@ -402,6 +410,8 @@ func (t *QueueManager) StoreSeries(series []record.RefSeries, index int) {
// stored series records with the checkpoints index number, so we can now
// delete any ref ID's lower than that # from the two maps.
func (t *QueueManager) SeriesReset(index int) {
+ t.seriesMtx.Lock()
+ defer t.seriesMtx.Unlock()
// Check for series that are in segments older than the checkpoint
// that were not also present in the checkpoint.
for k, v := range t.seriesSegmentIndexes {
@@ -409,6 +419,7 @@ func (t *QueueManager) SeriesReset(index int) {
delete(t.seriesSegmentIndexes, k)
releaseLabels(t.seriesLabels[k])
delete(t.seriesLabels, k)
+ delete(t.droppedSeries, k)
}
}
}
diff --git a/tsdb/wal/watcher.go b/tsdb/wal/watcher.go
index <HASH>..<HASH> 100644
--- a/tsdb/wal/watcher.go
+++ b/tsdb/wal/watcher.go
@@ -41,10 +41,13 @@ const (
)
// WriteTo is an interface used by the Watcher to send the samples it's read
-// from the WAL on to somewhere else.
+// from the WAL on to somewhere else. Functions will be called concurrently
+// and it is left to the implementer to make sure they are safe.
type WriteTo interface {
Append([]record.RefSample) bool
StoreSeries([]record.RefSeries, int)
+ // SeriesReset is called after reading a checkpoint to allow the deletion
+ // of all series created in a segment lower than the argument.
SeriesReset(int)
}
@@ -337,6 +340,7 @@ func (w *Watcher) watch(segmentNum int, tail bool) error {
}
}
+ gcSem := make(chan struct{}, 1)
for {
select {
case <-w.quit:
@@ -345,9 +349,21 @@ func (w *Watcher) watch(segmentNum int, tail bool) error {
case <-checkpointTicker.C:
// Periodically check if there is a new checkpoint so we can garbage
// collect labels. As this is considered an optimisation, we ignore
- // errors during checkpoint processing.
- if err := w.garbageCollectSeries(segmentNum); err != nil {
- level.Warn(w.logger).Log("msg", "error process checkpoint", "err", err)
+ // errors during checkpoint processing. Doing the process asynchronously
+ // allows the current WAL segment to be processed while reading the
+ // checkpoint.
+ select {
+ case gcSem <- struct{}{}:
+ go func() {
+ defer func() {
+ <-gcSem
+ }()
+ if err := w.garbageCollectSeries(segmentNum); err != nil {
+ level.Warn(w.logger).Log("msg", "error process checkpoint", "err", err)
+ }
+ }()
+ default:
+ // Currently doing a garbage collect, try again later.
}
case <-segmentTicker.C: | Garbage collect asynchronously in the WAL Watcher
The WAL Watcher replays a checkpoint after it is created in order to
garbage collect series that no longer exist in the WAL. Currently the
garbage collection process is done serially with reading from the tip of
the WAL which can cause large delays in writing samples to remote
storage just after compaction occurs.
This also fixes a memory leak where dropped series are not cleaned up as
part of the SeriesReset process. | prometheus_prometheus | train |
51886c3880d844ce361ff0abd1ebcb450aa89b14 | diff --git a/pkg/api/meta/restmapper.go b/pkg/api/meta/restmapper.go
index <HASH>..<HASH> 100644
--- a/pkg/api/meta/restmapper.go
+++ b/pkg/api/meta/restmapper.go
@@ -80,6 +80,9 @@ type DefaultRESTMapper struct {
pluralToSingular map[unversioned.GroupVersionResource]unversioned.GroupVersionResource
interfacesFunc VersionInterfacesFunc
+
+ // aliasToResource is used for mapping aliases to resources
+ aliasToResource map[string][]string
}
func (m *DefaultRESTMapper) String() string {
@@ -103,6 +106,7 @@ func NewDefaultRESTMapper(defaultGroupVersions []unversioned.GroupVersion, f Ver
kindToScope := make(map[unversioned.GroupVersionKind]RESTScope)
singularToPlural := make(map[unversioned.GroupVersionResource]unversioned.GroupVersionResource)
pluralToSingular := make(map[unversioned.GroupVersionResource]unversioned.GroupVersionResource)
+ aliasToResource := make(map[string][]string)
// TODO: verify name mappings work correctly when versions differ
return &DefaultRESTMapper{
@@ -112,6 +116,7 @@ func NewDefaultRESTMapper(defaultGroupVersions []unversioned.GroupVersion, f Ver
defaultGroupVersions: defaultGroupVersions,
singularToPlural: singularToPlural,
pluralToSingular: pluralToSingular,
+ aliasToResource: aliasToResource,
interfacesFunc: f,
}
}
@@ -488,20 +493,17 @@ func (m *DefaultRESTMapper) RESTMapping(gk unversioned.GroupKind, versions ...st
return retVal, nil
}
-// aliasToResource is used for mapping aliases to resources
-var aliasToResource = map[string][]string{}
-
// AddResourceAlias maps aliases to resources
func (m *DefaultRESTMapper) AddResourceAlias(alias string, resources ...string) {
if len(resources) == 0 {
return
}
- aliasToResource[alias] = resources
+ m.aliasToResource[alias] = resources
}
// AliasesForResource returns whether a resource has an alias or not
func (m *DefaultRESTMapper) AliasesForResource(alias string) ([]string, bool) {
- if res, ok := aliasToResource[alias]; ok {
+ if res, ok := m.aliasToResource[alias]; ok {
return res, true
}
return nil, false | move aliasToResource to a field of DefaultRESTMapper | kubernetes_kubernetes | train |
6accfe1356270dfe7913b4385de337bdd4d65d38 | diff --git a/fastlane/lib/fastlane/actions/setup_ci.rb b/fastlane/lib/fastlane/actions/setup_ci.rb
index <HASH>..<HASH> 100644
--- a/fastlane/lib/fastlane/actions/setup_ci.rb
+++ b/fastlane/lib/fastlane/actions/setup_ci.rb
@@ -92,12 +92,11 @@ module Fastlane
FastlaneCore::ConfigItem.new(key: :force,
env_name: "FL_SETUP_CI_FORCE",
description: "Force setup, even if not executed by CI",
- is_string: false,
+ type: Boolean,
default_value: false),
FastlaneCore::ConfigItem.new(key: :provider,
env_name: "FL_SETUP_CI_PROVIDER",
description: "CI provider. If none is set, the provider is detected automatically",
- is_string: true,
optional: true,
verify_block: proc do |value|
value = value.to_s | [action][setup_ci] remove all instances of `is_string` in options and use `type` (#<I>) | fastlane_fastlane | train |
f4efaeefa73ce2163399b3b4a6622cbe026a0a25 | diff --git a/model/DataObject.php b/model/DataObject.php
index <HASH>..<HASH> 100644
--- a/model/DataObject.php
+++ b/model/DataObject.php
@@ -2850,12 +2850,10 @@ class DataObject extends ViewableData implements DataObjectInterface, i18nEntity
&& DataObject::$_cache_get_one[$callerClass][$cacheKey] instanceof DataObject
&& DataObject::$_cache_get_one[$callerClass][$cacheKey]->destroyed) {
- DataObject::$_cache_get_one[$callerClass][$cacheKey
- ] = false;
+ DataObject::$_cache_get_one[$callerClass][$cacheKey] = false;
}
if(!$cache || !isset(DataObject::$_cache_get_one[$callerClass][$cacheKey])) {
- $dl = DataList::create($callerClass)->where($filter)->sort($orderby);
- $dl->setDataModel(DataModel::inst());
+ $dl = $callerClass::get()->where($filter)->sort($orderby);
$item = $dl->First();
if($cache) {
diff --git a/tests/model/DataObjectTest.php b/tests/model/DataObjectTest.php
index <HASH>..<HASH> 100644
--- a/tests/model/DataObjectTest.php
+++ b/tests/model/DataObjectTest.php
@@ -171,6 +171,10 @@ class DataObjectTest extends SapphireTest {
$this->assertEquals('Bob', $comment->Name);
$comment = DataObject::get_one('DataObjectTest_TeamComment', '', true, '"Name" DESC');
$this->assertEquals('Phil', $comment->Name);
+
+ // Test get_one() with bad case on the classname
+ $subteam1 = DataObject::get_one('dataobjecttest_subteam', "\"Title\" = 'Subteam 1'", true);
+ $this->assertEquals($subteam1->Title, "Subteam 1");
}
public function testGetSubclassFields() { | FIX: Fix DataObject::get_one() when the classname is passed with improper casing. | silverstripe_silverstripe-framework | train |
6699bd4a4474734563ad684fba78bba8f9b6ac04 | diff --git a/command/remount.go b/command/remount.go
index <HASH>..<HASH> 100644
--- a/command/remount.go
+++ b/command/remount.go
@@ -62,8 +62,8 @@ Usage: vault remount [options] from to
This command remounts a secret backend that is already mounted to
a new path. All the secrets from the old path will be revoked, but
- the Vault data associated with the backend (such as configuration)
- will be preserved.
+ the data associated with the backend (such as configuration), will
+ be preserved.
Example: vault remount secret/ generic/
diff --git a/command/server.go b/command/server.go
index <HASH>..<HASH> 100644
--- a/command/server.go
+++ b/command/server.go
@@ -675,7 +675,7 @@ func (c *ServerCommand) enableDev(core *vault.Core, rootTokenID string) (*vault.
return nil, err
}
if !unsealed {
- return nil, fmt.Errorf("failed to unseal Vault for Dev mode")
+ return nil, fmt.Errorf("failed to unseal Vault for dev mode")
}
isLeader, _, err := core.Leader()
diff --git a/command/ssh.go b/command/ssh.go
index <HASH>..<HASH> 100644
--- a/command/ssh.go
+++ b/command/ssh.go
@@ -271,23 +271,23 @@ func (c *SSHCommand) defaultRole(mountPoint, ip string) (string, error) {
}
func (c *SSHCommand) Synopsis() string {
- return "Initiate a SSH session"
+ return "Initiate an SSH session"
}
func (c *SSHCommand) Help() string {
helpText := `
Usage: vault ssh [options] username@ip
- Establishes a SSH connection with the target machine.
+ Establishes an SSH connection with the target machine.
- This command generates a key and uses it to establish a SSH
+ This command generates a key and uses it to establish an SSH
connection with the target machine. This operation requires
that the SSH backend is mounted and at least one 'role' is
registered with Vault beforehand.
For setting up SSH backends with one-time-passwords, installation
- of vault-ssh-helper on target machines is required.
- See [https://github.com/hashicorp/vault-ssh-agent]
+ of vault-ssh-helper or a compatible agent on target machines
+ is required. See [https://github.com/hashicorp/vault-ssh-agent].
General Options:
` + meta.GeneralOptionsUsage() + ` | Add some minor tweaks to the PR | hashicorp_vault | train |
fe60ecf02d4f29ab1c9748aaab5d3d3b0e00b5af | diff --git a/kmip/version.py b/kmip/version.py
index <HASH>..<HASH> 100644
--- a/kmip/version.py
+++ b/kmip/version.py
@@ -13,4 +13,4 @@
# License for the specific language governing permissions and limitations
# under the License.
-__version__ = "0.10.0"
+__version__ = "0.11.0dev1" | Bump the library version to <I>dev1 | OpenKMIP_PyKMIP | train |
Subsets and Splits