hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
296ea46a56dd3057657220de17dc87b8eecb5a7c | diff --git a/lib/chef/knife/core/object_loader.rb b/lib/chef/knife/core/object_loader.rb
index <HASH>..<HASH> 100644
--- a/lib/chef/knife/core/object_loader.rb
+++ b/lib/chef/knife/core/object_loader.rb
@@ -69,15 +69,15 @@ class Chef
#
# @api public
def find_all_objects(path)
- path = File.join(path, '*')
+ path = File.join(Chef::Util::PathHelper.escape_glob(File.expand_path(path)), '*')
path << '.{json,rb}'
- objects = Dir.glob(Chef::Util::PathHelper.escape_glob(File.expand_path(path)))
+ objects = Dir.glob(path)
objects.map { |o| File.basename(o) }
end
def find_all_object_dirs(path)
- path = File.join(path, '*')
- objects = Dir.glob(Chef::Util::PathHelper.escape_glob(File.expand_path(path)))
+ path = File.join(Chef::Util::PathHelper.escape_glob(File.expand_path(path)), '*')
+ objects = Dir.glob(path)
objects.delete_if { |o| !File.directory?(o) }
objects.map { |o| File.basename(o) }
end | Excape path for globbing. | chef_chef | train |
c6dfb490ab52f704efdb84a4473f28ac2120fcc4 | diff --git a/cmd/helm/completion.go b/cmd/helm/completion.go
index <HASH>..<HASH> 100644
--- a/cmd/helm/completion.go
+++ b/cmd/helm/completion.go
@@ -72,6 +72,16 @@ To load completions for every new session, execute once:
You will need to start a new shell for this setup to take effect.
`
+const powershellCompDesc = `
+Generate the autocompletion script for powershell.
+
+To load completions in your current shell session:
+PS C:\> helm completion powershell | Out-String | Invoke-Expression
+
+To load completions for every new session, add the output of the above command
+to your powershell profile.
+`
+
const (
noDescFlagName = "no-descriptions"
noDescFlagText = "disable completion descriptions"
@@ -123,7 +133,19 @@ func newCompletionCmd(out io.Writer) *cobra.Command {
}
fish.Flags().BoolVar(&disableCompDescriptions, noDescFlagName, false, noDescFlagText)
- cmd.AddCommand(bash, zsh, fish)
+ powershell := &cobra.Command{
+ Use: "powershell",
+ Short: "generate autocompletion script for powershell",
+ Long: powershellCompDesc,
+ Args: require.NoArgs,
+ ValidArgsFunction: noCompletions,
+ RunE: func(cmd *cobra.Command, args []string) error {
+ return runCompletionPowershell(out, cmd)
+ },
+ }
+ powershell.Flags().BoolVar(&disableCompDescriptions, noDescFlagName, false, noDescFlagText)
+
+ cmd.AddCommand(bash, zsh, fish, powershell)
return cmd
}
@@ -180,6 +202,13 @@ func runCompletionFish(out io.Writer, cmd *cobra.Command) error {
return cmd.Root().GenFishCompletion(out, !disableCompDescriptions)
}
+func runCompletionPowershell(out io.Writer, cmd *cobra.Command) error {
+ if disableCompDescriptions {
+ return cmd.Root().GenPowerShellCompletion(out)
+ }
+ return cmd.Root().GenPowerShellCompletionWithDesc(out)
+}
+
// Function to disable file completion
func noCompletions(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
return nil, cobra.ShellCompDirectiveNoFileComp | feat(comp): Add support for powershell completion
Cobra provides powershell auto-completion out of the box. This commit
teaches helm how to use it. | helm_helm | train |
75079222186799234b01db1af807a3eed76cb457 | diff --git a/bcbio/rnaseq/count.py b/bcbio/rnaseq/count.py
index <HASH>..<HASH> 100644
--- a/bcbio/rnaseq/count.py
+++ b/bcbio/rnaseq/count.py
@@ -27,7 +27,7 @@ def combine_count_files(files, out_file=None, ext=".fpkm"):
"Some count files in %s do not exist." % files
for f in files:
assert file_exists(f), "%s does not exist or is empty." % f
- col_names = [os.path.basename(os.path.splitext(x)[0]) for x in files]
+ col_names = [os.path.basename(x.replace(ext, "")) for x in files]
if not out_file:
out_dir = os.path.join(os.path.dirname(files[0]))
out_file = os.path.join(out_dir, "combined.counts") | When combining counts, trim off multiple extensions if passed in. | bcbio_bcbio-nextgen | train |
b4820dbd3197122a4b88dcb883b15cf18ff3c674 | diff --git a/src/Models/Permission.php b/src/Models/Permission.php
index <HASH>..<HASH> 100644
--- a/src/Models/Permission.php
+++ b/src/Models/Permission.php
@@ -3,13 +3,21 @@
use Arcanedev\LaravelAuth\Bases\Model;
use Arcanedev\LaravelAuth\Contracts\Permission as PermissionContract;
use Arcanedev\LaravelAuth\Traits\AuthPermissionRelationships;
-use Carbon\Carbon;
/**
* Class Permission
*
* @package Arcanedev\LaravelAuth\Models
* @author ARCANEDEV <[email protected]>
+ *
+ * @property int id
+ * @property string name
+ * @property string slug
+ * @property string description
+ * @property string model
+ * @property \Carbon\Carbon created_at
+ * @property \Carbon\Carbon updated_at
+ * @property \Illuminate\Database\Eloquent\Collection roles
*/
class Permission extends Model implements PermissionContract
{
diff --git a/src/Models/Role.php b/src/Models/Role.php
index <HASH>..<HASH> 100644
--- a/src/Models/Role.php
+++ b/src/Models/Role.php
@@ -3,7 +3,6 @@
use Arcanedev\LaravelAuth\Bases\Model;
use Arcanedev\LaravelAuth\Contracts\Role as RoleContract;
use Arcanedev\LaravelAuth\Traits\AuthRoleRelationships;
-use Carbon\Carbon;
/**
* Class Role
@@ -11,13 +10,15 @@ use Carbon\Carbon;
* @package Arcanedev\LaravelAuth\Models
* @author ARCANEDEV <[email protected]>
*
- * @property int id
- * @property string slug
- * @property string description
- * @property bool is_active
- * @property bool is_locked
- * @property Carbon created_at
- * @property Carbon updated_at
+ * @property int id
+ * @property string slug
+ * @property string description
+ * @property bool is_active
+ * @property bool is_locked
+ * @property \Carbon\Carbon created_at
+ * @property \Carbon\Carbon updated_at
+ * @property \Illuminate\Database\Eloquent\Collection users
+ * @property \Illuminate\Database\Eloquent\Collection permissions
*/
class Role extends Model implements RoleContract
{
diff --git a/src/Models/User.php b/src/Models/User.php
index <HASH>..<HASH> 100644
--- a/src/Models/User.php
+++ b/src/Models/User.php
@@ -9,7 +9,6 @@ use Illuminate\Auth\Passwords\CanResetPassword;
use Illuminate\Contracts\Auth\Access\Authorizable as AuthorizableContract;
use Illuminate\Contracts\Auth\Authenticatable as AuthenticatableContract;
use Illuminate\Contracts\Auth\CanResetPassword as CanResetPasswordContract;
-use Illuminate\Database\Eloquent\Builder;
use Illuminate\Database\Eloquent\SoftDeletes;
use Illuminate\Foundation\Auth\Access\Authorizable;
@@ -19,7 +18,24 @@ use Illuminate\Foundation\Auth\Access\Authorizable;
* @package Arcanedev\LaravelAuth\Models
* @author ARCANEDEV <[email protected]>
*
- * @method Builder unconfirmed(string $code)
+ * @property int id
+ * @property string username
+ * @property string first_name
+ * @property string last_name
+ * @property string email
+ * @property string password
+ * @property string remember_token
+ * @property bool is_admin
+ * @property bool is_active
+ * @property bool is_confirmed (Optional)
+ * @property string confirmation_code (Optional)
+ * @property \Carbon\Carbon confirmed_at (Optional)
+ * @property \Carbon\Carbon created_at
+ * @property \Carbon\Carbon updated_at
+ * @property \Carbon\Carbon deleted_at
+ * @property \Illuminate\Database\Eloquent\Collection roles
+ *
+ * @method \Illuminate\Database\Eloquent\Builder unconfirmed(string $code)
*/
class User
extends Model
@@ -101,7 +117,7 @@ class User
*
* @return \Illuminate\Database\Eloquent\Builder
*/
- public function scopeUnconfirmed(Builder $query, $code)
+ public function scopeUnconfirmed($query, $code)
{
return $query->where('is_confirmed', false)
->where('confirmation_code', $code) | Fixing use statements and doc comments | ARCANEDEV_LaravelAuth | train |
fc416cb6fcc7a2c02a66ebfe613180f51299771f | diff --git a/src/main/java/org/dynjs/ir/Builder.java b/src/main/java/org/dynjs/ir/Builder.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dynjs/ir/Builder.java
+++ b/src/main/java/org/dynjs/ir/Builder.java
@@ -202,7 +202,13 @@ public class Builder implements CodeVisitor {
@Override
public Object visit(Object context, ExpressionStatement statement, boolean strict) {
- return unimplemented(context, statement, strict);
+ Scope scope = (Scope) context;
+ Variable variable = scope.createTemporaryVariable();
+ Operand value = (Operand) acceptOrUndefined(context, statement.getExpr(), strict);
+
+ scope.addInstruction(new Copy(variable, value));
+
+ return value;
}
@Override
diff --git a/src/main/java/org/dynjs/ir/Scope.java b/src/main/java/org/dynjs/ir/Scope.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dynjs/ir/Scope.java
+++ b/src/main/java/org/dynjs/ir/Scope.java
@@ -20,12 +20,16 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import org.dynjs.ir.operands.LocalVariable;
+import org.dynjs.ir.operands.TemporaryVariable;
import org.dynjs.ir.operands.Variable;
// FIXME: Modelled as single scope now but I doubt this will hold for long.
public class Scope {
private Scope parent;
+ private Map<Integer, Variable> temporaryVariables = new HashMap<>();
+ private int temporaryVariablesIndex = 0;
+
private Map<String, Variable> localVariables = new HashMap<>();
// What next variable index will be (also happens to be current size
private int localVariablesIndex = 0;
@@ -67,11 +71,26 @@ public class Scope {
Variable variable = findVariable(name);
if (variable == null) {
- variable = new LocalVariable(name, localVariablesIndex);
+ variable = new LocalVariable(this, name, localVariablesIndex);
localVariables.put(name, variable);
localVariablesIndex++;
}
return variable;
}
+
+ // FIXME: Do I care about all the boxing here of index?
+ public Variable acquireTemporaryVariable(int index) {
+ Variable variable = temporaryVariables.get(index);
+
+ return variable == null ? createTemporaryVariable() : variable;
+ }
+
+ public Variable createTemporaryVariable() {
+ Variable variable = new TemporaryVariable(temporaryVariablesIndex);
+
+ temporaryVariablesIndex++;
+
+ return variable;
+ }
}
diff --git a/src/main/java/org/dynjs/ir/operands/LocalVariable.java b/src/main/java/org/dynjs/ir/operands/LocalVariable.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/dynjs/ir/operands/LocalVariable.java
+++ b/src/main/java/org/dynjs/ir/operands/LocalVariable.java
@@ -1,7 +1,8 @@
package org.dynjs.ir.operands;
+import org.dynjs.ir.Scope;
+
/**
- * Local variable in the current scope.
* Offset is an identifier for the JIT and a location
* identifier for the Interpreter.
*/
@@ -9,7 +10,7 @@ public class LocalVariable extends Variable {
private String name;
private int offset;
- public LocalVariable(String name, int offset) {
+ public LocalVariable(Scope scope, String name, int offset) {
this.name = name;
this.offset = offset;
} | Sketch in IR Temporary variables | dynjs_dynjs | train |
3a763c653938f25e87562ac92a9fa80d25de40cf | diff --git a/parserator/__init__.py b/parserator/__init__.py
index <HASH>..<HASH> 100644
--- a/parserator/__init__.py
+++ b/parserator/__init__.py
@@ -1,2 +1,159 @@
+import config
+import os
from manual_labeling import xmlLabeler
from training import train
+
+
+try :
+ TAGGER = pycrfsuite.Tagger()
+ path = os.path.split(os.path.abspath(__file__))[0] + '/' + config.MODEL_FILE
+ TAGGER.open(path)
+except IOError :
+ warnings.warn("You must train the model (run training/training.py) and create the "+config.MODEL_FILE" file before you can use the parse and tag methods")
+
+
+
+def parse(raw_string) :
+
+ tokens = tokenize(raw_string)
+
+ if not tokens :
+ return []
+
+ features = tokens2features(tokens)
+
+ tags = TAGGER.tag(features)
+ return zip(tokens, tags)
+
+
+def tag(raw_string) :
+ tagged = OrderedDict()
+ for token, label in parse(raw_string) :
+ tagged.setdefault(label, []).append(token)
+
+ for token in tagged :
+ component = ' '.join(tagged[token])
+ component = component.strip(" ,;")
+ tagged[token] = component
+
+ return tagged
+
+
+# This defines how a raw string is split into tokens, to be tagged
+def tokenize(raw_string) :
+ re_tokens = re.compile(r"""
+ \(*[^\s,;()]+[.,;)]* # ['ab. cd,ef '] -> ['ab.', 'cd,', 'ef']
+ """,
+ re.VERBOSE | re.UNICODE)
+
+ tokens = re_tokens.findall(raw_string)
+
+ if not tokens :
+ return []
+
+ return tokens
+
+#####################################################
+# This is where sequence-level features are defined #
+# e.g. first token, last token, etc #
+#####################################################
+def tokens2features(tokens):
+
+ feature_sequence = [config.tokenFeatures(tokens[0])]
+ previous_features = feature_sequence[-1].copy()
+
+ seen_comma = False
+
+ for token in tokens[1:] :
+ token_features = config.tokenFeatures(token)
+
+ # # This is an example of a feature for whether a comma has been encountered in previous tokens
+ # if not seen_comma and previous_features['comma'] :
+ # seen_comma = True
+ # if seen_comma :
+ # token_features['seen.comma'] = True
+
+ current_features = token_features.copy()
+
+ feature_sequence[-1]['next'] = current_features
+ token_features['previous'] = previous_features
+
+ feature_sequence.append(token_features)
+
+ previous_features = current_features
+
+ if len(feature_sequence) > 1 :
+ feature_sequence[0]['rawstring.start'] = True
+ feature_sequence[-1]['rawstring.end'] = True
+ feature_sequence[1]['previous']['rawstring.start'] = True
+ feature_sequence[-2]['next']['rawstring.end'] = True
+
+ else :
+ feature_sequence[0]['singleton'] = True
+
+ return feature_sequence
+
+
+
+###########################################################
+# This is where features of individual tokens are defined #
+###########################################################
+
+VOWELS_Y = tuple('aeiouy')
+
+def tokenFeatures(token) :
+
+ if token in (u'&') :
+ token_chars = token_chars_lc = token
+
+ else :
+ # this is the token w/o punctuation
+ token_chars = re.sub(r'(^[\W]*)|([^\w]*$)', u'', token)
+ # this is the token w/o punctuation & w/o capitalization
+ token_chars_lc = re.sub(r'\W', u'', token_chars.lower())
+
+ # below are some basic examples of feature definitions
+ features = {
+ # lowercase chars example
+ 'nopunc' : token_chars_lc,
+ # word shape example
+ 'case' : casing(token_chars),
+ # length example
+ 'length' : len(token_chars_lc),
+ # vowels example
+ 'has.vowels' : bool(set(token_chars_lc[1:]) & set('aeiouy')),
+ # vowel ratio example
+ 'more.vowels' : vowelRatio(token_chars_lc)
+ }
+
+ reversed_token = token_chars_lc[::-1]
+ for i in range(1, len(token_chars_lc)) :
+ features['prefix_%s' % i] = token_chars_lc[:i]
+ features['suffix_%s' % i] = reversed_token[:i][::-1]
+ if i > 4 :
+ break
+
+ return features
+
+# word shape feature example
+def casing(token) :
+ if token.isupper() :
+ return 'upper'
+ elif token.islower() :
+ return 'lower'
+ elif token.istitle() :
+ return 'title'
+ elif token.isalpha() :
+ return 'mixed'
+ else :
+ return False
+
+# vowel ratio feature example
+def vowelRatio(token) :
+ n_chars = len(token)
+ if n_chars > 1:
+ n_vowels = sum(token.count(c) for c in VOWELS_Y)
+ return n_vowels/float(n_chars)
+ else :
+ return False
+ | parsing, tokenizing, setting features | datamade_parserator | train |
6b20a5a112455439e1586f81672f6276cf86293c | diff --git a/js/vcc.js b/js/vcc.js
index <HASH>..<HASH> 100644
--- a/js/vcc.js
+++ b/js/vcc.js
@@ -18,29 +18,56 @@ module.exports = class vcc extends Exchange {
'rateLimit': 1000,
'version': 'v3',
'has': {
- 'fetchTradingFee': true,
+ 'spot': true,
+ 'margin': false,
+ 'swap': false,
+ 'future': false,
+ 'option': false,
+ 'addMargin': false,
'cancelAllOrders': true,
'cancelOrder': true,
'createOrder': true,
+ 'createReduceOnlyOrder': false,
'editOrder': undefined,
'fetchBalance': true,
+ 'fetchBorrowRate': false,
+ 'fetchBorrowRateHistory': false,
+ 'fetchBorrowRates': false,
+ 'fetchBorrowRatesPerSymbol': false,
'fetchClosedOrders': true,
'fetchCurrencies': true,
'fetchDepositAddress': true,
'fetchDeposits': true,
+ 'fetchFundingHistory': false,
+ 'fetchFundingRate': false,
+ 'fetchFundingRateHistory': false,
+ 'fetchFundingRates': false,
+ 'fetchIndexOHLCV': false,
+ 'fetchIsolatedPositions': false,
+ 'fetchLeverage': false,
'fetchMarkets': true,
+ 'fetchMarkOHLCV': false,
'fetchMyTrades': true,
'fetchOHLCV': true,
'fetchOpenOrders': true,
'fetchOrder': true,
'fetchOrderBook': true,
'fetchOrders': undefined,
+ 'fetchPosition': false,
+ 'fetchPositions': false,
+ 'fetchPositionsRisk': false,
+ 'fetchPremiumIndexOHLCV': false,
'fetchTicker': 'emulated',
'fetchTickers': true,
'fetchTrades': true,
+ 'fetchTradingFee': true,
'fetchTradingFees': undefined,
'fetchTransactions': true,
'fetchWithdrawals': true,
+ 'reduceMargin': false,
+ 'setLeverage': false,
+ 'setMarginMode': false,
+ 'setPositionMode': false,
},
'timeframes': {
'1m': '60000',
@@ -176,29 +203,45 @@ module.exports = class vcc extends Exchange {
const quoteId = this.safeString (market, 'currency');
const base = this.safeCurrencyCode (baseId);
const quote = this.safeCurrencyCode (quoteId);
- const active = this.safeValue (market, 'active');
const precision = this.safeValue (market, 'precision', {});
const limits = this.safeValue (market, 'limits', {});
const amountLimits = this.safeValue (limits, 'amount', {});
const priceLimits = this.safeValue (limits, 'price', {});
const costLimits = this.safeValue (limits, 'cost', {});
const entry = {
- 'info': market,
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
+ 'settle': undefined,
'baseId': baseId,
'quoteId': quoteId,
+ 'settledId': undefined,
'type': 'spot',
'spot': true,
- 'active': active,
+ 'margin': false,
+ 'swap': false,
+ 'future': false,
+ 'option': false,
+ 'active': this.safeValue (market, 'active', false),
+ 'contract': false,
+ 'linear': undefined,
+ 'inverse': undefined,
+ 'contractSize': undefined,
+ 'expiry': undefined,
+ 'expiryDatetime': undefined,
+ 'strike': undefined,
+ 'optionType': undefined,
'precision': {
'price': this.safeInteger (precision, 'price'),
'amount': this.safeInteger (precision, 'amount'),
'cost': this.safeInteger (precision, 'cost'),
},
'limits': {
+ 'leverage': {
+ 'min': undefined,
+ 'max': undefined,
+ },
'amount': {
'min': this.safeNumber (amountLimits, 'min'),
'max': undefined,
@@ -212,6 +255,7 @@ module.exports = class vcc extends Exchange {
'max': undefined,
},
},
+ 'info': market,
};
result.push (entry);
} | vcc.fetchMarkets unified and futures leverage methods | ccxt_ccxt | train |
47f5d1f0dcf96593c81e2adb3479a8005ebcfb24 | diff --git a/lib/internal/queue.js b/lib/internal/queue.js
index <HASH>..<HASH> 100644
--- a/lib/internal/queue.js
+++ b/lib/internal/queue.js
@@ -119,11 +119,12 @@ export default function queue(worker, concurrency, payload) {
data.push(node.data);
}
+ numRunning += 1;
+ workersList.push(tasks[0]);
+
if (q._tasks.length === 0) {
q.empty();
}
- numRunning += 1;
- workersList.push(tasks[0]);
if (numRunning === q.concurrency) {
q.saturated();
diff --git a/mocha_test/queue.js b/mocha_test/queue.js
index <HASH>..<HASH> 100644
--- a/mocha_test/queue.js
+++ b/mocha_test/queue.js
@@ -603,6 +603,35 @@ describe('queue', function(){
q.push([]);
});
+
+ // #1367
+ it('empty and not idle()', function(done) {
+ var calls = [];
+ var q = async.queue(function(task, cb) {
+ // nop
+ calls.push('process ' + task);
+ setImmediate(cb);
+ }, 1);
+
+ q.empty = function () {
+ calls.push('empty');
+ assert(q.idle() === false,
+ 'tasks should be running when empty is called')
+ expect(q.running()).to.equal(1);
+ }
+
+ q.drain = function() {
+ calls.push('drain');
+ expect(calls).to.eql([
+ 'empty',
+ 'process 1',
+ 'drain'
+ ]);
+ done();
+ };
+ q.push(1);
+ });
+
it('saturated', function(done) {
var saturatedCalled = false;
var q = async.queue(function(task, cb) { | change timing of q.empty() so that q.idle() will be false. Fixes #<I> | caolan_async | train |
2b3b00bdc8c2f523cd1f38104093f6893b953c7d | diff --git a/lib/inspectors/weinre.js b/lib/inspectors/weinre.js
index <HASH>..<HASH> 100644
--- a/lib/inspectors/weinre.js
+++ b/lib/inspectors/weinre.js
@@ -1,9 +1,13 @@
var Transform = require('pipestream').Transform;
var util = require('../util');
+var weinreUrl;
module.exports = function(req, res, next) {
if (req.rules.weinre) {
- var localUIHost = this.config.localUIHost;
+ if (!weinreUrl) {
+ weinreUrl = this.config.weinreport + '.weinre.'
+ + this.config.localUIHost + '/target/target-script-min.js#';
+ }
util.disableReqCache(req.headers);
res.on('src', function(_res) {
if (!util.supportHtmlTransform(_res)) {
@@ -14,8 +18,8 @@ module.exports = function(req, res, next) {
var transform = new Transform();
transform._transform = function(chunk, encoding, callback) {
if (!chunk) {
- chunk = util.toBuffer('\r\n<script src="' + (req.isHttps ? 'https:' : 'http:') + '//weinre.' + localUIHost + '/target/target-script-min.js#'
-+ (name || 'anonymous') + '"></script>\r\n');
+ chunk = util.toBuffer('\r\n<script src="' + (req.isHttps ? 'https:' : 'http:')
+ + '//' + weinreUrl + (name || 'anonymous') + '"></script>\r\n');
}
callback(null, chunk);
}; | refactor(weinre): Support multiple instances | avwo_whistle | train |
0a38605c5466023b0afddbb48bf77b22afc2bb9d | diff --git a/metal/mmtl/BERT_tasks.py b/metal/mmtl/BERT_tasks.py
index <HASH>..<HASH> 100644
--- a/metal/mmtl/BERT_tasks.py
+++ b/metal/mmtl/BERT_tasks.py
@@ -24,7 +24,7 @@ def create_task(task_name):
if task_name == "COLA":
scorer = Scorer(
- standard_metrics=["train/loss", "valid/loss"],
+ standard_metrics=[],
custom_train_funcs=[matthews_corr],
custom_valid_funcs=[matthews_corr],
)
diff --git a/metal/mmtl/mmtl_logger.py b/metal/mmtl/mmtl_logger.py
index <HASH>..<HASH> 100644
--- a/metal/mmtl/mmtl_logger.py
+++ b/metal/mmtl/mmtl_logger.py
@@ -114,7 +114,7 @@ class Logger(object):
header = f"{self.unit_total:0.2f} {self.log_unit[:3]}"
else:
epochs = self.example_total / self.epoch_size
- header += f" ({epochs:0.2f} epo)"
+ header = f" ({epochs:0.2f} epo)"
string = f"[{header}]:"
if score_strings["train"]: | fix bug with header when log unit is not epochs | HazyResearch_metal | train |
d0fa0b82d7b3324ceb46e4feb58ca3675d2d7db7 | diff --git a/NavigationReactNative/sample/zoom/SharedElementMotion.js b/NavigationReactNative/sample/zoom/SharedElementMotion.js
index <HASH>..<HASH> 100644
--- a/NavigationReactNative/sample/zoom/SharedElementMotion.js
+++ b/NavigationReactNative/sample/zoom/SharedElementMotion.js
@@ -28,6 +28,22 @@ class SharedElementMotion extends React.Component {
return {sharedElements};
});
}
+ getSharedElements() {
+ var {currentUrl, oldUrl} = this.getStateNavigator().stateContext;
+ var {sharedElements} = this.state;
+ var activeSharedElements = [];
+ if (url !== currentUrl) {
+ for(var name in sharedElements[url]) {
+ if ((sharedElements[oldUr] || {})[name]) {
+ activeSharedElements.push({
+ from: sharedElements[oldUrl][name],
+ to: sharedElements[url][name]
+ })
+ }
+ }
+ }
+ return activeSharedElements;
+ }
render() {
return null;
} | Built shared elements intersection array
Animate the matching shared elements that exist in current and old state | grahammendick_navigation | train |
55ae45682936833b16e85ff8983ea842af200a16 | diff --git a/huawei_lte_api/api/Sms.py b/huawei_lte_api/api/Sms.py
index <HASH>..<HASH> 100644
--- a/huawei_lte_api/api/Sms.py
+++ b/huawei_lte_api/api/Sms.py
@@ -26,7 +26,7 @@ class Sms(ApiGroup):
def get_sms_list(self,
page: int=1,
box_type: BoxTypeEnum=BoxTypeEnum.LOCAL_INBOX,
- read_count: int=None,
+ read_count: int=20,
sort_type: int=0,
ascending: int=0,
unread_preferred: int=0 | Set default for sms_list read_count
At least the E<I>s-<I>a seems to require an int >= 1 in it.
Refs <URL> | Salamek_huawei-lte-api | train |
6edf4471d1e55ce6b587a7e37dd540d787716413 | diff --git a/pandas/core/window.py b/pandas/core/window.py
index <HASH>..<HASH> 100644
--- a/pandas/core/window.py
+++ b/pandas/core/window.py
@@ -280,7 +280,7 @@ class Window(_Window):
center : boolean, default False
Set the labels at the center of the window.
win_type : string, default None
- prove a window type, see the notes below
+ Provide a window type. See the notes below.
axis : int, default 0
Returns | DOC: Fix wording/grammar for rolling's win_type argument. | pandas-dev_pandas | train |
07d5c0676e02a27683b93835eba57fa53aaa3de8 | diff --git a/client/connection.go b/client/connection.go
index <HASH>..<HASH> 100644
--- a/client/connection.go
+++ b/client/connection.go
@@ -82,9 +82,9 @@ func New(nick, user, name string) *Conn {
Timestamp: time.LocalTime,
TSFormat: "15:04:05",
}
- conn.Me = conn.NewNick(nick, user, name, "")
conn.initialise()
conn.setupEvents()
+ conn.Me = conn.NewNick(nick, user, name, "")
return conn
} | Fix compile error when NewNick() is called before initialise(). | fluffle_goirc | train |
aec4a0a16cf61bbcfe1b721a56cb047dc1261e34 | diff --git a/core/test/header.spec.js b/core/test/header.spec.js
index <HASH>..<HASH> 100644
--- a/core/test/header.spec.js
+++ b/core/test/header.spec.js
@@ -80,11 +80,7 @@ describe('LogoTitle', function() {
await headerService.processHeaderSettings(component);
// then
- assert.equal(
- component.refs.logo.style.backgroundImage,
- 'url(' + headerSettings.logo + ')',
- 'backgroundImage logo'
- );
+ assert.equal(component.refs.logo.src, headerSettings.logo, 'header logo');
assert(
component.set.calledOnceWith({ hasLogo: true }),
'component set() hasLogo' | Fix failing unit tests (#<I>) | kyma-project_luigi | train |
6e858f41de0b2eb509e544b8aa91bfa38cc928bf | diff --git a/examples/collection.py b/examples/collection.py
index <HASH>..<HASH> 100644
--- a/examples/collection.py
+++ b/examples/collection.py
@@ -1,6 +1,7 @@
from helpers import authenticate
from trakt import Trakt
+from trakt.objects import Movie, Show
import logging
import os
@@ -23,13 +24,29 @@ if __name__ == '__main__':
token=authenticate()
)
+ # Fetch playback
+ playback = Trakt['sync'].playback(exceptions=True)
+
+ for key, item in playback.items():
+ print item
+
+ if type(item) is Movie:
+ print '\tprogress: %r' % item.progress
+ print '\tpaused_at: %r' % item.paused_at
+ elif type(item) is Show:
+ for (sk, ek), episode in item.episodes():
+ print '\t', episode
+
+ print '\t\tprogress: %r' % episode.progress
+ print '\t\tpaused_at: %r' % episode.paused_at
+
# Fetch movie library (watched, collection, ratings)
movies = {}
- # Trakt['sync/watched'].movies(movies)
- # Trakt['sync/collection'].movies(movies)
- #
- # Trakt['sync/ratings'].movies(movies)
+ Trakt['sync/watched'].movies(movies, exceptions=True)
+ Trakt['sync/collection'].movies(movies, exceptions=True)
+
+ Trakt['sync/ratings'].movies(movies, exceptions=True)
for key, movie in movies.items():
print movie | Updated [examples/collection.py] | fuzeman_trakt.py | train |
d838bd17a7e2ab6a2a1b701f39c292fc6107e22f | diff --git a/host/basil/HL/HardwareLayer.py b/host/basil/HL/HardwareLayer.py
index <HASH>..<HASH> 100644
--- a/host/basil/HL/HardwareLayer.py
+++ b/host/basil/HL/HardwareLayer.py
@@ -30,13 +30,13 @@ class HardwareLayer(Base):
def init(self):
pass
- def _set_value(self, value, addr, size=8, offset=0): # TODO: allow bit string for value (e.g. '10011110')
+ def _set_value(self, value, addr, size, offset):
'''Writing a value of any arbitrary size (max. unsigned int 64) and offset to a register
Parameters
----------
- value : int
- The register value to be written.
+ value : int, str
+ The register value (int, long, bit string) to be written.
addr : int
The register address.
size : int
@@ -48,20 +48,27 @@ class HardwareLayer(Base):
-------
nothing
'''
- if not size:
+ if not size and isinstance(value, (int, long)):
raise ValueError('Size must be greater than zero')
- if value.bit_length() > size:
- raise ValueError('Bit length of value is too big for given size')
+ if isinstance(value, (int, long)) and value.bit_length() > size:
+ raise ValueError('Value is too big for given size')
+ elif isinstance(value, basestring) and size and len(value) != size:
+ raise ValueError('Bit string does not match to the given size')
div, mod = divmod(size + offset, 8)
if mod:
div += 1
ret = self._intf.read(self._base_addr + addr, size=div)
reg = BitLogic()
reg.frombytes(ret.tostring())
- reg[size + offset - 1:offset] = BitLogic.from_value(value)[size - 1:0] # offset + size + 1:offset
+ if isinstance(value, (int, long)):
+ reg[size + offset - 1:offset] = BitLogic.from_value(value, size=size)
+ elif isinstance(value, basestring):
+ reg[size + offset - 1:offset] = BitLogic(value)
+ else:
+ raise ValueError('Type not supported')
self._intf.write(self._base_addr + addr, data=reg.tobytes())
- def _get_value(self, addr, size=8, offset=0):
+ def _get_value(self, addr, size, offset):
'''Reading a value of any arbitrary size (max. unsigned int 64) and offset from a register
Parameters
@@ -69,7 +76,7 @@ class HardwareLayer(Base):
addr : int
The register address.
size : int
- Bit size/length of the value to be written to the register.
+ Bit size/length of the value.
offset : int
Offset of the value to be written to the register (in number of bits).
@@ -91,8 +98,8 @@ class HardwareLayer(Base):
Parameters
----------
- value : iterable
- The data () to be written.
+ data : iterable
+ The data (byte array) to be written.
addr : int
The register address.
@@ -100,18 +107,21 @@ class HardwareLayer(Base):
-------
nothing
'''
- raise NotImplementedError('Has to be implemented')
+ self._intf.write(self._conf['base_addr'] + addr, data)
- def _get_data(self, addr):
+ def _get_data(self, addr, size):
'''Reading bytes of any arbitrary size
Parameters
----------.
addr : int
The register address.
+ size : int
+ Byte length of the value.
Returns
-------
- nothing
+ data : iterable
+ Byte array.
'''
- raise NotImplementedError('Has to be implemented')
+ return self._intf.read(self._conf['base_addr'] + addr, size)
diff --git a/host/basil/tests/test_HardwareLayer.py b/host/basil/tests/test_HardwareLayer.py
index <HASH>..<HASH> 100644
--- a/host/basil/tests/test_HardwareLayer.py
+++ b/host/basil/tests/test_HardwareLayer.py
@@ -23,26 +23,32 @@ class TestHardwareLayer(unittest.TestCase):
def test_write_read_reg(self):
for val in range(256):
- self.hl._set(val, 0, size=8, offset=0)
- ret_val = self.hl._get(0, size=8, offset=0)
+ self.hl._set_value(val, 0, size=8, offset=0)
+ ret_val = self.hl._get_value(0, size=8, offset=0)
self.assertEqual(ret_val, val)
+ def test_write_read_reg_with_bit_str(self):
+ val = '00110110' # 54
+ self.hl._set_value(val, 0, size=8, offset=0)
+ ret_val = self.hl._get_value(0, size=8, offset=0)
+ self.assertEqual(ret_val, int(val, base=2))
+
def test_write_read_reg_with_offset(self):
for offset in range(32):
val = 131
- self.hl._set(val, 0, size=8, offset=offset)
- ret_val = self.hl._get(0, size=8, offset=offset)
+ self.hl._set_value(val, 0, size=8, offset=offset)
+ ret_val = self.hl._get_value(0, size=8, offset=offset)
self.assertEqual(ret_val, val)
def test_write_read_reg_with_size(self):
for size in range(8, 33):
val = 131
- self.hl._set(val, 0, size=size, offset=7)
- ret_val = self.hl._get(0, size=size, offset=7)
+ self.hl._set_value(val, 0, size=size, offset=7)
+ ret_val = self.hl._get_value(0, size=size, offset=7)
self.assertEqual(ret_val, val)
def test_wrong_size(self):
- self.assertRaises(ValueError, self.hl._set, 131, addr=0, size=7, offset=7)
+ self.assertRaises(ValueError, self.hl._set_value, 131, addr=0, size=7, offset=7)
if __name__ == '__main__':
unittest.main() | ENH: support string
ENH: writing and reading data | SiLab-Bonn_basil | train |
05fe5bed7c4a6af1c9a171e874e72be7df2ae49d | diff --git a/salt/utils/cloud.py b/salt/utils/cloud.py
index <HASH>..<HASH> 100644
--- a/salt/utils/cloud.py
+++ b/salt/utils/cloud.py
@@ -459,7 +459,7 @@ def bootstrap(vm_, opts=None):
'wait_for_passwd_maxtries', vm_, opts, default=15
),
'preflight_cmds': salt.config.get_cloud_config_value(
- 'preflight_cmds', vm_, __opts__, default=[]
+ 'preflight_cmds', vm_, opts, default=[]
),
'cloud_grains': {'driver': vm_['driver'],
'provider': vm_['provider'], | preflight_cmds should use passed opts | saltstack_salt | train |
7ab40ea9312d12d2446f4300afc1b07b8ec9c80b | diff --git a/h2o-core/src/test/java/water/fvec/InteractionWrappedVecTest.java b/h2o-core/src/test/java/water/fvec/InteractionWrappedVecTest.java
index <HASH>..<HASH> 100644
--- a/h2o-core/src/test/java/water/fvec/InteractionWrappedVecTest.java
+++ b/h2o-core/src/test/java/water/fvec/InteractionWrappedVecTest.java
@@ -9,9 +9,9 @@ import water.TestUtil;
import java.util.Arrays;
public class InteractionWrappedVecTest extends TestUtil {
- @BeforeClass static public void setup() { stall_till_cloudsize(1); }
+ @BeforeClass static public void setup() { stall_till_cloudsize(3); }
- /*@Test*/ public void testIris() { // basic "can i construct the vec" test
+ @Test public void testIris() { // basic "can i construct the vec" test
Frame fr=null;
InteractionWrappedVec interactionVec=null;
try {
@@ -38,7 +38,7 @@ public class InteractionWrappedVecTest extends TestUtil {
}
// test interacting two enum columns
- /*@Test*/ public void testTwoEnum() {
+ @Test public void testTwoEnum() {
Frame fr=null;
InteractionWrappedVec interactionVec=null;
int FAKEMAXFORTEST=1000; | bump node count, unlock other tests | h2oai_h2o-3 | train |
054a5932441e2e2b52dba71b6fb1acd53d445183 | diff --git a/src/main/java/com/harium/keel/feature/Feature.java b/src/main/java/com/harium/keel/feature/Feature.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/harium/keel/feature/Feature.java
+++ b/src/main/java/com/harium/keel/feature/Feature.java
@@ -22,7 +22,6 @@ public class Feature extends ColorFeature implements FeatureArea {
setBounds(x, y, width, height);
}
-
public void setBounds(int x, int y, int width, int height) {
this.x = x;
this.y = y;
@@ -47,11 +46,11 @@ public class Feature extends ColorFeature implements FeatureArea {
}
public boolean colidePoint(int px, int py) {
- return CollisionDetector.colideRectPoint(x, y, width, height, px, py);
+ return CollisionDetector.collideRectPoint(x, y, width, height, px, py);
}
public boolean colide(Feature feature) {
- return CollisionDetector.colideRectRect(x, y, width, height,
+ return CollisionDetector.collideRectRect(x, y, width, height,
feature.x, feature.y, feature.width, feature.height);
} | Update code to etyl-commons | Harium_keel | train |
2bf24a21c7504ee87d771c4c14c79b117c2f2d53 | diff --git a/jqm-all/jqm-engine/src/main/java/com/enioka/jqm/tools/Main.java b/jqm-all/jqm-engine/src/main/java/com/enioka/jqm/tools/Main.java
index <HASH>..<HASH> 100644
--- a/jqm-all/jqm-engine/src/main/java/com/enioka/jqm/tools/Main.java
+++ b/jqm-all/jqm-engine/src/main/java/com/enioka/jqm/tools/Main.java
@@ -118,6 +118,7 @@ public class Main
Option o61 = OptionBuilder.withArgName("nodeName").hasArg()
.withDescription("create a JQM node of this name (init the database if needed").isRequired().create("createnode");
Option o71 = OptionBuilder.withDescription("display JQM engine version").withLongOpt("version").create("v");
+ Option o81 = OptionBuilder.withDescription("upgrade JQM database").withLongOpt("upgrade").create("u");
Options options = new Options();
OptionGroup og1 = new OptionGroup();
@@ -130,6 +131,7 @@ public class Main
og1.addOption(o51);
og1.addOption(o61);
og1.addOption(o71);
+ og1.addOption(o81);
options.addOptionGroup(og1);
try
@@ -179,6 +181,11 @@ public class Main
{
createEngine(line.getOptionValue(o61.getOpt()));
}
+ // Upgrade
+ else if (line.hasOption(o81.getOpt()))
+ {
+ upgrade();
+ }
// Help
else if (line.hasOption(o01.getOpt()))
{
@@ -258,6 +265,22 @@ public class Main
}
}
+ private static void upgrade()
+ {
+ try
+ {
+ Helpers.allowCreateSchema();
+ jqmlogger.info("Upgrading");
+ EntityManager em = Helpers.getNewEm();
+ Helpers.getParameter("none", "", em);
+ em.close();
+ }
+ catch (Exception e)
+ {
+ jqmlogger.fatal("Could not upgrade", e);
+ }
+ }
+
private static void exportAllQueues(String xmlPath)
{
QueueXmlExporter qxe = new QueueXmlExporter();
diff --git a/jqm-all/jqm-webui/jqm-webui-war/src/main/java/com/enioka/ui/controlers/JobInstanceControler.java b/jqm-all/jqm-webui/jqm-webui-war/src/main/java/com/enioka/ui/controlers/JobInstanceControler.java
index <HASH>..<HASH> 100644
--- a/jqm-all/jqm-webui/jqm-webui-war/src/main/java/com/enioka/ui/controlers/JobInstanceControler.java
+++ b/jqm-all/jqm-webui/jqm-webui-war/src/main/java/com/enioka/ui/controlers/JobInstanceControler.java
@@ -1,7 +1,6 @@
package com.enioka.ui.controlers;
import java.io.Serializable;
-import java.math.BigDecimal;
import java.util.ArrayList;
import java.util.List;
import java.util.Map; | CLI: added DB upgrade option without node creation | enioka_jqm | train |
11251c5fa5c991f6ddb256dea591fc955c53fb98 | diff --git a/src/seqmagick/magickwrap.py b/src/seqmagick/magickwrap.py
index <HASH>..<HASH> 100644
--- a/src/seqmagick/magickwrap.py
+++ b/src/seqmagick/magickwrap.py
@@ -370,7 +370,7 @@ class MagickWrap(object):
cut_record.annotations[k] = v
# Letter annotations must be lists / tuples / strings of the same
- # length as teh sequence
+ # length as the sequence
for k, v in record.letter_annotations.items():
cut_record.letter_annotations[k] = v[start:end]
@@ -403,8 +403,22 @@ class MagickWrap(object):
if self.verbose: print 'Applying _reverse_sequences generator: ' + \
'reversing the order of sites in sequences.'
for record in records:
- yield SeqRecord(record.seq[::-1], id=record.id,
- description=record.description)
+ rev_record = SeqRecord(record.seq[::-1], id=record.id,
+ name=record.name,
+ description=record.description)
+ # Copy the annotations over
+ for k, v in record.annotations.items():
+ # Trim if appropriate
+ if isinstance(v, (tuple, list)) and len(v) == len(record):
+ v = v[::-1]
+ rev_record.annotations[k] = v
+
+ # Letter annotations must be lists / tuples / strings of the same
+ # length as the sequence
+ for k, v in record.letter_annotations.items():
+ rev_record.letter_annotations[k] = v[::-1]
+
+ yield rev_record
def _reverse_complement_sequences(self, records):
@@ -414,8 +428,22 @@ class MagickWrap(object):
if self.verbose: print 'Applying _reverse_complement_sequences generator: ' + \
'transforming sequences into reverse complements.'
for record in records:
- yield SeqRecord(record.seq.reverse_complement(), id=record.id,
- description=record.description)
+ rev_record = SeqRecord(record.seq.reverse_complement(),
+ id=record.id, name=record.name,
+ description=record.description)
+ # Copy the annotations over
+ for k, v in record.annotations.items():
+ # Trim if appropriate
+ if isinstance(v, (tuple, list)) and len(v) == len(record):
+ v = v[::-1]
+ rev_record.annotations[k] = v
+
+ # Letter annotations must be lists / tuples / strings of the same
+ # length as the sequence
+ for k, v in record.letter_annotations.items():
+ rev_record.letter_annotations[k] = v[::-1]
+
+ yield rev_record
def _ungap_sequences(self, records):
""" | Add annotation copying to rev and revcomp
This could use a little tidying - duplicated | fhcrc_seqmagick | train |
17885f135bbc0663e35d3eb74f6d2b411895c3a8 | diff --git a/topology/probes/netlink.go b/topology/probes/netlink.go
index <HASH>..<HASH> 100644
--- a/topology/probes/netlink.go
+++ b/topology/probes/netlink.go
@@ -48,15 +48,18 @@ type NetLinkProbe struct {
Root *graph.Node
nlSocket *nl.NetlinkSocket
state int64
- indexToChildrenQueue map[int64][]*graph.Node
+ indexToChildrenQueue map[int64][]graph.Identifier
wg sync.WaitGroup
}
func (u *NetLinkProbe) linkMasterChildren(intf *graph.Node, index int64) {
// add children of this interface that haven previously added
if children, ok := u.indexToChildrenQueue[index]; ok {
- for _, child := range children {
- u.Graph.Link(intf, child, graph.Metadata{"RelationType": "layer2"})
+ for _, id := range children {
+ child := u.Graph.GetNode(id)
+ if child != nil {
+ u.Graph.Link(intf, child, graph.Metadata{"RelationType": "layer2"})
+ }
}
delete(u.indexToChildrenQueue, index)
}
@@ -86,7 +89,7 @@ func (u *NetLinkProbe) handleIntfIsChild(intf *graph.Node, link netlink.Link) {
u.Graph.Link(parent, intf, graph.Metadata{"RelationType": "layer2"})
} else {
// not yet the bridge so, enqueue for a later add
- u.indexToChildrenQueue[index] = append(u.indexToChildrenQueue[index], intf)
+ u.indexToChildrenQueue[index] = append(u.indexToChildrenQueue[index], intf.ID)
}
}
}
@@ -481,7 +484,7 @@ func NewNetLinkProbe(g *graph.Graph, n *graph.Node) *NetLinkProbe {
np := &NetLinkProbe{
Graph: g,
Root: n,
- indexToChildrenQueue: make(map[int64][]*graph.Node),
+ indexToChildrenQueue: make(map[int64][]graph.Identifier),
state: StoppedState,
}
return np | graph: fix race when a child of bridge delete before linked
A "queue" is used to add children of a bridge while the bridge
is not already present in the graph. Once the bridge is added
we iterate over the queue in order to add pending children but
children can have been deleted.
Change-Id: Ic<I>c<I>fdb<I>a<I>f<I>fc<I>d7d4ecfa<I> | skydive-project_skydive | train |
703f85224ee05dbba80461d48e7173e472c7942d | diff --git a/src/functions.php b/src/functions.php
index <HASH>..<HASH> 100644
--- a/src/functions.php
+++ b/src/functions.php
@@ -21,7 +21,9 @@ function promise_for($value)
// Return a Guzzle promise that shadows the given promise.
if (method_exists($value, 'then')) {
- $promise = new Promise();
+ $wfn = method_exists($value, 'wait') ? [$value, 'wait'] : null;
+ $cfn = method_exists($value, 'cancel') ? [$value, 'cancel'] : null;
+ $promise = new Promise($wfn, $cfn);
$value->then([$promise, 'resolve'], [$promise, 'reject']);
return $promise;
} | Proxying cancel and wait functions when wrapping promises | guzzle_promises | train |
e5aa676b33b839019999790ea326dde0720e7825 | diff --git a/tests/django.py b/tests/django.py
index <HASH>..<HASH> 100644
--- a/tests/django.py
+++ b/tests/django.py
@@ -88,7 +88,27 @@ class TestDjango(BaseTestClass):
self.assertTrue(os.path.exists(starting_page_py))
self.assertTrue(os.path.exists(starting_page_json))
- def test_patch_django_16_settings(self):
+ # Aldryn boilerplate
+ self._create_project_dir()
+ config_data = config.parse(['--db=postgres://user:pwd@host/dbname',
+ '--cms-version=stable', '-a',
+ '-q', '-p'+self.project_dir, 'example_prj'])
+ os.makedirs(config_data.project_path)
+ django.copy_files(config_data)
+ private_dir = os.path.join(config_data.project_directory, 'private')
+ static_js = os.path.join(config_data.project_directory, 'static', 'js', 'base.js')
+ aldryn_template = os.path.join(config_data.project_directory, 'templates', 'fullwidth.html')
+ basic_template = os.path.join(config_data.project_path, 'templates', 'fullwidth.html')
+ boostrap_template = os.path.join(config_data.project_path, 'templates', 'feature.html')
+ custom_template = os.path.join(config_data.project_path, 'templates', 'left.html')
+ self.assertFalse(os.path.exists(custom_template))
+ self.assertFalse(os.path.exists(boostrap_template))
+ self.assertFalse(os.path.exists(basic_template))
+ self.assertTrue(os.path.exists(private_dir))
+ self.assertTrue(os.path.exists(static_js))
+ self.assertTrue(os.path.exists(aldryn_template))
+
+ def test_patch_16_settings(self):
extra_path = os.path.join(os.path.dirname(__file__), 'data', 'extra_settings.py')
config_data = config.parse(['--db=sqlite://localhost/test.db',
'--lang=en', '--extra-settings=%s' % extra_path,
@@ -107,13 +127,42 @@ class TestDjango(BaseTestClass):
globals(), locals(), ['settings'])
## checking for django options
- self.assertTrue(project.settings.MEDIA_ROOT, os.path.join(config_data.project_directory, 'media'))
+ self.assertEqual(project.settings.MEDIA_ROOT, os.path.join(config_data.project_directory, 'media'))
+ self.assertEqual(project.settings.MEDIA_URL, '/media/')
+
+ # Data from external settings file
+ self.assertEqual(project.settings.CUSTOM_SETTINGS_VAR, True)
+ self.assertEqual(project.settings.CMS_PERMISSION, False)
+ self.assertEqual(set(project.settings.CMS_TEMPLATES), self.templates_basic)
+
+ def test_patch_16_aldryn(self):
+ extra_path = os.path.join(os.path.dirname(__file__), 'data', 'extra_settings.py')
+ config_data = config.parse(['--db=sqlite://localhost/test.db',
+ '--lang=en', '--extra-settings=%s' % extra_path,
+ '--django-version=1.6', '-a',
+ '--cms-version=3.0', '--timezone=Europe/Moscow',
+ '-q', '-u', '-zno', '--i18n=no',
+ '-p'+self.project_dir, 'example_path_16_aldryn'])
+ install.requirements(config_data.requirements)
+ django.create_project(config_data)
+ django.patch_settings(config_data)
+ django.copy_files(config_data)
+ # settings is importable even in non django environment
+ sys.path.append(config_data.project_directory)
+
+ project = __import__(config_data.project_name,
+ globals(), locals(), ['settings'])
+
+ ## checking for django options
+ self.assertEqual(project.settings.MEDIA_ROOT, os.path.join(config_data.project_directory, 'dist', 'media'))
+ self.assertEqual(project.settings.TEMPLATE_DIRS, (os.path.join(config_data.project_directory, 'templates'),))
self.assertEqual(project.settings.MEDIA_URL, '/media/')
# Data from external settings file
self.assertEqual(project.settings.CUSTOM_SETTINGS_VAR, True)
self.assertEqual(project.settings.CMS_PERMISSION, False)
self.assertEqual(set(project.settings.CMS_TEMPLATES), self.templates_basic)
+ self.assertTrue('compressor' in project.settings.INSTALLED_APPS)
def test_patch_django_17_settings(self):
extra_path = os.path.join(os.path.dirname(__file__), 'data', 'extra_settings.py')
@@ -161,7 +210,7 @@ class TestDjango(BaseTestClass):
globals(), locals(), ['settings'])
## checking for django options
- self.assertTrue(project.settings.MEDIA_ROOT, os.path.join(config_data.project_directory, 'media'))
+ self.assertEqual(project.settings.MEDIA_ROOT, os.path.join(config_data.project_directory, 'media'))
self.assertEqual(project.settings.MEDIA_URL, '/media/')
self.assertEqual(project.settings.TIME_ZONE, 'Europe/Moscow') | Add more tests for aldryn-boilerplate | nephila_djangocms-installer | train |
e1f1c686161ffed9d311c82b9b6d19845c39f847 | diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb
index <HASH>..<HASH> 100644
--- a/spec/spec_helper.rb
+++ b/spec/spec_helper.rb
@@ -1,6 +1,6 @@
-ENV['RAILS_ENV'] ||= 'test'
+ENV["RAILS_ENV"] ||= "test"
-require 'simplecov'
+require "simplecov"
# SimpleCov.formatter = SimpleCov::Formatter::HTMLFormatter
SimpleCov.start :rails do
add_filter do |source_file|
@@ -8,16 +8,17 @@ SimpleCov.start :rails do
end
end
-require File.expand_path('../dummy/config/environment.rb', __FILE__)
-require 'rspec/rails'
+# require File.expand_path("../dummy/config/environment.rb", __FILE__)
+require File.expand_path("../../config/environment", __FILE__)
+require "rspec/rails"
# require 'rspec/autorun'
-require 'factory_girl_rails'
-require 'capybara/poltergeist'
+require "factory_bot_rails"
+require "capybara/poltergeist"
# require 'capybara/rails'
-require 'pwb/seeder'
+require "pwb/seeder"
# http://www.thegreatcodeadventure.com/stubbing-with-vcr/
-require 'vcr'
-require 'webmock/rspec'
+require "vcr"
+require "webmock/rspec"
WebMock.disable_net_connect!(allow_localhost: true)
# load(Rails.root.join("db", "seeds.rb"))
@@ -57,13 +58,11 @@ Capybara.register_driver :apparition do |app|
Capybara::Apparition::Driver.new(app, options)
end
-
# http://stackoverflow.com/questions/24078768/argumenterror-factory-not-registered
# as per above, need to explicitly set below
-FactoryBot.definition_file_paths = [File.expand_path('../factories', __FILE__)]
-FactoryBot.find_definitions
-# Oddly above does not occur if factory_girl_rails is only referrenced in pwb.gemspec
-# but not main gemfile
+# FactoryBot.definition_file_paths = [File.expand_path("../factories", __FILE__)]
+# FactoryBot.find_definitions
+# July 2022 - above no longer required
Rails.backtrace_cleaner.remove_silencers!
@@ -74,8 +73,6 @@ Dir["#{File.dirname(__FILE__)}/support/**/*.rb"].each { |f| require f }
# replaced above with below in view specs so I can test diff themes
# @controller.prepend_view_path "#{Pwb::Engine.root}/app/themes/berlin/views/"
-
-
RSpec.configure do |config|
# TODO: - consider precompiling assets to speed up tests
# config.before(:suite) do
@@ -87,7 +84,7 @@ RSpec.configure do |config|
config.mock_with :rspec
config.infer_base_class_for_anonymous_controllers = false
- config.order = 'random'
+ config.order = "random"
# config.include Pwb::ApplicationHelper
# config.include Rails.application.routes.url_helpers
# config.include Pwb::Engine.routes.url_helpers | Fix spec_helper for factory_bot and use correct env file | etewiah_property_web_builder | train |
c3efe0a3fa54f0d53a99c9af0ef24e7eaf046b8d | diff --git a/source/core/datasystem/file/format/MWebfileStream.php b/source/core/datasystem/file/format/MWebfileStream.php
index <HASH>..<HASH> 100644
--- a/source/core/datasystem/file/format/MWebfileStream.php
+++ b/source/core/datasystem/file/format/MWebfileStream.php
@@ -75,12 +75,13 @@ class MWebfileStream
{
$json = "{";
- $json .= "webfiles: ";
+ $json .= "webfiles: [";
/** @var MWebfile $webfile */
foreach ($this->webfiles as $webfile) {
$json .= $webfile->marshall(false, true);
+ $json .= ",";
}
- $json .= "}";
+ $json .= "]}";
return $json;
} | corrected json marshalling in webfileStream | sebastianmonzel_webfiles-framework-php | train |
da9f386480cab374efddea12cff91815bc5aba65 | diff --git a/zhaquirks/xiaomi/aqara/cube.py b/zhaquirks/xiaomi/aqara/cube.py
index <HASH>..<HASH> 100644
--- a/zhaquirks/xiaomi/aqara/cube.py
+++ b/zhaquirks/xiaomi/aqara/cube.py
@@ -35,7 +35,7 @@ DROP = "drop"
DROP_VALUE = 3
DROPPED = "device_dropped"
-FACE_ANY = "fave_any"
+FACE_ANY = "face_any"
FACE_1 = "face_1"
FACE_2 = "face_2"
FACE_3 = "face_3"
diff --git a/zhaquirks/xiaomi/aqara/cube_aqgl01.py b/zhaquirks/xiaomi/aqara/cube_aqgl01.py
index <HASH>..<HASH> 100644
--- a/zhaquirks/xiaomi/aqara/cube_aqgl01.py
+++ b/zhaquirks/xiaomi/aqara/cube_aqgl01.py
@@ -35,7 +35,7 @@ DROP = "drop"
DROP_VALUE = 3
DROPPED = "device_dropped"
-FACE_ANY = "fave_any"
+FACE_ANY = "face_any"
FACE_1 = "face_1"
FACE_2 = "face_2"
FACE_3 = "face_3" | Fix translation key for Aqara cube any face (#<I>) | dmulcahey_zha-device-handlers | train |
32cef509b7de0eefdc673b24c66a666466007fc0 | diff --git a/dvc/stage/cache.py b/dvc/stage/cache.py
index <HASH>..<HASH> 100644
--- a/dvc/stage/cache.py
+++ b/dvc/stage/cache.py
@@ -3,7 +3,7 @@ import os
import tempfile
from contextlib import contextmanager
-from funcy import first
+from funcy import cached_property, first
from voluptuous import Invalid
from dvc.cache.local import _log_exceptions
@@ -48,7 +48,10 @@ def _get_stage_hash(stage):
class StageCache:
def __init__(self, repo):
self.repo = repo
- self.cache_dir = os.path.join(repo.cache.local.cache_dir, "runs")
+
+ @cached_property
+ def cache_dir(self):
+ return os.path.join(self.repo.cache.local.cache_dir, "runs")
@property
def tree(self):
@@ -129,6 +132,9 @@ class StageCache:
yield out
def save(self, stage):
+ if stage.is_callback or stage.always_changed:
+ return
+
cache_key = _get_stage_hash(stage)
if not cache_key:
return
diff --git a/tests/unit/stage/test_cache.py b/tests/unit/stage/test_cache.py
index <HASH>..<HASH> 100644
--- a/tests/unit/stage/test_cache.py
+++ b/tests/unit/stage/test_cache.py
@@ -1,5 +1,7 @@
import os
+import pytest
+
def test_stage_cache(tmp_dir, dvc, mocker):
tmp_dir.gen("dep", "dep")
@@ -204,3 +206,19 @@ def test_shared_stage_cache(tmp_dir, dvc, run_copy):
assert _mode(parent_cache_dir) == dir_mode
assert _mode(cache_dir) == dir_mode
assert _mode(cache_file) == file_mode
+
+
+def test_always_changed(mocker):
+ from dvc.repo import Repo
+ from dvc.stage import Stage
+ from dvc.stage.cache import RunCacheNotFoundError, StageCache
+
+ repo = mocker.Mock(spec=Repo)
+ cache = StageCache(repo)
+ stage = Stage(repo, always_changed=True)
+ get_stage_hash = mocker.patch("dvc.stage.cache._get_stage_hash")
+ assert cache.save(stage) is None
+ assert get_stage_hash.not_called
+ with pytest.raises(RunCacheNotFoundError):
+ cache.restore(stage)
+ assert get_stage_hash.not_called | run-cache: don't save always_changed stages (#<I>)
Fixes #<I> | iterative_dvc | train |
e13a0955c7c17efcba7a7f9b4123dc3fbcf5f4ac | diff --git a/src/Core/functions.php b/src/Core/functions.php
index <HASH>..<HASH> 100644
--- a/src/Core/functions.php
+++ b/src/Core/functions.php
@@ -295,7 +295,9 @@ if (!function_exists('deprecationWarning')) {
$frame += ['file' => '[internal]', 'line' => '??'];
$message = sprintf(
- '%s - %s, line: %s',
+ '%s - %s, line: %s' . "\n" .
+ ' You can disable deprecation warnings by setting `Error.errorLevel` to' .
+ ' `E_ALL & ~E_USER_DEPRECATED` in your config/app.php.',
$message,
$frame['file'],
$frame['line'] | Mention how one can disable deprecation warnings in error mesage. | cakephp_cakephp | train |
2d7fab7a14f3faa0fc047d3f825bc92c2c843d1b | diff --git a/src/Component/ButtonFactory.php b/src/Component/ButtonFactory.php
index <HASH>..<HASH> 100644
--- a/src/Component/ButtonFactory.php
+++ b/src/Component/ButtonFactory.php
@@ -41,7 +41,7 @@ EOL;
}
$xpath = new \DOMXpath($table->ownerDocument);
- $link = $xpath->query('//a', $table)->item(0);
+ $link = $xpath->query('.//a', $table)->item(0);
$link->setAttribute('href', $href);
if ($class && in_array('expanded', explode(' ', $class))) {
diff --git a/src/Component/CalloutFactory.php b/src/Component/CalloutFactory.php
index <HASH>..<HASH> 100644
--- a/src/Component/CalloutFactory.php
+++ b/src/Component/CalloutFactory.php
@@ -31,7 +31,7 @@ EOL;
if ($class) {
$xpath = new \DOMXPath($table->ownerDocument);
- $xpath->query('//th[contains(@class, "callout-inner")]', $table)->item(0)
+ $xpath->query('.//th[contains(@class, "callout-inner")]', $table)->item(0)
->setAttribute('class', 'callout-inner ' . $class);
}
}
diff --git a/src/Component/ContainerFactory.php b/src/Component/ContainerFactory.php
index <HASH>..<HASH> 100644
--- a/src/Component/ContainerFactory.php
+++ b/src/Component/ContainerFactory.php
@@ -32,7 +32,7 @@ EOL;
if ($class) {
$xpath = new \DOMXPath($table->ownerDocument);
- $xpath->query('//td', $table)->item(0)
+ $xpath->query('.//td', $table)->item(0)
->setAttribute('class', $class);
}
} | Use relative query for XPath with context, fixes #1 | Prezent_prezent-inky | train |
b335e4725b8f931dca173b1f092a8cca42a8687e | diff --git a/Module.php b/Module.php
index <HASH>..<HASH> 100644
--- a/Module.php
+++ b/Module.php
@@ -156,6 +156,15 @@ class Module
);
return $controller;
},
+ 'rcmInstallController' => function($controllerMgr) {
+ $serviceMgr=$controllerMgr->getServiceLocator();
+ $controller =
+ new \Rcm\Controller\InstallController(
+ $serviceMgr->get('rcmPluginManager'),
+ $serviceMgr->get('em')
+ );
+ return $controller;
+ },
)
);
}
diff --git a/config/module.config.php b/config/module.config.php
index <HASH>..<HASH> 100644
--- a/config/module.config.php
+++ b/config/module.config.php
@@ -380,8 +380,6 @@ return array(
'PrivateDevTools\Controller\CreateController'
=> 'PrivateDevTools\Controller\CreateController',
'PluginProxyController' => 'Rcm\Controller\PluginProxyController',
- 'Rcm\Controller\InstallController'
- => 'Rcm\Controller\InstallController',
),
),
@@ -572,7 +570,7 @@ return array(
'options' => array(
'route' => '/rcm/install',
'defaults' => array(
- 'controller' => 'Rcm\Controller\InstallController',
+ 'controller' => 'rcmInstallController',
'action' => 'index',
)
),
diff --git a/src/Rcm/Controller/AdminController.php b/src/Rcm/Controller/AdminController.php
index <HASH>..<HASH> 100644
--- a/src/Rcm/Controller/AdminController.php
+++ b/src/Rcm/Controller/AdminController.php
@@ -533,7 +533,7 @@ class AdminController extends BaseController
$this->entityMgr->flush();
- $this->savePlugin(
+ $this->pluginManager->savePlugin(
$newPluginInstance->getInstance(),
$data['pluginData']
);
diff --git a/src/Rcm/Controller/InstallController.php b/src/Rcm/Controller/InstallController.php
index <HASH>..<HASH> 100644
--- a/src/Rcm/Controller/InstallController.php
+++ b/src/Rcm/Controller/InstallController.php
@@ -269,7 +269,7 @@ class InstallController extends \Rcm\Controller\EntityMgrAwareController
* Creates a plugin instance for plugins that have controllers that extend
* \RcmJsonDataPluginToolkit\JsonContentController
*
- * @param int $pluginName
+ * @param string $pluginName
* @param array $jsonContent
* @param int $container
* @param int $renderOrder
diff --git a/src/Rcm/Entity/AdminPermissions.php b/src/Rcm/Entity/AdminPermissions.php
index <HASH>..<HASH> 100644
--- a/src/Rcm/Entity/AdminPermissions.php
+++ b/src/Rcm/Entity/AdminPermissions.php
@@ -38,7 +38,7 @@ use Rcm\Entity\Page;
*
* @ORM\Entity
* @ORM\Table(
- * name="rcm_admin_users",
+ * name="rcm_admin_permissions",
* uniqueConstraints={
* @ORM\UniqueConstraint(name="userId", columns={"userId"})
* } | got rss feed reader working with new inversion of control | reliv_Rcm | train |
625d541715e232140e53814fd914090f6f5b5041 | diff --git a/packages/@vue/cli-service/lib/config/app.js b/packages/@vue/cli-service/lib/config/app.js
index <HASH>..<HASH> 100644
--- a/packages/@vue/cli-service/lib/config/app.js
+++ b/packages/@vue/cli-service/lib/config/app.js
@@ -74,15 +74,24 @@ module.exports = (api, options) => {
})
// keep chunk ids stable so async chunks have consistent hash (#1916)
+ const seen = new Set()
+ const nameLength = 4
webpackConfig
.plugin('named-chunks')
.use(require('webpack/lib/NamedChunksPlugin'), [chunk => {
if (chunk.name) {
return chunk.name
}
- return `chunk-` + Array.from(chunk.modulesIterable, m => {
- return m.id
- }).join('_')
+ const modules = Array.from(chunk.modulesIterable)
+ if (modules.length > 1) {
+ const hash = require('hash-sum')
+ const joinedHash = hash(modules.map(m => m.id).join('_'))
+ let len = nameLength
+ while (seen.has(joinedHash.substr(0, len))) len++
+ return `chunk-${joinedHash.substr(0, len)}`
+ } else {
+ return modules[0].id
+ }
}])
} | refactor: improve chunk name aesthetics | vuejs_vue-cli | train |
a0c5ad13f2dd79df0f5e3c756304cb69812d277b | diff --git a/lib/active_record/typed_store/extension.rb b/lib/active_record/typed_store/extension.rb
index <HASH>..<HASH> 100644
--- a/lib/active_record/typed_store/extension.rb
+++ b/lib/active_record/typed_store/extension.rb
@@ -113,14 +113,8 @@ module ActiveRecord::TypedStore
when false, nil then false
else
column = store_column_definition(store_attribute, key)
- if column.nil?
- if Numeric === value || value !~ /[^0-9]/
- !value.to_i.zero?
- else
- return false if ActiveRecord::ConnectionAdapters::Column::FALSE_VALUES.include?(value)
- !value.blank?
- end
- elsif column.number?
+
+ if column.number?
!value.zero?
else
!value.blank? | Remove un-used code in ActiveRecord::TypedStore | byroot_activerecord-typedstore | train |
739feffc434725885f51e5c098a06fee1ca1eb6d | diff --git a/plexapi/video.py b/plexapi/video.py
index <HASH>..<HASH> 100644
--- a/plexapi/video.py
+++ b/plexapi/video.py
@@ -128,14 +128,20 @@ class Show(Video):
path = '/library/metadata/%s/children' % self.ratingKey
return find_item(self.server, path, title)
- def episodes(self):
+ def episodes(self, watched=None):
leavesKey = '/library/metadata/%s/allLeaves' % self.ratingKey
- return list_items(self.server, leavesKey)
+ return list_items(self.server, leavesKey, watched=watched)
def episode(self, title):
path = '/library/metadata/%s/allLeaves' % self.ratingKey
return find_item(self.server, path, title)
+ def watched(self):
+ return self.episodes(watched=True)
+
+ def unwatched(self):
+ return self.episodes(watched=False)
+
def get(self, title):
return self.episode(title)
@@ -161,9 +167,9 @@ class Season(Video):
self.leafCount = cast(int, data.attrib.get('leafCount', NA))
self.viewedLeafCount = cast(int, data.attrib.get('viewedLeafCount', NA))
- def episodes(self):
+ def episodes(self, watched=None):
childrenKey = '/library/metadata/%s/children' % self.ratingKey
- return list_items(self.server, childrenKey)
+ return list_items(self.server, childrenKey, watched=watched)
def episode(self, title):
path = '/library/metadata/%s/children' % self.ratingKey
@@ -175,6 +181,12 @@ class Season(Video):
def show(self):
return list_items(self.server, self.parentKey)[0]
+ def watched(self):
+ return self.episodes(watched=True)
+
+ def unwatched(self):
+ return self.episodes(watched=False)
+
class Episode(Video):
TYPE = 'episode'
@@ -235,14 +247,16 @@ def find_item(server, path, title):
raise NotFound('Unable to find title: %s' % title)
-def list_items(server, path, videotype=None):
+def list_items(server, path, videotype=None, watched=None):
items = []
for elem in server.query(path):
- if not videotype or elem.attrib.get('type') == videotype:
- try:
- items.append(build_item(server, elem, path))
- except UnknownType:
- pass
+ if videotype and elem.attrib.get('type') != videotype: continue
+ if watched is True and elem.attrib.get('viewCount', 0) == 0: continue
+ if watched is False and elem.attrib.get('viewCount', 0) >= 1: continue
+ try:
+ items.append(build_item(server, elem, path))
+ except UnknownType:
+ pass
return items | Get list of watched/unwatched episodes from Season or Show | pkkid_python-plexapi | train |
e23477f076819c57cb273c7261cba19970014b0a | diff --git a/src/main/java/com/twilio/sdk/examples/RestExamples.java b/src/main/java/com/twilio/sdk/examples/RestExamples.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/twilio/sdk/examples/RestExamples.java
+++ b/src/main/java/com/twilio/sdk/examples/RestExamples.java
@@ -81,32 +81,33 @@ public class RestExamples {
messageParams.add(new BasicNameValuePair("Body", "This is a test message!"));
messageFactory.create(messageParams);
- // Search for all available phone numbers
- AvailablePhoneNumberList phoneNumbers = mainAccount.getAvailablePhoneNumbers();
- List<AvailablePhoneNumber> phoneNumberList = phoneNumbers.getPageData();
-
- // Search for available phone numbers & filter by area code
- // For available filters see:
- // http://www.twilio.com/docs/api/rest/available-phone-numbers#local-get-basic-filters
- // http://www.twilio.com/docs/api/rest/available-phone-numbers#local-get-advanced-filters
- Map<String, String> areaCodeFilter = new HashMap<String, String>();
- areaCodeFilter.put("AreaCode", "94103");
- AvailablePhoneNumberList phoneNumbersByAreaCode = mainAccount.getAvailablePhoneNumbers(areaCodeFilter);
- List<AvailablePhoneNumber> phoneNumbersByAreaCodeList = phoneNumbersByAreaCode.getPageData();
-
- // Search for phone numbers local to a country (Great Britain), and filter by SMS enabled
- // For country codes, see:
- // http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
- Map<String, String> smsFilter = new HashMap<String, String>();
- smsFilter.put("SmsEnabled", "true");
- AvailablePhoneNumberList phoneNumbersByCountryAndSms = mainAccount.getAvailablePhoneNumbers(smsFilter, "GB", AvailablePhoneNumberList.TYPE_LOCAL);
- List<AvailablePhoneNumber> phoneNumbersByCountryAndSmsList = phoneNumbersByCountryAndSms.getPageData();
+ // Search for all available phone numbers
+ AvailablePhoneNumberList phoneNumbers = mainAccount.getAvailablePhoneNumbers();
+ List<AvailablePhoneNumber> phoneNumberList = phoneNumbers.getPageData();
+
+ // Search for available phone numbers & filter by area code
+ // For available filters see:
+ // http://www.twilio.com/docs/api/rest/available-phone-numbers#local-get-basic-filters
+ // http://www.twilio.com/docs/api/rest/available-phone-numbers#local-get-advanced-filters
+ Map<String, String> areaCodeFilter = new HashMap<String, String>();
+ areaCodeFilter.put("AreaCode", "94103");
+ AvailablePhoneNumberList phoneNumbersByAreaCode = mainAccount.getAvailablePhoneNumbers(areaCodeFilter);
+ List<AvailablePhoneNumber> phoneNumbersByAreaCodeList = phoneNumbersByAreaCode.getPageData();
+
+ // Search for phone numbers local to a country (Great Britain), and filter by SMS enabled
+ // For country codes, see:
+ // http://en.wikipedia.org/wiki/ISO_3166-1_alpha-2
+ Map<String, String> smsFilter = new HashMap<String, String>();
+ smsFilter.put("SmsEnabled", "true");
+ AvailablePhoneNumberList phoneNumbersByCountryAndSms = mainAccount.getAvailablePhoneNumbers(smsFilter, "GB", AvailablePhoneNumberList.TYPE_LOCAL);
+ List<AvailablePhoneNumber> phoneNumbersByCountryAndSmsList = phoneNumbersByCountryAndSms.getPageData();
// Buy the first number in a list
Map<String, String> params = new HashMap<String, String>();
params.put("PhoneNumber", phoneNumberList.get(0).getPhoneNumber());
params.put("VoiceUrl", "http://demo.twilio.com/welcome/voice/");
- mainAccount.getIncomingPhoneNumberFactory().create(params);
+ // Uncomment to complete the purchase!
+ // mainAccount.getIncomingPhoneNumberFactory().create(params);
// View a conference using it's sid
Conference c = mainAccount.getConference("CA12345..."); | Comment out buying the number...
in case people run the example code multiple times - could drain their account./ | twilio_twilio-java | train |
9f87035be006923c683eef8d90ed99bd8e467635 | diff --git a/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmExportPlugin.java b/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmExportPlugin.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmExportPlugin.java
+++ b/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmExportPlugin.java
@@ -131,5 +131,12 @@ public interface ScmExportPlugin {
ScmDiffResult getFileDiff(JobExportReference job, String originalPath);
+ /**
+ * Function to fix status of the jobs on cluster environment.
+ * To automatically match the job status on every node.
+ *
+ * @param jobs rundeck jobs
+ * @return map with information on the process
+ */
Map clusterFixJobs(List<JobReference> jobs);
}
diff --git a/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmImportPlugin.java b/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmImportPlugin.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmImportPlugin.java
+++ b/core/src/main/java/com/dtolabs/rundeck/plugins/scm/ScmImportPlugin.java
@@ -146,5 +146,12 @@ public interface ScmImportPlugin {
ScmImportDiffResult getFileDiff(JobScmReference job, String originalPath);
+ /**
+ * Function to fix status of the jobs on cluster environment.
+ * To automatically match the job status on every node.
+ *
+ * @param jobs rundeck jobs
+ * @return map with information on the process
+ */
Map clusterFixJobs(List<JobReference> jobs);
}
diff --git a/plugins/git-plugin/src/main/groovy/org/rundeck/plugin/scm/git/GitExportPlugin.groovy b/plugins/git-plugin/src/main/groovy/org/rundeck/plugin/scm/git/GitExportPlugin.groovy
index <HASH>..<HASH> 100644
--- a/plugins/git-plugin/src/main/groovy/org/rundeck/plugin/scm/git/GitExportPlugin.groovy
+++ b/plugins/git-plugin/src/main/groovy/org/rundeck/plugin/scm/git/GitExportPlugin.groovy
@@ -473,7 +473,6 @@ class GitExportPlugin extends BaseGitPlugin implements ScmExportPlugin {
jobs.each { job ->
def storedCommitId = ((JobScmReference)job).scmImportMetadata?.commitId
def commitId = lastCommitForPath(getRelativePathForJob(job))
- println(commitId)
def path = getRelativePathForJob(job)
if(storedCommitId != null && commitId == null){
//file to delete-pull | javadoc on interface and removed fugly println | rundeck_rundeck | train |
d499f7cd535a7b3fc276180fbd6b8071c7b10c42 | diff --git a/aeron-client/src/main/java/uk/co/real_logic/aeron/ActiveSubscriptions.java b/aeron-client/src/main/java/uk/co/real_logic/aeron/ActiveSubscriptions.java
index <HASH>..<HASH> 100644
--- a/aeron-client/src/main/java/uk/co/real_logic/aeron/ActiveSubscriptions.java
+++ b/aeron-client/src/main/java/uk/co/real_logic/aeron/ActiveSubscriptions.java
@@ -50,9 +50,13 @@ class ActiveSubscriptions
public synchronized void add(final Subscription subscription)
{
- getOrDefault(subscriptionByChannelMap, subscription.channel(), SUPPLIER)
- .computeIfAbsent(subscription.streamId(), ArrayList::new)
- .add(subscription);
+ final Int2ObjectHashMap<List<Subscription>> subscriptionByStreamIdMap =
+ getOrDefault(subscriptionByChannelMap, subscription.channel(), SUPPLIER);
+
+ final List<Subscription> subscriptions =
+ subscriptionByStreamIdMap.getOrDefault(subscription.streamId(), ArrayList::new);
+
+ subscriptions.add(subscription);
}
public synchronized void remove(final Subscription subscription) | [Java]: fixed #<I> by removing computeIfAbsent. | real-logic_aeron | train |
eeb6bacadfa1ad3d9608e38324514fa2932d7c1e | diff --git a/mongo/mongo_test.go b/mongo/mongo_test.go
index <HASH>..<HASH> 100644
--- a/mongo/mongo_test.go
+++ b/mongo/mongo_test.go
@@ -50,8 +50,8 @@ type Times struct {
type testNamespacedModel struct {
State string `bson:"state"`
NewKey int `bson:"new_key,omitempty"`
- Currant Times `bson:"currant",omitempty"`
- Nixed Times `bson:"nixed",omitempty"`
+ Currant Times `bson:"currant,omitempty"`
+ Nixed Times `bson:"nixed,omitempty"`
}
type ungraceful struct{} | minor fix to test mongo tag syntax | ONSdigital_go-ns | train |
805668576d4f46c955e049183f63daf9461fe5e9 | diff --git a/tests/integration/data/CrudTest.php b/tests/integration/data/CrudTest.php
index <HASH>..<HASH> 100644
--- a/tests/integration/data/CrudTest.php
+++ b/tests/integration/data/CrudTest.php
@@ -15,6 +15,8 @@ class CrudTest extends \lithium\test\Integration {
protected $_connection = null;
+ protected $_database = null;
+
protected $_key = null;
public $companyData = array(
@@ -22,23 +24,36 @@ class CrudTest extends \lithium\test\Integration {
array('name' => 'Ma \'n Pa\'s Data Warehousing & Bait Shop', 'active' => false)
);
+ /**
+ * Creating the test database
+ */
public function setUp() {
- Companies::config();
- $this->_key = Companies::key();
- $this->_connection = Connections::get('test');
+ $this->_connection->connection->put($this->_database);
+ }
+
+ /**
+ * Dropping the test database
+ */
+ public function tearDown() {
+ $this->_connection->connection->delete($this->_database);
}
/**
* Skip the test if no test database connection available.
- *
- * @return void
*/
public function skip() {
+ $connection = 'lithium_couch_test';
+ $config = Connections::get($connection, array('config' => true));
$isAvailable = (
- Connections::get('test', array('config' => true)) &&
- Connections::get('test')->isConnected(array('autoConnect' => true))
+ $config &&
+ Connections::get($connection)->isConnected(array('autoConnect' => true))
);
- $this->skipIf(!$isAvailable, "No test connection available.");
+ $this->skipIf(!$isAvailable, "No {$connection} connection available.");
+
+ Companies::config();
+ $this->_key = Companies::key();
+ $this->_database = $config['database'];
+ $this->_connection = Connections::get($connection);
}
/**
@@ -48,7 +63,6 @@ class CrudTest extends \lithium\test\Integration {
* @return void
*/
public function testCreate() {
- Companies::all()->delete();
$this->assertIdentical(0, Companies::count());
$new = Companies::create(array('name' => 'Acme, Inc.', 'active' => true));
@@ -64,6 +78,7 @@ class CrudTest extends \lithium\test\Integration {
}
public function testRead() {
+ static::_createCompany();
$existing = Companies::first();
foreach (Companies::key($existing) as $val) {
@@ -75,6 +90,7 @@ class CrudTest extends \lithium\test\Integration {
}
public function testUpdate() {
+ static::_createCompany();
$existing = Companies::first();
$this->assertEqual($existing->name, 'Acme, Inc.');
$existing->name = 'Big Brother and the Holding Company';
@@ -90,6 +106,7 @@ class CrudTest extends \lithium\test\Integration {
}
public function testDelete() {
+ static::_createCompany();
$existing = Companies::first();
$this->assertTrue($existing->exists());
$this->assertTrue($existing->delete());
@@ -141,6 +158,13 @@ class CrudTest extends \lithium\test\Integration {
$result = $updated->foo;
$this->assertEqual($expected, $result);
}
+
+ protected static function _createCompany() {
+ Companies::create(array(
+ 'name' => 'Acme, Inc.',
+ 'active' => true,
+ ))->save();
+ }
}
?>
\ No newline at end of file
diff --git a/tests/mocks/data/Companies.php b/tests/mocks/data/Companies.php
index <HASH>..<HASH> 100644
--- a/tests/mocks/data/Companies.php
+++ b/tests/mocks/data/Companies.php
@@ -12,7 +12,7 @@ class Companies extends \lithium\data\Model {
public $hasMany = array('Employees');
- protected $_meta = array('connection' => 'test');
+ protected $_meta = array('connection' => 'lithium_couch_test');
}
?>
\ No newline at end of file | Fixing CrudTest (using lithium_couch_test connection) | UnionOfRAD_lithium | train |
e30b1ddcc5408a1d37da43f5b7fec555e900e381 | diff --git a/fgivenx/__init__.py b/fgivenx/__init__.py
index <HASH>..<HASH> 100644
--- a/fgivenx/__init__.py
+++ b/fgivenx/__init__.py
@@ -33,7 +33,7 @@ import numpy
import fgivenx.samples
import fgivenx.mass
import fgivenx.dkl
-from fgivenx._utils import _check_args, _normalise_weights
+from fgivenx._utils import _check_args, _normalise_weights, _equally_weight_samples
def compute_samples(f, x, samples, logZ=None, **kwargs):
@@ -94,7 +94,7 @@ def compute_samples(f, x, samples, logZ=None, **kwargs):
logZ, weights = _normalise_weights(logZ, weights, ntrim)
for i, (s, w) in enumerate(zip(samples, weights)):
- samples[i] = fgivenx.samples.equally_weight_samples(s, w)
+ samples[i] = _equally_weight_samples(s, w)
return fgivenx.samples.compute_samples(f, x, samples,
parallel=parallel, cache=cache,
diff --git a/fgivenx/_utils.py b/fgivenx/_utils.py
index <HASH>..<HASH> 100644
--- a/fgivenx/_utils.py
+++ b/fgivenx/_utils.py
@@ -64,3 +64,49 @@ def _normalise_weights(logZ, weights, ntrim):
if ntrim is not None and ntrim < ntot:
weights = [w*ntrim/ntot for w in weights]
return logZ, weights
+
+
+def _equally_weight_samples(samples, weights):
+ """ Convert samples to be equally weighted.
+
+ Samples are trimmed by discarding samples in accordance with a probability
+ determined by the corresponding weight.
+
+ This function has assumed you have normalised the weights properly.
+ If in doubt, convert weights via: `weights /= weights.max()`
+
+ Parameters
+ ----------
+ samples: array-like
+ Samples to trim.
+
+ weights: array-like
+ Weights to trim by.
+
+ Returns
+ -------
+ 1D numpy.array:
+ Equally weighted sample array. `shape=(len(samples))`
+ """
+ if len(weights) != len(samples):
+ raise ValueError("len(weights) = %i != len(samples) = %i" %
+ (len(weights), len(samples)))
+
+ if numpy.logical_or(weights < 0, weights > 1).any():
+ raise ValueError("weights must have probability between 0 and 1")
+
+ weights = numpy.array(weights)
+ samples = numpy.array(samples)
+
+ state = numpy.random.get_state()
+
+ numpy.random.seed(1)
+ n = len(weights)
+ choices = numpy.random.rand(n) < weights
+
+ new_samples = samples[choices]
+
+ numpy.random.set_state(state)
+
+ return new_samples.copy()
+
diff --git a/fgivenx/samples.py b/fgivenx/samples.py
index <HASH>..<HASH> 100644
--- a/fgivenx/samples.py
+++ b/fgivenx/samples.py
@@ -1,52 +1,9 @@
import numpy
from fgivenx.parallel import parallel_apply
from fgivenx.io import CacheException, Cache
+from fgivenx._utils import _equally_weight_samples
-def equally_weight_samples(samples, weights):
- """ Convert samples to be equally weighted.
-
- Samples are trimmed by discarding samples in accordance with a probability
- determined by the corresponding weight.
-
- This function has assumed you have normalised the weights properly.
- If in doubt, convert weights via: `weights /= weights.max()`
-
- Parameters
- ----------
- samples: array-like
- Samples to trim.
-
- weights: array-like
- Weights to trim by.
-
- Returns
- -------
- 1D numpy.array:
- Equally weighted sample array. `shape=(len(samples))`
- """
- if len(weights) != len(samples):
- raise ValueError("len(weights) = %i != len(samples) = %i" %
- (len(weights), len(samples)))
-
- if numpy.logical_or(weights < 0, weights > 1).any():
- raise ValueError("weights must have probability between 0 and 1")
-
- weights = numpy.array(weights)
- samples = numpy.array(samples)
-
- state = numpy.random.get_state()
-
- numpy.random.seed(1)
- n = len(weights)
- choices = numpy.random.rand(n) < weights
-
- new_samples = samples[choices]
-
- numpy.random.set_state(state)
-
- return new_samples.copy()
-
def compute_samples(f, x, samples, **kwargs):
""" Apply f(x,theta) to x array and theta in samples. | Moved sample equal weighting to _utils | williamjameshandley_fgivenx | train |
03da40c9ec95ee181e1ef8e94f68e0b9a1aa79cd | diff --git a/src/Clients/WebClient.php b/src/Clients/WebClient.php
index <HASH>..<HASH> 100644
--- a/src/Clients/WebClient.php
+++ b/src/Clients/WebClient.php
@@ -228,6 +228,12 @@ class WebClient extends Client
{
$response = Metadata::make($response, $file);
}
+
+ // cache certain responses
+ if(in_array($type, ['lang', 'meta']))
+ {
+ $this->cache[sha1($file)][$type] = $response;
+ }
}
// request completed successfully but result is empty
elseif($status == 204)
@@ -237,8 +243,6 @@ class WebClient extends Client
// retry on request failed with error 500
elseif($status == 500 && $retries[sha1($file)]--)
{
- usleep(100000);
-
$response = $this->request($type, $file);
}
// other status code is an error
@@ -247,12 +251,6 @@ class WebClient extends Client
$this->error($status, $resource);
}
- // cache certain responses
- if(in_array($type, ['lang', 'meta']))
- {
- $this->cache[sha1($file)][$type] = $response;
- }
-
return $response;
} | Avoid cache of failed requests on web client | vaites_php-apache-tika | train |
7c28fad4e8c022e04c30759f14717f59a0015fa8 | diff --git a/dark/__init__.py b/dark/__init__.py
index <HASH>..<HASH> 100644
--- a/dark/__init__.py
+++ b/dark/__init__.py
@@ -5,4 +5,4 @@ if sys.version_info < (2, 7):
# Note that the version string must have the following format, otherwise it
# will not be found by the version() function in ../setup.py
-__version__ = '1.1.4'
+__version__ = '1.1.5'
diff --git a/dark/alignments.py b/dark/alignments.py
index <HASH>..<HASH> 100644
--- a/dark/alignments.py
+++ b/dark/alignments.py
@@ -350,19 +350,21 @@ class ReadsAlignments(object):
self.scoreClass = scoreClass
self._filters = []
- def getSequence(self, title):
+ def getSubjectSequence(self, title):
"""
Obtain information about a sequence given its title.
Must be implemented by a subclass, e.g., see
L{blast.alignments.BlastReadsAlignments}.
- @param title: A C{str} sequence title from a BLAST match. Of the form
+ @param title: A C{str} sequence title from a BLAST or DIAMOND (etc.)
+ match. Usually of the form
'gi|63148399|gb|DQ011818.1| Description...'.
- @return: A C{SeqIO.read} instance.
+ @raise NotImplementedError: This method must be implemented by a
+ subclass.
"""
- raise NotImplementedError('getSequence must be implemented by a '
- 'subclass')
+ raise NotImplementedError('getSubjectSequence must be implemented by '
+ 'a subclass')
def hsps(self):
"""
diff --git a/test/test_alignments.py b/test/test_alignments.py
index <HASH>..<HASH> 100644
--- a/test/test_alignments.py
+++ b/test/test_alignments.py
@@ -209,13 +209,13 @@ class TestReadsAlignments(TestCase):
readsAlignments = ReadsAlignments(reads, 'applicationName', None)
self.assertEqual([], list(readsAlignments))
- def testGetSequence(self):
+ def testGetSubjectSequence(self):
"""
- A ReadsAlignments instance will not implement getSequence. Subclasses
- are expected to implement it.
+ A ReadsAlignments instance will not implement getSubjectSequence.
+ Subclasses are expected to implement it.
"""
reads = Reads()
readsAlignments = ReadsAlignments(reads, 'applicationName', None)
error = 'getSequence must be implemented by a subclass'
six.assertRaisesRegex(self, NotImplementedError, error,
- readsAlignments.getSequence, 'title')
+ readsAlignments.getSubjectSequence, 'title') | getSequence is supposed to be called getSubjectSequence in dark/alignments.py. This was not causing an error because the BLAST and DIAMOND subclasses were implementing getSubjectSequence | acorg_dark-matter | train |
5b48611f47991f822d3a73036fd283bc39badb92 | diff --git a/test/index.js b/test/index.js
index <HASH>..<HASH> 100644
--- a/test/index.js
+++ b/test/index.js
@@ -12,9 +12,28 @@ runner( [
// test command line
execUnits( `cli` ),
// test with binary
- execUnits( `common`, true ),
+ execUnits( `common`, {
+ "bin": true,
+ } ),
+ // test with binary and trace
+ execUnits( `common`, {
+ "bin": true,
+ "trace": true,
+ } ),
// test with binary, no env
- execUnits( `common`, true, `` ),
+ execUnits( `common`, {
+ "bin": true,
+ "wiresEnv": ``,
+ } ),
+ // test with binary and trace, no env
+ execUnits( `common`, {
+ "bin": true,
+ "wiresEnv": ``,
+ } ),
// test with local
execUnits( `common` ),
+ // test with local and trace
+ execUnits( `common`, {
+ "trace": true,
+ } ),
] );
diff --git a/test/runner/execUnits.js b/test/runner/execUnits.js
index <HASH>..<HASH> 100644
--- a/test/runner/execUnits.js
+++ b/test/runner/execUnits.js
@@ -6,14 +6,21 @@ const spawn = require( `child_process` ).spawn;
const binPath = path.resolve( __dirname, `../../lib/bin.js` );
const unitPath = path.resolve( __dirname, `../util/runUnits.js` );
-module.exports = ( type, bin, nodeEnv = `test` ) => () => new Promise( ( resolve, reject ) => {
- console.log( `running tests for ${ type } (${ bin ? `` : `no ` }binary, ENV=${ nodeEnv || `-` })\n` );
+module.exports = ( type, { bin, trace, wiresEnv = `test` } = {} ) => () => new Promise( ( resolve, reject ) => {
+ console.log(
+ `running tests for ${ type } (${
+ bin ? `` : `no `
+ }binary, ${
+ trace ? `` : `no `
+ }trace, ENV=${ wiresEnv || `-` })\n`
+ );
const env = {};
for ( const key of Object.keys( process.env ) ) {
env[ key ] = process.env[ key ];
}
- env[ `WIRES_ENV` ] = nodeEnv;
- spawn( process.execPath, bin ? [ binPath, unitPath, type ] : [ unitPath, type ], {
+ env[ `WIRES_ENV` ] = wiresEnv;
+ const boolTrace = Boolean( trace );
+ spawn( process.execPath, bin ? [ binPath, unitPath, type, boolTrace ] : [ unitPath, type, boolTrace ], {
env,
"stdio": `inherit`,
} )
diff --git a/test/util/runUnits.js b/test/util/runUnits.js
index <HASH>..<HASH> 100644
--- a/test/util/runUnits.js
+++ b/test/util/runUnits.js
@@ -14,7 +14,9 @@ const rUnit = /\.unit\.js$/;
const fixtureDir = path.resolve( __dirname, `../fixture` );
const unitDir = path.resolve( __dirname, `../units/${ process.argv[ 2 ] }` );
-const files = new Set( process.argv.slice( 3 ) );
+const trace = JSON.parse( process.argv[ 3 ] );
+
+const files = new Set( process.argv.slice( 4 ) );
const dirUnits = {};
const units = [];
@@ -70,16 +72,24 @@ const npmInstall = [];
// eslint-disable-next-line no-extend-native
Object.prototype.__MODIFIED_PROTOTYPE = true;
-const run = () => nodeunit.reporters.minimal.run( units, null, error => {
- if ( error ) {
- if ( error.message === `We have got test failures.` ) {
- // eslint-disable-next-line no-process-exit
- process.exit( 1 );
- }
- throw error;
-
+const run = () => {
+ const wires = trace && require( `../..` );
+ if ( wires ) {
+ wires.startTrace();
}
-} );
+ nodeunit.reporters.minimal.run( units, null, error => {
+ if ( wires ) {
+ wires.stopTrace();
+ }
+ if ( error ) {
+ if ( error.message === `We have got test failures.` ) {
+ // eslint-disable-next-line no-process-exit
+ process.exit( 1 );
+ }
+ throw error;
+ }
+ } );
+};
if ( npmInstall.length ) {
const exec = require( `child_process` ).exec; | test: run unit tests with tracer on. | jaubourg_wires | train |
e10080ca511391331d715dee6d3c0c3e398858d1 | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -31,6 +31,16 @@ var normal = createDist();
The distribution is configurable and has the following methods...
+#### normal.support()
+
+Returns the distribution support, which is the set of all real values.
+
+``` javascript
+normal.support();
+// returns [-inf, inf]
+```
+
+
#### normal.mean( [value] )
This method is a setter/getter. If no `value` is provided, returns the distribution `mean`. To set the distribution `mean`,
@@ -113,7 +123,7 @@ var entropy = normal.entropy();
#### normal.pdf( [arr] )
-If a support vector is not provided, returns the probability density function (PDF). If a support vector is provided, evaluates the PDF for each vector element.
+If a vector is not provided, returns the probability density function (PDF). If a vector is provided, evaluates the PDF for each vector element.
``` javascript
var data = [ -1, -0.5, 0, 0.5, 1 ];
@@ -124,7 +134,7 @@ var pdf = normal.pdf( data );
#### normal.cdf( [arr] )
-If a support vector is not provided, returns the cumulative density function (CDF). If a support vector is provided, evaluates the CDF for each vector element.
+If a vector is not provided, returns the cumulative density function (CDF). If a vector is provided, evaluates the CDF for each vector element.
``` javascript
var data = [ -1, -0.5, 0, 0.5, 1 ];
@@ -163,7 +173,7 @@ var mu = 100,
xLow = 0,
xHigh = 200;
-// Create a support vector...
+// Create a vector...
var vec = new Array( 1000 ),
len = vec.length,
inc;
@@ -179,7 +189,7 @@ var normal = createDist()
.mean( mu )
.variance( s2 );
-// Evaluate the probability density function over the support vector...
+// Evaluate the probability density function over the vector...
var pdf = normal.pdf( vec );
// Find the max...
diff --git a/examples/index.js b/examples/index.js
index <HASH>..<HASH> 100644
--- a/examples/index.js
+++ b/examples/index.js
@@ -8,7 +8,7 @@ var mu = 100,
xLow = 0,
xHigh = 200;
-// Create a support vector...
+// Create a vector...
var vec = new Array( 1000 ),
len = vec.length,
inc;
@@ -24,7 +24,7 @@ var normal = createDist()
.mean( mu )
.variance( s2 );
-// Evaluate the probability density function over the support vector...
+// Evaluate the probability density function over the vector...
var pdf = normal.pdf( vec );
// Find the max...
diff --git a/lib/index.js b/lib/index.js
index <HASH>..<HASH> 100644
--- a/lib/index.js
+++ b/lib/index.js
@@ -132,6 +132,16 @@
} // end FUNCTION Distribution()
/**
+ * METHOD: support()
+ * Returns the distribution support.
+ *
+ * @returns {Array} distribution support
+ */
+ Distribution.prototype.support = function() {
+ return [ Number.NEGATIVE_INFINITY, Number.POSITIVE_INFINITY ];
+ }; // end METHOD support()
+
+ /**
* METHOD: mean( [value] )
* Mean value setter and getter. If a value is provided, sets the mean value. If no value is provided, returns the mean value.
*
diff --git a/test/test.js b/test/test.js
index <HASH>..<HASH> 100644
--- a/test/test.js
+++ b/test/test.js
@@ -34,6 +34,22 @@ describe( 'distributions-normal', function tests() {
expect( createDist ).to.be.a( 'function' );
});
+ describe( 'support', function tests() {
+
+ it( 'should provide a method to get the distribution support', function test() {
+ expect( normal.support ).to.be.a( 'function' );
+ });
+
+ it( 'should return a support equal to all real values', function test() {
+ var expected = [
+ Number.NEGATIVE_INFINITY,
+ Number.POSITIVE_INFINITY
+ ];
+ assert.deepEqual( normal.support(), expected );
+ });
+
+ }); // end TESTS support
+
describe( 'mean', function tests() {
it( 'should provide a setter/getter for the distribution mean', function test() { | [UPDATE] added support. | distributions-io_normal | train |
bbb16d5cd793f81db25a8a880eab090015c8cb11 | diff --git a/frontends/default/javascripts/active_scaffold.js b/frontends/default/javascripts/active_scaffold.js
index <HASH>..<HASH> 100644
--- a/frontends/default/javascripts/active_scaffold.js
+++ b/frontends/default/javascripts/active_scaffold.js
@@ -71,12 +71,12 @@ var ActiveScaffold = {
decrement_record_count: function(scaffold_id) {
// decrement the last record count, firsts record count are in nested lists
count = $$('#' + scaffold_id + ' span.active-scaffold-records').last();
- count.innerHTML = parseInt(count.innerHTML) - 1;
+ if (count) count.update(parseInt(count.innerHTML, 10) - 1);
},
increment_record_count: function(scaffold_id) {
// increment the last record count, firsts record count are in nested lists
count = $$('#' + scaffold_id + ' span.active-scaffold-records').last();
- count.innerHTML = parseInt(count.innerHTML) + 1;
+ if (count) count.update(parseInt(count.innerHTML, 10) + 1);
},
update_row: function(row, html) {
row = $(row);
diff --git a/test/mock_app/public/javascripts/active_scaffold/default/active_scaffold.js b/test/mock_app/public/javascripts/active_scaffold/default/active_scaffold.js
index <HASH>..<HASH> 100644
--- a/test/mock_app/public/javascripts/active_scaffold/default/active_scaffold.js
+++ b/test/mock_app/public/javascripts/active_scaffold/default/active_scaffold.js
@@ -71,12 +71,12 @@ var ActiveScaffold = {
decrement_record_count: function(scaffold_id) {
// decrement the last record count, firsts record count are in nested lists
count = $$('#' + scaffold_id + ' span.active-scaffold-records').last();
- count.innerHTML = parseInt(count.innerHTML) - 1;
+ if (count) count.update(parseInt(count.innerHTML, 10) - 1);
},
increment_record_count: function(scaffold_id) {
// increment the last record count, firsts record count are in nested lists
count = $$('#' + scaffold_id + ' span.active-scaffold-records').last();
- count.innerHTML = parseInt(count.innerHTML) + 1;
+ if (count) count.update(parseInt(count.innerHTML, 10) + 1);
},
update_row: function(row, html) {
row = $(row); | Fix creating/deleting when pagination is disabled or infinite | activescaffold_active_scaffold | train |
f26f985539b0f36c2fc05dd37911950425bb36c3 | diff --git a/test/Validation.test.js b/test/Validation.test.js
index <HASH>..<HASH> 100644
--- a/test/Validation.test.js
+++ b/test/Validation.test.js
@@ -61,4 +61,59 @@ describe("Validation", function() {
throw new Error("Validation didn't fail");
})
});
+
+ describe("checkHost", function() {
+ it("should always allow any host if options.disableHostCheck is set", function() {
+ const options = {
+ public: "test.host:80",
+ disableHostCheck: true
+ };
+ const headers = {
+ host: "bad.host"
+ };
+ const server = new Server(compiler, options);
+ if(!server.checkHost(headers)) {
+ throw new Error("Validation didn't fail");
+ }
+ });
+
+ it("should allow any valid options.public when host is localhost", function() {
+ const options = {
+ public: "test.host:80"
+ };
+ const headers = {
+ host: "localhost"
+ };
+ const server = new Server(compiler, options);
+ if(!server.checkHost(headers)) {
+ throw new Error("Validation didn't fail");
+ }
+ });
+
+ it("should allow any valid options.public when host is 127.0.0.1", function() {
+ const options = {
+ public: "test.host:80"
+ };
+ const headers = {
+ host: "127.0.0.1"
+ };
+ const server = new Server(compiler, options);
+ if(!server.checkHost(headers)) {
+ throw new Error("Validation didn't fail");
+ }
+ });
+
+ it("should not allow hostnames that don't match options.public", function() {
+ const options = {
+ public: "test.host:80",
+ };
+ const headers = {
+ host: "test.hostname:80"
+ };
+ const server = new Server(compiler, options);
+ if(server.checkHost(headers)) {
+ throw new Error("Validation didn't fail");
+ }
+ });
+ })
}); | Added tests for Server.prototype.checkHost | webpack_webpack-dev-server | train |
ff5824fe38bede761b873cab6e247a530e89236a | diff --git a/client.go b/client.go
index <HASH>..<HASH> 100644
--- a/client.go
+++ b/client.go
@@ -28,23 +28,17 @@ func (h *HAProxyClient) RunCommand(cmd string) (*bytes.Buffer, error) {
if err != nil {
return nil, err
}
+ defer h.conn.Close()
result := bytes.NewBuffer(nil)
_, err = h.conn.Write([]byte(cmd + "\n"))
if err != nil {
- h.conn.Close()
return nil, err
}
_, err = io.Copy(result, h.conn)
if err != nil {
- h.conn.Close()
- return nil, err
- }
-
- err = h.conn.Close()
- if err != nil {
return nil, err
} | move conn.Close() to defer stmt | bcicen_go-haproxy | train |
40377b188b3486ac4cebf188d1cff4eacb590259 | diff --git a/mod/quiz/index.php b/mod/quiz/index.php
index <HASH>..<HASH> 100644
--- a/mod/quiz/index.php
+++ b/mod/quiz/index.php
@@ -112,10 +112,17 @@
$gradecol = "";
}
} else {
+ // If student has no grade for this quiz,
+ // or the quiz has no grade, display nothing in grade col
if ($bestgrade === NULL || $quiz->grade == 0) {
$gradecol = "";
} else {
- $gradecol = "$bestgrade / $quiz->grade";
+ //If all quiz's attempts have visible results, show bestgrade
+ if(all_attempt_results_visible($quiz, $USER)) {
+ $gradecol = "$bestgrade / $quiz->grade";
+ } else {
+ $gradecol = "";
+ }
}
}
diff --git a/mod/quiz/locallib.php b/mod/quiz/locallib.php
index <HASH>..<HASH> 100644
--- a/mod/quiz/locallib.php
+++ b/mod/quiz/locallib.php
@@ -550,5 +550,31 @@ function quiz_get_reviewoptions($quiz, $attempt, $isteacher=false) {
return $options;
}
+////////////////////////////////////////////////////////////////////////////////
+/**
+* Return boolean indicating if the quiz has attempts with hidden grades
+*
+* Selects all attempts matching specified quiz & user, and examines each to
+* check they all have visible results.
+* @return boolean If the quiz has attempts without visible results
+* @param object $quiz The quiz being examined
+* @param object $user The user concerned
+*/
+function all_attempt_results_visible($quiz, $user) {
+ global $CFG;
+ $sql = 'SELECT timefinish, preview FROM '.$CFG->prefix.'quiz_attempts qa'.
+ ' WHERE qa.quiz='.$quiz->id.' AND qa.userid='.$user->id.
+ ' ORDER BY id DESC';
+ $attempts = get_records_sql($sql);
+ foreach ($attempts as $attempt) {
+ $attemptoptions = quiz_get_reviewoptions($quiz, $attempt);
+ //if any attempt has scores option not set, not all attempt results are
+ //visible
+ if (!$attemptoptions->scores) {
+ return false;
+ }
+ }
+ return true;
+}
?> | mod/quiz/index changes to only display quiz grades when teacher allows
Changes to index.php to only allow student to view overall quiz scores when
teacher has not set options forbidding this, and it can't give away what
scores were for attempts where results should still be hidden.
Credit: Peter Bulmer <EMAIL> | moodle_moodle | train |
8ac541fbd9baf1a054e7a102c4fd64ab82798ab7 | diff --git a/scapy.py b/scapy.py
index <HASH>..<HASH> 100755
--- a/scapy.py
+++ b/scapy.py
@@ -21,6 +21,11 @@
#
# $Log: scapy.py,v $
+# Revision 0.9.17.103 2005/06/07 10:18:27 pbi
+# - added a try/catch for get_if_hw_addr
+# - fixed the netstat parsing for OpenBSD
+# - changed Dot11WEP's key ID field from "key" to "keyid"
+#
# Revision 0.9.17.102 2005/06/07 09:54:51 pbi
# - added LEShortEnumField
# - added L2CAP layer
@@ -806,7 +811,7 @@
from __future__ import generators
-RCSID="$Id: scapy.py,v 0.9.17.102 2005/06/07 09:54:51 pbi Exp $"
+RCSID="$Id: scapy.py,v 0.9.17.103 2005/06/07 10:18:27 pbi Exp $"
VERSION = RCSID.split()[2]+"beta"
@@ -977,6 +982,7 @@ except ImportError:
LINUX=sys.platform.startswith("linux")
+OPENBSD=sys.platform.startswith("openbsd")
if LINUX:
DNET=PCAP=0
@@ -1418,8 +1424,11 @@ if DNET:
def get_if_raw_hwaddr(iff):
if iff[:2] == "lo":
return (772, '\x00'*6)
- l = dnet.intf().get(iff)
- l = l["link_addr"]
+ try:
+ l = dnet.intf().get(iff)
+ l = l["link_addr"]
+ except:
+ raise Exception("Error in attempting to get hw address for interface [%s]" % iff)
return l.type,l.data
def get_if_raw_addr(ifname):
i = dnet.intf()
@@ -1525,7 +1534,10 @@ if not LINUX:
continue
if not l:
break
- dest,gw,fl,ref,use,netif = l.split()[:6]
+ if OPENBSD:
+ dest,gw,fl,ref,use,mtu,netif = l.split()[:7]
+ else:
+ dest,gw,fl,ref,use,netif = l.split()[:6]
if dest == "default":
dest = 0L
netmask = 0L
@@ -4655,7 +4667,7 @@ class Dot11Deauth(Packet):
class Dot11WEP(Packet):
name = "802.11 WEP packet"
fields_desc = [ StrFixedLenField("iv", "", 3),
- ByteField("key", 0),
+ ByteField("keyid", 0),
StrField("wepdata",None,remain=4),
IntField("icv",None) ] | - added a try/catch for get_if_hw_addr
- fixed the netstat parsing for OpenBSD
- changed Dot<I>WEP's key ID field from "key" to "keyid" | secdev_scapy | train |
b9d1fba43e8b1f799d31649ad6060567f421559a | diff --git a/packages/ember-handlebars/lib/helpers/loc.js b/packages/ember-handlebars/lib/helpers/loc.js
index <HASH>..<HASH> 100644
--- a/packages/ember-handlebars/lib/helpers/loc.js
+++ b/packages/ember-handlebars/lib/helpers/loc.js
@@ -11,11 +11,11 @@ require('ember-handlebars/ext');
```html
<script type="text/x-handlebars" data-template-name="home">
- {{loc welcome}}
+ {{loc "welcome"}}
</script>
```
- Take note that `welcome` is a string and not an object
+ Take note that `"welcome"` is a string and not an object
reference.
@method loc | Fix `{{loc}}` for new string/lookup distinction. | emberjs_ember.js | train |
7eb8cc31ea6d8f6c911aa607d00b435bc2c8d54c | diff --git a/remix-solidity/index.js b/remix-solidity/index.js
index <HASH>..<HASH> 100644
--- a/remix-solidity/index.js
+++ b/remix-solidity/index.js
@@ -3,11 +3,13 @@ var SolidityProxy = require('./src/decoder/solidityProxy')
var localDecoder = require('./src/decoder/localDecoder')
var stateDecoder = require('./src/decoder/stateDecoder')
var CodeAnalysis = require('./src/analysis/staticAnalysisRunner')
+var Compiler = require('./src/compiler/compiler')
module.exports = {
InternalCallTree: InternalCallTree,
SolidityProxy: SolidityProxy,
localDecoder: localDecoder,
stateDecoder: stateDecoder,
- CodeAnalysis: CodeAnalysis
+ CodeAnalysis: CodeAnalysis,
+ Compiler: Compiler
} | export Compiler in remix-solidity | ethereum_remix | train |
6b760d7bda7adf1721be66f93ad838b14242e4af | diff --git a/src/Parser.php b/src/Parser.php
index <HASH>..<HASH> 100644
--- a/src/Parser.php
+++ b/src/Parser.php
@@ -303,9 +303,8 @@ class Parser{
// still testing...
if($preg_error !== PREG_NO_ERROR){
-
throw new BBCodeException('preg_replace_callback() died on ['.$tag.'] due to a '.$this->preg_error[$preg_error]
- .' ('.$preg_error.')'.PHP_EOL.htmlspecialchars(print_r($bbcode, true)));
+ .' ('.$preg_error.')'.PHP_EOL.htmlspecialchars(print_r($bbcode, true))); // @codeCoverageIgnore
}
if($callback && isset($this->tagmap[$tag]) && in_array($tag, $this->allowed_tags)){
@@ -353,7 +352,7 @@ class Parser{
if($preg_error !== PREG_NO_ERROR){
throw new BBCodeException('preg_match_all() died due to a '.$this->preg_error[$preg_error]
- .' ('.$preg_error.')'.PHP_EOL.htmlspecialchars(print_r($attributes, true)));
+ .' ('.$preg_error.')'.PHP_EOL.htmlspecialchars(print_r($attributes, true))); // @codeCoverageIgnore
}
return $attr; | ignore coverage for preg_errors | chillerlan_php-bbcode | train |
06121aa5681fe2be7019636f117e8f8f8748f440 | diff --git a/src/renderer/Dataframe.js b/src/renderer/Dataframe.js
index <HASH>..<HASH> 100644
--- a/src/renderer/Dataframe.js
+++ b/src/renderer/Dataframe.js
@@ -60,6 +60,10 @@ export default class Dataframe {
aabb = _updateAABBForGeometry(feature[j], aabb, type);
}
+ if (aabb.minx === Number.POSITIVE_INFINITY) {
+ aabb = null;
+ }
+
aabbList.push(aabb);
}
@@ -259,6 +263,10 @@ export default class Dataframe {
}
_compareAABBs (featureAABB, viewportAABB, stroke) {
+ if (featureAABB === null) {
+ return AABBTestResults.OUTSIDE;
+ }
+
const featureStrokeAABB = {
minx: featureAABB.minx - stroke,
miny: featureAABB.miny - stroke,
@@ -357,13 +365,13 @@ export default class Dataframe {
if (i === 0 || i >= breakpoints[featureIndex]) {
featureIndex++;
const feature = this.getFeature(featureIndex);
- let offset = {x: 0, y: 0};
+ let offset = { x: 0, y: 0 };
if (!viz.offset.default) {
const vizOffset = viz.offset.eval(feature);
offset.x = vizOffset[0] * widthScale;
offset.y = vizOffset[1] * widthScale;
}
- pointWithOffset = {x: point.x - offset.x, y: point.y - offset.y};
+ pointWithOffset = { x: point.x - offset.x, y: point.y - offset.y };
if (!pointInRectangle(pointWithOffset, this._aabb[featureIndex]) ||
this._isFeatureFiltered(feature, viz.filter)) {
i = breakpoints[featureIndex] - 6;
@@ -547,7 +555,7 @@ function _updateAABBLine (line, aabb) {
}
function _updateAABBPolygon (polygon, aabb) {
- const [ vertices, numVertices ] = [ polygon.flat, polygon.holes[0] || polygon.flat.length / 2 ];
+ const [vertices, numVertices] = [polygon.flat, polygon.holes[0] || polygon.flat.length / 2];
for (let i = 0; i < numVertices; i++) {
aabb.minx = Math.min(aabb.minx, vertices[2 * i + 0]);
@@ -561,12 +569,12 @@ function _updateAABBPolygon (polygon, aabb) {
function _isFeatureAABBInsideViewport (featureAABB, viewportAABB) {
return (featureAABB.minx >= viewportAABB.minx && featureAABB.maxx <= viewportAABB.maxx &&
- featureAABB.miny >= viewportAABB.miny && featureAABB.maxy <= viewportAABB.maxy);
+ featureAABB.miny >= viewportAABB.miny && featureAABB.maxy <= viewportAABB.maxy);
}
function _isFeatureAABBOutsideViewport (featureAABB, viewportAABB) {
return (featureAABB.minx > viewportAABB.maxx || featureAABB.miny > viewportAABB.maxy ||
- featureAABB.maxx < viewportAABB.minx || featureAABB.maxy < viewportAABB.miny);
+ featureAABB.maxx < viewportAABB.minx || featureAABB.maxy < viewportAABB.miny);
}
function _isPolygonCollidingViewport (vertices, normals, start, end, strokeWidthScale, viewportAABB) { | Fix viewport counting corner case with zero-vertex polygons | CartoDB_carto-vl | train |
6c120a1014b467a9792763c87b0dd01f5e7aa5f0 | diff --git a/lib/rack/perftools_profiler/action.rb b/lib/rack/perftools_profiler/action.rb
index <HASH>..<HASH> 100644
--- a/lib/rack/perftools_profiler/action.rb
+++ b/lib/rack/perftools_profiler/action.rb
@@ -17,7 +17,7 @@ module Rack::PerftoolsProfiler
def self.for_env(env, profiler, middleware)
request = Rack::Request.new(env)
klass =
- case request.path
+ case request.path_info
when '/__start__'
StartProfiling
when '/__stop__' | Change Request#path to Request#path_info.
In some versions of Rack, at least, Request#path is called #fullpath, but #path_info should be consistent and is probably more correct anyways, as it will scope it to the path the app was mounted to. Otherwise if, for whatever reason, you mounted the app below the root you would not be able to access any of the __*__ commands at all. | bhb_rack-perftools_profiler | train |
37bc0bb0ca91eb21460b8e3203d94ff15a7412db | diff --git a/src/main/java/com/wiley/autotest/selenium/AbstractSeleniumTest.java b/src/main/java/com/wiley/autotest/selenium/AbstractSeleniumTest.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/wiley/autotest/selenium/AbstractSeleniumTest.java
+++ b/src/main/java/com/wiley/autotest/selenium/AbstractSeleniumTest.java
@@ -185,11 +185,11 @@ public abstract class AbstractSeleniumTest extends AbstractTest implements ITest
}
public <E extends IPage> E getPage(final Class<E> helperClass) {
- return pageProvider.getPage(helperClass, this);
+ return pageProvider.get(helperClass, this);
}
public <E extends IPage> E getPage(final Class<E> helperClass, final String urlToOpen) {
- return pageProvider.getPage(helperClass, this, urlToOpen);
+ return pageProvider.get(helperClass, this, urlToOpen);
}
public void setPostponedTestFail(final String message) {
diff --git a/src/main/java/com/wiley/autotest/services/PageProvider.java b/src/main/java/com/wiley/autotest/services/PageProvider.java
index <HASH>..<HASH> 100644
--- a/src/main/java/com/wiley/autotest/services/PageProvider.java
+++ b/src/main/java/com/wiley/autotest/services/PageProvider.java
@@ -17,12 +17,45 @@ public class PageProvider {
@Autowired
private HelperRegistry registry;
+ public <E extends IPage> E get(final Class<E> helperClass, ScreenshotHelper screenshotHelper) {
+ E helper = registry.getPageHelper(helperClass);
+ helper.init(getWebDriver(), screenshotHelper);
+ return helper;
+ }
+
+ public <E extends IPage> E get(final Class<E> helperClass) {
+ E helper = registry.getPageHelper(helperClass);
+ //TODO VE implement proper support of ScreenshotHelper. As for now it's only used for screenshot base tests passing it as null
+ helper.init(getWebDriver(), null);
+ return helper;
+ }
+
+ public <E extends IPage> E get(final Class<E> helperClass, ScreenshotHelper screenshotHelper, final String urlToOpen) {
+ E helper = get(helperClass, screenshotHelper);
+ helper.load(urlToOpen);
+ return helper;
+ }
+
+ public <E extends IPage> E get(final Class<E> helperClass, final String urlToOpen) {
+ E helper = get(helperClass);
+ helper.load(urlToOpen);
+ return helper;
+ }
+
+ /**
+ * Please use get()
+ */
+ @Deprecated
public <E extends IPage> E getPage(final Class<E> helperClass, ScreenshotHelper screenshotHelper) {
E helper = registry.getPageHelper(helperClass);
helper.init(getWebDriver(), screenshotHelper);
return helper;
}
+ /**
+ * Please use get()
+ */
+ @Deprecated
public <E extends IPage> E getPage(final Class<E> helperClass) {
E helper = registry.getPageHelper(helperClass);
//TODO VE implement proper support of ScreenshotHelper. As for now it's only used for screenshot base tests passing it as null
@@ -30,14 +63,22 @@ public class PageProvider {
return helper;
}
+ /**
+ * Please use get()
+ */
+ @Deprecated
public <E extends IPage> E getPage(final Class<E> helperClass, ScreenshotHelper screenshotHelper, final String urlToOpen) {
- E helper = getPage(helperClass, screenshotHelper);
+ E helper = get(helperClass, screenshotHelper);
helper.load(urlToOpen);
return helper;
}
+ /**
+ * Please use get()
+ */
+ @Deprecated
public <E extends IPage> E getPage(final Class<E> helperClass, final String urlToOpen) {
- E helper = getPage(helperClass);
+ E helper = get(helperClass);
helper.load(urlToOpen);
return helper;
} | refactoring PageProvider | WileyLabs_teasy | train |
d535d9810012b18ca83f86aa416ff4a08bb49804 | diff --git a/daemon/volumes.go b/daemon/volumes.go
index <HASH>..<HASH> 100644
--- a/daemon/volumes.go
+++ b/daemon/volumes.go
@@ -168,6 +168,12 @@ func createVolumes(container *Container) error {
return err
}
}
+
+ for volPath := range binds {
+ if err := initializeVolume(container, volPath, binds); err != nil {
+ return err
+ }
+ }
return nil
}
@@ -226,7 +232,6 @@ func initializeVolume(container *Container, volPath string, binds map[string]Bin
}
// Otherwise create an directory in $ROOT/volumes/ and use that
} else {
-
// Do not pass a container as the parameter for the volume creation.
// The graph driver using the container's information ( Image ) to
// create the parent.
@@ -273,38 +278,50 @@ func initializeVolume(container *Container, volPath string, binds map[string]Bin
// Do not copy or change permissions if we are mounting from the host
if srcRW && !isBindMount {
- volList, err := ioutil.ReadDir(rootVolPath)
+ if err := copyExistingContents(rootVolPath, srcPath); err != nil {
+ return err
+ }
+ }
+ return nil
+}
+
+func copyExistingContents(rootVolPath, srcPath string) error {
+ volList, err := ioutil.ReadDir(rootVolPath)
+ if err != nil {
+ return err
+ }
+
+ if len(volList) > 0 {
+ srcList, err := ioutil.ReadDir(srcPath)
if err != nil {
return err
}
- if len(volList) > 0 {
- srcList, err := ioutil.ReadDir(srcPath)
- if err != nil {
+
+ if len(srcList) == 0 {
+ // If the source volume is empty copy files from the root into the volume
+ if err := archive.CopyWithTar(rootVolPath, srcPath); err != nil {
return err
}
- if len(srcList) == 0 {
- // If the source volume is empty copy files from the root into the volume
- if err := archive.CopyWithTar(rootVolPath, srcPath); err != nil {
- return err
- }
- }
}
+ }
- var stat syscall.Stat_t
- if err := syscall.Stat(rootVolPath, &stat); err != nil {
- return err
- }
- var srcStat syscall.Stat_t
- if err := syscall.Stat(srcPath, &srcStat); err != nil {
+ var (
+ stat syscall.Stat_t
+ srcStat syscall.Stat_t
+ )
+
+ if err := syscall.Stat(rootVolPath, &stat); err != nil {
+ return err
+ }
+ if err := syscall.Stat(srcPath, &srcStat); err != nil {
+ return err
+ }
+ // Change the source volume's ownership if it differs from the root
+ // files that were just copied
+ if stat.Uid != srcStat.Uid || stat.Gid != srcStat.Gid {
+ if err := os.Chown(srcPath, int(stat.Uid), int(stat.Gid)); err != nil {
return err
}
- // Change the source volume's ownership if it differs from the root
- // files that were just copied
- if stat.Uid != srcStat.Uid || stat.Gid != srcStat.Gid {
- if err := os.Chown(srcPath, int(stat.Uid), int(stat.Gid)); err != nil {
- return err
- }
- }
}
return nil
}
diff --git a/runconfig/parse.go b/runconfig/parse.go
index <HASH>..<HASH> 100644
--- a/runconfig/parse.go
+++ b/runconfig/parse.go
@@ -135,8 +135,8 @@ func parseRun(cmd *flag.FlagSet, args []string, sysInfo *sysinfo.SysInfo) (*Conf
if arr[0] == "/" {
return nil, nil, cmd, fmt.Errorf("Invalid bind mount: source can't be '/'")
}
- dstDir := arr[1]
- flVolumes.Set(dstDir)
+ // after creating the bind mount we want to delete it from the flVolumes values because
+ // we do not want bind mounts being committed to image configs
binds = append(binds, bind)
flVolumes.Delete(bind)
} else if bind == "/" { | Don't save bind mounts in image
Docker-DCO-<I>- | containers_storage | train |
3eeb225d07b0a1b4dffdd31a3435a4b5c45f3563 | diff --git a/languagetool-core/src/main/java/org/languagetool/rules/LanguageDependentFilter.java b/languagetool-core/src/main/java/org/languagetool/rules/LanguageDependentFilter.java
index <HASH>..<HASH> 100644
--- a/languagetool-core/src/main/java/org/languagetool/rules/LanguageDependentFilter.java
+++ b/languagetool-core/src/main/java/org/languagetool/rules/LanguageDependentFilter.java
@@ -43,7 +43,7 @@ public class LanguageDependentFilter implements RuleMatchFilter {
@Override
public List<RuleMatch> filter(List<RuleMatch> ruleMatches) {
- if (language.getShortCode() == "ca") {
+ if (language.getShortCode().equals("ca")) {
// Use typographic apostrophe in suggestions
CategoryId catID = new CategoryId("DIACRITICS_TRADITIONAL");
if (this.enabledRules.contains("APOSTROF_TIPOGRAFIC")
@@ -62,8 +62,8 @@ public class LanguageDependentFilter implements RuleMatchFilter {
newReplacements.add(s);
}
}
- RuleMatch newrm = new RuleMatch(rm, newReplacements);
- newRuleMatches.add(newrm);
+ RuleMatch newMatch = new RuleMatch(rm, newReplacements);
+ newRuleMatches.add(newMatch);
}
return newRuleMatches;
} | [ca] fix warning and tiny cleanup | languagetool-org_languagetool | train |
c6c8de260d07042e477a4698a6adafa1ae1f320b | diff --git a/src/Entity/Embeddable/Objects/AbstractEmbeddableObject.php b/src/Entity/Embeddable/Objects/AbstractEmbeddableObject.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Embeddable/Objects/AbstractEmbeddableObject.php
+++ b/src/Entity/Embeddable/Objects/AbstractEmbeddableObject.php
@@ -2,7 +2,6 @@
namespace EdmondsCommerce\DoctrineStaticMeta\Entity\Embeddable\Objects;
-use Doctrine\Common\Util\Debug;
use Doctrine\ORM\Mapping\Builder\ClassMetadataBuilder;
use Doctrine\ORM\Mapping\ClassMetadata;
use EdmondsCommerce\DoctrineStaticMeta\Entity\Interfaces\EntityInterface;
@@ -22,10 +21,7 @@ abstract class AbstractEmbeddableObject
return $builder;
}
- public function __toString(): string
- {
- return (string)Debug::export($this, 2);
- }
+ abstract public function __toString(): string;
public function setOwningEntity(EntityInterface $entity): void
{
diff --git a/src/Entity/Embeddable/Objects/Financial/MoneyEmbeddable.php b/src/Entity/Embeddable/Objects/Financial/MoneyEmbeddable.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Embeddable/Objects/Financial/MoneyEmbeddable.php
+++ b/src/Entity/Embeddable/Objects/Financial/MoneyEmbeddable.php
@@ -89,7 +89,7 @@ class MoneyEmbeddable extends AbstractEmbeddableObject implements MoneyEmbeddabl
{
return (string)print_r(
[
- 'money' => [
+ 'moneyEmbeddable' => [
'amount' => $this->getMoney()->getAmount(),
'currency' => $this->getMoney()->getCurrency(),
],
diff --git a/src/Entity/Embeddable/Objects/Geo/AddressEmbeddable.php b/src/Entity/Embeddable/Objects/Geo/AddressEmbeddable.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Embeddable/Objects/Geo/AddressEmbeddable.php
+++ b/src/Entity/Embeddable/Objects/Geo/AddressEmbeddable.php
@@ -242,6 +242,22 @@ class AddressEmbeddable extends AbstractEmbeddableObject implements AddressEmbed
);
}
+ public function __toString(): string
+ {
+ return (string)print_r(
+ [
+ 'addressEmbeddable' => [
+ AddressEmbeddableInterface::EMBEDDED_PROP_HOUSE_NUMBER => $this->getHouseNumber(),
+ AddressEmbeddableInterface::EMBEDDED_PROP_HOUSE_NAME => $this->getHouseName(),
+ AddressEmbeddableInterface::EMBEDDED_PROP_STREET => $this->getStreet(),
+ AddressEmbeddableInterface::EMBEDDED_PROP_CITY => $this->getCity(),
+ AddressEmbeddableInterface::EMBEDDED_PROP_POSTAL_CODE => $this->getPostalCode(),
+ AddressEmbeddableInterface::EMBEDDED_PROP_POSTAL_AREA => $this->getPostalArea(),
+ AddressEmbeddableInterface::EMBEDDED_PROP_COUNTRY_CODE => $this->getCountryCode(),
+ ],
+ ], true);
+ }
+
protected function getPrefix(): string
{
return HasAddressEmbeddableInterface::PROP_ADDRESS_EMBEDDABLE;
diff --git a/src/Entity/Embeddable/Objects/Identity/FullNameEmbeddable.php b/src/Entity/Embeddable/Objects/Identity/FullNameEmbeddable.php
index <HASH>..<HASH> 100644
--- a/src/Entity/Embeddable/Objects/Identity/FullNameEmbeddable.php
+++ b/src/Entity/Embeddable/Objects/Identity/FullNameEmbeddable.php
@@ -234,6 +234,20 @@ class FullNameEmbeddable extends AbstractEmbeddableObject implements FullNameEmb
);
}
+ public function __toString(): string
+ {
+ return (string)print_r(
+ [
+ 'fullNameEmbeddabled' => [
+ FullNameEmbeddableInterface::EMBEDDED_PROP_TITLE => $this->getTitle(),
+ FullNameEmbeddableInterface::EMBEDDED_PROP_FIRSTNAME => $this->getFirstName(),
+ FullNameEmbeddableInterface::EMBEDDED_PROP_MIDDLENAMES => $this->getMiddleNames(),
+ FullNameEmbeddableInterface::EMBEDDED_PROP_LASTNAME => $this->getLastName(),
+ FullNameEmbeddableInterface::EMBEDDED_PROP_SUFFIX => $this->getSuffix(),
+ ],
+ ], true);
+ }
+
protected function getPrefix(): string
{
return HasFullNameEmbeddableInterface::PROP_FULL_NAME_EMBEDDABLE; | long winded but hopefully will work | edmondscommerce_doctrine-static-meta | train |
e3c9c584103a4bfdb5a22b86f2e63441b760ab52 | diff --git a/addon/components/file-browser-item/component.js b/addon/components/file-browser-item/component.js
index <HASH>..<HASH> 100644
--- a/addon/components/file-browser-item/component.js
+++ b/addon/components/file-browser-item/component.js
@@ -33,9 +33,6 @@ export default Ember.Component.extend({
let date = this.get('item.dateModified');
return moment(date).utc().format('YYYY-MM-DD, h:mm:ss a')
}),
- versionLink: Ember.computed('item.currentVersion', function() {
- return this.get('item.path') + '?revision=' + this.get('item.currentVersion');
- }),
guid: null,
link: Ember.computed('item', 'guid', function() {
let guid = this.get('item.guid') || this.get('guid');
@@ -49,6 +46,9 @@ export default Ember.Component.extend({
}
},
actions: {
+ openVersion() {
+ this.sendAction('openItem', this.get('item'), '?revision=' + this.get('item.currentVersion'));
+ },
open() {
this.sendAction('openItem', this.get('item'));
},
diff --git a/addon/components/file-browser-item/template.hbs b/addon/components/file-browser-item/template.hbs
index <HASH>..<HASH> 100644
--- a/addon/components/file-browser-item/template.hbs
+++ b/addon/components/file-browser-item/template.hbs
@@ -11,7 +11,7 @@
{{/if}}
{{#if (if-filter 'version-column' display)}}
<div class="col-sm-1 hidden-xs file-browser-header">
- <a class='version-link' href='{{versionLink}}'>{{item.currentVersion}}</a>
+ <a class='version-link' {{action 'openVersion'}} role="link">{{item.currentVersion}}</a>
</div>
{{/if}}
{{#if (if-filter 'downloads-column' display)}}
diff --git a/addon/components/file-browser/component.js b/addon/components/file-browser/component.js
index <HASH>..<HASH> 100644
--- a/addon/components/file-browser/component.js
+++ b/addon/components/file-browser/component.js
@@ -202,8 +202,8 @@ export default Ember.Component.extend({
let item = this.get('selectedItems.firstObject');
this.sendAction('openFile', item);
},
- openItem(item) {
- this.sendAction('openFile', item);
+ openItem(item, qparams) {
+ this.sendAction('openFile', item, qparams);
},
downloadItem() {
let downloadLink = this.get('selectedItems.firstObject.links.download'); | Pass in qparams to openItem action | CenterForOpenScience_ember-osf | train |
93fa01585593e3ea9d0b2574e05d434483c683b4 | diff --git a/addon/components/layers/odata-vector-layer.js b/addon/components/layers/odata-vector-layer.js
index <HASH>..<HASH> 100644
--- a/addon/components/layers/odata-vector-layer.js
+++ b/addon/components/layers/odata-vector-layer.js
@@ -78,7 +78,7 @@ export default BaseVectorLayer.extend({
models.forEach(model => {
let ids = insertedIds.filter(id => {
- return model.get('id') === id;
+ return Ember.isNone(model) ? false : model.get('id') === id;
});
if (ids.length > 0) {
insertedModelId.push(ids[0]);
@@ -102,6 +102,8 @@ export default BaseVectorLayer.extend({
console.log('Error save: ' + e);
leafletObject.fire('save:failed', e);
});
+ } else {
+ leafletObject.fire('save:success', { layers: [] });
}
return leafletObject; | fix error delete and add callback for save (#<I>) | Flexberry_ember-flexberry-gis | train |
333284d0a8ad424cb7b6e8b33dd7502843ca3404 | diff --git a/src/org/parosproxy/paros/core/scanner/Alert.java b/src/org/parosproxy/paros/core/scanner/Alert.java
index <HASH>..<HASH> 100644
--- a/src/org/parosproxy/paros/core/scanner/Alert.java
+++ b/src/org/parosproxy/paros/core/scanner/Alert.java
@@ -100,12 +100,6 @@ public class Alert implements Comparable<Alert> {
private int pluginId = 0;
private String name = "";
private int risk = RISK_INFO;
- /**
- * @deprecated
- * Use of reliability has been deprecated in favour of using confidence
- */
- @Deprecated
- private int reliability = CONFIDENCE_MEDIUM;
private int confidence = CONFIDENCE_MEDIUM;
private String description = "";
private String uri = "";
@@ -581,8 +575,7 @@ public class Alert implements Comparable<Alert> {
}
public URL getIconUrl() {
- //TODO: Shouldn't be necessary to check both but let's be careful
- if (reliability == Alert.CONFIDENCE_FALSE_POSITIVE || confidence == Alert.CONFIDENCE_FALSE_POSITIVE) {
+ if (confidence == Alert.CONFIDENCE_FALSE_POSITIVE) {
// Special case - theres no risk - use the green flag
return Constant.OK_FLAG_IMAGE_URL;
} | Remove Alert's instance variable no longer needed
Remove deprecated Alert's instance variable reliability, it's no longer
needed/used, superseded by confidence. | zaproxy_zaproxy | train |
fc0eed650552d8ac81d140c232fe173dd40f2ec7 | diff --git a/client/project.js b/client/project.js
index <HASH>..<HASH> 100644
--- a/client/project.js
+++ b/client/project.js
@@ -16,7 +16,7 @@ sections = [
paths: [ '/customize' ],
module: 'my-sites/customize',
group: 'sites',
- secondary: true,
+ secondary: true
},
{
name: 'me',
@@ -288,24 +288,6 @@ if ( config.isEnabled( 'reader' ) ) {
}
}
-if ( config.isEnabled( 'devdocs' ) ) {
- sections.push( {
- name: 'devdocs',
- paths: [ '/devdocs' ],
- module: 'devdocs',
- secondary: true,
- enableLoggedOut: true
- } );
-
- sections.push( {
- name: 'devdocs',
- paths: [ '/devdocs/start' ],
- module: 'devdocs',
- secondary: false,
- enableLoggedOut: true
- } );
-}
-
if ( config.isEnabled( 'vip' ) ) {
sections.push( {
name: 'vip',
diff --git a/client/sections.js b/client/sections.js
index <HASH>..<HASH> 100644
--- a/client/sections.js
+++ b/client/sections.js
@@ -4,4 +4,22 @@
const config = require( 'config' ),
sections = require( config( 'project' ) );
+if ( config.isEnabled( 'devdocs' ) ) {
+ sections.push( {
+ name: 'devdocs',
+ paths: [ '/devdocs' ],
+ module: 'devdocs',
+ secondary: true,
+ enableLoggedOut: true
+ } );
+
+ sections.push( {
+ name: 'devdocs',
+ paths: [ '/devdocs/start' ],
+ module: 'devdocs',
+ secondary: false,
+ enableLoggedOut: true
+ } );
+}
+
module.exports = sections; | Framework: Move devdocs to shared sections | Automattic_wp-calypso | train |
7d84196519fcdbf96204d754d95c4dbca1ba9121 | diff --git a/airflow/providers/qubole/hooks/qubole.py b/airflow/providers/qubole/hooks/qubole.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/qubole/hooks/qubole.py
+++ b/airflow/providers/qubole/hooks/qubole.py
@@ -22,7 +22,7 @@ import logging
import os
import pathlib
import time
-from typing import Dict, List, Tuple
+from typing import Dict, List, Optional, Tuple
from qds_sdk.commands import (
Command,
@@ -134,7 +134,7 @@ class QuboleHook(BaseHook):
self.dag_id = kwargs['dag'].dag_id
self.kwargs = kwargs
self.cls = COMMAND_CLASSES[self.kwargs['command_type']]
- self.cmd = None
+ self.cmd: Optional[Command] = None
self.task_instance = None
@staticmethod
diff --git a/airflow/providers/qubole/hooks/qubole_check.py b/airflow/providers/qubole/hooks/qubole_check.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/qubole/hooks/qubole_check.py
+++ b/airflow/providers/qubole/hooks/qubole_check.py
@@ -23,6 +23,7 @@ from typing import List, Optional, Union
from qds_sdk.commands import Command
from airflow.exceptions import AirflowException
+from airflow.hooks.dbapi import DbApiHook
from airflow.providers.qubole.hooks.qubole import QuboleHook
log = logging.getLogger(__name__)
@@ -74,7 +75,7 @@ def parse_first_row(row_list) -> List[Union[bool, float, int, str]]:
return record_list
-class QuboleCheckHook(QuboleHook):
+class QuboleCheckHook(QuboleHook, DbApiHook):
"""Qubole check hook"""
def __init__(self, context, *args, **kwargs) -> None:
diff --git a/airflow/providers/qubole/operators/qubole_check.py b/airflow/providers/qubole/operators/qubole_check.py
index <HASH>..<HASH> 100644
--- a/airflow/providers/qubole/operators/qubole_check.py
+++ b/airflow/providers/qubole/operators/qubole_check.py
@@ -27,11 +27,14 @@ from airflow.providers.qubole.operators.qubole import QuboleOperator
class _QuboleCheckOperatorMixin:
"""This is a Mixin for Qubole related check operators"""
+ kwargs: dict
+ results_parser_callable: Optional[Callable]
+
def execute(self, context=None) -> None:
"""Execute a check operation against Qubole"""
try:
self._hook_context = context
- super().execute(context=context)
+ super().execute(context=context) # type: ignore[misc]
except AirflowException as e:
handle_airflow_exception(e, self.get_hook())
@@ -39,9 +42,11 @@ class _QuboleCheckOperatorMixin:
"""Get QuboleCheckHook"""
return self.get_hook()
- # this overwrite the original QuboleOperator.get_hook() which returns a QuboleHook.
def get_hook(self) -> QuboleCheckHook:
- """Reinitialising the hook, as some template fields might have changed"""
+ """
+ Reinitialising the hook, as some template fields might have changed
+ This method overwrites the original QuboleOperator.get_hook() which returns a QuboleHook.
+ """
return QuboleCheckHook(
context=self._hook_context, results_parser_callable=self.results_parser_callable, **self.kwargs
)
@@ -107,7 +112,11 @@ class QuboleCheckOperator(_QuboleCheckOperatorMixin, SQLCheckOperator, QuboleOpe
ui_fgcolor = '#000'
def __init__(
- self, *, qubole_conn_id: str = "qubole_default", results_parser_callable: Callable = None, **kwargs
+ self,
+ *,
+ qubole_conn_id: str = "qubole_default",
+ results_parser_callable: Optional[Callable] = None,
+ **kwargs,
) -> None:
sql = get_sql_from_qbol_cmd(kwargs)
kwargs.pop('sql', None)
@@ -168,7 +177,7 @@ class QuboleValueCheckOperator(_QuboleCheckOperatorMixin, SQLValueCheckOperator,
*,
pass_value: Union[str, int, float],
tolerance: Optional[Union[int, float]] = None,
- results_parser_callable: Callable = None,
+ results_parser_callable: Optional[Callable] = None,
qubole_conn_id: str = "qubole_default",
**kwargs,
) -> None: | Fix MyPy Errors for Qubole provider. (#<I>) | apache_airflow | train |
c13ad29fd2f56b546fe3f959acbe865a03530930 | diff --git a/docs/settings.rst b/docs/settings.rst
index <HASH>..<HASH> 100644
--- a/docs/settings.rst
+++ b/docs/settings.rst
@@ -95,6 +95,14 @@ JWT_PAYLOAD_GET_USERNAME_HANDLER
lambda payload: payload.get(get_user_model().USERNAME_FIELD)
+JWT_GET_USER_BY_NATURAL_KEY_HANDLER
+~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+
+ A custom function to get User object from username
+
+ .. autofunction:: graphql_jwt.utils.get_user_by_natural_key
+
+
Token expiration
----------------
diff --git a/graphql_jwt/settings.py b/graphql_jwt/settings.py
index <HASH>..<HASH> 100644
--- a/graphql_jwt/settings.py
+++ b/graphql_jwt/settings.py
@@ -30,6 +30,8 @@ DEFAULTS = {
'JWT_PAYLOAD_GET_USERNAME_HANDLER': (
lambda payload: payload.get(get_user_model().USERNAME_FIELD)
),
+ 'JWT_GET_USER_BY_NATURAL_KEY_HANDLER':
+ 'graphql_jwt.utils.get_user_by_natural_key',
'JWT_REFRESH_EXPIRED_HANDLER': 'graphql_jwt.utils.refresh_has_expired',
'JWT_GET_REFRESH_TOKEN_HANDLER':
'graphql_jwt.refresh_token.utils.get_refresh_token_by_model',
@@ -44,6 +46,7 @@ IMPORT_STRINGS = (
'JWT_DECODE_HANDLER',
'JWT_PAYLOAD_HANDLER',
'JWT_PAYLOAD_GET_USERNAME_HANDLER',
+ 'JWT_GET_USER_BY_NATURAL_KEY_HANDLER',
'JWT_REFRESH_EXPIRED_HANDLER',
'JWT_GET_REFRESH_TOKEN_HANDLER',
'JWT_ALLOW_ANY_HANDLER',
diff --git a/graphql_jwt/utils.py b/graphql_jwt/utils.py
index <HASH>..<HASH> 100644
--- a/graphql_jwt/utils.py
+++ b/graphql_jwt/utils.py
@@ -92,10 +92,10 @@ def get_payload(token, context=None):
return payload
-def get_user_by_natural_key(user_id):
+def get_user_by_natural_key(username):
User = get_user_model()
try:
- return User.objects.get_by_natural_key(user_id)
+ return User.objects.get_by_natural_key(username)
except User.DoesNotExist:
return None
@@ -106,7 +106,7 @@ def get_user_by_payload(payload):
if not username:
raise exceptions.JSONWebTokenError(_('Invalid payload'))
- user = get_user_by_natural_key(username)
+ user = jwt_settings.JWT_GET_USER_BY_NATURAL_KEY_HANDLER(username)
if user is not None and not user.is_active:
raise exceptions.JSONWebTokenError(_('User is disabled')) | Added JWT_GET_USER_BY_NATURAL_KEY_HANDLER | flavors_django-graphql-jwt | train |
16416966ebee62e101e4cf74265fd027d07b1ab1 | diff --git a/tests/test_commands/test_command_orchestrator.py b/tests/test_commands/test_command_orchestrator.py
index <HASH>..<HASH> 100644
--- a/tests/test_commands/test_command_orchestrator.py
+++ b/tests/test_commands/test_command_orchestrator.py
@@ -34,13 +34,6 @@ class Test_command_orchestrator(TestCase):
self.ports = Mock()
self.command_orchestrator._parse_remote_model = Mock(return_value=remote_resource)
- def test_connect(self):
- # act
- res = self.command_orchestrator.connect(self.context, 'uuid', 'vlan id', 'vlan type')
- # assert
- self.assertTrue(self.command_orchestrator.command_wrapper.execute_command_with_connection.called)
- self.assertTrue(res)
-
def test_disconnect_all(self):
# act
self.command_orchestrator.disconnect_all(self.context, self.ports)
diff --git a/tests/test_network/test_dvswitch/test_port_group_configurer.py b/tests/test_network/test_dvswitch/test_port_group_configurer.py
index <HASH>..<HASH> 100644
--- a/tests/test_network/test_dvswitch/test_port_group_configurer.py
+++ b/tests/test_network/test_dvswitch/test_port_group_configurer.py
@@ -29,6 +29,7 @@ class TestDvPortGroupConfigurer(TestCase):
self.vm = Mock()
self.vm.config.hardware = Mock()
self.vnic = Mock(spec=vim.vm.device.VirtualEthernetCard)
+ self.vnic.macAddress = True
self.vnic.deviceInfo = Mock()
self.vm.config.hardware.device = [self.vnic]
self.py_vmomi_service.find_by_uuid = lambda a, b, c: self.vm
@@ -64,7 +65,7 @@ class TestDvPortGroupConfigurer(TestCase):
self.assertFalse(mapping[0].connect)
def test_connect_vnic_to_networks(self):
- ConnectRequest('vnic 1', Mock(spec=vim.Network))
+ req = ConnectRequest('vnic 1', Mock(spec=vim.Network))
mapping = [ConnectRequest('vnic 1', Mock(spec=vim.Network))]
res = self.configurer.connect_vnic_to_networks(self.vm, mapping, Mock(spec=vim.Network))
self.assertIsNotNone(res[0].vnic) | fixing test test_command_orchestrator | QualiSystems_vCenterShell | train |
a1dd2b629320ad70de618c82dab8a0050b38f56d | diff --git a/graphene/contrib/django/converter.py b/graphene/contrib/django/converter.py
index <HASH>..<HASH> 100644
--- a/graphene/contrib/django/converter.py
+++ b/graphene/contrib/django/converter.py
@@ -31,6 +31,7 @@ def convert_django_field(field):
@convert_django_field.register(models.SlugField)
@convert_django_field.register(models.URLField)
@convert_django_field.register(models.GenericIPAddressField)
+@convert_django_field.register(models.FileField)
@convert_django_field.register(UUIDField)
def convert_field_to_string(field):
return String(description=field.help_text)
diff --git a/graphene/contrib/django/tests/test_converter.py b/graphene/contrib/django/tests/test_converter.py
index <HASH>..<HASH> 100644
--- a/graphene/contrib/django/tests/test_converter.py
+++ b/graphene/contrib/django/tests/test_converter.py
@@ -53,6 +53,14 @@ def test_should_ipaddress_convert_string():
assert_conversion(models.GenericIPAddressField, graphene.String)
+def test_should_file_convert_string():
+ assert_conversion(models.FileField, graphene.String)
+
+
+def test_should_image_convert_string():
+ assert_conversion(models.ImageField, graphene.String)
+
+
def test_should_auto_convert_id():
assert_conversion(models.AutoField, graphene.ID, primary_key=True) | Added FileField and ImageField conversion. Fixed #<I> | graphql-python_graphene | train |
12a6a87b2c8a23ebace8034f4345ff65034d52d2 | diff --git a/commands/cli/parse.go b/commands/cli/parse.go
index <HASH>..<HASH> 100644
--- a/commands/cli/parse.go
+++ b/commands/cli/parse.go
@@ -9,14 +9,14 @@ import (
// Parse parses the input commandline string (cmd, flags, and args).
// returns the corresponding command Request object.
-func Parse(input []string, root *commands.Command) ([]string, map[string]interface{}, []string, error) {
+func Parse(input []string, root *commands.Command) (commands.Request, error) {
path, input := parsePath(input, root)
opts, args, err := parseOptions(input)
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
- return path, opts, args, nil
+ return commands.NewRequest(path, opts, args, nil), nil
}
// parsePath gets the command path from the command line input | commands/cli: Made Parse return a Request (again) | ipfs_go-ipfs | train |
e35819bbec66374d8aa327eeb1d3ddaebaedd490 | diff --git a/lib/faraday.rb b/lib/faraday.rb
index <HASH>..<HASH> 100644
--- a/lib/faraday.rb
+++ b/lib/faraday.rb
@@ -14,7 +14,7 @@ require 'forwardable'
# conn.get '/'
#
module Faraday
- VERSION = "0.9.0.rc1"
+ VERSION = "0.9.0.rc2"
class << self
# Public: Gets or sets the root path that Faraday is being loaded from. | Release <I>.rc2 | lostisland_faraday | train |
7df2a66623253157a9d5c249c3bcba045fb66123 | diff --git a/builtin/providers/openstack/resource_openstack_fw_policy_v1.go b/builtin/providers/openstack/resource_openstack_fw_policy_v1.go
index <HASH>..<HASH> 100644
--- a/builtin/providers/openstack/resource_openstack_fw_policy_v1.go
+++ b/builtin/providers/openstack/resource_openstack_fw_policy_v1.go
@@ -7,6 +7,7 @@ import (
"github.com/gophercloud/gophercloud"
"github.com/gophercloud/gophercloud/openstack/networking/v2/extensions/fwaas/policies"
+ "github.com/hashicorp/terraform/helper/resource"
"github.com/hashicorp/terraform/helper/schema"
)
@@ -61,7 +62,6 @@ func resourceFWPolicyV1() *schema.Resource {
}
func resourceFWPolicyV1Create(d *schema.ResourceData, meta interface{}) error {
-
config := meta.(*Config)
networkingClient, err := config.networkingV2Client(d.Get("region").(string))
if err != nil {
@@ -130,7 +130,6 @@ func resourceFWPolicyV1Read(d *schema.ResourceData, meta interface{}) error {
}
func resourceFWPolicyV1Update(d *schema.ResourceData, meta interface{}) error {
-
config := meta.(*Config)
networkingClient, err := config.networkingV2Client(d.Get("region").(string))
if err != nil {
@@ -179,15 +178,27 @@ func resourceFWPolicyV1Delete(d *schema.ResourceData, meta interface{}) error {
return fmt.Errorf("Error creating OpenStack networking client: %s", err)
}
- for i := 0; i < 15; i++ {
+ stateConf := &resource.StateChangeConf{
+ Pending: []string{"ACTIVE"},
+ Target: []string{"DELETED"},
+ Refresh: waitForFirewallPolicyDeletion(networkingClient, d.Id()),
+ Timeout: 120 * time.Second,
+ Delay: 0,
+ MinTimeout: 2 * time.Second,
+ }
- err = policies.Delete(networkingClient, d.Id()).Err
- if err == nil {
- break
- }
+ if _, err = stateConf.WaitForState(); err != nil {
+ return err
+ }
- if _, ok := err.(gophercloud.ErrDefault404); ok {
- return nil
+ return nil
+}
+
+func waitForFirewallPolicyDeletion(networkingClient *gophercloud.ServiceClient, id string) resource.StateRefreshFunc {
+ return func() (interface{}, string, error) {
+ err := policies.Delete(networkingClient, id).Err
+ if err == nil {
+ return "", "DELETED", nil
}
if errCode, ok := err.(gophercloud.ErrUnexpectedResponseCode); ok {
@@ -195,13 +206,10 @@ func resourceFWPolicyV1Delete(d *schema.ResourceData, meta interface{}) error {
// This error usually means that the policy is attached
// to a firewall. At this point, the firewall is probably
// being delete. So, we retry a few times.
- time.Sleep(time.Second * 2)
- continue
+ return nil, "ACTIVE", nil
}
}
- return err
+ return nil, "ACTIVE", err
}
-
- return nil
} | provider/openstack: gophercloud migration: Allow Firewall v1 Policy to cleanly delete | hashicorp_terraform | train |
1c99f89e82078b8c9e5a934a38637ce622e5e9ab | diff --git a/NotificationsModule.php b/NotificationsModule.php
index <HASH>..<HASH> 100644
--- a/NotificationsModule.php
+++ b/NotificationsModule.php
@@ -26,6 +26,11 @@ class NotificationsModule extends Module
public $allowDuplicate = false;
/**
+ * @var string Database created_at field format
+ */
+ public $dbDateFormat = 'Y-m-d H:i:s';
+
+ /**
* @var callable|integer The current user id
*/
public $userId;
diff --git a/controllers/NotificationsController.php b/controllers/NotificationsController.php
index <HASH>..<HASH> 100644
--- a/controllers/NotificationsController.php
+++ b/controllers/NotificationsController.php
@@ -54,6 +54,11 @@ class NotificationsController extends Controller
$results = [];
foreach ($models as $model) {
+
+ // give user a chance to parse the date as needed
+ $date = \DateTime::createFromFormat($this->module->dbDateFormat, $model->created_at)
+ ->format('Y-m-d H:i:s');
+
/** @var Notification $model */
$results[] = [
'id' => $model->id,
@@ -63,7 +68,7 @@ class NotificationsController extends Controller
'url' => Url::to(['notifications/rnr', 'id' => $model->id]),
'key' => $model->key,
'flashed' => $model->flashed,
- 'date' => $model->created_at
+ 'date' => $date,
];
}
return $results;
diff --git a/docs/Configuration.md b/docs/Configuration.md
index <HASH>..<HASH> 100644
--- a/docs/Configuration.md
+++ b/docs/Configuration.md
@@ -20,6 +20,8 @@ return [
// Allow to have notification with same (user_id, key, key_id)
// Default to FALSE
'allowDuplicate' => false,
+ // Allow custom date formatting in database
+ 'dbDateFormat' => 'Y-m-d H:i:s',
// This callable should return your logged in user Id
'userId' => function() {
return \Yii::$app->user->id; | Add dbDateFormat to support Oracle (and other) formatting | machour_yii2-notifications | train |
4828499b6914cc8770615b8edd6fbea2f16dcd5f | diff --git a/tests/test_client.py b/tests/test_client.py
index <HASH>..<HASH> 100644
--- a/tests/test_client.py
+++ b/tests/test_client.py
@@ -101,7 +101,10 @@ def test_client_passes_correct_reply(client, mock_protocol, method_name,
transport_return = '023hoisdfh'
mock_transport.send_message = Mock(return_value=transport_return)
client.call(method_name, method_args, method_kwargs, one_way_setting)
- mock_protocol.parse_reply.assert_called_with(transport_return)
+ if one_way_setting:
+ mock_protocol.parse_reply.assert_not_called()
+ else:
+ mock_protocol.parse_reply.assert_called_with(transport_return)
def test_client_raises_error_replies(client, mock_protocol, method_name,
@@ -111,5 +114,6 @@ def test_client_raises_error_replies(client, mock_protocol, method_name,
error_response.error = 'foo'
mock_protocol.parse_reply = Mock(return_value=error_response)
- with pytest.raises(RPCError):
- client.call(method_name, method_args, method_kwargs, one_way_setting)
+ if not one_way_setting:
+ with pytest.raises(RPCError):
+ client.call(method_name, method_args, method_kwargs, one_way_setting) | Test correct operation of one_way flag | mbr_tinyrpc | train |
193cceb0b7ea0372f0c847f17b8d02d2d5844e4b | diff --git a/windpowerlib/power_output.py b/windpowerlib/power_output.py
index <HASH>..<HASH> 100644
--- a/windpowerlib/power_output.py
+++ b/windpowerlib/power_output.py
@@ -249,15 +249,19 @@ def power_curve_density_correction(wind_speed, power_curve_wind_speeds,
# TODO: 1. smooth curve? 2. density corr?
def smooth_power_curve(power_curve_wind_speeds, power_curve_values,
- block_width=0.01, normalized_standard_deviation=0.15):
+ block_width=0.01, normalized_standard_deviation=0.15,
+ mean_gauss=0):
r"""
- Calulates the the turbine power output using a smoothed power curve.
+ Smoothes the input power curve values by using a gaussian distribution.
Parameters
----------
- p_values : pandas.Series
- Power curve of the wind turbine.
- Indices are the wind speeds of the power curve in m/s.
+ power_curve_wind_speeds : pandas.Series
+ Wind speeds in m/s for which the power curve values are provided in
+ `power_curve_values`.
+ power_curve_values : pandas.Series or numpy.array
+ Power curve values corresponding to wind speeds in
+ `power_curve_wind_speeds`.
Returns
@@ -270,23 +274,32 @@ def smooth_power_curve(power_curve_wind_speeds, power_curve_values,
----------
Knorr p. 106
"""
-
smoothed_power_curve_values = []
+ # Steop of power curve wind speeds
+ step = power_curve_wind_speeds.iloc[-2] - power_curve_wind_speeds.iloc[-3]
+ # Append wind speeds to `power_curve_wind_speeds` until 40 m/s
+ while (power_curve_wind_speeds.values[-1] < 40.0):
+ power_curve_wind_speeds.append(
+ pd.Series(power_curve_wind_speeds.iloc[-1] + step,
+ index=[power_curve_wind_speeds.index[-1] + 1]))
+ power_curve_values.append(pd.Series(0.0, index=[power_curve_values.index[-1] + 1]))
for power_curve_wind_speed in power_curve_wind_speeds:
- # Create list of wind speeds for the moving block
- wind_speeds_block = np.linspace(
- power_curve_wind_speed - block_width, power_curve_wind_speed +
- block_width, num=10)
+ # Create array of wind speeds for the moving block
+ wind_speeds_block = (np.arange(-15.0, 15.0, block_width) +
+ power_curve_wind_speed)
+ # Get the smoothed value of the power output
smoothed_value = sum(
block_width * np.interp(wind_speed, power_curve_wind_speeds,
power_curve_values, left=0, right=0) *
tools.gaussian_distribution(
power_curve_wind_speed - wind_speed,
- power_curve_wind_speed * normalized_standard_deviation, mean=0)
+ power_curve_wind_speed * normalized_standard_deviation,
+ mean_gauss)
for wind_speed in wind_speeds_block)
smoothed_power_curve_values.append(smoothed_value)
+ # Create smoothed power curve
+ smoothed_power_curve = pd.Series(smoothed_power_curve_values,
+ index=power_curve_wind_speeds)
# turbulence_intensity = 1 / (np.log(hub_height / roughness_length))
# standard_deviation = turbulence_intensity * wind_speed
- return smoothed_power_curve_values
-
-
+ return smoothed_power_curve | Add to smoothed power curve function | wind-python_windpowerlib | train |
35bffd071b0146f135d8c6e920a01210019996d8 | diff --git a/libravatar.php b/libravatar.php
index <HASH>..<HASH> 100644
--- a/libravatar.php
+++ b/libravatar.php
@@ -55,10 +55,10 @@
* <code>
* $libravatar = new Libravatar();
* $options = array()
- * $options['s'] = 40;
- * $options['algorithm'] = sha256;
+ * $options['s'] = '40';
+ * $options['algorithm'] = 'sha256';
* $options['https'] = true;
- * $options['d'] = http://upload.wikimedia.org/wikipedia/commons/a/af/Tux.png;
+ * $options['d'] = 'http://upload.wikimedia.org/wikipedia/commons/a/af/Tux.png';
* $url = $libravatar->url('[email protected]', $options);
* </code>
* | Fix code example missing quotes around strings. Oops. | pear_Services_Libravatar | train |
06fb7f61698dbc2b7549f55ba5337a744b4d89c0 | diff --git a/lib/microspec/predicates.rb b/lib/microspec/predicates.rb
index <HASH>..<HASH> 100644
--- a/lib/microspec/predicates.rb
+++ b/lib/microspec/predicates.rb
@@ -10,4 +10,6 @@ module Microspec
@_predicates[method] = proc
end
end
+
+ Predicates[:truthy?] = -> (actual) { actual }
end | Add truthy? predicate helper | Erol_microspec | train |
bf404b895294673ff961a69edc2d3cb2d5f2799e | diff --git a/dipper/sources/ZFIN.py b/dipper/sources/ZFIN.py
index <HASH>..<HASH> 100644
--- a/dipper/sources/ZFIN.py
+++ b/dipper/sources/ZFIN.py
@@ -1285,6 +1285,17 @@ class ZFIN(Source):
# zfin environments (standard salinity and temperature, heat shock (37C), etc), which
# includes the zfin ID instead of a GENO ID for those environments.
+ # Clean up the units
+ if units == 'N/A' or units == '':
+ units = None
+
+ # Clean up the values
+ if values == '' or values == 'N/A':
+ values = None
+
+ if comment == 'NULL':
+ comment = None
+
#Use this regex match if using all knockdown reagents.
#if re.match('ZDB.*',condition):
#Use this regex match if using only morpholino knockdown reagents.
@@ -1295,14 +1306,12 @@ class ZFIN(Source):
geno.addGenotype(extrinsic_geno_id,None,geno.genoparts['extrinsic_genotype'])
# Clean up the units
- if units == 'N/A':
- units = None
if units is not None and re.match('.*\/.*',units):
units = re.sub(r"/",'_',units)
# Clean up the values
- if values == '':
+ if values == '' or values == 'N/A':
values = None
if values is not None:
values = values.replace(' ', '_')
@@ -1313,9 +1322,6 @@ class ZFIN(Source):
#if units is not None and values is not None:
#print(values+units)
- if comment is 'NULL':
- comment = None
-
#Create the targeted sequence id
if units is not None and values is not None:
targeted_sequence_id = condition+'_'+values+units
@@ -1375,15 +1381,30 @@ class ZFIN(Source):
#except KeyError:
#extrinsic_parts[enviro_con] = [condition]
#FIXME: Can remove this if we don't want to deal with any other abnormal environments.
- #elif not re.match('ZDB.*',condition):
+ elif not re.match('ZDB.*',condition):
#FIXME:Need to adjust label for non-knockdown reagent environments
- #if values is not None and units is not None:
- #enviro_label = condition_group+'['+condition+': '+values+units+']'
- #elif values is None and units is not None:
- #enviro_label = condition_group+'['+condition+': '+units+']'
- #elif values is not None and units is None:
- #enviro_label = condition_group+'['+condition+': '+values+']'
+ if values is not None and units is not None and comment is not None:
+ enviro_label = condition_group+'['+condition+': '+values+units+' ('+comment+')]'
+ elif values is not None and units is not None and comment is None:
+ enviro_label = condition_group+'['+condition+': '+values+units+']'
+ elif values is not None and units is None and comment is not None:
+ enviro_label = condition_group+'['+condition+': '+values+' ('+comment+')]'
+ elif values is not None and units is None and comment is None:
+ enviro_label = condition_group+'['+condition+': '+values+']'
+ elif values is None and units is None and comment is None:
+ enviro_label = condition_group+'['+condition+']'
+ elif values is None and units is None and comment is not None:
+ enviro_label = condition_group+'['+condition+' ('+comment+')]'
+ elif values is None and units is not None and comment is None:
+ enviro_label = condition_group+'['+condition+': '+units+']'
+ elif values is None and units is not None and comment is not None:
+ enviro_label = condition_group+'['+condition+': '+units+' ('+comment+')]'
+ else:
+ logger.warn('No environment label created for environment %s.', environment_id)
+ #enviro_label = '<empty>'
+ enviro_label = ''
+ #print(enviro_label) | ZFIN.py: Added environment label processing. | monarch-initiative_dipper | train |
0458e2edb289d7c84a617a652edfbc425099b2ce | diff --git a/src/compiler/parser/index.js b/src/compiler/parser/index.js
index <HASH>..<HASH> 100644
--- a/src/compiler/parser/index.js
+++ b/src/compiler/parser/index.js
@@ -169,9 +169,10 @@ export function parse (
if (currentParent && !element.forbidden) {
if (element.else) { // else block
processElse(element, currentParent)
- } else if (element.slotTarget && element.slotScope) { // scoped slot
- (currentParent.scopedSlots || (currentParent.scopedSlots = {}))[element.slotTarget] = element
+ } else if (element.slotScope) { // scoped slot
currentParent.plain = false
+ const name = element.slotTarget || 'default'
+ ;(currentParent.scopedSlots || (currentParent.scopedSlots = {}))[name] = element
} else {
currentParent.children.push(element)
element.parent = currentParent
@@ -356,6 +357,8 @@ function processSlot (el) {
const slotTarget = getBindingAttr(el, 'slot')
if (slotTarget) {
el.slotTarget = slotTarget === '""' ? '"default"' : slotTarget
+ }
+ if (el.tag === 'template') {
el.slotScope = getAndRemoveAttr(el, 'scope')
}
}
diff --git a/test/unit/features/component/component-scoped-slot.spec.js b/test/unit/features/component/component-scoped-slot.spec.js
index <HASH>..<HASH> 100644
--- a/test/unit/features/component/component-scoped-slot.spec.js
+++ b/test/unit/features/component/component-scoped-slot.spec.js
@@ -1,30 +1,13 @@
import Vue from 'vue'
-describe('Component scoped slot', () => {
- it('default slot', () => {
- const vm = new Vue({
- template: `<test><span slot scope="props">{{ props.msg }}</span></test>`,
- components: {
- test: {
- data () {
- return { msg: 'hello' }
- },
- template: `
- <div>
- <slot :msg="msg"></slot>
- </div>
- `
- }
- }
- }).$mount()
- expect(vm.$el.innerHTML).toBe('<span>hello</span>')
- })
-
- it('normal element slot', done => {
+fdescribe('Component scoped slot', () => {
+ it('default slot', done => {
const vm = new Vue({
template: `
<test ref="test">
- <span slot="item" scope="props">{{ props.text }}</span>
+ <template scope="props">
+ <span>{{ props.msg }}</span>
+ </template>
</test>
`,
components: {
@@ -34,7 +17,7 @@ describe('Component scoped slot', () => {
},
template: `
<div>
- <slot name="item" :text="msg"></slot>
+ <slot :msg="msg"></slot>
</div>
`
}
@@ -204,7 +187,9 @@ describe('Component scoped slot', () => {
const vm = new Vue({
template: `
<test ref="test">
- <span slot="item" scope="props">{{ props.text || 'meh' }}</span>
+ <template slot="item" scope="props">
+ <span>{{ props.text || 'meh' }}</span>
+ </template>
</test>
`,
components: { | adjustment: scoped slots must use <template> | IOriens_wxml-transpiler | train |
bad0a315d77eea16ff9efc9b94ac060969823b9a | diff --git a/lib/win.js b/lib/win.js
index <HASH>..<HASH> 100644
--- a/lib/win.js
+++ b/lib/win.js
@@ -165,6 +165,7 @@ function toStream({path, files}) {
const stream = Readable.from(str);
assign(stream, {
+ type: 'directory',
path,
files,
}); | fix(win<I>) add stream type | coderaiser_win32 | train |
6bc38f39b3caa4867106cd2c556e3244b7267779 | diff --git a/bind/bind.go b/bind/bind.go
index <HASH>..<HASH> 100644
--- a/bind/bind.go
+++ b/bind/bind.go
@@ -75,6 +75,7 @@ func listenTo(bind string) (net.Listener, error) {
bind, err)
}
f := os.NewFile(uintptr(fd), bind)
+ defer f.Close()
return net.FileListener(f)
} else if strings.HasPrefix(bind, "einhorn@") {
fd, err := strconv.Atoi(bind[8:])
diff --git a/bind/einhorn.go b/bind/einhorn.go
index <HASH>..<HASH> 100644
--- a/bind/einhorn.go
+++ b/bind/einhorn.go
@@ -65,6 +65,7 @@ func einhornBind(n int) (net.Listener, error) {
fno := einhornFdMap(n)
f := os.NewFile(uintptr(fno), fmt.Sprintf("einhorn@%d", n))
+ defer f.Close()
return net.FileListener(f)
} | Be more strict about file closing in bind
This eliminates the race condition mentioned in a<I>c<I>a by forbidding
duplicate binds to the same socket (well, at least in the sense that
attempting to do so will *always* result in an error instead of
nondeterministically resulting in an error). | zenazn_goji | train |
e4042704f01a44b02143e0b60170ba740df8e08f | diff --git a/src/config.js b/src/config.js
index <HASH>..<HASH> 100644
--- a/src/config.js
+++ b/src/config.js
@@ -19,10 +19,10 @@ const Config = CoreObject.extend({
*/
constructor(attrs) {
Object.keys(this.deprecatedProperties).forEach(key => {
- const transformName = this.deprecatedProperties[key];
- const transform = this[transformName];
-
if (attrs.hasOwnProperty(key)) {
+ const transformName = this.deprecatedProperties[key];
+ const transform = this[transformName];
+
transform(attrs[key], attrs);
}
}); | optimizations in the depracation transforms | Shopify_js-buy-sdk | train |
1d623ebab7dea395a2b632737cd976090941b509 | diff --git a/chatterbot/storage/mongodb.py b/chatterbot/storage/mongodb.py
index <HASH>..<HASH> 100644
--- a/chatterbot/storage/mongodb.py
+++ b/chatterbot/storage/mongodb.py
@@ -122,9 +122,8 @@ class MongoDatabaseAdapter(StorageAdapter):
if search_text_contains:
or_regex = '|'.join([
- '{}'.format(word) for word in search_text_contains.split(' ')
+ '({})'.format(word) for word in search_text_contains.split(' ')
])
- or_regex = re.escape(or_regex)
kwargs['search_text'] = re.compile(or_regex)
mongo_ordering = [] | Update mongodb.py
To support regexes with | in them, the expression must be parenthesized. | gunthercox_ChatterBot | train |
5219c2c89b3ec72620548fc2c18ea83fad4854de | diff --git a/src/de/lmu/ifi/dbs/elki/utilities/datastructures/KNNList.java b/src/de/lmu/ifi/dbs/elki/utilities/datastructures/KNNList.java
index <HASH>..<HASH> 100644
--- a/src/de/lmu/ifi/dbs/elki/utilities/datastructures/KNNList.java
+++ b/src/de/lmu/ifi/dbs/elki/utilities/datastructures/KNNList.java
@@ -3,7 +3,6 @@ package de.lmu.ifi.dbs.elki.utilities.datastructures;
import java.util.AbstractList;
import java.util.ArrayList;
import java.util.Collection;
-import java.util.Collections;
import java.util.Iterator;
import java.util.List;
@@ -45,11 +44,16 @@ public class KNNList<D extends Distance<D>> extends ArrayList<DistanceResultPair
super(heap.size());
this.k = heap.getK();
this.maxdist = maxdist;
- // Get sorted data from heap.
+ // Get sorted data from heap; but in reverse.
+ int i;
+ for (i = 0; i < heap.size(); i++) {
+ super.add(null);
+ }
while(!heap.isEmpty()) {
- super.add(heap.poll());
+ assert(i >= 0);
+ super.set(1, heap.poll());
+ i--;
}
- Collections.reverse(this);
}
/** | Fix KNNList unmodifiable issues. | elki-project_elki | train |
5a24794dbeac33c1325a96b5a3a23b918d4d0a4c | diff --git a/pyravendb/store/session_query.py b/pyravendb/store/session_query.py
index <HASH>..<HASH> 100644
--- a/pyravendb/store/session_query.py
+++ b/pyravendb/store/session_query.py
@@ -898,8 +898,8 @@ class Query(object):
return self
def or_else(self):
- if len(self.query_builder) > 0:
- self.query_builder += " OR"
+ if len(self._where_tokens) > 0:
+ self._where_tokens.append(_Token(write=" OR"))
return self
def boost(self, boost):
diff --git a/pyravendb/tests/jvm_migrated_tests/query_tests/test_query.py b/pyravendb/tests/jvm_migrated_tests/query_tests/test_query.py
index <HASH>..<HASH> 100644
--- a/pyravendb/tests/jvm_migrated_tests/query_tests/test_query.py
+++ b/pyravendb/tests/jvm_migrated_tests/query_tests/test_query.py
@@ -38,6 +38,29 @@ class UsersByName(IndexDefinition):
super(UsersByName, self).__init__(maps=maps, reduce=reduce, name=name)
+class Dog:
+ def __init__(self, key, name="Reksio", breed="Shepherd", color="White", age=1, is_vaccinated=True):
+ self.Id = key
+ self.name = name
+ self.breed = breed
+ self.color = color
+ self.age = age
+ self.is_vaccinated = is_vaccinated
+
+
+class DogsIndexResult:
+ def __init__(self, name, age, is_vaccinated):
+ self.name = name
+ self.age = age
+ self.is_vaccinated = is_vaccinated
+
+
+class DogsIndex(IndexDefinition):
+ def __init__(self, name="doggos"):
+ maps = "from dog in docs.dogs select new { dog.name, dog.age, dog.is_vaccinated }"
+ super(DogsIndex, self).__init__(name, maps)
+
+
class TestQuery(TestBase):
def setUp(self):
super(TestQuery, self).setUp()
@@ -59,6 +82,16 @@ class TestQuery(TestBase):
session.save_changes()
self.store.maintenance.send(PutIndexesOperation(UsersByName()))
+ def add_dogs(self, session):
+ session.store(Dog("docs/1", "Snoopy", "Beagle", "White", 6, True))
+ session.store(Dog("docs/2", "Brian", "Labrador", "White", 12, False))
+ session.store(Dog("docs/3", "Django", "Jack Russel", "Black", 3, True))
+ session.store(Dog("docs/4", "Beethoven", "St. Bernard", "Brown", 1, False))
+ session.store(Dog("docs/5", "Scooby Doo", "Great Dane", "Brown", 0, False))
+ session.store(Dog("docs/6", "Old Yeller", "Black Mouth Cur", "White", 2, True))
+ session.store(Dog("docs/7", "Benji", "Mixed", "White", 0, False))
+ session.store(Dog("docs/8", "Lassie", "Collie", "Brown", 6, True))
+
def test_collection_stats(self):
with self.store.open_session() as session:
user1 = UserWithId("John")
@@ -302,3 +335,32 @@ class TestQuery(TestBase):
)
self.assertEqual(1, len(users))
self.assertEqual("John", users[0].name)
+
+ def test_query_by_index(self):
+ index = DogsIndex()
+ self.store.maintenance.send(PutIndexesOperation(index))
+ with self.store.open_session() as session:
+ self.add_dogs(session)
+ session.save_changes()
+
+ with self.store.open_session() as session:
+ query_result = list(
+ session.query(object_type=Dog, index_name=index.name)
+ .where_greater_than("age", 2)
+ .and_also()
+ .where_equals("is_vaccinated", False)
+ )
+
+ self.assertEqual(1, len(query_result))
+ self.assertEqual("Brian", query_result[0].name)
+
+ query_result_2 = list(
+ session.query(object_type=Dog, index_name=index.name)
+ .where_less_than_or_equal("age", 2)
+ .and_also()
+ .where_equals("is_vaccinated", False)
+ )
+ self.assertEqual(3, len(query_result_2))
+
+ names = list(map(lambda dog: dog.name, query_result_2))
+ self.assertSequenceContainsElements(names, "Beethoven", "Scooby Doo", "Benji") | RDBC-<I> Test for querying by index and query.or_else fix | ravendb_ravendb-python-client | train |
b7c8060704dd37cf0e9802b5496c713f28ab782c | diff --git a/idol/src/main/java/com/hp/autonomy/searchcomponents/idol/parametricvalues/IdolParametricValuesService.java b/idol/src/main/java/com/hp/autonomy/searchcomponents/idol/parametricvalues/IdolParametricValuesService.java
index <HASH>..<HASH> 100644
--- a/idol/src/main/java/com/hp/autonomy/searchcomponents/idol/parametricvalues/IdolParametricValuesService.java
+++ b/idol/src/main/java/com/hp/autonomy/searchcomponents/idol/parametricvalues/IdolParametricValuesService.java
@@ -48,7 +48,7 @@ import java.util.regex.Pattern;
public class IdolParametricValuesService implements ParametricValuesService<IdolParametricRequest, String, AciErrorException> {
private static final String VALUE_NODE_NAME = "value";
private static final Pattern CSV_SEPARATOR_PATTERN = Pattern.compile(",\\s*");
- private static final String IDOL_PARAMETRIC_DATE_FORMAT = "hh:mm:ss dd/MM/yyyy";
+ private static final String IDOL_PARAMETRIC_DATE_FORMAT = "HH:mm:ss dd/MM/yyyy";
private final HavenSearchAciParameterHandler parameterHandler;
private final FieldsService<IdolFieldsRequest, AciErrorException> fieldsService; | FIND-<I> Add controller method for retrieving date field values, for use in widget :: fix date format pattern [rev. alex.scown] | microfocus-idol_haven-search-components | train |
e3aaccebfd87d19841ed9c63e684a5848bf0432a | diff --git a/packages/vaex-core/vaex/dataframe.py b/packages/vaex-core/vaex/dataframe.py
index <HASH>..<HASH> 100644
--- a/packages/vaex-core/vaex/dataframe.py
+++ b/packages/vaex-core/vaex/dataframe.py
@@ -3350,7 +3350,7 @@ class DataFrame(object):
# we only have to do this locally
# if we don't do this locally, we still store this info
# in self._renamed_columns, so it will happen at the server
- self.columns[new] = self.columns.pop(old)
+ self.dataset = self.dataset.renamed({old: new})
if rename_meta_data:
for d in [self.ucds, self.units, self.descriptions]:
if old in d: | refactor: use dataset.rename instead of relying on ColumnProxy | vaexio_vaex | train |
1787a8501b626f272ef57b9b39f5e9b0ffd49475 | diff --git a/inginious-lti-wsgi.py b/inginious-lti-wsgi.py
index <HASH>..<HASH> 100755
--- a/inginious-lti-wsgi.py
+++ b/inginious-lti-wsgi.py
@@ -18,7 +18,7 @@ os.environ['INGInious_CONFIG_LTI'] = '/var/www/INGInious/configuration.lti.yaml'
if os.getenv('INGInious_PATH_LTI'):
sys.path.append(os.getenv('INGInious_PATH_LTI'))
- os.chdir(os.getenv('INGInious_CONFIG_LTI'))
+ os.chdir(os.getenv('INGInious_PATH_LTI'))
import signal
import logging | config -> path -- fix stupid cut and paste error | UCL-INGI_INGInious | train |
c3bf8cb64e6154a36d25e1da7fdf54ea71a1f4a7 | diff --git a/agent/action/mount_disk.go b/agent/action/mount_disk.go
index <HASH>..<HASH> 100644
--- a/agent/action/mount_disk.go
+++ b/agent/action/mount_disk.go
@@ -61,7 +61,10 @@ func (a MountDiskAction) Run(diskCid string, hints ...interface{}) (interface{},
if len(hints) > 0 {
diskSettings = settings.PersistentDiskSettingsFromHint(diskCid, hints[0])
- a.settingsService.SavePersistentDiskHint(diskSettings)
+ err = a.settingsService.SavePersistentDiskHint(diskSettings)
+ if err != nil {
+ return nil, bosherr.WrapError(err, "Saving disk hints failed")
+ }
} else {
var found bool
diskSettings, found = settings.PersistentDiskSettings(diskCid)
diff --git a/agent/action/mount_disk_test.go b/agent/action/mount_disk_test.go
index <HASH>..<HASH> 100644
--- a/agent/action/mount_disk_test.go
+++ b/agent/action/mount_disk_test.go
@@ -147,6 +147,19 @@ var _ = Describe("MountDiskAction", func() {
Expect(err.Error()).To(ContainSubstring("fake-mount-persistent-disk-err"))
Expect(settingsService.SavePersistentDiskHintCallCount).To(Equal(1))
})
+
+ // Should be same for all disk hints input types; string, hash
+ Context("when saving disk hints fails", func() {
+ BeforeEach(func() {
+ settingsService.SavePersistentDiskHintErr = errors.New("Reading all persistent disk hints")
+ })
+ It("should raise error", func() {
+ _, err := action.Run("hint-fake-disk-cid", diskHint)
+ Expect(err).To(HaveOccurred())
+ Expect(err.Error()).To(ContainSubstring("Saving disk hints failed"))
+ Expect(settingsService.SavePersistentDiskHintCallCount).To(Equal(1))
+ })
+ })
})
Context("when the hint is a map", func() {
diff --git a/settings/fakes/fake_settings_service.go b/settings/fakes/fake_settings_service.go
index <HASH>..<HASH> 100644
--- a/settings/fakes/fake_settings_service.go
+++ b/settings/fakes/fake_settings_service.go
@@ -25,6 +25,7 @@ type FakeSettingsService struct {
GetPersistentDiskHintsCallCount int
RemovePersistentDiskHintsCallCount int
SavePersistentDiskHintCallCount int
+ SavePersistentDiskHintErr error
}
func (service *FakeSettingsService) InvalidateSettings() error {
@@ -58,5 +59,8 @@ func (service *FakeSettingsService) RemovePersistentDiskHint(diskID string) erro
func (service *FakeSettingsService) SavePersistentDiskHint(_ boshsettings.DiskSettings) error {
service.SavePersistentDiskHintCallCount++
+ if service.SavePersistentDiskHintErr != nil {
+ return service.SavePersistentDiskHintErr
+ }
return nil
} | Handle error during saving disk hints
[#<I>](<URL>) | cloudfoundry_bosh-agent | train |
c6870be44f13abcdba1c4c9e370902dd6fcf8a50 | diff --git a/docx2html/tests/test_xml.py b/docx2html/tests/test_xml.py
index <HASH>..<HASH> 100644
--- a/docx2html/tests/test_xml.py
+++ b/docx2html/tests/test_xml.py
@@ -602,11 +602,10 @@ class MangledIlvlTestCase(_TranslationTestCase):
<li>AAA</li>
</ol>
<ol data-list-type="decimal">
- <li>BBB
- <ol data-list-type="decimal">
- <li>CCC</li>
- </ol>
- </li>
+ <li>BBB</li>
+ </ol>
+ <ol data-list-type="decimal">
+ <li>CCC</li>
</ol>
</html>
''' | refs #<I>: updated the test for no longer doing stupid nesting | PolicyStat_docx2html | train |
f88ebcdb3a4c7752fb840744db85f5e083622d50 | diff --git a/lib/key.js b/lib/key.js
index <HASH>..<HASH> 100644
--- a/lib/key.js
+++ b/lib/key.js
@@ -63,6 +63,13 @@ function Key (ns, set, key, digest) {
}
}
+Key.prototype.equals = function (other) {
+ return this.ns === other.ns &&
+ ((!isSet(this.set) && !isSet(other.set)) || this.set === other.set) &&
+ ((!isSet(this.key) && !isSet(other.key)) || this.key === other.key) &&
+ (!isSet(this.digest) || !isSet(other.digest) || this.digest.equals(other.digest))
+}
+
function isSet (value) {
return typeof value !== 'undefined' && value !== null
}
diff --git a/test/key.js b/test/key.js
index <HASH>..<HASH> 100644
--- a/test/key.js
+++ b/test/key.js
@@ -172,4 +172,83 @@ describe('Key', function () {
})
})
})
+
+ describe('equals', function () {
+ it('matches two keys with identical ns, set and user key', function () {
+ let key1 = new Key('ns1', 'set1', 'key1')
+ let key2 = new Key('ns1', 'set1', 'key1')
+ expect(key1.equals(key2)).to.be(true)
+ expect(key2.equals(key1)).to.be(true)
+ })
+
+ it('matches two keys with identical ns, set, user key and digest', function () {
+ let key1 = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ let key2 = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ expect(key1.equals(key2)).to.be(true)
+ expect(key2.equals(key1)).to.be(true)
+ })
+
+ it('matches two keys with identical ns, set and digest', function () {
+ let key1 = new Key('ns1', 'set1', null, Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ let key2 = new Key('ns1', 'set1', null, Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ expect(key1.equals(key2)).to.be(true)
+ expect(key2.equals(key1)).to.be(true)
+ })
+
+ it('a key with digest to another key with identical ns, set and user key but without digest', function () {
+ let key1 = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ let key2 = new Key('ns1', 'set1', 'key1')
+ expect(key1.equals(key2)).to.be(true)
+ expect(key2.equals(key1)).to.be(true)
+ })
+
+ it('matches two keys with identical ns, empty set and user key', function () {
+ let key1 = new Key('ns1', null, 'key1')
+ let key2 = new Key('ns1', null, 'key1')
+ expect(key1.equals(key2)).to.be(true)
+ expect(key2.equals(key1)).to.be(true)
+ })
+
+ it('does not match two keys with different ns', function () {
+ let key1 = new Key('ns1', 'set1', 'key1')
+ let key2 = new Key('ns2', 'set1', 'key1')
+ expect(key1.equals(key2)).to.be(false)
+ expect(key2.equals(key1)).to.be(false)
+ })
+
+ it('does not match two keys with different set', function () {
+ let key1 = new Key('ns1', 'set1', 'key1')
+ let key2 = new Key('ns1', 'set2', 'key1')
+ expect(key1.equals(key2)).to.be(false)
+ expect(key2.equals(key1)).to.be(false)
+ })
+
+ it('does not match a key with set and a key without set', function () {
+ let key1 = new Key('ns1', 'set1', 'key1')
+ let key2 = new Key('ns1', null, 'key1')
+ expect(key1.equals(key2)).to.be(false)
+ expect(key2.equals(key1)).to.be(false)
+ })
+
+ it('does not match two keys with different user keys', function () {
+ let key1 = new Key('ns1', 'set1', 'key1')
+ let key2 = new Key('ns1', 'set1', 'key2')
+ expect(key1.equals(key2)).to.be(false)
+ expect(key2.equals(key1)).to.be(false)
+ })
+
+ it('does not match a key with user key and a key without user key', function () {
+ let key1 = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ let key2 = new Key('ns1', 'set1', null, Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ expect(key1.equals(key2)).to.be(false)
+ expect(key2.equals(key1)).to.be(false)
+ })
+
+ it('does not match two keys with different digests', function () {
+ let key1 = new Key('ns1', 'set1', 'key1', Buffer.from('a1b2c3d4e5f6g7h8i9j0'))
+ let key2 = new Key('ns1', 'set1', 'key1', Buffer.from('0j9i8h7g6f5e4d3c2b1a'))
+ expect(key1.equals(key2)).to.be(false)
+ expect(key2.equals(key1)).to.be(false)
+ })
+ })
}) | Add Key#equals function to compare two keys | aerospike_aerospike-client-nodejs | train |
0f6e14b245b9be7b97f0adb30410bf9672e760d0 | diff --git a/curtsies/formatstring.py b/curtsies/formatstring.py
index <HASH>..<HASH> 100644
--- a/curtsies/formatstring.py
+++ b/curtsies/formatstring.py
@@ -87,7 +87,7 @@ class Chunk(object):
def __unicode__(self):
value = self.color_str
if isinstance(value, bytes):
- return value.decode('utf8')
+ return value.decode('utf8', 'replace')
return value
def __eq__(self, other):
diff --git a/tests/test_fmtstr.py b/tests/test_fmtstr.py
index <HASH>..<HASH> 100644
--- a/tests/test_fmtstr.py
+++ b/tests/test_fmtstr.py
@@ -319,6 +319,10 @@ class TestUnicode(unittest.TestCase):
repr(Chunk(u'–'))
self.assertEqual(repr(fmtstr(u'–')), repr(u'–'))
+ def test_bad_utf8(self):
+ """FmtStrs of bytes that arne't valid utf8 even though a the output medium is shouldn't crash"""
+ str(fmtstr('\xf7'))
+
class TestFSArray(unittest.TestCase):
def test_no_hanging_space(self):
a = FSArray(4, 2) | add temp fix for dealing bpython bug
In the future - probably make fmtstrs only possible with unicode?
Api would be a bit annoying in Python 2 though. Maybe the moment
it comes in, decode the bytes as the terminal is encoded? But they
could be being used in code, in which case we want to know the
encoding of the source file, or the encoding of the bytes from
wherever they came from. | bpython_curtsies | train |
c3c988833bb4fd5c02e51089cafc394e3efb6237 | diff --git a/dallinger/experiment.py b/dallinger/experiment.py
index <HASH>..<HASH> 100644
--- a/dallinger/experiment.py
+++ b/dallinger/experiment.py
@@ -402,33 +402,7 @@ class Experiment(object):
self.fail_participant(participant)
@exp_class_working_dir
- def sandbox(self, exp_config=None, app_id=None):
- """Deploys and runs an experiment in sandbox mode.
- The exp_config object is either a dictionary or a
- ``localconfig.LocalConfig`` object with parameters
- specific to the experiment run grouped by section.
- """
- import dallinger as dlgr
-
- # Ensure that experiment runs in sandbox mode.
- config.extend({
- "mode": u"sandbox",
- "logfile": u"-",
- })
-
- if app_id is None:
- app_id = str(uuid.uuid4())
-
- self.app_id = app_id
- self.exp_config = exp_config
-
- dlgr.command_line.deploy_sandbox_shared_setup(app=app_id,
- verbose=self.verbose,
- exp_config=exp_config)
- return self._finish_experiment()
-
- @exp_class_working_dir
- def deploy(self, exp_config=None, app_id=None):
+ def run(self, exp_config=None, app_id=None):
"""Deploy and run an experiment.
The exp_config object is either a dictionary or a
@@ -437,22 +411,17 @@ class Experiment(object):
"""
import dallinger as dlgr
- # Ensure that experiment is not in sandbox mode.
- config.extend({
- "mode": u"sandbox",
- "logfile": u"-",
- })
-
if app_id is None:
app_id = str(uuid.uuid4())
self.app_id = app_id
self.exp_config = exp_config
- dlgr.command_line.deploy_sandbox_shared_setup(app=app_id,
- verbose=self.verbose,
- exp_config=exp_config)
-
+ dlgr.command_line.deploy_sandbox_shared_setup(
+ app=app_id,
+ verbose=self.verbose,
+ exp_config=exp_config
+ )
return self._finish_experiment()
def _finish_experiment(self): | Simplify high-level API with Experiment.run()
Instead of using experiment.sandbox() and experiment.deploy(), there is
now a single command experiment.run(). | Dallinger_Dallinger | train |
242228887b969c5a1a5816d56fc2fcd057b07ea1 | diff --git a/tests/Jejik/Tests/MT940/Parser/AbnAmroTest.php b/tests/Jejik/Tests/MT940/Parser/AbnAmroTest.php
index <HASH>..<HASH> 100644
--- a/tests/Jejik/Tests/MT940/Parser/AbnAmroTest.php
+++ b/tests/Jejik/Tests/MT940/Parser/AbnAmroTest.php
@@ -53,8 +53,8 @@ class AbnAmroTest extends \PHPUnit_Framework_TestCase
$transactions = $this->statements[0]->getTransactions();
$this->assertCount(8, $transactions);
- $this->assertEquals('2011-05-24', $transactions[0]->getValueDate()->format('Y-m-d'));
- $this->assertEquals('2011-05-24', $transactions[0]->getBookDate()->format('Y-m-d'));
+ $this->assertEquals('2011-05-24 00:00:00', $transactions[0]->getValueDate()->format('Y-m-d H:i:s'));
+ $this->assertEquals('2011-05-24 00:00:00', $transactions[0]->getBookDate()->format('Y-m-d H:i:s'));
$this->assertEquals(-9.00, $transactions[0]->getAmount());
$expected = "GIRO 428428 KPN - DIGITENNE BETALINGSKENM. 000000042188659\r\n"
diff --git a/tests/Jejik/Tests/MT940/Parser/IngTest.php b/tests/Jejik/Tests/MT940/Parser/IngTest.php
index <HASH>..<HASH> 100644
--- a/tests/Jejik/Tests/MT940/Parser/IngTest.php
+++ b/tests/Jejik/Tests/MT940/Parser/IngTest.php
@@ -53,7 +53,7 @@ class IngTest extends \PHPUnit_Framework_TestCase
$transactions = $this->statements[0]->getTransactions();
$this->assertCount(6, $transactions);
- $this->assertEquals('2010-07-22', $transactions[0]->getValueDate()->format('Y-m-d'));
+ $this->assertEquals('2010-07-22 00:00:00', $transactions[0]->getValueDate()->format('Y-m-d H:i:s'));
$this->assertEquals(null, $transactions[0]->getBookDate());
$this->assertEquals(-25.03, $transactions[0]->getAmount());
diff --git a/tests/Jejik/Tests/MT940/Parser/RabobankTest.php b/tests/Jejik/Tests/MT940/Parser/RabobankTest.php
index <HASH>..<HASH> 100644
--- a/tests/Jejik/Tests/MT940/Parser/RabobankTest.php
+++ b/tests/Jejik/Tests/MT940/Parser/RabobankTest.php
@@ -53,7 +53,7 @@ class RabobankTest extends \PHPUnit_Framework_TestCase
$transactions = $this->statements[2]->getTransactions();
$this->assertCount(2, $transactions);
- $this->assertEquals('2011-06-17', $transactions[0]->getValueDate()->format('Y-m-d'));
+ $this->assertEquals('2011-06-17 00:00:00', $transactions[0]->getValueDate()->format('Y-m-d H:i:s'));
$this->assertEquals(null, $transactions[0]->getBookDate());
$this->assertEquals(-44.95, $transactions[0]->getAmount());
diff --git a/tests/Jejik/Tests/MT940/Parser/TriodosTest.php b/tests/Jejik/Tests/MT940/Parser/TriodosTest.php
index <HASH>..<HASH> 100644
--- a/tests/Jejik/Tests/MT940/Parser/TriodosTest.php
+++ b/tests/Jejik/Tests/MT940/Parser/TriodosTest.php
@@ -53,7 +53,7 @@ class TriodosTest extends \PHPUnit_Framework_TestCase
$transactions = $this->statements[0]->getTransactions();
$this->assertCount(2, $transactions);
- $this->assertEquals('2011-01-01', $transactions[0]->getValueDate()->format('Y-m-d'));
+ $this->assertEquals('2011-01-01 00:00:00', $transactions[0]->getValueDate()->format('Y-m-d H:i:s'));
$this->assertEquals(null, $transactions[0]->getBookDate());
$this->assertEquals(-15.70, $transactions[0]->getAmount());
diff --git a/tests/bootstrap.php b/tests/bootstrap.php
index <HASH>..<HASH> 100644
--- a/tests/bootstrap.php
+++ b/tests/bootstrap.php
@@ -1,5 +1,7 @@
<?php
+date_default_timezone_set('UTC');
+
// A simple autoloader for the tests
spl_autoload_register(function ($class) {
if (substr($class, 0, 12) == 'Jejik\\Tests\\') { | Test bookDate and valueDate times | sandermarechal_jejik-mt940 | train |
c8fd1934d3a6a2f2621f2b703258b4155854d823 | diff --git a/src/PatternLab/PatternData/Rules/DocumentationRule.php b/src/PatternLab/PatternData/Rules/DocumentationRule.php
index <HASH>..<HASH> 100644
--- a/src/PatternLab/PatternData/Rules/DocumentationRule.php
+++ b/src/PatternLab/PatternData/Rules/DocumentationRule.php
@@ -39,9 +39,6 @@ class DocumentationRule extends \PatternLab\PatternData\Rule {
$patternTypeDash = PatternData::getPatternTypeDash();
$dirSep = PatternData::getDirSep();
- // make sure the pattern isn't hidden
- $hidden = ($name[0] == "_");
-
// set-up the names, $name == 00-colors.md
$doc = str_replace(".".$this->extProp,"",$name); // 00-colors
$docDash = $this->getPatternName(str_replace("_","",$doc),false); // colors | removing a variable set that i'm not using | pattern-lab_patternlab-php-core | train |
36a40c562afa436b229d10c97540167b06ab6a8c | diff --git a/lib/virtus/support/equalizer.rb b/lib/virtus/support/equalizer.rb
index <HASH>..<HASH> 100644
--- a/lib/virtus/support/equalizer.rb
+++ b/lib/virtus/support/equalizer.rb
@@ -120,8 +120,7 @@ module Virtus
#
# @api public
def ==(other)
- return false unless self.class <=> other.class
- cmp?(__method__, other)
+ other.kind_of?(self.class) && cmp?(__method__, other)
end
end # module Methods
diff --git a/spec/unit/virtus/equalizer/methods/equal_value_spec.rb b/spec/unit/virtus/equalizer/methods/equal_value_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/unit/virtus/equalizer/methods/equal_value_spec.rb
+++ b/spec/unit/virtus/equalizer/methods/equal_value_spec.rb
@@ -3,14 +3,21 @@ require 'spec_helper'
describe Virtus::Equalizer::Methods, '#==' do
subject { object == other }
- let(:object) { described_class.new }
+ let(:object) { described_class.new(true) }
+ let(:described_class) { Class.new(super_class) }
- let(:described_class) do
+ let(:super_class) do
Class.new do
include Virtus::Equalizer::Methods
+ attr_reader :boolean
+
+ def initialize(boolean)
+ @boolean = boolean
+ end
+
def cmp?(comparator, other)
- !!(comparator and other)
+ boolean.send(comparator, other.boolean)
end
end
end
@@ -35,13 +42,25 @@ describe Virtus::Equalizer::Methods, '#==' do
end
end
- context 'with an equivalent object of a subclass' do
- let(:other) { Class.new(described_class).new }
+ context 'with a subclass instance having equivalent obervable state' do
+ let(:other) { Class.new(described_class).new(true) }
it { should be(true) }
- it 'is symmetric' do
- should eql(other == object)
+ it 'is not symmetric' do
+ # the subclass instance should maintain substitutability with the object
+ # (in the LSP sense) the reverse is not true.
+ should_not eql(other == object)
+ end
+ end
+
+ context 'with a superclass instance having equivalent observable state' do
+ let(:other) { super_class.new(true) }
+
+ it { should be(false) }
+
+ it 'is not symmetric' do
+ should_not eql(other == object)
end
end | Change Virtus::Equalizer behaviour to be closer to equalizer
* Even though the implementations differ I wanted to bring the
behaviour closer inline to equalizer so that when we do factor
the code out it shouldn't change behaviour from the pov of the end
user.
The remaining change will be to add coercion in #==, but early
attempts to add it caused other spec failures, so I wanted to
commit what I have now since it is working. | solnic_virtus | train |
f399c488bbec7c564289211920c0696fdbcabf04 | diff --git a/commerce-test-util/src/main/java/com/liferay/commerce/test/util/CommerceTestUtil.java b/commerce-test-util/src/main/java/com/liferay/commerce/test/util/CommerceTestUtil.java
index <HASH>..<HASH> 100644
--- a/commerce-test-util/src/main/java/com/liferay/commerce/test/util/CommerceTestUtil.java
+++ b/commerce-test-util/src/main/java/com/liferay/commerce/test/util/CommerceTestUtil.java
@@ -251,7 +251,7 @@ public class CommerceTestUtil {
CommerceChannelLocalServiceUtil.getCommerceChannelByOrderGroupId(
commerceOrder.getGroupId());
- CommerceTestUtil.addWarehouseCommerceChannelRel(
+ addWarehouseCommerceChannelRel(
commerceInventoryWarehouse.getCommerceInventoryWarehouseId(),
commerceChannel.getCommerceChannelId());
diff --git a/commerce-test/src/testIntegration/java/com/liferay/commerce/internal/util/test/CommerceShippingHelperTest.java b/commerce-test/src/testIntegration/java/com/liferay/commerce/internal/util/test/CommerceShippingHelperTest.java
index <HASH>..<HASH> 100644
--- a/commerce-test/src/testIntegration/java/com/liferay/commerce/internal/util/test/CommerceShippingHelperTest.java
+++ b/commerce-test/src/testIntegration/java/com/liferay/commerce/internal/util/test/CommerceShippingHelperTest.java
@@ -87,8 +87,8 @@ public class CommerceShippingHelperTest {
@Test
public void testGetDimensions() throws Exception {
frutillaRule.scenario(
- "Verify that the product dimensions are correctly retrieved " +
- "from the order"
+ "Verify that the product dimensions are correctly retrieved from " +
+ "the order"
).given(
"I add some product instances with some dimensions"
).when(
@@ -152,8 +152,8 @@ public class CommerceShippingHelperTest {
@Test
public void testGetWeight() throws Exception {
frutillaRule.scenario(
- "Verify that the product weights are correctly retrieved " +
- "from the order"
+ "Verify that the product weights are correctly retrieved from " +
+ "the order"
).given(
"I add some product instances with some weights"
).when( | COMMERCE-<I> fixed commerce shipping and utils text indentation | liferay_com-liferay-commerce | train |
c0f793bd29f6c3b8a9a1b53dd20e511d055c30ee | diff --git a/util.js b/util.js
index <HASH>..<HASH> 100644
--- a/util.js
+++ b/util.js
@@ -23,9 +23,7 @@ module.exports = {
resolveWebpackMode,
webpackMode,
webpackNodeExternalsWhitelist: /@randy\.tarampi|query-string|strict-uri-encode|strip-ansi|ansi-regex|bunyan-sentry-stream|libphonenumber-js|react-router-sitemap|react-hot-loader|dom-helpers|redux-immutable|reduce-reducers|react-router|react-progressive-image|react-metrics|react-materialize|react-event-listener|react-helmet/,
- babelLoaderExclusions: webpackMode === WEBPACK_MODE_DEVELOPMENT
- ? /\/node_modules\/(?!(?:@randy\.tarampi|query-string|strict-uri-encode|strip-ansi|ansi-regex|bunyan-sentry-stream|libphonenumber-js|react-router-sitemap|react-hot-loader|dom-helpers|redux-immutable|reduce-reducers|react-router|react-progressive-image|react-metrics|react-materialize|react-event-listener|react-helmet)\/)/
- : /!^/,
+ babelLoaderExclusions: /\/node_modules\/(?!(?:@randy\.tarampi|query-string|strict-uri-encode|strip-ansi|ansi-regex|bunyan-sentry-stream|libphonenumber-js|react-router-sitemap|react-hot-loader|dom-helpers|redux-immutable|reduce-reducers|react-router|react-progressive-image|react-metrics|react-materialize|react-event-listener|react-helmet)\/)/,
babelRegisterInclusions: /\/(?:node_modules\/(?:@randy\.tarampi|query-string|strict-uri-encode|strip-ansi|ansi-regex|bunyan-sentry-stream|libphonenumber-js|react-router-sitemap|react-hot-loader|dom-helpers|redux-immutable|reduce-reducers|react-router|react-progressive-image|react-metrics|react-materialize|react-event-listener|react-helmet)|packages)\//,
webpackVendorInclusions: /\/node_modules\//
}; | revert: chore: Webpack should `babel` everything in `production` mode.
Hm. It's probably this, isn't it?
This reverts commit 2dbbbe0f<I>e1b<I>ba2eb3d<I>e5b1e<I>b5. | randytarampi_me | train |
97733d0733be293e76bce8952b1bddabda90c114 | diff --git a/src/Server.js b/src/Server.js
index <HASH>..<HASH> 100644
--- a/src/Server.js
+++ b/src/Server.js
@@ -17,8 +17,9 @@ const defaults = {
/**
* Connection hook is run when a client connects to a server. The
- * result is used as an auth reply data. May also return promises
- * for an asynchronous execution.
+ * result is used as an auth reply data. May also return promises for
+ * an asynchronous execution. If the promise is rejected or an error
+ * is thrown, then auth has failed and the socket will be closed.
*
* @callback Server.ConnectionHook
*
@@ -67,7 +68,7 @@ class Server extends EventEmitter {
/**
* Starts a server.
*
- * @param {Object} wssOptions Options that are passed to wss server.
+ * @param {Object} wssOptions Options that are passed to ws server.
* @param {Server.ServerOptions} [serverOptions] Server options.
* @param {Client.SocketOptions} [socketOptions] Socket options.
*/
@@ -99,8 +100,8 @@ class Server extends EventEmitter {
}
_onConnection (socket /* : Object & EventEmitter */) /* : void */ {
- let timeout =
- setTimeout(socket.close.bind(socket, CLOSE_FORBIDDEN), this.authTimeout)
+ let timeout = setTimeout(
+ socket.close.bind(socket, CLOSE_FORBIDDEN), this.authTimeout)
socket.once('message', data => this._addClient(socket, data, timeout))
}
@@ -111,6 +112,7 @@ class Server extends EventEmitter {
clearTimeout(timeout)
uid(18).then(id => {
client = new Client(null, assign({socket, id}, this.socketOptions))
+ client.autoReconnect = false
if (this.connectionHook) {
return attempt(() => client.decoder(data))
.then(authData => this.connectionHook(client, authData)) | refactor: disallow auto reconnect on a server side | an-sh_ws-messaging | train |
214fc8d6e0fe38c218026a1c34ee12270d55f5a3 | diff --git a/test/activity/stack_activity_test.rb b/test/activity/stack_activity_test.rb
index <HASH>..<HASH> 100644
--- a/test/activity/stack_activity_test.rb
+++ b/test/activity/stack_activity_test.rb
@@ -14,7 +14,6 @@ test('stack depth') do |activity|
os_offset = {13 => 1}[android.os.Build::VERSION::SDK_INT].to_i
jruby_offset = {
'1.5.6' => [-2, -5, -6, -8],
- '1.7.0.dev' => [ 0, 0, 5, 5],
}[org.jruby.runtime.Constants::VERSION] || [0,0,0,0]
version_message ="ANDROID: #{android.os.Build::VERSION::SDK_INT}, JRuby: #{org.jruby.runtime.Constants::VERSION}"
assert_equal 44 + os_offset + jruby_offset[0], activity.find_view_by_id(42).text.to_i, version_message | * Lower stack usage on JRuby master | ruboto_ruboto | train |
8ff2b510e8d59608b2717c72c746394bff043d2f | diff --git a/semver/__init__.py b/semver/__init__.py
index <HASH>..<HASH> 100644
--- a/semver/__init__.py
+++ b/semver/__init__.py
@@ -67,6 +67,11 @@ src[NUMERICIDENTIFIERLOOSE] = '[0-9]+'
NONNUMERICIDENTIFIER = R()
src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*'
+# A non-numeric identifier not beginning with a number
+
+NONNUMERICIDENTIFIERBEGINNONNUMBER = R()
+src[NONNUMERICIDENTIFIERBEGINNONNUMBER] = '[a-zA-Z-][a-zA-Z0-9-]*'
+
# ## Main Version
# Three dot-separated numeric identifiers.
@@ -102,7 +107,8 @@ src[PRERELEASE] = ('(?:-(' + src[PRERELEASEIDENTIFIER] +
'(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))')
PRERELEASELOOSE = R()
-src[PRERELEASELOOSE] = ('(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] +
+src[PRERELEASELOOSE] = ('(?:-?((?:(?<=-)' + src[PRERELEASEIDENTIFIERLOOSE] +
+ '|' + src[NONNUMERICIDENTIFIERBEGINNONNUMBER] + ')'
'(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))')
# ## Build Metadata Identifier
diff --git a/semver/tests/test_for_4digit.py b/semver/tests/test_for_4digit.py
index <HASH>..<HASH> 100644
--- a/semver/tests/test_for_4digit.py
+++ b/semver/tests/test_for_4digit.py
@@ -93,6 +93,16 @@ cands = [
"micro_versions": [2, 2],
}
),
+ (
+ "4.1.33.2", True, {
+ "major": 4,
+ "minor": 1,
+ "patch": 33,
+ "prerelease": [],
+ "build": [],
+ "micro_versions": [2],
+ }
+ ),
] | Fix parsing of four(or more)-component version numbers when the third component has multiple digits | podhmo_python-semver | train |
12e353e3acc7b7e5365f4474c41dc3692eaf1f6f | diff --git a/go.mod b/go.mod
index <HASH>..<HASH> 100644
--- a/go.mod
+++ b/go.mod
@@ -1,11 +1,9 @@
module github.com/nats-io/nats-server/v2
-go 1.14
-
require (
+ github.com/minio/highwayhash v1.0.0
github.com/nats-io/jwt v0.3.3-0.20200519195258-f2bf5ce574c7
github.com/nats-io/nats.go v1.10.0
- github.com/minio/highwayhash v1.0.0
github.com/nats-io/nkeys v0.1.4
github.com/nats-io/nuid v1.0.1
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59
diff --git a/server/accounts.go b/server/accounts.go
index <HASH>..<HASH> 100644
--- a/server/accounts.go
+++ b/server/accounts.go
@@ -274,7 +274,42 @@ func (a *Account) updateRemoteServer(m *AccountNumConns) {
a.strack[m.Server.ID] = sconns{conns: int32(m.Conns), leafs: int32(m.LeafNodes)}
a.nrclients += int32(m.Conns) - prev.conns
a.nrleafs += int32(m.LeafNodes) - prev.leafs
+
+ mtce := a.mconns != jwt.NoLimit && (len(a.clients)-int(a.sysclients)+int(a.nrclients) > int(a.mconns))
+ // If we are over here some have snuck in and we need to rebalance.
+ // All others will probably be doing the same thing but better to be
+ // conservative and bit harsh here. Clients will reconnect if we over compensate.
+ var clients []*client
+ if mtce {
+ clients = make([]*client, 0, len(a.clients))
+ for c := range a.clients {
+ clients = append(clients, c)
+ }
+ sort.Slice(clients, func(i, j int) bool {
+ return clients[i].start.After(clients[j].start)
+ })
+ over := (len(a.clients) - int(a.sysclients) + int(a.nrclients)) - int(a.mconns)
+ if over < len(clients) {
+ clients = clients[:over]
+ }
+ }
+ // Now check leafnodes.
+ mtlce := a.mleafs != jwt.NoLimit && (a.nleafs+a.nrleafs > a.mleafs)
+ if mtlce {
+ // Take ones from the end.
+ leafs := a.lleafs
+ over := int(a.nleafs + a.nrleafs - a.mleafs)
+ if over < len(leafs) {
+ leafs = leafs[len(leafs)-over:]
+ }
+ clients = append(clients, leafs...)
+ }
a.mu.Unlock()
+
+ // If we have exceeded our max clients this will be populated.
+ for _, c := range clients {
+ c.maxAccountConnExceeded()
+ }
}
// Removes tracking for a remote server that has shutdown.
@@ -373,13 +408,13 @@ func (a *Account) numLocalLeafNodes() int {
// MaxTotalConnectionsReached returns if we have reached our limit for number of connections.
func (a *Account) MaxTotalConnectionsReached() bool {
- var mtc bool
+ var mtce bool
a.mu.RLock()
if a.mconns != jwt.NoLimit {
- mtc = len(a.clients)-int(a.sysclients)+int(a.nrclients) >= int(a.mconns)
+ mtce = len(a.clients)-int(a.sysclients)+int(a.nrclients) >= int(a.mconns)
}
a.mu.RUnlock()
- return mtc
+ return mtce
}
// MaxActiveConnections return the set limit for the account system
diff --git a/server/events_test.go b/server/events_test.go
index <HASH>..<HASH> 100644
--- a/server/events_test.go
+++ b/server/events_test.go
@@ -780,7 +780,7 @@ func TestSystemAccountConnectionLimits(t *testing.T) {
defer ncb1.Close()
}
- checkFor(t, 1*time.Second, 50*time.Millisecond, func() error {
+ checkFor(t, 5*time.Second, 50*time.Millisecond, func() error {
total := sa.NumClients() + sb.NumClients()
if total > int(nac.Limits.Conn) {
return fmt.Errorf("Expected only %d connections, was allowed to connect %d", nac.Limits.Conn, total) | Close connections when a remote update exceeds maximum | nats-io_gnatsd | train |
Subsets and Splits