hash
stringlengths 40
40
| diff
stringlengths 131
26.7k
| message
stringlengths 7
694
| project
stringlengths 5
67
| split
stringclasses 1
value | diff_languages
stringlengths 2
24
|
---|---|---|---|---|---|
3eaa6578100ed9078ba9af49f44d032fddd7102f | diff --git a/network.js b/network.js
index <HASH>..<HASH> 100644
--- a/network.js
+++ b/network.js
@@ -2702,14 +2702,16 @@ function startAcceptingConnections(){
}
var bStatsCheckUnderWay = true;
db.query(
- "SELECT \n\
+ /* "SELECT \n\
SUM(CASE WHEN event='invalid' THEN 1 ELSE 0 END) AS count_invalid, \n\
SUM(CASE WHEN event='new_good' THEN 1 ELSE 0 END) AS count_new_good \n\
- FROM peer_events WHERE peer_host=? AND event_date>"+db.addTime("-1 HOUR"), [ws.host],
+ FROM peer_events WHERE peer_host=? AND event_date>"+db.addTime("-1 HOUR"),*/
+ "SELECT 1 FROM peer_events WHERE peer_host=? AND event_date>"+db.addTime("-1 HOUR")+" AND event='invalid' LIMIT 1",
+ [ws.host],
function(rows){
bStatsCheckUnderWay = false;
- var stats = rows[0];
- if (stats.count_invalid){
+ // var stats = rows[0];
+ if (rows.length > 0){
console.log("rejecting new client "+ws.host+" because of bad stats");
return ws.terminate();
} | faster check of stats of incoming peer | byteball_ocore | train | js |
50ef67c1421f9cdb71eaed70a3649baecf70afb9 | diff --git a/c7n/mu.py b/c7n/mu.py
index <HASH>..<HASH> 100644
--- a/c7n/mu.py
+++ b/c7n/mu.py
@@ -1237,7 +1237,14 @@ class ConfigRule(object):
if isinstance(func, PolicyLambda):
manager = func.policy.get_resource_manager()
- config_type = manager.get_model().config_type
+ if hasattr(manager.get_model(), 'config_type'):
+ config_type = manager.get_model().config_type
+ else:
+ raise Exception("You may have attempted to deploy a config "
+ "based lambda function with an unsupported config type. "
+ "The most recent AWS config types are here: http://docs.aws"
+ ".amazon.com/config/latest/developerguide/resource"
+ "-config-reference.html.")
params['Scope'] = {
'ComplianceResourceTypes': [config_type]}
else: | mu - runtime validate aws config support for resource type before provisioning (#<I>) | cloud-custodian_cloud-custodian | train | py |
4d67e17ba8f3f6f88c65674ebe2e00bbde6a8ace | diff --git a/lib/hb-helpers.js b/lib/hb-helpers.js
index <HASH>..<HASH> 100644
--- a/lib/hb-helpers.js
+++ b/lib/hb-helpers.js
@@ -132,7 +132,9 @@ module.exports.registerHelpers = function registerHelpers() {
if (Array.isArray(run))
return run[0][whichView].images.waterfall;
else
- return run[whichView].images.waterfall;
+ if (run)
+ return run[whichView].images.waterfall;
+ else return;
}); | don't break if we don't have any runs | sitespeedio_sitespeed.io | train | js |
b210f25abe1e32e5358a9c59d98744affd0c86fe | diff --git a/org.jenetics/src/main/java/org/jenetics/internal/util/Args.java b/org.jenetics/src/main/java/org/jenetics/internal/util/Args.java
index <HASH>..<HASH> 100644
--- a/org.jenetics/src/main/java/org/jenetics/internal/util/Args.java
+++ b/org.jenetics/src/main/java/org/jenetics/internal/util/Args.java
@@ -86,7 +86,7 @@ public class Args {
*/
public Optional<Double> doubleArg(final String name) {
return arg(name)
- .flatMap(s -> parse(s, Double::new));
+ .flatMap(s -> parse(s, Double::valueOf));
}
private static <T> Optional<T> parse( | #<I>: Constructor 'Double(String)' has been deprecated in Java 9.
Using 'Double.valueOf' instead. | jenetics_jenetics | train | java |
f6a3109c9243da65735453acb3538895ba96198f | diff --git a/src/javascript/image/Image.js b/src/javascript/image/Image.js
index <HASH>..<HASH> 100644
--- a/src/javascript/image/Image.js
+++ b/src/javascript/image/Image.js
@@ -481,7 +481,7 @@ define("moxie/image/Image", [
}
if (Env.can('use_data_uri_of', dataUrl.length)) {
- el.innerHTML = '<img src="' + dataUrl + '" width="' + img.width + '" height="' + img.height + '" />';
+ el.innerHTML = '<img src="' + dataUrl + '" width="' + img.width + '" height="' + img.height + '" alt="" />';
img.destroy();
self.trigger('embedded');
} else { | Image: fulfill basic xhtml requirement for img tag | moxiecode_moxie | train | js |
098c1ee45939a322cad9301aab8da72c0f71a5f6 | diff --git a/js/bitstamp.js b/js/bitstamp.js
index <HASH>..<HASH> 100644
--- a/js/bitstamp.js
+++ b/js/bitstamp.js
@@ -840,14 +840,15 @@ module.exports = class bitstamp extends Exchange {
feeCurrency = market['quote'];
symbol = market['symbol'];
}
- let timestamp = this.safeString2 (trade, 'date', 'datetime');
- if (timestamp !== undefined) {
- if (timestamp.indexOf (' ') >= 0) {
+ const datetimeString = this.safeString2 (trade, 'date', 'datetime');
+ let timestamp = undefined;
+ if (datetimeString !== undefined) {
+ if (datetimeString.indexOf (' ') >= 0) {
// iso8601
- timestamp = this.parse8601 (timestamp);
+ timestamp = this.parse8601 (datetimeString);
} else {
// string unix epoch in seconds
- timestamp = parseInt (timestamp);
+ timestamp = parseInt (datetimeString);
timestamp = timestamp * 1000;
}
} | btistamp parseTrade timestamp minor edits | ccxt_ccxt | train | js |
610cb1088fcb84ec88aa4b7b3d3422c8a17542fe | diff --git a/scripts/sass-render/index.js b/scripts/sass-render/index.js
index <HASH>..<HASH> 100644
--- a/scripts/sass-render/index.js
+++ b/scripts/sass-render/index.js
@@ -37,7 +37,13 @@ async function sassToCss(sassFile) {
},
outputStyle: 'compressed',
});
- return result.css.toString();
+
+ // Strip any Byte Order Marking from output CSS
+ let cssStr = result.css.toString();
+ if (cssStr.charCodeAt(0) === 0xFEFF) {
+ cssStr = cssStr.substr(1);
+ }
+ return cssStr;
}
async function sassRender(sourceFile, templateFile, outputFile) { | fix(select): Strip byte order mark in CSS compilation | material-components_material-components-web-components | train | js |
e1f9c1e1399b4277ce6526750b1dab48196d5641 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -126,7 +126,7 @@ build.sub_commands.insert(0, ('build_proto', None))
INSTALL_REQUIRES = [
'contextlib2>=0.5.1,<1.0',
- 'enum34>=1.1.2,<2.0',
+ 'enum34>=1.1.2,<2.0;python_version<"3.4"',
'future>=0.16.0',
'mutablerecords>=0.4.1,<2.0',
'oauth2client>=1.5.2,<2.0', | install enum<I> for certain python version (#<I>) | google_openhtf | train | py |
580031bb64ca15c0c03ff9353de591b7571cd973 | diff --git a/lib/AssetGraph.js b/lib/AssetGraph.js
index <HASH>..<HASH> 100644
--- a/lib/AssetGraph.js
+++ b/lib/AssetGraph.js
@@ -406,10 +406,10 @@ AssetGraph.prototype = {
var nextTransform = transforms[nextStepNo],
startTime = new Date();
nextStepNo += 1;
- nextTransform(that, function () {
+ nextTransform(that, error.logAndExit(function () {
// console.log(nextTransform.name + ': ' + (new Date() - startTime));
executeNextStep();
- });
+ }));
}
}
executeNextStep(); | AssetGraph.transform: Report and die if an error occurs during a transformation. | assetgraph_assetgraph | train | js |
59126753ed781c0db9bbf4d9a13d6a479bf46b29 | diff --git a/ovirtlago/virt.py b/ovirtlago/virt.py
index <HASH>..<HASH> 100644
--- a/ovirtlago/virt.py
+++ b/ovirtlago/virt.py
@@ -22,6 +22,7 @@ import warnings
import ovirtsdk.api
import lago
+import lago.config
import lago.vm
from ovirtsdk.infrastructure.errors import (RequestError, ConnectionError)
@@ -45,6 +46,8 @@ class OvirtVirtEnv(lago.virt.VirtEnv):
'ovirt-host'
)
provider_name = 'ovirt-' + role
+ else:
+ provider_name = lago.config.get('default_vm_provider', 'default')
if provider_name == 'ovirt-engine':
if self._engine_vm is not None: | ovirtlago: use the vm-provider if it's there
Change-Id: Ic<I>b<I>d<I>ab<I>f<I>fb<I>a<I>f<I>cd | lago-project_lago | train | py |
d55ede58d8d37d2b359554cff692f4a842e28f53 | diff --git a/src/js/utils.js b/src/js/utils.js
index <HASH>..<HASH> 100644
--- a/src/js/utils.js
+++ b/src/js/utils.js
@@ -79,8 +79,18 @@ fbUtils.attrString = function(attrs) {
*/
fbUtils.safeAttr = function(name, value) {
name = fbUtils.safeAttrName(name);
+ let valString;
- let valString = fbUtils.escapeAttr(value);
+ if (value) {
+ if (Array.isArray(value)) {
+ valString = fbUtils.escapeAttr(value.join(' '))
+ } else {
+ if (typeof(value) === 'boolean') {
+ value = value.toString();
+ }
+ valString = fbUtils.escapeAttr(value.replace(',', ' ').trim());
+ }
+ }
value = value ? `="${valString}"` : '';
return { | Hotfix: typeUserEvents, attribute array converted to comma separated list (#<I>) | kevinchappell_formBuilder | train | js |
e75528259797830aeb93c974b6a8fea11ad81371 | diff --git a/padaos.py b/padaos.py
index <HASH>..<HASH> 100644
--- a/padaos.py
+++ b/padaos.py
@@ -61,7 +61,7 @@ class IntentContainer:
(r'(\\[^\w ])', r'\1?'),
# === Force 1+ Space Between Words ===
- (r'(?<=\w)(\\\s|\s)+(?=\w)', r'\\W+'),
+ (r'(?<=\w)(\\\s|\s)+', r'\\W+'),
# === Force 0+ Space Between Everything Else ===
(r'\s+', r'\\W*'),
@@ -121,6 +121,7 @@ class IntentContainer:
}
def calc_intents(self, query):
+ query = ' ' + query + ' '
if self.must_compile:
self.compile()
for intent_name, regexes in self.intents.items(): | Fix words not requiring any space in between | MycroftAI_padaos | train | py |
f3baa1114d13a8f213ee5c1e07c710f180c194e4 | diff --git a/spillway/renderers.py b/spillway/renderers.py
index <HASH>..<HASH> 100644
--- a/spillway/renderers.py
+++ b/spillway/renderers.py
@@ -227,10 +227,12 @@ class MapnikRenderer(BaseRenderer):
object.draw(self.map)
except AttributeError:
pass
- bbox = renderer_context.get('bbox')
+ bbox = renderer_context.get('bbox') if renderer_context else None
if bbox:
bbox.transform(self.map.srs)
self.map.zoom_to_box(mapnik.Box2d(*bbox.extent))
+ else:
+ self.map.zoom_all()
img = mapnik.Image(self.map.width, self.map.height)
mapnik.render(self.map, img)
return img.tostring(self.format) | Zoom to all layers without a bbox present | bkg_django-spillway | train | py |
a41a1b4ed642b8c07a6030603e3b8a45b3044680 | diff --git a/gbdxtools/s3.py b/gbdxtools/s3.py
index <HASH>..<HASH> 100644
--- a/gbdxtools/s3.py
+++ b/gbdxtools/s3.py
@@ -87,7 +87,10 @@ class S3(object):
location = location.strip('/')
self.logger.debug('Downloading contents')
- for s3key in s3conn.list_objects(Bucket=bucket, Prefix=(prefix+'/'+location))['Contents']:
+ objects = s3conn.list_objects(Bucket=bucket, Prefix=(prefix+'/'+location))
+ if 'Contents' not in objects:
+ raise ValueError('Download target {}/{}/{} was not found.'.format(bucket, prefix, location))
+ for s3key in objects['Contents']:
key = s3key['Key']
# skip directory keys | Check to make sure the download target exists | DigitalGlobe_gbdxtools | train | py |
be8d6400426fb96964a8447bd941d4ab777a867c | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -13,7 +13,7 @@ setup(
version='0.3.7',
description='Blanc Basic Pages for Django',
long_description=readme,
- url='https://github.com/blancltd/blanc-basic-pages',
+ url='https://github.com/developersociety/blanc-basic-pages',
maintainer='Blanc Ltd',
maintainer_email='[email protected]',
platforms=['any'], | Update GitHub repos from blancltd to developersociety | developersociety_blanc-basic-pages | train | py |
bba0b456f5c3ed8ac38cd839a10b996dd1e8bc2b | diff --git a/sc2gameLobby/ipAddresses.py b/sc2gameLobby/ipAddresses.py
index <HASH>..<HASH> 100644
--- a/sc2gameLobby/ipAddresses.py
+++ b/sc2gameLobby/ipAddresses.py
@@ -53,7 +53,7 @@ def getPublicIPaddress(timeout=c.DEFAULT_TIMEOUT):
"""visible on public internet"""
start = time.time()
my_public_ip = None
- e = None
+ e = Exception
while my_public_ip == None:
if time.time() - start > timeout:
break | - added robustness in case a connection to the internet is lost. | ttinies_sc2gameLobby | train | py |
46e9bc83a990d356eba5c74440b0d78a321f77d7 | diff --git a/bungiesearch/managers.py b/bungiesearch/managers.py
index <HASH>..<HASH> 100644
--- a/bungiesearch/managers.py
+++ b/bungiesearch/managers.py
@@ -25,8 +25,12 @@ class BungiesearchManager(Manager):
from bungiesearch import Bungiesearch
return Bungiesearch(raw_results=True).index(index).doc_type(doc_type)
- def __init__(self, **kwargs):
- super(BungiesearchManager, self).__init__(**kwargs)
+ def contribute_to_class(self, cls, name):
+ '''
+ Sets up the signal processor. Since self.model is not available
+ in the constructor, we perform this operation here.
+ '''
+ super(BungiesearchManager, self).contribute_to_class(cls, name)
from . import Bungiesearch
from .signals import get_signal_processor | Add contribute_to_class in place of __init__ in manage.py | ChristopherRabotin_bungiesearch | train | py |
0a6f63ff37d860d117072b0981c31db4fee397d3 | diff --git a/ruby/server/lib/roma/command/mh_command_receiver.rb b/ruby/server/lib/roma/command/mh_command_receiver.rb
index <HASH>..<HASH> 100644
--- a/ruby/server/lib/roma/command/mh_command_receiver.rb
+++ b/ruby/server/lib/roma/command/mh_command_receiver.rb
@@ -48,7 +48,7 @@ module Roma
return "SERVER_ERROR #{hname} already exists."
end
st = Roma::Config::STORAGE_CLASS.new
- st.storage_path = "#{@stats.ap_str}/#{hname}"
+ st.storage_path = "#{Roma::Config::STORAGE_PATH}/#{@stats.ap_str}/#{hname}"
st.vn_list = @rttable.vnodes
st.divnum = Roma::Config::STORAGE_DIVNUM
st.option = Roma::Config::STORAGE_OPTION
@@ -91,7 +91,7 @@ module Roma
st = @storages[hname]
@storages.delete(hname)
st.closedb
- rm_rf("#{@stats.ap_str}/#{hname}")
+ rm_rf("#{Roma::Config::STORAGE_PATH}/#{@stats.ap_str}/#{hname}")
@log.info("deletehash #{hname}")
return "DELETED"
rescue =>e | bugfix:multihash concerning the file path was corrected. | roma_roma | train | rb |
12defe0e3d3a843b9ec3ed7d902aea66681e5e48 | diff --git a/teams/admin.py b/teams/admin.py
index <HASH>..<HASH> 100644
--- a/teams/admin.py
+++ b/teams/admin.py
@@ -5,14 +5,30 @@ import reversion
from .models import Team, Membership
+def members_count(obj):
+ return obj.memberships.count()
+members_count.short_description = "Members Count"
+
+
admin.site.register(
Team,
- list_display=["name", "member_access", "manager_access", "creator"],
+ list_display=["name", "member_access", "manager_access", members_count, "creator"],
+ fields=[
+ "name",
+ "slug",
+ "avatar",
+ "description",
+ "member_access",
+ "manager_access",
+ "creator"
+ ],
prepopulated_fields={"slug": ("name",)},
+ raw_id_fields=["creator"]
)
class MembershipAdmin(reversion.VersionAdmin):
+ raw_id_fields = ["user"]
list_display = ["team", "user", "state", "role"]
list_filter = ["team"]
search_fields = ["user__username"] | Fix up admin to be a bit more useful | pinax_pinax-teams | train | py |
3179f0ad4eba1cf4811fb11c82695d9ed5442309 | diff --git a/translation_server/admin.py b/translation_server/admin.py
index <HASH>..<HASH> 100644
--- a/translation_server/admin.py
+++ b/translation_server/admin.py
@@ -37,7 +37,11 @@ class CustomModelAdminMixin(object):
@admin.register(TranslationType)
class TranslationTypeAdmin(CustomModelAdminMixin, TabbedTranslationAdmin):
- pass
+ def get_queryset(self, request):
+ qs = super(TranslationTypeAdmin, self).get_queryset(request)
+ if request.user.is_superuser:
+ return qs
+ return qs.exclude(tag__startswith='DTS')
@admin.register(Translation)
@@ -63,3 +67,9 @@ class TranslationAdmin(CustomModelAdminMixin, TabbedTranslationAdmin):
js_dir + '/admin-translation.js',
)
+ def get_queryset(self, request):
+ qs = super(TranslationAdmin, self).get_queryset(request)
+ # if request.user.is_superuser:
+ # return qs
+ return qs.exclude(tag__startswith='DTS')
+ | Removed "DTS" translations and translation tags from admin | gdelnegro_django-translation-server | train | py |
53eddcae3c6034414ffc10bb7f786e7cf5d01d05 | diff --git a/src/sap.ui.integration/src/sap/ui/integration/util/ManifestResolver.js b/src/sap.ui.integration/src/sap/ui/integration/util/ManifestResolver.js
index <HASH>..<HASH> 100644
--- a/src/sap.ui.integration/src/sap/ui/integration/util/ManifestResolver.js
+++ b/src/sap.ui.integration/src/sap/ui/integration/util/ManifestResolver.js
@@ -91,7 +91,6 @@ sap.ui.define([
Utils.setNestedPropertyValue(oManifest, sManifestPath, oSubConfig);
});
- oCard.destroy();
return JSON.stringify(oManifest);
}; | [INTERNAL] Integration Cards: Do not destroy card during manifest resolving
- card shouldn't be destroyed during manifest resolving
since it must be destroyed by the one who created it
Change-Id: I3a3f<I>f2db<I>ac<I>f<I>bf<I>d<I>f0 | SAP_openui5 | train | js |
327dc8017ac26dcdb915a5505ad1c7de9ebbb6f9 | diff --git a/Kwf/Model/Proxy/Rowset.php b/Kwf/Model/Proxy/Rowset.php
index <HASH>..<HASH> 100644
--- a/Kwf/Model/Proxy/Rowset.php
+++ b/Kwf/Model/Proxy/Rowset.php
@@ -48,6 +48,7 @@ class Kwf_Model_Proxy_Rowset implements Kwf_Model_Rowset_Interface
{
return $this->_rowset->count();
}
+
public function seek($position)
{
$this->_rowset->seek($position);
@@ -61,7 +62,8 @@ class Kwf_Model_Proxy_Rowset implements Kwf_Model_Rowset_Interface
public function offsetGet($offset)
{
- return $this->_rowset->offsetGet($offset);
+ $row = $this->_rowset->offsetGet($offset);
+ return $this->_model->getRowByProxiedRow($row);
}
public function offsetSet($offset, $value) | Fix problem with index-access to specific row in rowset
If accessing a row via index it didn't returned the proxied row
resulting in missing expected functionality. | koala-framework_koala-framework | train | php |
779949c688f7f8c2692c64b76aecf26b32b2b08d | diff --git a/server_test.go b/server_test.go
index <HASH>..<HASH> 100644
--- a/server_test.go
+++ b/server_test.go
@@ -107,6 +107,7 @@ func generateConfig(forwardAddr string) Config {
TraceAddress: fmt.Sprintf("127.0.0.1:%d", tracePort),
TraceAPIAddress: forwardAddr,
TraceMaxLengthBytes: 4096,
+ SsfBufferSize: 32,
}
} | Set SsfBufferSize to <I> in tests | stripe_veneur | train | go |
93229da6a7050d45afda7b7d7068a4c5eaa00f40 | diff --git a/sdl/render.go b/sdl/render.go
index <HASH>..<HASH> 100644
--- a/sdl/render.go
+++ b/sdl/render.go
@@ -191,6 +191,9 @@ static int SDLCALL SDL_GetTextureScaleMode(SDL_Texture * texture, SDL_ScaleMode
}
static int SDL_LockTextureToSurface(SDL_Texture *texture, const SDL_Rect *rect, SDL_Surface **surface)
+{
+ return -1;
+}
#endif | sdl/render: fix broken build on older SDL2 | veandco_go-sdl2 | train | go |
2b8588c1d526a99c229d01483081f6099dc4d324 | diff --git a/datajoint/schema.py b/datajoint/schema.py
index <HASH>..<HASH> 100644
--- a/datajoint/schema.py
+++ b/datajoint/schema.py
@@ -193,7 +193,7 @@ class Schema:
# add table definition to the doc string
if isinstance(table_class.definition, str):
- table_class.__doc__ = ((table_class.__doc__ or "") + "\n\nTable definition:\n"
+ table_class.__doc__ = ((table_class.__doc__ or "") + "\nTable definition:\n\n"
+ table_class.describe(printout=False, context=context))
# fill values in Lookup tables from their contents property | minor improvement in display of table doc strings | datajoint_datajoint-python | train | py |
b58ef6412d1d7399dcb900cf9842e4fc977ecdd2 | diff --git a/FlowCal/mef.py b/FlowCal/mef.py
index <HASH>..<HASH> 100644
--- a/FlowCal/mef.py
+++ b/FlowCal/mef.py
@@ -333,9 +333,9 @@ def fit_beads_autofluorescence(fl_rfi, fl_mef):
-----
The following model is used to describe bead fluorescence::
- m*log(fl_mef[i]) + b = log(fl_mef_auto + fl_mef[i])
+ m*log(fl_rfi[i]) + b = log(fl_mef_auto + fl_mef[i])
- where ``fl_mef[i]`` is the fluorescence of bead subpopulation ``i`` in
+ where ``fl_rfi[i]`` is the fluorescence of bead subpopulation ``i`` in
RFI units and ``fl_mef[i]`` is the corresponding fluorescence in MEF
units. The model includes 3 parameters: ``m`` (slope), ``b``
(intercept), and ``fl_mef_auto`` (bead autofluorescence). The last term | Corrected typo in mef.fit_beads_autofluorescence's docstring. | taborlab_FlowCal | train | py |
e1f2869396e8f3ee0d62df63aa97cd5f7cfc1dd2 | diff --git a/src/gl/texture.js b/src/gl/texture.js
index <HASH>..<HASH> 100644
--- a/src/gl/texture.js
+++ b/src/gl/texture.js
@@ -137,7 +137,19 @@ export default class Texture {
let image = new Image();
image.onload = () => {
try {
- this.setElement(image, options);
+ // For data URL images, first draw the image to a separate canvas element. Workaround for
+ // obscure bug seen with small (<28px) SVG images encoded as data URLs in Chrome and Safari.
+ if (this.url.slice(0, 5) === 'data:') {
+ const canvas = document.createElement('canvas');
+ const ctx = canvas.getContext('2d');
+ canvas.width = image.width;
+ canvas.height = image.height;
+ ctx.drawImage(image, 0, 0);
+ this.setElement(canvas, options);
+ }
+ else {
+ this.setElement(image, options);
+ }
}
catch (e) {
this.loaded = false; | workaround for obscure bug seen with small (<<I>px) SVG images encoded as data URLs in Chrome and Safari | tangrams_tangram | train | js |
be39d1cfbc71774423900943fd94453739af79a8 | diff --git a/can/io/sqlite.py b/can/io/sqlite.py
index <HASH>..<HASH> 100644
--- a/can/io/sqlite.py
+++ b/can/io/sqlite.py
@@ -25,7 +25,7 @@ if sys.version_info > (3,):
buffer = memoryview
-@deprecated(version='2.1', reason="Use the name SqliteReader instead")
+@deprecated(reason="Use the name SqliteReader instead. (Replaced in v2.1)")
class SqlReader:
"""
Reads recorded CAN messages from a simple SQL database. | removed the version attribute from the deprecated decorator | hardbyte_python-can | train | py |
1d139749ea96f42cf8647d69ca428bb7c5350774 | diff --git a/lib/mangopay/client.rb b/lib/mangopay/client.rb
index <HASH>..<HASH> 100644
--- a/lib/mangopay/client.rb
+++ b/lib/mangopay/client.rb
@@ -1,5 +1,17 @@
module MangoPay
class Client < Resource
- include MangoPay::HTTPCalls::Create
+ def self.create(params)
+ uri = URI(MangoPay.configuration.root_url + '/api/clients/')
+ res = Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http|
+ puts uri.request_uri
+ request = Net::HTTP::Post.new(uri.request_uri, {
+ 'user_agent' => "MangoPay V1 RubyBindings/#{MangoPay::VERSION}",
+ 'Content-Type' => 'application/json'
+ })
+ request.body = MangoPay::JSON.dump(params)
+ http.request request
+ end
+ MangoPay::JSON.load(res.body)
+ end
end
end | Now use an outside method to create the client account | Mangopay_mangopay2-ruby-sdk | train | rb |
6596903b060164b5e77a8e4c077257266cf0ce6f | diff --git a/demo/app.js b/demo/app.js
index <HASH>..<HASH> 100644
--- a/demo/app.js
+++ b/demo/app.js
@@ -46,7 +46,7 @@ Abba.InputsView.prototype = {
return {
label: $row.find('.label-input').val(),
numSuccesses: parseInt($row.find('.num-successes-input').val()),
- numSamples: parseInt($row.find('.num-samples-input').val()),
+ numSamples: parseInt($row.find('.num-samples-input').val())
};
},
@@ -154,7 +154,7 @@ Abba.Presenter.prototype = {
var baseline = variations.shift();
return {
baseline: baseline,
- variations: variations,
+ variations: variations
};
},
@@ -188,4 +188,4 @@ Abba.Presenter.prototype = {
};
return Abba;
-}(Abba || {}, jQuery, Hash));
\ No newline at end of file
+}(Abba || {}, jQuery, Hash)); | Fix some trailing commas for IE | thii_abbajs | train | js |
d9b934a5d3c5ef5bade2c9c51dc474783e33a8b4 | diff --git a/salt/config.py b/salt/config.py
index <HASH>..<HASH> 100644
--- a/salt/config.py
+++ b/salt/config.py
@@ -61,6 +61,7 @@ VALID_OPTS = {
'master_sign_key_name': str,
'master_sign_pubkey': bool,
'verify_master_pubkey_sign': bool,
+ 'always_verify_signature': bool,
'master_pubkey_signature': str,
'master_use_pubkey_signature': bool,
'syndic_finger': str,
@@ -260,6 +261,7 @@ DEFAULT_MINION_OPTS = {
'master_shuffle': False,
'master_alive_interval': 0,
'verify_master_pubkey_sign': False,
+ 'always_verify_signature': False,
'master_sign_key_name': 'master_sign',
'syndic_finger': '',
'user': 'root', | add switch to always verify the masters pubkey
for the paranoid among us, this makes it possible to always verify
the masters auth-replies, even if the public key has not changed | saltstack_salt | train | py |
c36df2f04523a38b2edda2445a899bc63d79448d | diff --git a/drivers/overlay/overlay.go b/drivers/overlay/overlay.go
index <HASH>..<HASH> 100644
--- a/drivers/overlay/overlay.go
+++ b/drivers/overlay/overlay.go
@@ -620,7 +620,7 @@ func supportsOverlay(home string, homeMagic graphdriver.FsMagic, rootUID, rootGI
if len(flags) < unix.Getpagesize() {
err := unix.Mount("overlay", mergedDir, "overlay", 0, flags)
if err == nil {
- logrus.Errorf("overlay test mount with multiple lowers failed, but succeeded with a single lower")
+ logrus.StandardLogger().Logf(logLevel, "overlay test mount with multiple lowers failed, but succeeded with a single lower")
return supportsDType, errors.Wrap(graphdriver.ErrNotSupported, "kernel too old to provide multiple lowers feature for overlay")
}
logrus.Debugf("overlay test mount with a single lower failed %v", err) | Log expected rootless overlay mount failures as debug level
Most linux kernels do not support overlay mounts in rootless mode,
we should not be reporting this as an error, but drop it to debug
level.
Fixes: <URL> | containers_storage | train | go |
bb4b68e6ffe8a8b57f3847e8b88bc3a3f75f6447 | diff --git a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/CachingHiveMetastore.java b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/CachingHiveMetastore.java
index <HASH>..<HASH> 100644
--- a/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/CachingHiveMetastore.java
+++ b/presto-hive-metastore/src/main/java/com/facebook/presto/hive/metastore/CachingHiveMetastore.java
@@ -670,7 +670,11 @@ public class CachingHiveMetastore
HivePartitionName hivePartitionName = hivePartitionName(databaseName, tableName, partitionNameWithVersion.getPartitionName());
KeyAndContext<HivePartitionName> partitionNameKey = getCachingKey(metastoreContext, hivePartitionName);
Optional<Partition> partition = partitionCache.getIfPresent(partitionNameKey);
- if (partition != null && partition.isPresent()) {
+ if (partition == null || !partition.isPresent()) {
+ partitionCache.invalidate(partitionNameKey);
+ partitionStatisticsCache.invalidate(partitionNameKey);
+ }
+ else {
Optional<Long> partitionVersion = partition.get().getPartitionVersion();
if (!partitionVersion.isPresent() || !partitionVersion.equals(partitionNameWithVersion.getPartitionVersion())) {
partitionCache.invalidate(partitionNameKey); | Invalidate cache if partition is not present
Invalidating the partition cache if partition object is not present. | prestodb_presto | train | java |
13a3aa3aa49fe59aeaf4957cef0ef328a9553990 | diff --git a/test/test_client.py b/test/test_client.py
index <HASH>..<HASH> 100644
--- a/test/test_client.py
+++ b/test/test_client.py
@@ -157,12 +157,6 @@ class TestClient(IntegrationTest, TestRequestMixin):
# No error.
connected(MongoClient())
- def assertIsInstance(self, obj, cls, msg=None):
- """Backport from Python 2.7."""
- if not isinstance(obj, cls):
- standardMsg = '%r is not an instance of %r' % (obj, cls)
- self.fail(self._formatMessage(msg, standardMsg))
-
def test_init_disconnected(self):
c = rs_or_single_client(connect=False) | Remove "assertIsInstance" backport.
No longer required since we use unittest2 on Python <I>, and the method is
in the standard library for Python <I>+. | mongodb_mongo-python-driver | train | py |
ddb5f7bb014b35e3c456f2dc8c2880a368b88a3d | diff --git a/test/glimpse/views/git_test.rb b/test/glimpse/views/git_test.rb
index <HASH>..<HASH> 100644
--- a/test/glimpse/views/git_test.rb
+++ b/test/glimpse/views/git_test.rb
@@ -1,13 +1,33 @@
require 'test_helper'
describe Glimpse::Views::Git do
- before do
- @git = Glimpse::Views::Git.new(:nwo => 'github/test', :sha => '123')
- end
-
describe "compare url" do
+ before do
+ @git = Glimpse::Views::Git.new(:nwo => 'github/test', :sha => '123')
+ end
+
it "should return the full url" do
assert_equal 'https://github.com/github/test/compare/master...123', @git.compare_url
end
end
+
+ describe "sha" do
+ before do
+ @git = Glimpse::Views::Git.new(:sha => '123')
+ end
+
+ it "should return correct sha" do
+ assert_equal '123', @git.sha
+ end
+ end
+
+ describe "branch name" do
+ before do
+ @git = Glimpse::Views::Git.new(:sha => '123', :branch_name => 'glimpse')
+ end
+
+ it "should return correct branch name" do
+ assert_equal 'glimpse', @git.branch_name
+ end
+ end
end | Add some Glimpse::Views::Git tests | peek_peek | train | rb |
665f60e4af7e621471564a651e185e6401ff28ca | diff --git a/client/webpack.config.js b/client/webpack.config.js
index <HASH>..<HASH> 100644
--- a/client/webpack.config.js
+++ b/client/webpack.config.js
@@ -386,6 +386,10 @@ const webpackConfig = {
release: `calypso_${ process.env.COMMIT_SHA }`,
include: filePaths.path,
urlPrefix: `~${ filePaths.publicPath }`,
+ errorHandler: ( err, invokeErr, compilation ) => {
+ // Sentry should _never_ fail the webpack build, so only emit warnings here:
+ compilation.warnings.push( 'Sentry CLI Plugin: ' + err.message );
+ },
} ),
].filter( Boolean ),
externals: [ 'keytar' ], | Never fail webpack build from sentry (#<I>) | Automattic_wp-calypso | train | js |
60704cb85f4e512e0acd9b144d6599c3b3763820 | diff --git a/testing/test_detail_page.py b/testing/test_detail_page.py
index <HASH>..<HASH> 100644
--- a/testing/test_detail_page.py
+++ b/testing/test_detail_page.py
@@ -14,7 +14,7 @@ from k2catalogue import detail_object
])
def test_detail_url(input, expected):
epic_object = mock.Mock(epic_id=input)
- url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{}'
+ url_root = 'http://deneb.astro.warwick.ac.uk/phrlbj/k2varcat/objects/{0}'
assert detail_object.DetailObject(epic_object).url == url_root.format(
expected) | Update format placeholder to be <I> compatible | mindriot101_k2catalogue | train | py |
542bbae3fc40edb30536d92dca2e1eef25c3254f | diff --git a/tests/Redisearch/IndexTest.php b/tests/Redisearch/IndexTest.php
index <HASH>..<HASH> 100644
--- a/tests/Redisearch/IndexTest.php
+++ b/tests/Redisearch/IndexTest.php
@@ -301,7 +301,7 @@ class ClientTest extends TestCase
$this->assertEquals($expectedDocumentCount, count($result->getDocuments()));
}
- private function makeDocuments($count = 30000): array
+ private function makeDocuments($count = 3000): array
{
$documents = [];
foreach (range(1, $count) as $id) { | Decrease number of docs for batch indexing tests | ethanhann_redisearch-php | train | php |
d07e6622d618a0004f42bbbb268966264a4e8013 | diff --git a/rollup.config.js b/rollup.config.js
index <HASH>..<HASH> 100644
--- a/rollup.config.js
+++ b/rollup.config.js
@@ -1,6 +1,7 @@
import vue from 'rollup-plugin-vue';
import babel from 'rollup-plugin-babel';
import resolve from 'rollup-plugin-node-resolve';
+import uglify from 'rollup-plugin-uglify';
export default {
entry: './src/index.js',
@@ -8,7 +9,8 @@ export default {
plugins: [
resolve(),
vue({compileTemplate: true}),
- babel()
+ babel(),
+ uglify()
],
format: 'umd',
moduleName: 'vueYandexMaps' | minify main dist js | PNKBizz_vue-yandex-map | train | js |
aa8ca1202e40c6bbeaf03519dd6d74821cf0e6d5 | diff --git a/src/org/zaproxy/zap/extension/alert/AlertTreeModel.java b/src/org/zaproxy/zap/extension/alert/AlertTreeModel.java
index <HASH>..<HASH> 100644
--- a/src/org/zaproxy/zap/extension/alert/AlertTreeModel.java
+++ b/src/org/zaproxy/zap/extension/alert/AlertTreeModel.java
@@ -219,7 +219,9 @@ class AlertTreeModel extends DefaultTreeModel {
// Parent has no other children, remove it also
this.removeNodeFromParent(parent);
nodeStructureChanged((AlertNode) this.getRoot());
- }
+ } else if (parent.getUserObject() == node.getUserObject()) {
+ parent.setUserObject(parent.getChildAt(0).getUserObject());
+ }
}
} | Issue <I> - NullPointerException while selecting a node in the "Alerts" tab after deleting a message
Changed AlertTreeModel to set another alert to parent's leaf if it contains the alert of the deleted leaf. | zaproxy_zaproxy | train | java |
ef0826acd621a65be8ccb4cb1977b83d1b615e4d | diff --git a/commands/command.go b/commands/command.go
index <HASH>..<HASH> 100644
--- a/commands/command.go
+++ b/commands/command.go
@@ -103,11 +103,12 @@ func (c *Command) Call(req Request) Response {
if err != nil {
// if returned error is a commands.Error, use its error code
// otherwise, just default the code to ErrNormal
- var e Error
- e, ok := err.(Error)
- if ok {
+ switch e := err.(type) {
+ case *Error:
res.SetError(e, e.Code)
- } else {
+ case Error:
+ res.SetError(e, e.Code)
+ default:
res.SetError(err, ErrNormal)
}
return res | fix(commands/err)
I didn't know there were dragons here.
When casting errors we've gotta be careful. Apparently both values and
pointers satisfy the error interface. Type checking for one doesn't
catch the other.
cc @whyrusleeping @mappum @jbenet
License: MIT | ipfs_go-ipfs | train | go |
6a607abbb88da00301c50af38e144ad24314eee7 | diff --git a/pandoc_tablenos.py b/pandoc_tablenos.py
index <HASH>..<HASH> 100755
--- a/pandoc_tablenos.py
+++ b/pandoc_tablenos.py
@@ -112,8 +112,11 @@ def attach_attrs_table(key, value, fmt, meta):
else:
assert len(value) == 6
assert value[1]['t'] == 'Caption'
- assert value[1]['c'][1][0]['t'] == 'Plain'
- caption = value[1]['c'][1][0]['c']
+ if value[1]['c'][1]:
+ assert value[1]['c'][1][0]['t'] == 'Plain'
+ caption = value[1]['c'][1][0]['c']
+ else:
+ return # There is no caption
# Set n to the index where the attributes start
n = 0
@@ -158,7 +161,10 @@ def _process_table(value, fmt):
if version(PANDOCVERSION) < version('2.10'):
table['caption'] = value[1]
else:
- table['caption'] = value[1]['c'][1][0]['c']
+ if value[1]['c'][1]:
+ table['caption'] = value[1]['c'][1][0]['c']
+ else:
+ table['caption'] = []
# Bail out if the label does not conform to expectations
if not LABEL_PATTERN.match(attrs.id): | Fixed processing of uncaptioned tables with pandoc <I>. (pandoc-fignos Issue #<I>) | tomduck_pandoc-tablenos | train | py |
bc1bacfcd4a4ecd4f87df34ee15c3aa0b74dfc8d | diff --git a/debug/init.js b/debug/init.js
index <HASH>..<HASH> 100644
--- a/debug/init.js
+++ b/debug/init.js
@@ -197,6 +197,7 @@ $(function(){
});
});
+/*
$container2.cy({
elements: {
nodes: [ { data: { id: 'n0' } }, { data: { id: 'n1' } } ],
@@ -207,7 +208,7 @@ $(function(){
window.cy2 = this;
}
});
-
+*/
$("#remove-elements-button").click(function(){
var n = number("nodes");
var e = number("edges"); | disable the container 2
yue can reenable this on his local copy for debugging | cytoscape_cytoscape.js | train | js |
7eab821a982c8201a584f3dea5afe173bde70cd5 | diff --git a/stellar_base/operation.py b/stellar_base/operation.py
index <HASH>..<HASH> 100644
--- a/stellar_base/operation.py
+++ b/stellar_base/operation.py
@@ -544,6 +544,7 @@ class AllowTrust(Operation):
raise NotImplementedError(
"Operation of asset_type={} is not implemented"
".".format(asset_type.type))
+ asset_code = asset_code.rstrip('\x00')
return cls(
source=source, | fix(Operation.AllowTrust): AllowTrust.from_xdr_object should return asset_code properly. | StellarCN_py-stellar-base | train | py |
3242c98f964559b0a1b0d6c695fe21008c4f67c0 | diff --git a/wallace/custom.py b/wallace/custom.py
index <HASH>..<HASH> 100644
--- a/wallace/custom.py
+++ b/wallace/custom.py
@@ -114,11 +114,11 @@ def api_agent_create():
# Generate the right kind of newcomer.
try:
assert(issubclass(exp.agent_type_generator, models.Node))
- agent_type_generator = lambda: exp.agent_type_generator
+ agent_type_generator = lambda network=net: exp.agent_type_generator
except:
- agent_type_generator = agent_type_generator
+ agent_type_generator = exp.agent_type_generator
- newcomer_type = agent_type_generator()
+ newcomer_type = agent_type_generator(network=net)
newcomer = newcomer_type(participant_uuid=participant_uuid)
session.add(newcomer)
session.commit()
@@ -153,6 +153,7 @@ def api_agent_create():
def api_transmission(transmission_uuid):
exp = experiment(session)
+ session.commit()
if request.method == 'GET': | Update custom.py to work with Rogers | berkeley-cocosci_Wallace | train | py |
1f02e1671bd2ee61a358dc1a5efa3cfacc751822 | diff --git a/tests/unit/states/cron_test.py b/tests/unit/states/cron_test.py
index <HASH>..<HASH> 100644
--- a/tests/unit/states/cron_test.py
+++ b/tests/unit/states/cron_test.py
@@ -214,7 +214,7 @@ class CronTestCase(TestCase):
cron.present(
name='foo',
hour='1',
- comment='Second crontab\nmulti-line-comment\n',
+ comment='Second crontab\nmulti-line comment\n',
identifier='2',
user='root')
self.assertEqual( | cron: fix a typo in the tests | saltstack_salt | train | py |
ff017636266cae9171b9a693e92c0e35910cd9c3 | diff --git a/ImageCacheProvider.js b/ImageCacheProvider.js
index <HASH>..<HASH> 100644
--- a/ImageCacheProvider.js
+++ b/ImageCacheProvider.js
@@ -36,11 +36,18 @@ function getQueryForCacheKey(url, useQueryParamsInCacheKey) {
function generateCacheKey(url, options) {
const parsedUrl = new URL(url, null, true);
- const parts = parsedUrl.pathname.split('.');
+
+ const pathParts = parsedUrl.pathname.split('/');
+
+ // last path part is the file name
+ const fileName = pathParts.pop();
+ const filePath = pathParts.join('/');
+
+ const parts = fileName.split('.');
// TODO - try to figure out the file type or let the user provide it, for now use jpg as default
const type = parts.length > 1 ? parts.pop() : 'jpg';
- const pathname = parts.join('.');
- const cacheable = pathname + getQueryForCacheKey(parsedUrl, options.useQueryParamsInCacheKey);
+
+ const cacheable = filePath + fileName + type + getQueryForCacheKey(parsedUrl, options.useQueryParamsInCacheKey);
return SHA1(cacheable) + '.' + type;
} | better resolve type of files from url without typename and with dots in them | kfiroo_react-native-cached-image | train | js |
10a0015b8797c8b4ba84f513887205b8cfd08c80 | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -61,7 +61,7 @@ ElasticsearchStream.prototype._write = function (entry, encoding, callback) {
};
var self = this;
- client.create(options, function (err, resp) {
+ client.index(options, function (err, resp) {
if (err) {
self.emit('error', err);
} | Use index instead of create, for ES 5 | simianhacker_bunyan-elasticsearch | train | js |
c8fb4e913e5a06fa214a885b6a8637c15df70493 | diff --git a/pygsp/filters/filter.py b/pygsp/filters/filter.py
index <HASH>..<HASH> 100644
--- a/pygsp/filters/filter.py
+++ b/pygsp/filters/filter.py
@@ -1,5 +1,7 @@
# -*- coding: utf-8 -*-
+from __future__ import division
+
from math import log
from copy import deepcopy | filters: float division for python 2 | epfl-lts2_pygsp | train | py |
bf0807134f12e4970586f5a9db019a6a94965bc4 | diff --git a/lib/perpetuity/mapper.rb b/lib/perpetuity/mapper.rb
index <HASH>..<HASH> 100644
--- a/lib/perpetuity/mapper.rb
+++ b/lib/perpetuity/mapper.rb
@@ -60,7 +60,6 @@ module Perpetuity
end
def insert
- raise "#{object} is invalid and cannot be persisted." if object.respond_to?(:valid?) and !object.valid?
raise "#{object} is invalid and cannot be persisted." unless validations.valid?(object)
serializable_attributes = {}
serializable_attributes[:id] = object.instance_eval(&self.class.id) unless self.class.id.nil?
diff --git a/spec/mapper_spec.rb b/spec/mapper_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/mapper_spec.rb
+++ b/spec/mapper_spec.rb
@@ -56,12 +56,6 @@ describe Perpetuity::Mapper do
BookMapper.insert book
BookMapper.first.id.should == 'my-title'
end
-
- it "checks for object validity before persisting" do
- invalid_article = Article.new(title=nil)
- invalid_article.stub(valid?: nil)
- expect { ArticleMapper.insert(invalid_article) }.to raise_error
- end
end
describe "deletion" do | Remove check for an object's `valid?` method
Having an object tell the ORM whether or not it is valid places
persistence concerns within the business objects, which is not what we
want to do. | jgaskins_perpetuity | train | rb,rb |
4d6a0526324c3dc24b0b3f5902ab2dc65f6dd2c2 | diff --git a/src/main/java/org/fit/layout/tools/ParamsPanel.java b/src/main/java/org/fit/layout/tools/ParamsPanel.java
index <HASH>..<HASH> 100644
--- a/src/main/java/org/fit/layout/tools/ParamsPanel.java
+++ b/src/main/java/org/fit/layout/tools/ParamsPanel.java
@@ -197,11 +197,14 @@ public class ParamsPanel extends JPanel implements ChangeListener, DocumentListe
public void reloadParams()
{
- boolean a = autosave;
- autosave = false;
- this.params = ServiceManager.getServiceParams(op);
- setParams(this.params);
- autosave = a;
+ if (op != null)
+ {
+ boolean a = autosave;
+ autosave = false;
+ this.params = ServiceManager.getServiceParams(op);
+ setParams(this.params);
+ autosave = a;
+ }
}
//====================================================================================== | Fix param panel reloading when no operation is bound | FitLayout_tools | train | java |
8cd4395deaf894c69c6b46cac34c0de872e1caa4 | diff --git a/bcbio/srna/group.py b/bcbio/srna/group.py
index <HASH>..<HASH> 100644
--- a/bcbio/srna/group.py
+++ b/bcbio/srna/group.py
@@ -5,10 +5,13 @@ import shutil
from collections import namedtuple
import pysam
-from seqcluster import prepare_data as prepare
-from seqcluster import make_clusters as main_cluster
-from seqcluster.libs.inputs import parse_ma_file
-from seqcluster.libs import parse
+try:
+ from seqcluster import prepare_data as prepare
+ from seqcluster import make_clusters as main_cluster
+ from seqcluster.libs.inputs import parse_ma_file
+ from seqcluster.libs import parse
+except ImportError:
+ pass
from bcbio.utils import file_exists, safe_makedir
from bcbio.provenance import do
diff --git a/bcbio/srna/sample.py b/bcbio/srna/sample.py
index <HASH>..<HASH> 100644
--- a/bcbio/srna/sample.py
+++ b/bcbio/srna/sample.py
@@ -3,7 +3,10 @@ import sys
import os.path as op
import shutil
from collections import Counter
-from seqcluster.libs.fastq import collapse, write_output
+try:
+ from seqcluster.libs.fastq import collapse, write_output
+except ImportError:
+ pass
from bcbio.utils import (splitext_plus, file_exists, append_stem, replace_directory)
from bcbio.provenance import do | Make seqcluster requirement soft (cc @lpantano)
- Do not require seqcluster if not running the small RNA pipeline.
- Preparation for adding seqcluster as a soft dependency of bcbio
and checking issues with circular requirements. | bcbio_bcbio-nextgen | train | py,py |
b68c43e3e393c8f3a9262a4a39c556a095e676f1 | diff --git a/python_modules/libraries/dagster-pyspark/dagster_pyspark/__init__.py b/python_modules/libraries/dagster-pyspark/dagster_pyspark/__init__.py
index <HASH>..<HASH> 100644
--- a/python_modules/libraries/dagster-pyspark/dagster_pyspark/__init__.py
+++ b/python_modules/libraries/dagster-pyspark/dagster_pyspark/__init__.py
@@ -5,3 +5,8 @@ from .types import DataFrame
from .version import __version__
check_dagster_package_version('dagster-pyspark', __version__)
+
+__all__ = [
+ 'DataFrame',
+ 'pyspark_resource',
+] | set __all__ for dagster-pyspark
Test Plan: bk
Reviewers: nate
Reviewed By: nate
Differential Revision: <URL> | dagster-io_dagster | train | py |
6b49eb8b72f9670656f9e01c29c49dbc62cea023 | diff --git a/mod/data/lib.php b/mod/data/lib.php
index <HASH>..<HASH> 100755
--- a/mod/data/lib.php
+++ b/mod/data/lib.php
@@ -1453,7 +1453,7 @@ function data_print_comments($data, $record, $page=0, $mform=false) {
if (!$mform and !$editor) {
echo '<div class="newcomment" style="text-align:center">';
- echo '<a href="view.php?d='.$data->id.'&page='.$page.'&mode=single&addcomment=1">'.get_string('addcomment', 'data').'</a>';
+ echo '<a href="view.php?d='.$data->id.'&rid='.$record->id.'&mode=single&addcomment=1">'.get_string('addcomment', 'data').'</a>';
echo '</div>';
} else {
if (!$mform) { | "DATA/MDL-<I>, use rid instead page to comment a record, merged from <I>" | moodle_moodle | train | php |
d81fee0912313d4b1817affd98a16c795a2b6bd4 | diff --git a/tests/test_gnupg.py b/tests/test_gnupg.py
index <HASH>..<HASH> 100644
--- a/tests/test_gnupg.py
+++ b/tests/test_gnupg.py
@@ -201,7 +201,6 @@ class GPGTestCase(unittest.TestCase):
else:
log.warn("Can't delete homedir: '%s' not a directory"
% self.homedir)
- log.warn("%s%s%s" % (os.linesep, str("=" * 70), os.linesep))
def test_parsers_fix_unsafe(self):
"""Test that unsafe inputs are quoted out and then ignored.""" | Remove the log.warn line that printed dividers between unittest runs. | isislovecruft_python-gnupg | train | py |
ee8ab7b85eac23d5a0d77f0309aa7d65897768fb | diff --git a/test/spec/modules/aduptechBidAdapter_spec.js b/test/spec/modules/aduptechBidAdapter_spec.js
index <HASH>..<HASH> 100644
--- a/test/spec/modules/aduptechBidAdapter_spec.js
+++ b/test/spec/modules/aduptechBidAdapter_spec.js
@@ -532,8 +532,8 @@ describe('AduptechBidAdapter', () => {
const bidderRequest = {
auctionId: 'auctionId123',
refererInfo: {
- canonicalUrl: 'http://crazy.canonical.url',
- referer: 'http://crazy.referer.url'
+ page: 'http://crazy.canonical.url',
+ ref: 'http://crazy.referer.url'
},
gdprConsent: {
consentString: 'consentString123',
@@ -572,8 +572,8 @@ describe('AduptechBidAdapter', () => {
method: ENDPOINT_METHOD,
data: {
auctionId: bidderRequest.auctionId,
- pageUrl: bidderRequest.refererInfo.canonicalUrl,
- referrer: bidderRequest.refererInfo.referer,
+ pageUrl: bidderRequest.refererInfo.page,
+ referrer: bidderRequest.refererInfo.ref,
gdpr: {
consentString: bidderRequest.gdprConsent.consentString,
consentRequired: bidderRequest.gdprConsent.gdprApplies | Aduptech bid adapter: fix failing test (#<I>) | prebid_Prebid.js | train | js |
9a3a59eaaefd175379963a3a6048bdb5b3950fb8 | diff --git a/actionview/test/template/sanitize_helper_test.rb b/actionview/test/template/sanitize_helper_test.rb
index <HASH>..<HASH> 100644
--- a/actionview/test/template/sanitize_helper_test.rb
+++ b/actionview/test/template/sanitize_helper_test.rb
@@ -1,7 +1,7 @@
require 'abstract_unit'
-# The exhaustive tests are in test/template/html-scanner/sanitizer_test.rb
-# This tests the that the helpers hook up correctly to the sanitizer classes.
+# The exhaustive tests are in test/controller/html/sanitizer_test.rb.
+# This tests that the helpers hook up correctly to the sanitizer classes.
class SanitizeHelperTest < ActionView::TestCase
tests ActionView::Helpers::SanitizeHelper
@@ -49,7 +49,7 @@ class SanitizeHelperTest < ActionView::TestCase
stripped = strip_tags(blank)
assert_equal blank, stripped
end
-
+
# Actual: "something "
assert_equal "something <img onerror=alert(1337)", ERB::Util.html_escape(strip_tags("something <img onerror=alert(1337)"))
end | Fixed: spelling mistake in SanitizeHelperTest. | rails_rails | train | rb |
60679cde34d1bc43d7229a65b28e8f60dfb7487f | diff --git a/js/cbrowser.js b/js/cbrowser.js
index <HASH>..<HASH> 100644
--- a/js/cbrowser.js
+++ b/js/cbrowser.js
@@ -1733,7 +1733,7 @@ Browser.prototype.addViewListener = function(handler, opts) {
Browser.prototype.notifyLocation = function() {
var nvs = Math.max(1, this.viewStart|0);
var nve = this.viewEnd|0;
- if (this.currentSeqMax && nve > this.currentSeqMax)
+ if (this.currentSeqMax > 0 && nve > this.currentSeqMax)
nve = this.currentSeqMax;
for (var lli = 0; lli < this.viewListeners.length; ++lli) { | Fix reported coordinaes when currentSeqMax isn't set. | dasmoth_dalliance | train | js |
8c3463b877e5046dfce27881c7bcab63c20f6107 | diff --git a/releaf-i18n/app/controllers/releaf/translations_controller.rb b/releaf-i18n/app/controllers/releaf/translations_controller.rb
index <HASH>..<HASH> 100644
--- a/releaf-i18n/app/controllers/releaf/translations_controller.rb
+++ b/releaf-i18n/app/controllers/releaf/translations_controller.rb
@@ -84,7 +84,7 @@ module Releaf
relation = relation.joins(sql % ([locale] * 4))
end
- relation.select(columns_for_select)
+ relation.select(columns_for_select).order(:key)
end
# overwrite leaf base class | TranslationsController: order translations by key | cubesystems_releaf | train | rb |
ae075ebe448b71bc875a538078dc5183e4a6e017 | diff --git a/config/test/ConfigPanel.php b/config/test/ConfigPanel.php
index <HASH>..<HASH> 100644
--- a/config/test/ConfigPanel.php
+++ b/config/test/ConfigPanel.php
@@ -1,7 +1,7 @@
<?php
-$config['db']['dbname'] = 'framework_test';
-$config['db']['user'] = 'thulium_1';
-$config['db']['pass'] = 'a';
+$config['db']['dbname'] = 'ouzo_test';
+$config['db']['user'] = 'postgres';
+$config['db']['pass'] = '';
$config['db']['driver'] = 'pgsql';
$config['db']['host'] = '127.0.0.1';
$config['db']['port'] = '5432'; | Updated ConfigPanel.php to be compatibile with travis settings. | letsdrink_ouzo | train | php |
e6e2f62a7ad97cc7b55568376d72b0dcad4b6592 | diff --git a/src/router-configuration.js b/src/router-configuration.js
index <HASH>..<HASH> 100644
--- a/src/router-configuration.js
+++ b/src/router-configuration.js
@@ -30,6 +30,9 @@ export class RouterConfiguration {
* @chainable
*/
addPipelineStep(name: string, step: Function|PipelineStep): RouterConfiguration {
+ if (step === null || step === undefined) {
+ throw new Error('Pipeline step cannot be null or undefined.');
+ }
this.pipelineSteps.push({name, step});
return this;
} | fix(router-configuration): throw early on invalid pipeline steps | aurelia_router | train | js |
16c01436c1d1c49162c6fb258776a69920b9b0eb | diff --git a/lib/rollbar/exception_reporter.rb b/lib/rollbar/exception_reporter.rb
index <HASH>..<HASH> 100644
--- a/lib/rollbar/exception_reporter.rb
+++ b/lib/rollbar/exception_reporter.rb
@@ -1,7 +1,8 @@
module Rollbar
module ExceptionReporter
def report_exception_to_rollbar(env, exception)
- Rollbar.log_debug "[Rollbar] Reporting exception: #{exception.try(:message)}"
+ exception_message = exception.respond_to?(:message) ? exception.message : 'No Exception Message'
+ Rollbar.log_debug "[Rollbar] Reporting exception: #{exception_message}"
exception_data = Rollbar.log(Rollbar.configuration.uncaught_exception_level, exception) | replace usage of #try with #respond_to? | rollbar_rollbar-gem | train | rb |
a91347107a1aad72f186b338b82704f2b5f88c4f | diff --git a/src/rasterstats/io.py b/src/rasterstats/io.py
index <HASH>..<HASH> 100644
--- a/src/rasterstats/io.py
+++ b/src/rasterstats/io.py
@@ -183,7 +183,8 @@ def boundless_array(arr, window, nodata, masked=False):
window_shape = (wr_stop - wr_start, wc_stop - wc_start)
# create an array of nodata values
- out = np.ones(shape=window_shape) * nodata
+ out = np.empty(shape=window_shape)
+ out[:] = nodata
# Fill with data where overlapping
nr_start = olr_start - wr_start | Do not create array with ones, and then multiply value by nodata. Instead, create un-initialized array and set array values to nodata. This cuts the amount of memory-copy operations in half and thus, halves the operation time | perrygeo_python-rasterstats | train | py |
58259f1f5b970a19f6df59144acf353a7be10b90 | diff --git a/lib/bel/completion_rule.rb b/lib/bel/completion_rule.rb
index <HASH>..<HASH> 100644
--- a/lib/bel/completion_rule.rb
+++ b/lib/bel/completion_rule.rb
@@ -69,6 +69,7 @@ module BEL
})
end
+ # add the active_token length if we do not need to delete it
if active_token and actions.empty?
position_start += active_token.value.length
end | comment why we add length to start when no delete | OpenBEL_bel.rb | train | rb |
9f3f33e2cc01bbd50d37bebb8e0a1e8c20ca321c | diff --git a/lib/webrtc/call.js b/lib/webrtc/call.js
index <HASH>..<HASH> 100644
--- a/lib/webrtc/call.js
+++ b/lib/webrtc/call.js
@@ -128,7 +128,7 @@ MatrixCall.prototype._initWithInvite = function(event) {
this.state = 'ringing';
this.direction = 'inbound';
- // firefox and Safari's RTCPeerConnection doesn't add streams until it
+ // firefox and OpenWebRTC's RTCPeerConnection doesn't add streams until it
// starts getting media on them so we need to figure out whether a video
// channel has been offered by ourselves.
if (this.msg.offer.sdp.indexOf('m=video') > -1) { | s/Safari/OpenWebRTC/ | matrix-org_matrix-js-sdk | train | js |
5a514193f1f56c0e6764e13163b5835a3184dbb7 | diff --git a/core/src/main/java/org/mapfish/print/processor/jasper/MergeDataSourceProcessor.java b/core/src/main/java/org/mapfish/print/processor/jasper/MergeDataSourceProcessor.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/org/mapfish/print/processor/jasper/MergeDataSourceProcessor.java
+++ b/core/src/main/java/org/mapfish/print/processor/jasper/MergeDataSourceProcessor.java
@@ -19,6 +19,7 @@
package org.mapfish.print.processor.jasper;
+import com.google.common.annotations.Beta;
import com.google.common.collect.BiMap;
import com.google.common.collect.Lists;
import com.google.common.collect.Maps;
@@ -61,6 +62,7 @@ import javax.annotation.Nullable;
*
* @author Jesse on 9/6/2014.
*/
+@Beta
public final class MergeDataSourceProcessor extends AbstractProcessor<MergeDataSourceProcessor.In, MergeDataSourceProcessor.Out>
implements CustomDependencies {
private List<Source> sources = Lists.newArrayList(); | Mark MergeDataSourceProcessor as @Beta because I am not sure it is needed, it might be a work around for my ignorance of Jasper Reports | mapfish_mapfish-print | train | java |
1ceb2594014b52bf8c0e940c7c3b15b9bf089621 | diff --git a/packages/grpc-native-core/test/channel_test.js b/packages/grpc-native-core/test/channel_test.js
index <HASH>..<HASH> 100644
--- a/packages/grpc-native-core/test/channel_test.js
+++ b/packages/grpc-native-core/test/channel_test.js
@@ -132,7 +132,8 @@ describe('channel', function() {
grpc.connectivityState.IDLE);
});
});
- describe('watchConnectivityState', function() {
+ // This suite test appears to be triggering grpc/grpc#12932; skipping for now
+ describe.skip('watchConnectivityState', function() {
var channel;
beforeEach(function() {
channel = new grpc.Channel('localhost', insecureCreds, {}); | Skip a test suite that appears to be triggering a core assertion failure | grpc_grpc-node | train | js |
ad08c3f2f9ccc42aa51cce4de9c12cacce681987 | diff --git a/lib/clone-wars/version.rb b/lib/clone-wars/version.rb
index <HASH>..<HASH> 100644
--- a/lib/clone-wars/version.rb
+++ b/lib/clone-wars/version.rb
@@ -1,5 +1,5 @@
# encoding: utf-8
module CloneWars # :nodoc:
- VERSION = "0.0.3"
+ VERSION = "0.1.0"
end | Version bumping minor to a <I> | caleon_quickening | train | rb |
8a519366489734dadd5bd993c0fda01f63de6ac3 | diff --git a/pyramid_webassets/tests/test_webassets.py b/pyramid_webassets/tests/test_webassets.py
index <HASH>..<HASH> 100644
--- a/pyramid_webassets/tests/test_webassets.py
+++ b/pyramid_webassets/tests/test_webassets.py
@@ -183,7 +183,7 @@ class TestWebAssets(unittest.TestCase):
with self.assertRaises(Exception) as cm:
get_webassets_env_from_settings(settings, prefix='webassets')
- assert cm.exception.message == "You need to provide webassets.base_dir in your configuration"
+ assert cm.exception.message == "You need to provide webassets.base_dir in your configuration"
def test_includeme(self):
from pyramid_webassets import includeme
@@ -395,7 +395,7 @@ class TestAssetSpecs(TempDirHelper, unittest.TestCase):
with self.assertRaises(BundleError) as cm:
bundle.urls(self.env)
- assert cm.exception.args[0].message == 'No module named rabbits'
+ assert cm.exception.args[0].message == 'No module named rabbits'
def test_asset_spec_no_static_view(self):
from webassets import Bundle | Fixed two asserts in test when raising an exception
They were inside the with, so they were never executed. (Thanks to the cov plugin for this one) | sontek_pyramid_webassets | train | py |
cc35d3e4acbb8ade014776bfc55d5df5a0692cb5 | diff --git a/buildprocess/configureWebpack.js b/buildprocess/configureWebpack.js
index <HASH>..<HASH> 100644
--- a/buildprocess/configureWebpack.js
+++ b/buildprocess/configureWebpack.js
@@ -104,7 +104,7 @@ function configureWebpack(terriaJSBasePath, config, devMode, hot, MiniCssExtract
loader: 'babel-loader',
options: {
cacheDirectory: true,
- sourceMaps: !!devMode,
+ sourceMaps: true,
presets: [
[
'@babel/preset-env', | babel-loader to always generate source-maps | TerriaJS_terriajs | train | js |
b2e0880c92f95ac1833b3aac9cf092ebf1730c84 | diff --git a/bit/wallet.py b/bit/wallet.py
index <HASH>..<HASH> 100644
--- a/bit/wallet.py
+++ b/bit/wallet.py
@@ -958,7 +958,7 @@ class MultiSig:
else:
return unspent.script == script
- def sign(self, data):
+ def sign(self, data): # pragma: no cover
"""Signs some data which can be verified later by others using
the public key.
@@ -1261,7 +1261,7 @@ class MultiSigTestnet:
else:
return unspent.script == script
- def sign(self, data):
+ def sign(self, data): # pragma: no cover
"""Signs some data which can be verified later by others using
the public key. | Removes code coverage for function `~MultiSig.sign()` | ofek_bit | train | py |
8482c6c2e6489020f134d16085e284cf29f59459 | diff --git a/hcl2template/types.packer_config.go b/hcl2template/types.packer_config.go
index <HASH>..<HASH> 100644
--- a/hcl2template/types.packer_config.go
+++ b/hcl2template/types.packer_config.go
@@ -155,6 +155,7 @@ func (c *PackerConfig) evaluateLocalVariable(local *Local) hcl.Diagnostics {
return diags
}
c.LocalVariables[local.Name] = &Variable{
+ Name: local.Name,
DefaultValue: value,
Type: value.Type(),
} | evaluateLocalVariable: also pass the variable name | hashicorp_packer | train | go |
be729b262fb4c1be77532a363d19eca36b8b3725 | diff --git a/Module.php b/Module.php
index <HASH>..<HASH> 100644
--- a/Module.php
+++ b/Module.php
@@ -17,6 +17,7 @@
*/
namespace Rcm;
+
use Zend\Mvc\MvcEvent;
use Zend\Mvc\ResponseSender\SendResponseEvent;
use Zend\Console\Request as ConsoleRequest;
@@ -55,6 +56,13 @@ class Module
return;
}
+ $siteInfo = $serviceManager->get('Rcm\Service\SiteManager')
+ ->getCurrentSiteInfo();
+ setlocale(
+ LC_ALL,
+ $siteInfo['language']['iso639_1'] . '_' . $siteInfo['country']['iso2']
+ );
+
//Add Domain Checker
$routeListener = $serviceManager->get('Rcm\EventListener\RouteListener'); | Set php locale based on site country and language | reliv_Rcm | train | php |
6cb42db7873fdd48f0b1c0140289d85773dea3b8 | diff --git a/tinymce/widgets.py b/tinymce/widgets.py
index <HASH>..<HASH> 100644
--- a/tinymce/widgets.py
+++ b/tinymce/widgets.py
@@ -34,6 +34,14 @@ logger.setLevel(20)
def language_file_exists(language_code):
+ """
+ Check if TinyMCE has a language file for the specified lang code
+
+ :param language_code: language code
+ :type language_code: str
+ :return: check result
+ :rtype: bool
+ """
filename = '{0}.js'.format(language_code)
path = os.path.join('tinymce', 'js', 'tinymce', 'langs', filename)
return finders.find(path) is not None | Add docstring for language_file_exists | romanvm_django-tinymce4-lite | train | py |
30d2ec5f6e3cb7857db381ac57c47033ed31501c | diff --git a/lib/mobility/arel/nodes.rb b/lib/mobility/arel/nodes.rb
index <HASH>..<HASH> 100644
--- a/lib/mobility/arel/nodes.rb
+++ b/lib/mobility/arel/nodes.rb
@@ -5,10 +5,8 @@ module Mobility
class Unary < ::Arel::Nodes::Unary; end
class Binary < ::Arel::Nodes::Binary; end
class Grouping < ::Arel::Nodes::Grouping; end
- class Equality < ::Arel::Nodes::Equality; end
::Arel::Visitors::ToSql.class_eval do
- alias :visit_Mobility_Arel_Nodes_Equality :visit_Arel_Nodes_Equality
alias :visit_Mobility_Arel_Nodes_Grouping :visit_Arel_Nodes_Grouping
end
end
diff --git a/lib/mobility/arel/nodes/pg_ops.rb b/lib/mobility/arel/nodes/pg_ops.rb
index <HASH>..<HASH> 100644
--- a/lib/mobility/arel/nodes/pg_ops.rb
+++ b/lib/mobility/arel/nodes/pg_ops.rb
@@ -19,10 +19,6 @@ module Mobility
include ::Arel::OrderPredications
include ::Arel::AliasPredication
- def eq other
- Equality.new self, quoted_node(other)
- end
-
def lower
super self
end | Remove unused Mobility::Arel::Nodes::Equality | shioyama_mobility | train | rb,rb |
61452567a67b7b1aea36efa4a6fdb22527c6c893 | diff --git a/lib/xcodeproj/constants.rb b/lib/xcodeproj/constants.rb
index <HASH>..<HASH> 100644
--- a/lib/xcodeproj/constants.rb
+++ b/lib/xcodeproj/constants.rb
@@ -6,7 +6,7 @@ module Xcodeproj
# @return [String] The last known iOS SDK (stable).
#
- LAST_KNOWN_IOS_SDK = '6.1'
+ LAST_KNOWN_IOS_SDK = '7.0'
# @return [String] The last known OS X SDK (stable).
# | [Constants] Bump last known iOS version | CocoaPods_Xcodeproj | train | rb |
6c423d2b92ee8fbbabd3a97ca3682624741422bc | diff --git a/aiogram/utils/parts.py b/aiogram/utils/parts.py
index <HASH>..<HASH> 100644
--- a/aiogram/utils/parts.py
+++ b/aiogram/utils/parts.py
@@ -15,12 +15,13 @@ def split_text(text: str, length: int = MAX_MESSAGE_LENGTH) -> typing.List[str]:
return [text[i:i + length] for i in range(0, len(text), length)]
-def safe_split_text(text: str, length: int = MAX_MESSAGE_LENGTH) -> typing.List[str]:
+def safe_split_text(text: str, length: int = MAX_MESSAGE_LENGTH, split_separator: str = ' ') -> typing.List[str]:
"""
Split long text
:param text:
:param length:
+ :param split_separator
:return:
"""
# TODO: More informative description
@@ -30,7 +31,7 @@ def safe_split_text(text: str, length: int = MAX_MESSAGE_LENGTH) -> typing.List[
while temp_text:
if len(temp_text) > length:
try:
- split_pos = temp_text[:length].rindex(' ')
+ split_pos = temp_text[:length].rindex(split_separator)
except ValueError:
split_pos = length
if split_pos < length // 4 * 3: | Update safe_split_text function, added split_separator param (#<I>) | aiogram_aiogram | train | py |
1a8a7abfe3922847f6e7e7a33e207094f76bdb20 | diff --git a/Model/View/Asset/Image.php b/Model/View/Asset/Image.php
index <HASH>..<HASH> 100644
--- a/Model/View/Asset/Image.php
+++ b/Model/View/Asset/Image.php
@@ -99,7 +99,7 @@ class Image extends ImageModel
ScopeConfigInterface $scopeConfig,
ImageHelper $imageHelper,
StoreManagerInterface $storeManager,
- $filePath,
+ string $filePath,
array $miscParams
) {
$this->scopeConfig = $scopeConfig; | resolves compilation error caused by <I>aff<I>d<I>dda<I>d2d8bbfcbaccee<I>
specify type in phpdoc resolves error during compilation | fastly_fastly-magento2 | train | php |
7ebba89753520687da4dc5c1277cb8622da03bc2 | diff --git a/src/function/abstract.js b/src/function/abstract.js
index <HASH>..<HASH> 100644
--- a/src/function/abstract.js
+++ b/src/function/abstract.js
@@ -33,7 +33,7 @@ define(
};
if (invocation) {
errorDetails.signature = invocation.signature;
- errorDetails.nonMatchingSignatures = invocation.nonMatchingImplementationSignatures;
+ errorDetails.nonMatchingSignatures = invocation.nonMatchingSignatures;
invocation.reset();
}
if (!errorDetails.signature) { | fixing reference to invocation.nonMatchingImplementationSignatures property to nonMatchingSignatures | bob-gray_solv | train | js |
30ac6933b94a2bcefaf288c738a0d2662c5e54a4 | diff --git a/app/transitions/explode.js b/app/transitions/explode.js
index <HASH>..<HASH> 100644
--- a/app/transitions/explode.js
+++ b/app/transitions/explode.js
@@ -6,17 +6,22 @@ import { Promise } from "liquid-fire";
// animations.
export default function explode(...pieces) {
- return Promise.all(pieces.map((piece) => {
+ var result = Promise.all(pieces.map((piece) => {
if (piece.matchBy) {
return matchAndExplode(this, piece);
} else {
return explodePiece(this, piece);
}
- })).then(() => {
- // The default transition guarantees that we didn't leave our
- // original new element invisible
- this.lookup('default').apply(this);
- });
+ }));
+
+ if (this.newElement) {
+ this.newElement.css({visibility: ''});
+ }
+ if (this.oldElement) {
+ this.oldElement.css({visibility: 'hidden'});
+ }
+
+ return result;
}
function explodePiece(context, piece) { | show new and hide old immediately after giving each exploded piece a
chance to animate | ember-animation_liquid-fire | train | js |
48c6d27bdd42f5adbddd02aabd2c20608d9e73dc | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100644
--- a/setup.py
+++ b/setup.py
@@ -17,9 +17,6 @@
import re
import io
-import textwrap
-import sys
-import shutil
from setuptools import setup
@@ -63,12 +60,3 @@ setup(
'Programming Language :: Python'
]
)
-
-# Check that the pandoc-eqnos script is on the PATH
-if not shutil.which('pandoc-eqnos'):
- msg = """
- ERROR: `pandoc-eqnos` script not found. This will need to
- be corrected. If you need help, please file an Issue at
- https://github.com/tomduck/pandoc-eqnos/issues.\n"""
- print(textwrap.dedent(msg))
- sys.exit(-1) | Removed script check that was causing a developer pip error. | tomduck_pandoc-eqnos | train | py |
b4adb2779d9e739263eaf905d809d2c505c75fa3 | diff --git a/lib/core/Docs.js b/lib/core/Docs.js
index <HASH>..<HASH> 100644
--- a/lib/core/Docs.js
+++ b/lib/core/Docs.js
@@ -78,10 +78,12 @@ class Docs {
const previous = this.find(metadata.previous);
if (previous) {
metadata.previous_title = previous.title || 'Previous';
+ metadata.previous_sidebar_label = previous.sidebar_label || 'Previous';
}
const next = this.find(metadata.next);
if (next) {
metadata.next_title = next.title || 'Next';
+ metadata.next_sidebar_label = next.sidebar_label || 'Next';
}
});
} | prev | next sidebar_label | richardzcode_Dochameleon | train | js |
c5ab4c09804d9f21987d3e8506217db296f9048c | diff --git a/examples/14-recurring-first-payment.php b/examples/14-recurring-first-payment.php
index <HASH>..<HASH> 100644
--- a/examples/14-recurring-first-payment.php
+++ b/examples/14-recurring-first-payment.php
@@ -8,7 +8,7 @@ try
/*
* Initialize the Mollie API library with your API key or OAuth access token.
*/
- include "initialize.php";
+ require "initialize.php";
/*
* Retrieve the last created customer for this example. | Update <I>-recurring-first-payment.php | mollie_mollie-api-php | train | php |
f0bf55d4a869598d3af565440001ab1720cf73e0 | diff --git a/lib/utils/result-summary.js b/lib/utils/result-summary.js
index <HASH>..<HASH> 100644
--- a/lib/utils/result-summary.js
+++ b/lib/utils/result-summary.js
@@ -2,7 +2,7 @@
const CoreObject = require('core-object');
const chalk = require('chalk');
-const Table = require('cli-table2');
+const Table = require('cli-table3');
module.exports = CoreObject.extend({
print() { | Update require to cli-table3 | ember-cli_ember-try | train | js |
0e7ddbccf3beab3bb3c92c55bcbd2ec2ca6f732a | diff --git a/tests/integration/test_install_twists.py b/tests/integration/test_install_twists.py
index <HASH>..<HASH> 100644
--- a/tests/integration/test_install_twists.py
+++ b/tests/integration/test_install_twists.py
@@ -92,7 +92,8 @@ setup(
assert "version" in pipenv_instance.lockfile["default"]["test-private-dependency"]
assert "0.1" in pipenv_instance.lockfile["default"]["test-private-dependency"]["version"]
- with PipenvInstance(pypi=pypi, chdir=True) as p:
+ with temp_environ(), PipenvInstance(pypi=pypi, chdir=True) as p:
+ os.environ['PIP_PROCESS_DEPENDENCY_LINKS'] = '1'
test_deplink(p, 'git+https://github.com/atzannes/[email protected]#egg=test-private-dependency-v0.1')
# with PipenvInstance(pypi=pypi, chdir=True) as p: | Enable dependency link processing
- Use `PIP_PROCESS_DEPENDENCY_LINKS` to toggle the processing of dependency links | pypa_pipenv | train | py |
b1a4a626de7488bad77f6abb7bb0a90fa7033a2b | diff --git a/src/Services/BaseFileService.php b/src/Services/BaseFileService.php
index <HASH>..<HASH> 100644
--- a/src/Services/BaseFileService.php
+++ b/src/Services/BaseFileService.php
@@ -767,11 +767,12 @@ abstract class BaseFileService extends BaseRestService implements FileServiceInt
);
$out[$key] = $tmp;
} else {
- $err[] = $name;
+ $err[$name] = $error;
}
}
if (!empty($err)) {
- $msg = 'Failed to upload the following files to folder ' . $this->folderPath . ': ' . implode(', ', $err);
+ $msg = 'Failed to upload the following files to folder ' . $this->folderPath . ': ' . implode(', ', array_keys($err)) .
+ '. Error codes are: ' . implode(', ', array_values($err)) . '. See https://www.php.net/manual/en/features.file-upload.errors.php';
throw new InternalServerErrorException($msg);
} | DP-<I> Windows IIS install File Upload does not work
- Show error codes for files uploading | dreamfactorysoftware_df-file | train | php |
6e28b242e1a7323358ec5bcf1f37a5ae4c65e681 | diff --git a/loadConn.go b/loadConn.go
index <HASH>..<HASH> 100644
--- a/loadConn.go
+++ b/loadConn.go
@@ -38,6 +38,7 @@ type ChildSAConf struct {
CloseAction string `json:"close_action"`
ReqID string `json:"reqid"`
RekeyTime string `json:"rekey_time"`
+ ReplayWindow string `json:"replay_window,omitempty"`
Mode string `json:"mode"`
InstallPolicy string `json:"policies"`
UpDown string `json:"updown,omitempty"` | Adding replay_window to struct | bronze1man_goStrongswanVici | train | go |
f149c4a51946eb6722489af62e9a747d391574af | diff --git a/tej/main.py b/tej/main.py
index <HASH>..<HASH> 100644
--- a/tej/main.py
+++ b/tej/main.py
@@ -36,8 +36,9 @@ def _setup(args):
def _submit(args):
- RemoteQueue(args.destination, args.queue).submit(
- args.id, args.directory, args.script)
+ job_id = RemoteQueue(args.destination, args.queue).submit(
+ args.id, args.directory, args.script)
+ print(job_id)
def _status(args):
diff --git a/tej/submission.py b/tej/submission.py
index <HASH>..<HASH> 100644
--- a/tej/submission.py
+++ b/tej/submission.py
@@ -413,6 +413,7 @@ class RemoteQueue(object):
job_id, target,
script))
logger.info("Submitted job %s", job_id)
+ return job_id
def status(self, job_id):
"""Gets the status of a previously-submitted job. | Makes 'submit' return the job identifier
This is both for the API (RemoteQueue#submit()) and the CLI
('tej submit'). | VisTrails_tej | train | py,py |
2f499114acada6814a45e379c710d5835d6a20a4 | diff --git a/gcsproxy/mutable_object.go b/gcsproxy/mutable_object.go
index <HASH>..<HASH> 100644
--- a/gcsproxy/mutable_object.go
+++ b/gcsproxy/mutable_object.go
@@ -48,6 +48,8 @@ type MutableObject struct {
// Mutable state
/////////////////////////
+ destroyed bool
+
// A record for the specific generation of the object from which our local
// state is branched.
src gcs.Object
@@ -139,6 +141,10 @@ func (mo *MutableObject) SourceGeneration() int64 {
// at appropriate times to help debug weirdness. Consider using
// syncutil.InvariantMutex to automate the process.
func (mo *MutableObject) CheckInvariants() {
+ if mo.destroyed {
+ return
+ }
+
// INVARIANT: atomic.LoadInt64(&sourceGeneration) == src.Generation
{
g := atomic.LoadInt64(&mo.sourceGeneration)
@@ -166,6 +172,8 @@ func (mo *MutableObject) CheckInvariants() {
// state. The MutableObject must not be used after calling this method,
// regardless of outcome.
func (mo *MutableObject) Destroy() (err error) {
+ mo.destroyed = true
+
// If we have no read/write lease, there's nothing to do.
if mo.readWriteLease == nil {
return | Don't check invariants for destroyed objects. | jacobsa_timeutil | train | go |
feccc5ef755d72d37a9b23f7aaba4d884fb68593 | diff --git a/Kwf/Component/Events/ViewCache.php b/Kwf/Component/Events/ViewCache.php
index <HASH>..<HASH> 100644
--- a/Kwf/Component/Events/ViewCache.php
+++ b/Kwf/Component/Events/ViewCache.php
@@ -206,7 +206,7 @@ class Kwf_Component_Events_ViewCache extends Kwf_Component_Events
}
// namechanged and filnamechanged-events
- public function onPageChanged(Kwf_Component_Event_Page_ContentChanged $event)
+ public function onPageChanged(Kwf_Component_Event_Component_Abstract $event)
{
$this->_updates[] = array(
'type' => 'componentLink', | fix test: (File)Name_Cachend's parent class was changed | koala-framework_koala-framework | train | php |
6ce7b3de05e515e2ea4cb66b92f90336b14a26ae | diff --git a/py/testdir_single_jvm_fvec/test_exec2_cbind_like_R.py b/py/testdir_single_jvm_fvec/test_exec2_cbind_like_R.py
index <HASH>..<HASH> 100644
--- a/py/testdir_single_jvm_fvec/test_exec2_cbind_like_R.py
+++ b/py/testdir_single_jvm_fvec/test_exec2_cbind_like_R.py
@@ -86,16 +86,6 @@ class Basic(unittest.TestCase):
execResult, result = h2e.exec_expr(h2o.nodes[0], execExpr, resultKey=None, timeoutSecs=300)
execTime = time.time() - start
print 'exec took', execTime, 'seconds'
- c = h2o.nodes[0].get_cloud()
- c = c['nodes']
-
- # print (h2o.dump_json(c))
- k = [i['num_keys'] for i in c]
- v = [i['value_size_bytes'] for i in c]
-
-
- print "keys: %s" % " ".join(map(str,k))
- print "value_size_bytes: %s" % " ".join(map(str,v))
h2o.check_sandbox_for_errors() | get rid of the get_cloud probes | h2oai_h2o-2 | train | py |
ac99217e92c43066af7ec96554054d75532565d7 | diff --git a/tests/test_modeling_common.py b/tests/test_modeling_common.py
index <HASH>..<HASH> 100644
--- a/tests/test_modeling_common.py
+++ b/tests/test_modeling_common.py
@@ -67,6 +67,8 @@ class ModelTesterMixin:
if model_class in MODEL_FOR_MULTIPLE_CHOICE_MAPPING.values():
return {
k: v.unsqueeze(1).expand(-1, self.model_tester.num_choices, -1).contiguous()
+ if isinstance(v, torch.Tensor) and v.ndim != 0
+ else v
for k, v in inputs_dict.items()
}
return inputs_dict
@@ -157,7 +159,7 @@ class ModelTesterMixin:
model.to(torch_device)
model.eval()
with torch.no_grad():
- outputs = model(**inputs_dict)
+ outputs = model(**self._prepare_for_class(inputs_dict, model_class))
attentions = outputs[-1]
self.assertEqual(model.config.output_hidden_states, False)
self.assertEqual(len(attentions), self.model_tester.num_hidden_layers) | Fix the CI (#<I>)
* Fix CI | huggingface_pytorch-pretrained-BERT | train | py |
d1563d1b34a52740712da3d70771576386576fc5 | diff --git a/neuropythy/__init__.py b/neuropythy/__init__.py
index <HASH>..<HASH> 100644
--- a/neuropythy/__init__.py
+++ b/neuropythy/__init__.py
@@ -79,7 +79,7 @@ try:
except: pass
# Version information...
-__version__ = '0.8.1'
+__version__ = '0.8.2'
diff --git a/neuropythy/util/core.py b/neuropythy/util/core.py
index <HASH>..<HASH> 100644
--- a/neuropythy/util/core.py
+++ b/neuropythy/util/core.py
@@ -493,7 +493,7 @@ class CurveSpline(ObjectWithMetaData):
'''
return CurveSpline(
np.flip(self.coordinates, axis=1),
- distances=(None if self.distances is None else np.flip(self.distances)),
+ distances=(None if self.distances is None else np.flip(self.distances, axis=0)),
order=self.order, weights=self.weights, smoothing=self.smoothing,
periodic=self.periodic, meta_data=self.meta_data)
def subcurve(self, t0, t1): | minor compatibility with numpy <I> issue | noahbenson_neuropythy | train | py,py |
f8a21b4a9f7e9f15b7378feb2962cc2e5d51caeb | diff --git a/src/experimentalcode/remigius/Visualizers/DotVisualizer.java b/src/experimentalcode/remigius/Visualizers/DotVisualizer.java
index <HASH>..<HASH> 100644
--- a/src/experimentalcode/remigius/Visualizers/DotVisualizer.java
+++ b/src/experimentalcode/remigius/Visualizers/DotVisualizer.java
@@ -34,7 +34,9 @@ public class DotVisualizer<NV extends NumberVector<NV, ?>> extends Projection2DV
@Override
public Element visualize(SVGPlot svgp) {
Element layer = super.visualize(svgp);
+ //MarkerLibrary ml = context.getMarkerLibrary();
for(int id : database) {
+ //Element dot = ml.useMarker(svgp, layer, getProjected(id, 0), getProjected(id, 1), 0, 0.01);
Element dot = ShapeLibrary.createMarkerDot(svgp.getDocument(), getProjected(id, 0), getProjected(id, 1));
// setting ID for efficient use of ToolTips.
dot.setAttribute("id", ShapeLibrary.createID(ShapeLibrary.MARKER, id)); | Add code (still commented) for calling MarkerLibrary | elki-project_elki | train | java |
d73e1768d6b4703acaff316c9f9432281f405832 | diff --git a/core-bundle/src/Resources/contao/controllers/BackendInstall.php b/core-bundle/src/Resources/contao/controllers/BackendInstall.php
index <HASH>..<HASH> 100644
--- a/core-bundle/src/Resources/contao/controllers/BackendInstall.php
+++ b/core-bundle/src/Resources/contao/controllers/BackendInstall.php
@@ -454,6 +454,12 @@ class BackendInstall extends \Backend
continue;
}
+ // The port number must not be empty (see #7950)
+ if ($strKey == 'dbPort' && \Input::post($strKey, true) == '')
+ {
+ \Input::setPost($strKey, 3306);
+ }
+
\Config::persist($strKey, \Input::post($strKey, true));
} | [Core] Ensure that the database port is not empty (see #<I>). | contao_contao | train | php |
81dbb5d8ebdf72c897a240bfe3d8d2b37511e378 | diff --git a/lib/codemirror.js b/lib/codemirror.js
index <HASH>..<HASH> 100644
--- a/lib/codemirror.js
+++ b/lib/codemirror.js
@@ -700,7 +700,7 @@ window.CodeMirror = (function() {
// parse correctly.
function findStartLine(doc, n) {
var minindent, minline;
- for (var search = n, lim = n - 40; search > lim; --search) {
+ for (var search = n, lim = n - 100; search > lim; --search) {
if (search == 0) return 0;
var line = getLine(doc, search-1);
if (line.stateAfter) return search; | Increase getStateBefore's scan limit | codemirror_CodeMirror | train | js |
146b6b24cc78b2101208f9fade75ac39cc92a794 | diff --git a/pyiso.py b/pyiso.py
index <HASH>..<HASH> 100644
--- a/pyiso.py
+++ b/pyiso.py
@@ -678,10 +678,10 @@ class PrimaryVolumeDescriptor(object):
self.path_tbl_size = 10
# By default the Little Endian Path Table record starts at extent 19
# (right after the Volume Terminator).
- self.path_table_location_le = 19*self.log_block_size
+ self.path_table_location_le = 19
# By default the Big Endian Path Table record starts at extent 21
# (two extents after the Little Endian Path Table Record).
- self.path_table_location_be = 21*self.log_block_size
+ self.path_table_location_be = 21
# FIXME: we don't support the optional path table location right now
self.optional_path_table_location_le = 0
self.optional_path_table_location_be = 0 | Store the path table location as extents.
That's what they get written out as anyway. | clalancette_pycdlib | train | py |
1e486a58441a3ce300939fb320812578d7884555 | diff --git a/pandas/io/tests/test_parsers.py b/pandas/io/tests/test_parsers.py
index <HASH>..<HASH> 100644
--- a/pandas/io/tests/test_parsers.py
+++ b/pandas/io/tests/test_parsers.py
@@ -812,6 +812,8 @@ bar,foo,foo"""
@slow
def test_file(self):
# FILE
+ if sys.version_info[:2] < (2, 6):
+ raise nose.SkipTest("file:// not supported with Python < 2.6")
dirpath = curpath()
localtable = os.path.join(dirpath, 'salary.table')
local_table = read_table(localtable) | ENH: skip test_file test with python <I> (not supported AFAIK) | pandas-dev_pandas | train | py |
c586aefe6751c5918a999dd6e8203e5365872f85 | diff --git a/package-testing/spec/package/update_module_spec.rb b/package-testing/spec/package/update_module_spec.rb
index <HASH>..<HASH> 100644
--- a/package-testing/spec/package/update_module_spec.rb
+++ b/package-testing/spec/package/update_module_spec.rb
@@ -25,7 +25,7 @@ describe 'Updating an existing module' do
create_remote_file(get_working_node, File.join(module_dir, 'metadata.json'), metadata.to_json)
sync_yaml = YAML.safe_load(open("https://raw.githubusercontent.com/#{mod}/master/.sync.yml").read)
- sync_yaml['Gemfile']['required'][':system_tests'] << { 'gem' => 'nokogiri', 'version' => '1.8.2' }
+ sync_yaml['Gemfile']['required'][':system_tests'] << { 'gem' => 'nokogiri', 'version' => '1.8.5' }
create_remote_file(get_working_node, File.join(module_dir, '.sync.yml'), sync_yaml.to_yaml)
end
let(:cwd) { repo_dir } | (maint) Fix pin for nokogiri in package tests | puppetlabs_pdk | train | rb |
b7afa2a27e25c99a10b831c76995305d6d2d75b8 | diff --git a/base/app/models/relation/custom.rb b/base/app/models/relation/custom.rb
index <HASH>..<HASH> 100644
--- a/base/app/models/relation/custom.rb
+++ b/base/app/models/relation/custom.rb
@@ -99,7 +99,7 @@ class Relation::Custom < Relation
# JSON compatible with SocialCheesecake
def to_cheesecake_hash(options = {})
- { :name => name }.tap do |hash|
+ {:id => id, :name => name}.tap do |hash|
if options[:subsector]
hash[:actors] = ties.map{ |t| [t.contact.receiver_id, t.contact.receiver.name] }.uniq
else | Adding ids to cheesecake_hash for relation custom | ging_social_stream | train | rb |
Subsets and Splits