hash
stringlengths
40
40
diff
stringlengths
131
26.7k
message
stringlengths
7
694
project
stringlengths
5
67
split
stringclasses
1 value
diff_languages
stringlengths
2
24
f910c34cce10a99411be304521e63afed397865b
diff --git a/test/download-dht-torrent.js b/test/download-dht-torrent.js index <HASH>..<HASH> 100644 --- a/test/download-dht-torrent.js +++ b/test/download-dht-torrent.js @@ -77,13 +77,22 @@ test('Download using DHT (via .torrent file)', function (t) { file.getBuffer(function (err, buf) { if (err) throw err t.deepEqual(buf, leavesFile, 'downloaded correct content') + gotBuffer = true + maybeDone() }) }) torrent.once('done', function () { t.pass('client2 downloaded torrent from client1') - cb(null, client2) + torrentDone = true + maybeDone() }) + + var torrentDone = false + var gotBuffer = false + function maybeDone () { + if (torrentDone && gotBuffer) cb(null, client2) + } }) }] }, function (err, r) {
test: don't assume event ordering
webtorrent_webtorrent
train
js
27574eed1461bf5e244b9341778508344c4587a6
diff --git a/jsoncfg/__init__.py b/jsoncfg/__init__.py index <HASH>..<HASH> 100644 --- a/jsoncfg/__init__.py +++ b/jsoncfg/__init__.py @@ -40,7 +40,7 @@ __all__ = [ # case increase only version_info[2]. # version_info[2]: Increase in case of bugfixes. Also use this if you added new features # without modifying the behavior of the previously existing ones. -version_info = (0, 3, 4) +version_info = (0, 4, 0) __version__ = '.'.join(str(n) for n in version_info) __author__ = 'István Pásztor' __license__ = 'MIT'
bumping version to <I>
pasztorpisti_json-cfg
train
py
725c6e1ef90878f725160ebc0519fbef4d5dd67f
diff --git a/cas/middleware.py b/cas/middleware.py index <HASH>..<HASH> 100644 --- a/cas/middleware.py +++ b/cas/middleware.py @@ -12,6 +12,10 @@ from django.contrib.auth.views import login, logout from django.core.urlresolvers import reverse from django.http import HttpResponseRedirect, HttpResponseForbidden from django.core.exceptions import ImproperlyConfigured +try: + from django.utils.deprecation import MiddlewareMixin +except ImportError: + MiddlewareMixin = object from cas.exceptions import CasTicketException from cas.views import login as cas_login, logout as cas_logout @@ -19,7 +23,7 @@ from cas.views import login as cas_login, logout as cas_logout __all__ = ['CASMiddleware'] -class CASMiddleware(object): +class CASMiddleware(MiddlewareMixin): """ Middleware that allows CAS authentication on admin pages """ @@ -81,7 +85,7 @@ class CASMiddleware(object): return None -class ProxyMiddleware(object): +class ProxyMiddleware(MiddlewareMixin): # Middleware used to "fake" the django app that it lives at the Proxy Domain def process_request(self, request):
Compatibility with Django <I> Middleware
kstateome_django-cas
train
py
6be3b94de0b5d67b89e2d13612867c76eed60ff2
diff --git a/api/v1/api.go b/api/v1/api.go index <HASH>..<HASH> 100644 --- a/api/v1/api.go +++ b/api/v1/api.go @@ -63,7 +63,11 @@ type getIsiVolumeAttributesResp struct { // Isi PAPI export path JSON struct type ExportPathList struct { - Paths []string `json:"paths"` + Paths []string `json:"paths"` + MapAll struct { + User string `json:"user"` + Groups []string `json:"groups,omitempty"` + } `json:"map_all"` } // Isi PAPI export clients JSON struct @@ -305,6 +309,10 @@ func (papi *PapiConnection) Export(path string) (err error) { } var data = &ExportPathList{Paths: []string{path}} + data.MapAll.User = papi.username + if papi.group != "" { + data.MapAll.Groups = append(data.MapAll.Groups, papi.group) + } headers := map[string]string{"Content-Type": "application/json"} var resp *postIsiExportResp
Fix for volume chown issue. This fixes the issue where ownership of directories created on a mounted volume could not be changed. Added the user in the config file to the map_all list in the export parameters for the volume.
thecodeteam_goisilon
train
go
e83aed2235f20d02224bad5fda02674801406b89
diff --git a/pythran/typing.py b/pythran/typing.py index <HASH>..<HASH> 100644 --- a/pythran/typing.py +++ b/pythran/typing.py @@ -4,6 +4,15 @@ import operator from tables import type_to_str, operator_to_lambda, modules from passes import global_declarations, constant_value +if not "has_path" in nx.__dict__: + def has_path(G,source,target): + try: + sp = nx.shortest_path(G, source, target) + except nx.NetworkXNoPath: + return False + return True + nx.has_path=has_path + class Reorder(ast.NodeVisitor): def __init__(self, typedeps): self.typedeps=typedeps
define has_path in networkx if it is not already defined
serge-sans-paille_pythran
train
py
2073d47b49330d735f726987dceec19cec2631d2
diff --git a/lib/markaby/builder.rb b/lib/markaby/builder.rb index <HASH>..<HASH> 100644 --- a/lib/markaby/builder.rb +++ b/lib/markaby/builder.rb @@ -79,7 +79,7 @@ module Markaby @streams = [[]] @assigns = assigns.dup @_helper = helper - @elements = {} + @used_ids = {} @@options.each do |k, v| instance_variable_set("@#{k}", @assigns.delete(k) || v) @@ -149,7 +149,7 @@ module Markaby end if atname == :id ele_id = v.to_s - if @elements.has_key? ele_id + if @used_ids.has_key? ele_id raise InvalidXhtmlError, "id `#{ele_id}' already used (id's must be unique)." end end @@ -163,7 +163,7 @@ module Markaby end f = fragment { @builder.method_missing(tag, *args, &block) } - @elements[ele_id] = f if ele_id + @used_ids[ele_id] = f if ele_id f end
Rename @elements to used_ids
markaby_markaby
train
rb
c9668dd0ab9abe09c38cd5053d8edbb2808ed1a6
diff --git a/test/candidates_test.rb b/test/candidates_test.rb index <HASH>..<HASH> 100644 --- a/test/candidates_test.rb +++ b/test/candidates_test.rb @@ -114,4 +114,30 @@ class CandidatesTest < TestCaseClass end end + test "allows to iterate through candidates without passing block" do + klass = Class.new model_class do + def slug_candidates + :name + end + end + with_instances_of klass do |_, city| + candidates = FriendlyId::Candidates.new(city, city.slug_candidates) + assert_equal candidates.each, ['new-york'] + end + end + + test "iterates through candidates with passed block" do + klass = Class.new model_class do + def slug_candidates + :name + end + end + with_instances_of klass do |_, city| + collected_candidates = [] + candidates = FriendlyId::Candidates.new(city, city.slug_candidates) + candidates.each { |candidate| collected_candidates << candidate } + assert_equal collected_candidates, ['new-york'] + end + end + end
Tests for Candidates#each.
norman_friendly_id
train
rb
8bf0317c7a25fe1d7c585079935233ecebe740ff
diff --git a/sukhoi.py b/sukhoi.py index <HASH>..<HASH> 100644 --- a/sukhoi.py +++ b/sukhoi.py @@ -20,7 +20,7 @@ class Miner(list): task.start() def __init__(self, url, headers=default_headers, args={}, - method='get', payload={}, auth=(), attempts=5): + method='get', payload=None, auth=None, attempts=5): """ Resource Param: url
Fixing to work with new websnake version.
untwisted_sukhoi
train
py
bc5711a3092dc446bcd1e25c8fc5490617338bfb
diff --git a/dipper/sources/FlyBase.py b/dipper/sources/FlyBase.py index <HASH>..<HASH> 100644 --- a/dipper/sources/FlyBase.py +++ b/dipper/sources/FlyBase.py @@ -416,7 +416,11 @@ class FlyBase(PostgreSQLSource): col = self.files[src_key]['columns'] - with gzip.open(raw, 'rt') as tsvfile: + # JR - I've set encoding to latin-1 to fix the UnicodeDecodeError that happens + # when the default encoding (utf-8) is used. This possibly will break if/when + # the encoding of this file upstream at Flybase is changed to utf-8. If so, + # trying setting encoding='utf-8' below + with gzip.open(raw, 'rt', encoding='latin-1') as tsvfile: reader = csv.reader(tsvfile, delimiter='\t') # skip first four lines for _ in range(0, 2):
Cherry picked commits from dataset PR to fix Flybase ingest (set encoding to latin-1 in _fyref_to_pmid() to fix UnicodeDecodeError)
monarch-initiative_dipper
train
py
b3c2567d40524dbee439053dc77a2a190e4ba07d
diff --git a/src/RecordsTransformer.php b/src/RecordsTransformer.php index <HASH>..<HASH> 100644 --- a/src/RecordsTransformer.php +++ b/src/RecordsTransformer.php @@ -155,6 +155,10 @@ class RecordsTransformer $datatableParameters['DT_RowClass'] = $record->laratablesRowClass(); } + if (method_exists($this->model, 'laratablesRowData')) { + $datatableParameters['DT_RowData'] = $record->laratablesRowData(); + } + return $datatableParameters; } }
Allow setting data attributes for each row of the table
freshbitsweb_laratables
train
php
925697fab5b7f33088ca8269997865607f156477
diff --git a/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java b/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java index <HASH>..<HASH> 100644 --- a/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java +++ b/core-test/src/test/java/com/microsoft/windowsazure/RunCukesTest.java @@ -23,6 +23,6 @@ import org.junit.runner.RunWith; @RunWith(Cucumber.class) @CucumberOptions( format = {"html:target/cucumber-html-report", "json:target/cucumber-json-report.json"}, -tags = {"~@ignore"}) +tags = {"@livetest"}) public class RunCukesTest { } \ No newline at end of file
Change cucumber test tag to not run by default
Azure_azure-sdk-for-java
train
java
ca67d02e6bf23f03f5bcca2f620d6e3451b7e30b
diff --git a/environs/tools/tools.go b/environs/tools/tools.go index <HASH>..<HASH> 100644 --- a/environs/tools/tools.go +++ b/environs/tools/tools.go @@ -121,14 +121,7 @@ func FindTools(env environs.BootstrapEnviron, majorVersion, minorVersion int, st if err != nil { return nil, err } - list, err := FindToolsForCloud(sources, cloudSpec, streams, majorVersion, minorVersion, filter) - if err != nil { - return nil, err - } - for _, tool := range list { - logger.Debugf("Located tool version %s at %s", tool.Version.String(), tool.URL) - } - return list, nil + return FindToolsForCloud(sources, cloudSpec, streams, majorVersion, minorVersion, filter) } // FindToolsForCloud returns a List containing all tools in the given streams, with a given
Remove the debug log Instead of this debug log, favour the one in bootstrap.
juju_juju
train
go
607f9bdb5a78e6a7a677962903fe6a47eefee578
diff --git a/Gruntfile.js b/Gruntfile.js index <HASH>..<HASH> 100644 --- a/Gruntfile.js +++ b/Gruntfile.js @@ -12,6 +12,9 @@ module.exports = function (grunt) { } }, watch: { + options: { + interval: 1000 + }, css: { files: '**/*.scss', tasks: ['sass']
Increase the file watch interval to give my CPU a break.
johnbillion_query-monitor
train
js
fc6f22fd6b637a0d7c68829d22f410dc41061017
diff --git a/edisgo/grid/network.py b/edisgo/grid/network.py index <HASH>..<HASH> 100644 --- a/edisgo/grid/network.py +++ b/edisgo/grid/network.py @@ -859,6 +859,38 @@ class Results: self._pfa_v_mag_pu = pypsa @property + def i_res(self): + """ + Current results from power flow analysis in A. + + Holds power flow analysis results for current for the last + iteration step. Index of the DataFrame is a DatetimeIndex indicating + the time period the power flow analysis was conducted for; columns + of the DataFrame are the edges as well as stations of the grid + topology. + ToDo: add unit + + Parameters + ---------- + pypsa: `pandas.DataFrame<dataframe>` + Results time series of current in A from the + `PyPSA network <https://www.pypsa.org/doc/components.html#network>`_ + + Provide this if you want to set values. For retrieval of data do + not pass an argument + + Returns + ------- + :pandas:`pandas.DataFrame<dataframe>` + Current results from power flow analysis + """ + return self._i_res + + @i_res.setter + def i_res(self, pypsa): + self._i_res = pypsa + + @property def equipment_changes(self): """ Tracks changes in the equipment (e.g. replaced or added cable, etc.)
Add getter and setter for i_res
openego_eDisGo
train
py
46c22b72a85083698eeba1bb566726f70efd3848
diff --git a/lib/origami/graphics/xobject.rb b/lib/origami/graphics/xobject.rb index <HASH>..<HASH> 100644 --- a/lib/origami/graphics/xobject.rb +++ b/lib/origami/graphics/xobject.rb @@ -659,22 +659,22 @@ module Origami data = fd.read else data = File.binread(File.expand_path(path)) - format ||= File.extname(path) + format ||= File.extname(path)[1..-1] end image = ImageXObject.new raise ArgumentError, "Missing file format" if format.nil? case format.downcase - when '.jpg', 'jpeg', '.jpe', '.jif', '.jfif', '.jfi' + when 'jpg', 'jpeg', 'jpe', 'jif', 'jfif', 'jfi' image.setFilter :DCTDecode image.encoded_data = data - when '.jp2','.jpx','.j2k','.jpf','.jpm','.mj2' + when 'jp2','jpx','j2k','jpf','jpm','mj2' image.setFilter :JPXDecode image.encoded_data = data - when '.jb2', '.jbig', '.jbig2' + when '.b2', 'jbig', 'jbig2' image.setFilter :JBIG2Decode image.encoded_data = data else
graphics/xobject: fix regression in from_image_file
gdelugre_origami
train
rb
d48742d33c9dc0baf19aa964768b54275d254f78
diff --git a/lib/PaymentMethod.php b/lib/PaymentMethod.php index <HASH>..<HASH> 100644 --- a/lib/PaymentMethod.php +++ b/lib/PaymentMethod.php @@ -30,6 +30,7 @@ namespace Stripe; * @property \Stripe\StripeObject $eps * @property \Stripe\StripeObject $fpx * @property \Stripe\StripeObject $giropay + * @property \Stripe\StripeObject $grabpay * @property \Stripe\StripeObject $ideal * @property \Stripe\StripeObject $interac_present * @property bool $livemode Has the value <code>true</code> if the object exists in live mode or the value <code>false</code> if the object exists in test mode.
Codegen for openapi bb9e<I>d
stripe_stripe-php
train
php
5e342c7e7c9811de253659331adb1d442feda6e8
diff --git a/core/src/main/java/hudson/security/LDAPSecurityRealm.java b/core/src/main/java/hudson/security/LDAPSecurityRealm.java index <HASH>..<HASH> 100644 --- a/core/src/main/java/hudson/security/LDAPSecurityRealm.java +++ b/core/src/main/java/hudson/security/LDAPSecurityRealm.java @@ -226,9 +226,9 @@ public class LDAPSecurityRealm extends SecurityRealm { /** * Query to locate an entry that identifies the user, given the user name string. * - * Normally something like "uid={0}" + * Normally "uid={0}" * - * @see FilterBasedLdapUserSearch#searchFilter + * @see FilterBasedLdapUserSearch */ public final String userSearch; @@ -282,7 +282,7 @@ public class LDAPSecurityRealm extends SecurityRealm { this.rootDN = rootDN.trim(); this.userSearchBase = fixNull(userSearchBase).trim(); userSearch = fixEmptyAndTrim(userSearch); - this.userSearch = userSearch!=null ? userSearch : "(| (uid={0}) (mail={0}) (cn={0}))"; + this.userSearch = userSearch!=null ? userSearch : "uid={0}"; this.groupSearchBase = fixEmptyAndTrim(groupSearchBase); }
rolling back the LDAP change based on the discussion: <URL>
jenkinsci_jenkins
train
java
2807384d8ff28deb35fd7f3b80c4d138de087946
diff --git a/coconut/command/util.py b/coconut/command/util.py index <HASH>..<HASH> 100644 --- a/coconut/command/util.py +++ b/coconut/command/util.py @@ -239,7 +239,9 @@ def run_cmd(cmd, show_output=True, raise_errs=True, **kwargs): elif show_output: return subprocess.call(cmd, **kwargs) else: - stdout, stderr, _ = call_output(cmd, **kwargs) + stdout, stderr, retcode = call_output(cmd, **kwargs) + if retcode and raise_errs: + raise subprocess.CalledProcessError(retcode, cmd, stdout, stderr) return "".join(stdout + stderr) diff --git a/tests/main_test.py b/tests/main_test.py index <HASH>..<HASH> 100644 --- a/tests/main_test.py +++ b/tests/main_test.py @@ -315,6 +315,7 @@ class TestShell(unittest.TestCase): def test_jupyter(self): call(["coconut", "--jupyter"], assert_output="Coconut: Successfully installed Coconut Jupyter kernel.") + call(["jupyter", "kernelspec", "list"], assert_output="coconut") class TestCompilation(unittest.TestCase):
Fix jupyter installation error Resolves #<I>.
evhub_coconut
train
py,py
bbe4f62c2086103fdea3b7cc9c243a7feea89511
diff --git a/pyathena/cursor.py b/pyathena/cursor.py index <HASH>..<HASH> 100644 --- a/pyathena/cursor.py +++ b/pyathena/cursor.py @@ -191,8 +191,7 @@ class Cursor(object): raise DatabaseError('KeyError `Rows`') processed_rows = [] if len(rows) > 0: - offset = 1 if not self._next_token and \ - self._is_first_row_column_labels(rows) else 0 + offset = 1 if not self._next_token and self._is_first_row_column_labels(rows) else 0 processed_rows = [ tuple([self._converter.convert(meta.get('Type', None), row.get('VarCharValue', None))
Fix E<I> continuation line over-indented for hanging indent
laughingman7743_PyAthena
train
py
52df644b67c2de4b5c7a273efeb6520e71aad19c
diff --git a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py index <HASH>..<HASH> 100644 --- a/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py +++ b/datadog_checks_dev/datadog_checks/dev/tooling/commands/meta/manifest.py @@ -2,6 +2,7 @@ # All rights reserved # Licensed under Simplified BSD License (see LICENSE) import json +import uuid import click @@ -154,6 +155,10 @@ def migrate(ctx, integration, to_version): # Explicitly set the manifest_version first so it appears at the top of the manifest migrated_manifest.set_path("/manifest_version", "2.0.0") + # Generate and introduce a uuid + app_uuid = str(uuid.uuid4()) + migrated_manifest.set_path("/app_uuid", app_uuid) + for key, val in V2_TO_V1_MAP.items(): if val == SKIP_IF_FOUND: continue
Add app_uuid to manifest migrator (#<I>)
DataDog_integrations-core
train
py
514d6370d4047f8efe354db90c79ae28ac2d0159
diff --git a/resources/lang/ko-KR/dashboard.php b/resources/lang/ko-KR/dashboard.php index <HASH>..<HASH> 100644 --- a/resources/lang/ko-KR/dashboard.php +++ b/resources/lang/ko-KR/dashboard.php @@ -18,7 +18,7 @@ return [ 'incidents' => [ 'title' => '문제 및 예정', 'incidents' => '문제', - 'logged' => '{0} There are no incidents, good work.|[1]You have logged one incident.|[2,*]You have reported <strong>:count</strong> incidents.', + 'logged' => '{0}There are no incidents, good work.|[1]You have logged one incident.|[2,*]You have reported <strong>:count</strong> incidents.', 'incident-create-template' => '템플릿 생성', 'incident-templates' => '문제 템플릿', 'updates' => [
New translations dashboard.php (Korean)
CachetHQ_Cachet
train
php
30a6d311eb66c671992a78e354ad56ec78a94f68
diff --git a/lib/progressPlugBrowser.js b/lib/progressPlugBrowser.js index <HASH>..<HASH> 100644 --- a/lib/progressPlugBrowser.js +++ b/lib/progressPlugBrowser.js @@ -1,4 +1,4 @@ -import { Plug } from './plug.js'; +import { Plug } from '../plug.js'; function _doXhr({ xhr, body, progressInfo }) { return new Promise((resolve, reject) => { xhr.onreadystatechange = function(e) { diff --git a/lib/progressPlugNode.js b/lib/progressPlugNode.js index <HASH>..<HASH> 100644 --- a/lib/progressPlugNode.js +++ b/lib/progressPlugNode.js @@ -1,2 +1,2 @@ -import { Plug } from './plug.js'; +import { Plug } from '../plug.js'; export class ProgressPlugNode extends Plug {}
Fix import paths in Plug subclasses
MindTouch_mindtouch-http.js
train
js,js
045161eec93c1abbb33b3012f8cd3cbfbf375e0c
diff --git a/src/lib/Tables/InventorySelling.php b/src/lib/Tables/InventorySelling.php index <HASH>..<HASH> 100644 --- a/src/lib/Tables/InventorySelling.php +++ b/src/lib/Tables/InventorySelling.php @@ -37,7 +37,7 @@ class InventorySelling extends Table 'Name' => 'name', 'Duration' => 'duration', 'Details' => null, - 'Time left' => 'time_ended', + 'Time left' => 'end_date', 'Options' => null, ]; @@ -58,7 +58,7 @@ class InventorySelling extends Table * * @var string */ - public $defaultSortKey = 'time_ended'; + public $defaultSortKey = 'end_date'; /** * Default sort order. @@ -73,14 +73,4 @@ class InventorySelling extends Table * @var string */ public $presenter = FoundationFivePresenter::class; - - /** - * Sort by time since item ended. - * - * @return void - */ - public function sortTimeEnded($sortOrder) - { - $this->db->orderBy(DB::raw('unix_timestamp() - cast(`end_date` as signed)'), $sortOrder); - } }
Fixed date sorting in inventory selling table
hamjoint_mustard
train
php
a91d672f60ae5a5480448c6019f9b277799f21f0
diff --git a/neovim/util.py b/neovim/util.py index <HASH>..<HASH> 100644 --- a/neovim/util.py +++ b/neovim/util.py @@ -47,6 +47,9 @@ class RemoteMap(object): class Current(object): + def __init__(self, vim): + self._vim = vim + @property def line(self): return self._vim.get_current_line()
Add missing initializer for `Current`
neovim_pynvim
train
py
c4a602baacd4837b6b2e06526e369b30754e9e6f
diff --git a/pysis/commands.py b/pysis/commands.py index <HASH>..<HASH> 100644 --- a/pysis/commands.py +++ b/pysis/commands.py @@ -120,3 +120,9 @@ class IsisPool(Isis): self.close() self.join() + def __enter__(self): + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close_and_wait() +
Added enter and exit methods for using an isis pool with a `with` statement.
wtolson_pysis
train
py
f4e992119e7ce90aa1254d0defc0697c9421d58f
diff --git a/src/Mustache/Compiler.php b/src/Mustache/Compiler.php index <HASH>..<HASH> 100644 --- a/src/Mustache/Compiler.php +++ b/src/Mustache/Compiler.php @@ -211,9 +211,9 @@ class Mustache_Compiler } const BLOCK_VAR = ' - $value = $this->resolveValue($context->%s(%s), $context, $indent); + $value = $this->resolveValue($context->findInBlock(%s), $context, $indent); if($value && !is_array($value) && !is_object($value)) { - $buffer .= %s; + $buffer .= $value; } else { %s } @@ -221,11 +221,9 @@ class Mustache_Compiler private function blockVar($nodes, $id, $start, $end, $otag, $ctag, $level) { - $method = 'findInBlock'; $id_str = var_export($id, true); - $value = $this->getEscape(); - return sprintf($this->prepare(self::BLOCK_VAR, $level), $method, $id_str, $value, $this->walk($nodes, 2)); + return sprintf($this->prepare(self::BLOCK_VAR, $level), $id_str, $this->walk($nodes, 2)); } const BLOCK_ARG = '
Don\'t escape html from blocks, removing unneeded variable assigns
bobthecow_mustache.php
train
php
9c7f273162c9986e3f3a2cc5a43a77b5910f2b5e
diff --git a/pinax/teams/admin.py b/pinax/teams/admin.py index <HASH>..<HASH> 100644 --- a/pinax/teams/admin.py +++ b/pinax/teams/admin.py @@ -9,7 +9,9 @@ from .hooks import hookset def members_count(obj): return obj.memberships.count() -members_count.short_description = _("Members Count") # noqa + + +members_count.short_description = _("Members Count") admin.site.register(
Go ahead and fix lint error
pinax_pinax-teams
train
py
ed41d719828c046bf4bc8e852b30e9bbe05e5f06
diff --git a/modules/reports.php b/modules/reports.php index <HASH>..<HASH> 100755 --- a/modules/reports.php +++ b/modules/reports.php @@ -89,7 +89,7 @@ if ( ! class_exists('Reports') ) { <p><?php _e('These monthly reports are generated from the site\'s HTTP access logs. They show every HTTP request of the site, including traffic from both humans and bots. Requests blocked at the firewall level (for example during a DDOS attack) are not logged. Log files can be accessed also directly on the server at <code>/data/slog/html/goaccess-*.html</code>.', 'seravo'); ?></p> </div> <div class="http-requests_info_loading" style="padding: 0px;"> - <table class="widefat fixed striped" style="width: 100%; border: none;"> + <table class="widefat striped" style="width: 100%; border: none;"> <thead> <tr> <th style="width: 25%;"><?php _e('Month', 'seravo'); ?></th>
Prevent overflow of HTTP stats table by removing fixed table layout
Seravo_seravo-plugin
train
php
d6db7f6b84380d7a74bdcc4f6fb50e5c7ef04e23
diff --git a/spec/request_pattern_spec.rb b/spec/request_pattern_spec.rb index <HASH>..<HASH> 100644 --- a/spec/request_pattern_spec.rb +++ b/spec/request_pattern_spec.rb @@ -32,7 +32,7 @@ describe RequestPattern do end - class RequestPattern + class WebMock::RequestPattern def match(request_signature) self.matches?(request_signature) end
Fixed RequestPattern spec to work with Ruby <I>
bblimke_webmock
train
rb
aa2887f71c779448b22e4de67ae68dbaf218b7b9
diff --git a/sos/report/plugins/rhui.py b/sos/report/plugins/rhui.py index <HASH>..<HASH> 100644 --- a/sos/report/plugins/rhui.py +++ b/sos/report/plugins/rhui.py @@ -27,6 +27,7 @@ class Rhui(Plugin, RedHatPlugin): "/var/log/rhui-subscription-sync.log", "/var/cache/rhui/*", "/root/.rhui/*", + "/var/log/rhui/*", ]) # skip collecting certificate keys self.add_forbidden_path("/etc/pki/rhui/**/*.key", recursive=True)
[rhui] New log folder Included new log folder per Bugzilla <I>
sosreport_sos
train
py
e91b3b165e137022a43aad83ee8bbf34c40e875c
diff --git a/spyder/plugins/plots/widgets/figurebrowser.py b/spyder/plugins/plots/widgets/figurebrowser.py index <HASH>..<HASH> 100644 --- a/spyder/plugins/plots/widgets/figurebrowser.py +++ b/spyder/plugins/plots/widgets/figurebrowser.py @@ -536,7 +536,7 @@ class FigureViewer(QScrollArea): def get_scaling(self): """Get the current scaling of the figure in percent.""" - return self._scalestep**self._scalefactor*100 + return self.figcanvas.size().width() / self.figcanvas.fwidth * 100 def reset_original_image(self): """Reset the image to its original size."""
Calcul scaling from canvas size instead This is to be able to update the value displayed correctly when "Fits plots to window" is checked
spyder-ide_spyder
train
py
8e5556d759d755b0a4de1f27dfa03cf8a5290c5c
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ #!/usr/bin/env python -from distutils.core import setup +from setuptools import setup import scoop @@ -12,9 +12,9 @@ setup(name='scoop', author_email='[email protected]', url='http://scoop.googlecode.com', download_url='http://code.google.com/p/scoop/downloads/list', - requires=['greenlet (>=0.3.4)', - 'pyzmq (>=2.2.0)', - 'argparse (>=1.1)'], + install_requires=['greenlet>=0.3.4', + 'pyzmq>=2.2.0', + 'argparse>=1.1'], packages=['scoop'], platforms=['any'], keywords=['distributed algorithms', 'parallel programming'],
Changed distutils for distributed and added dependency resolve
soravux_scoop
train
py
25d0c02b715b8474607d2fd9bf372ce48711c5dd
diff --git a/mustache.js b/mustache.js index <HASH>..<HASH> 100644 --- a/mustache.js +++ b/mustache.js @@ -138,7 +138,7 @@ var Mustache = function() { translation_mode = { _mode: context['_mode'] }; } - return that.render(_(content, translation_mode), context, partials, true); + return _(content, translation_mode); }); },
don't render during i<I>n step, just do replacements
janl_mustache.js
train
js
8f08bacf47c8b70b147abad9fcd5ba5e7fad6a6a
diff --git a/spec/lib/insert-spec.js b/spec/lib/insert-spec.js index <HASH>..<HASH> 100644 --- a/spec/lib/insert-spec.js +++ b/spec/lib/insert-spec.js @@ -12,6 +12,7 @@ describe('Insert', function() { mongoat.MongoClient.connect('mongodb://localhost:27017/test') .then(function (db) { + db.dropDatabase(); _this.testDb = db; _this.testCol = db.collection('Person'); done();
Drop db on beforeAll tests
dial-once_node-mongoat
train
js
171b588d62094a0fe85d1dac3e1c28debe8e0b62
diff --git a/tests/test_loading.py b/tests/test_loading.py index <HASH>..<HASH> 100644 --- a/tests/test_loading.py +++ b/tests/test_loading.py @@ -3,6 +3,7 @@ from django.test.utils import override_settings from django_core.utils.loading import get_class_from_settings from django_core.utils.loading import get_class_from_settings_from_apps from django_core.utils.loading import get_class_from_settings_full_path +from django_core.utils.loading import get_function_from_settings from django_core.utils.loading import get_model_from_settings from tests.test_objects.models import TestModel @@ -31,3 +32,7 @@ class LoadingTestCase(TestCase): model = get_class_from_settings_from_apps(settings_key='MY_MODEL_SETTING') self.assertEqual(model, TestModel) + @override_settings(MY_SETTING='django_core.utils.loading.get_function_from_settings') + def test_get_function_from_settings(self): + func = get_function_from_settings(settings_key='MY_SETTING') + self.assertEqual(func, get_function_from_settings)
added test for loading function from settings string.
InfoAgeTech_django-core
train
py
f03851a73e66919e6036614caf2fedf8656383b4
diff --git a/test/test_fft.py b/test/test_fft.py index <HASH>..<HASH> 100644 --- a/test/test_fft.py +++ b/test/test_fft.py @@ -448,7 +448,7 @@ class _BaseTestFFTClass(unittest.TestCase): # output arrays; just reuse inarr and outexp (values won't # matter, we're just checking exceptions). output_args = {"delta_f": self.delta, "epoch": self.epoch} - _test_raise_excep_ifft(self,inarr,outexp) + _test_raise_excep_ifft(self,inarr,outexp,output_args) def test_rev_real_fs(self): for rev_dtype in [float32,float64]: @@ -473,7 +473,7 @@ class _BaseTestFFTClass(unittest.TestCase): # output arrays; just reuse inarr and outexp (values won't # matter, we're just checking exceptions). output_args = {"delta_t": self.delta, "epoch": self.epoch} - _test_raise_excep_ifft(self,inarr,outexp) + _test_raise_excep_ifft(self,inarr,outexp,output_args) def test_fwd_complex_arr(self): for fwd_dtype in [complex64,complex128]:
Add missing 'output_args' to fn call in test_fft
gwastro_pycbc
train
py
6c3513c0775e1e1e69a7c9e5c86255804fa3fb36
diff --git a/p2p/server.go b/p2p/server.go index <HASH>..<HASH> 100644 --- a/p2p/server.go +++ b/p2p/server.go @@ -943,9 +943,8 @@ func (srv *Server) setupConn(c *conn, flags connFlag, dialDest *enode.Node) erro } // If dialing, figure out the remote public key. - var dialPubkey *ecdsa.PublicKey if dialDest != nil { - dialPubkey = new(ecdsa.PublicKey) + dialPubkey := new(ecdsa.PublicKey) if err := dialDest.Load((*enode.Secp256k1)(dialPubkey)); err != nil { err = errors.New("dial destination doesn't have a secp256k1 public key") srv.log.Trace("Setting up connection failed", "addr", c.fd.RemoteAddr(), "conn", c.flags, "err", err)
p2p: reduce the scope of variable dialPubkey (#<I>) dialPubkey isn't used anywhere else after dialDest.Load, so it should be safe to restrict its scope to the if clause.
ethereum_go-ethereum
train
go
d86e51bba61c2144f6dba68159f7f1dcdcdd5a70
diff --git a/lib/sprout/remote_file_target.rb b/lib/sprout/remote_file_target.rb index <HASH>..<HASH> 100644 --- a/lib/sprout/remote_file_target.rb +++ b/lib/sprout/remote_file_target.rb @@ -49,10 +49,16 @@ module Sprout def load_unpack_or_ignore_archive if(!unpacked_files_exist?) if(!File.exists?(downloaded_file)) - write_archive download_archive + bytes = download_archive + write_archive bytes end - bytes = File.read downloaded_file + # If we *just* downloaded the file, + # use the bytes directly, otherwise + # read them off disk from a previous + # download attempt: + bytes ||= File.open(downloaded_file, 'r').read + if should_unpack?(bytes, md5) unpack_archive end
Sped up File read code for Windows
lukebayes_project-sprouts
train
rb
e113d6dc52df6f8017d8c7fb77004a0fb85c4763
diff --git a/lib/components/map/route-viewer-overlay.js b/lib/components/map/route-viewer-overlay.js index <HASH>..<HASH> 100644 --- a/lib/components/map/route-viewer-overlay.js +++ b/lib/components/map/route-viewer-overlay.js @@ -4,6 +4,16 @@ import { FeatureGroup, MapLayer, Polyline } from 'react-leaflet' import polyline from '@mapbox/polyline' +// helper fn to check if geometry has been populated for all patterns in route +const isGeomComplete = routeData => { + return ( + routeData && + routeData.patterns && + Object.values(routeData.patterns) + .every(ptn => typeof ptn.geometry !== 'undefined') + ) +} + class RouteViewerOverlay extends MapLayer { static propTypes = {} @@ -13,18 +23,6 @@ class RouteViewerOverlay extends MapLayer { componentWillUnmount () {} componentWillReceiveProps (nextProps) { - // helper fn to check if geometry has been populated for all patterns in route - const isGeomComplete = routeData => { - return ( - routeData && - routeData.patterns && - Object.values(routeData.patterns).reduce( - (acc, ptn) => acc && typeof ptn.geometry !== 'undefined', - true - ) - ) - } - // if pattern geometry just finished populating, update the map points if ( !isGeomComplete(this.props.routeData) &&
refactor(route-viewer-overlay): use Array.every for geom complete check
opentripplanner_otp-react-redux
train
js
b60b5e4105a14ed1f93bbb21bfebe9aa4438d9ae
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -27,7 +27,7 @@ module.exports = timeout /** * Create a new timeout middleware. * - * @param {number|string} time The timeout as a number of milliseconds or a string for `ms` + * @param {number|string} [time=5000] The timeout as a number of milliseconds or a string for `ms` * @param {object} [options] Additional options for middleware * @param {boolean} [options.respond=true] Automatically emit error when timeout reached * @return {function} middleware
docs: fix jsdoc that time is optional
expressjs_timeout
train
js
aee5b34f8f23508b478683ded48b068311ad7647
diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index <HASH>..<HASH> 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -103,10 +103,15 @@ def remove_task_app(project_directory): def remove_pycharm_dir(project_directory): - """Removes the .idea directory if PyCharm isn't going to be used""" + """ + Removes directories related to PyCharm + if it isn't going to be used + """ idea_dir_location = os.path.join(PROJECT_DIRECTORY, '.idea/') shutil.rmtree(idea_dir_location) + docs_dir_location = os.path.join(PROJECT_DIRECTORY, 'docs/pycharm/') + shutil.rmtree(docs_dir_location) # IN PROGRESS # def copy_doc_files(project_directory):
post_get hook removes docs for pycharm if it isn't used
pydanny_cookiecutter-django
train
py
0c4b08eb85e3e479bc3ea76e8cc01e0bd0772f66
diff --git a/hatenablog.rb b/hatenablog.rb index <HASH>..<HASH> 100644 --- a/hatenablog.rb +++ b/hatenablog.rb @@ -159,7 +159,7 @@ XML categories_tag = categories.inject('') do |s, c| s + "<category term=\"#{c}\" />\n" end - xml % [title, @user_id, content, categories_tag, draft] + xml % [title, author_name, content, categories_tag, draft] end end end
Fix the entry XML generating method
kymmt90_hatenablog
train
rb
2d063818242210334194c31c50816b421abca87c
diff --git a/builtin/providers/aws/resource_aws_directory_service_directory.go b/builtin/providers/aws/resource_aws_directory_service_directory.go index <HASH>..<HASH> 100644 --- a/builtin/providers/aws/resource_aws_directory_service_directory.go +++ b/builtin/providers/aws/resource_aws_directory_service_directory.go @@ -232,7 +232,7 @@ func resourceAwsDirectoryServiceDirectoryCreate(d *schema.ResourceData, meta int d.Id(), *ds.Stage) return ds, *ds.Stage, nil }, - Timeout: 10 * time.Minute, + Timeout: 30 * time.Minute, } if _, err := stateConf.WaitForState(); err != nil { return fmt.Errorf( @@ -355,7 +355,7 @@ func resourceAwsDirectoryServiceDirectoryDelete(d *schema.ResourceData, meta int d.Id(), *ds.Stage) return ds, *ds.Stage, nil }, - Timeout: 10 * time.Minute, + Timeout: 30 * time.Minute, } if _, err := stateConf.WaitForState(); err != nil { return fmt.Errorf(
Increase aws_directory_service_directory timeouts According to the AWS docs, creating a MS directory could take up to <I> minutes.
hashicorp_terraform
train
go
3578c0faee107dc0faef164a4acf02b916caa2be
diff --git a/py/selenium/webdriver/chrome/options.py b/py/selenium/webdriver/chrome/options.py index <HASH>..<HASH> 100644 --- a/py/selenium/webdriver/chrome/options.py +++ b/py/selenium/webdriver/chrome/options.py @@ -166,6 +166,6 @@ class Options(object): if self.debugger_address: chrome_options["debuggerAddress"] = self.debugger_address - chrome["chromeOptions"] = chrome_options + chrome["goog:chromeOptions"] = chrome_options return chrome
Update python chromeOptions key for capabilities (#<I>)
SeleniumHQ_selenium
train
py
e48069bf123094a864d8ce124ae153ef4438d2f0
diff --git a/scripts/server.js b/scripts/server.js index <HASH>..<HASH> 100644 --- a/scripts/server.js +++ b/scripts/server.js @@ -25,6 +25,7 @@ const responseHeaderObj = { // object containing the name:hex pairs for nearestColor() const rgbColorsArr = []; +// prepare color array colors.forEach((c) => { const rgb = lib.hexToRgb(c.hex); // populates array needed for ClosestVector() @@ -68,6 +69,8 @@ const nameColors = (colorArr) => { distance: closestColor.distance, }; }); + + // closest.clearCache() }; /**
style($server): adds a few comments in server.js
meodai_color-names
train
js
d51c858939c76ede1b1c23890c406bab750f7aeb
diff --git a/scanpy/tests/test_datasets.py b/scanpy/tests/test_datasets.py index <HASH>..<HASH> 100644 --- a/scanpy/tests/test_datasets.py +++ b/scanpy/tests/test_datasets.py @@ -44,6 +44,16 @@ def test_pbmc3k(tmp_dataset_dir): @pytest.mark.internet +def test_pbmc3k_processed(tmp_dataset_dir): + with pytest.warns(None) as records: + adata = sc.datasets.pbmc3k_processed() + assert adata.shape == (2638, 1838) + assert adata.raw.shape == (2638, 13714) + + assert len(records) == 0 + + [email protected] def test_ebi_expression_atlas(tmp_dataset_dir): adata = sc.datasets.ebi_expression_atlas("E-MTAB-4888") assert adata.shape == (2315, 23852) @@ -70,7 +80,9 @@ def test_toggleswitch(): def test_pbmc68k_reduced(): - sc.datasets.pbmc68k_reduced() + with pytest.warns(None) as records: + sc.datasets.pbmc68k_reduced() + assert len(records) == 0 # Test that loading a dataset does not warn @pytest.mark.internet
Added tests for warnings from datasets
theislab_scanpy
train
py
eee841f3d9ab2ceb2e725f013a7e670537fc7d46
diff --git a/corelib/class.rb b/corelib/class.rb index <HASH>..<HASH> 100644 --- a/corelib/class.rb +++ b/corelib/class.rb @@ -20,8 +20,7 @@ class Class def new(*args, &block) obj = allocate() - #obj.initialize *args, &block - obj.initialize *args + obj.initialize *args, &block obj end diff --git a/lib/opal/parser/processor.rb b/lib/opal/parser/processor.rb index <HASH>..<HASH> 100644 --- a/lib/opal/parser/processor.rb +++ b/lib/opal/parser/processor.rb @@ -601,7 +601,7 @@ module Opal if @scope.uses_block? scope_name = (@scope.name ||= unique_temp) blk = "var $yield = #{scope_name}.proc || $noproc, $yself = $yield.$S, " - blk += "#{block_name} = #{scope_name}.proc, " if block_name + blk += "#{block_name} = #{scope_name}.proc || nil, " if block_name blk += "$break = $bjump; #{scope_name}.proc = null;" code = blk + code
Re-add block passing to Class#new
opal_opal
train
rb,rb
e6c6b5a15cc7b8ed831562b9bcae114a4ced3755
diff --git a/components/apps.js b/components/apps.js index <HASH>..<HASH> 100644 --- a/components/apps.js +++ b/components/apps.js @@ -499,7 +499,8 @@ SteamUser.prototype.redeemKey = function(key, callback) { var recipeDetails = BinaryKVParser.parse(body.purchase_receipt_info).MessageObject; if (recipeDetails.LineItemCount > 0) { recipeDetails.lineitems.forEach(function(pkg) { - packageList[pkg.PackageID] = pkg.ItemDescription; + var packageID = pkg.PackageID || pkg.packageID || pkg.packageid; + packageList[packageID] = pkg.ItemDescription; }); }
Fix packageid being undefined sometimes when redeeming keys (fixes #<I>)
DoctorMcKay_node-steam-user
train
js
48deacc83dbf3343061a2afa48a399c51dc37f19
diff --git a/landsat/landsat.py b/landsat/landsat.py index <HASH>..<HASH> 100755 --- a/landsat/landsat.py +++ b/landsat/landsat.py @@ -254,7 +254,7 @@ def main(args): def exit(message): print(message) - sys.exit() + sys.exit(0) def package_installed(package):
Added exit code 0 for success operations
developmentseed_landsat-util
train
py
cc980f2d32f414c1ad6aa49d38d14b932c89d3eb
diff --git a/python/dllib/src/bigdl/dllib/keras/optimizers.py b/python/dllib/src/bigdl/dllib/keras/optimizers.py index <HASH>..<HASH> 100644 --- a/python/dllib/src/bigdl/dllib/keras/optimizers.py +++ b/python/dllib/src/bigdl/dllib/keras/optimizers.py @@ -105,3 +105,20 @@ class AdamWeightDecay(OptimMethod, ZooKerasCreator): epsilon, weight_decay) self.bigdl_type = bigdl_type + + +class PolyEpochDecay(ZooKerasCreator): + """ + A learning rate decay policy, where the effective learning rate + follows a polynomial decay, to be zero by the max_epochs. + Calculation: init_lr * (1 - epoch/max_iteration) ^ (power) + + + :param power: The coefficient of decay. + :param max_epochs: The maximum number of epochs when lr becomes zero. + + >>> poly = PolyEpochDecay(0.5, 5) + creating: createZooKerasPolyEpochDecay + """ + def __init__(self, power, max_epochs, bigdl_type="float"): + JavaValue.__init__(self, None, bigdl_type, power, max_epochs)
Add Polynomial Decay based on epochs (#<I>) * poly on epoch * doc and ut * style
intel-analytics_BigDL
train
py
e222b61bd6a9ad88448e900aedde3ea805e98d9d
diff --git a/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java b/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java +++ b/src/main/java/com/amashchenko/maven/plugin/gitflow/GitFlowReleaseFinishMojo.java @@ -195,7 +195,7 @@ public class GitFlowReleaseFinishMojo extends AbstractGitFlowMojo { gitCheckout(gitFlowConfig.getDevelopmentBranch()); gitMerge(releaseBranch, releaseRebase, releaseMergeNoFF, - releaseMergeFFOnly); + false); } // get next snapshot version
releaseMergeFFOnly is not used when merging release to develop. Typically the develop branch moves on while the release is being prepared. Therefore it does not make sense to require the use of fast-forward when merging to the develop branch. On the master branch, however, it is useful to be able to require the use of fast-forward to make sure that the final commit on the release branch ends up in the master branch unmodified.
aleksandr-m_gitflow-maven-plugin
train
java
a8f08e765b5c8c827407794bd0d5ead7c0e38bf3
diff --git a/src/runners/CollectionRunner.js b/src/runners/CollectionRunner.js index <HASH>..<HASH> 100644 --- a/src/runners/CollectionRunner.js +++ b/src/runners/CollectionRunner.js @@ -44,6 +44,7 @@ var CollectionRunner = jsface.Class([AbstractRunner, Options, EventEmitter], { }, this); // Start the runner + RequestRunner.resetIndex(); RequestRunner.setDelay(this.opts.delay); if (!isNaN(this.opts.requestTimeout) && this.opts.requestTimeout % 1 === 0) { diff --git a/src/runners/RequestRunner.js b/src/runners/RequestRunner.js index <HASH>..<HASH> 100644 --- a/src/runners/RequestRunner.js +++ b/src/runners/RequestRunner.js @@ -70,6 +70,10 @@ var RequestRunner = jsface.Class([Queue, EventEmitter], { this.addToQueue(request); }, + resetIndex: function() { + this._currentIndex = -1; + }, + /** * Starts the RequestRunner going to each request in the queue. * @memberOf RequestRunner
Multiple iterations working with setNextRequest
postmanlabs_newman
train
js,js
361dce1ffc2773c6aac8512fd8dc636d4eda7166
diff --git a/pipeline/compressors/__init__.py b/pipeline/compressors/__init__.py index <HASH>..<HASH> 100644 --- a/pipeline/compressors/__init__.py +++ b/pipeline/compressors/__init__.py @@ -64,6 +64,8 @@ class Compressor(object): def compile_templates(self, paths): compiled = "" + if not paths: + return compiled namespace = settings.PIPELINE_TEMPLATE_NAMESPACE base_path = self.base_path(paths) for path in paths:
don't output templates when there is no templates
jazzband_django-pipeline
train
py
9f547fe6a30d544f52f8201ba89cff36f79d792d
diff --git a/setuptools/tests/test_virtualenv.py b/setuptools/tests/test_virtualenv.py index <HASH>..<HASH> 100644 --- a/setuptools/tests/test_virtualenv.py +++ b/setuptools/tests/test_virtualenv.py @@ -1,3 +1,4 @@ +import distutils.command import glob import os import sys @@ -134,3 +135,14 @@ def test_test_command_install_requirements(bare_virtualenv, tmpdir): 'python setup.py test -s test', )).format(tmpdir=tmpdir)) assert tmpdir.join('success').check() + + +def test_no_missing_dependencies(bare_virtualenv): + """ + Quick and dirty test to ensure all external dependencies are vendored. + """ + for command in ('upload',):#sorted(distutils.command.__all__): + bare_virtualenv.run(' && '.join(( + 'cd {source}', + 'python setup.py {command} -h', + )).format(command=command, source=SOURCE_DIR))
add a test to catch unvendored dependencies
pypa_setuptools
train
py
eb85bc7f887c70ea4ff66db978e37e3ed46b7ba2
diff --git a/transport_plugins/jlink/iotile_transport_jlink/jlink.py b/transport_plugins/jlink/iotile_transport_jlink/jlink.py index <HASH>..<HASH> 100644 --- a/transport_plugins/jlink/iotile_transport_jlink/jlink.py +++ b/transport_plugins/jlink/iotile_transport_jlink/jlink.py @@ -40,7 +40,8 @@ class JLinkAdapter(DeviceAdapter): self._mux_func = None self._channel = None self._control_thread = None - self.jlink = pylink.JLink() + self.jlink = None + self._parse_port(port) if on_scan is not None: @@ -130,6 +131,7 @@ class JLinkAdapter(DeviceAdapter): raise ArgumentError("Missing device name or alias, specify using device=name in port string or -c device=name in connect_direct or debug command", known_devices=[x for x in viewkeys(DEVICE_ALIASES)]) try: + self.jlink = pylink.JLink() self.jlink.open(serial_no=self._jlink_serial) self.jlink.set_tif(pylink.enums.JLinkInterfaces.SWD) self.jlink.connect(self._device_info.jlink_name)
move pylink declaration for easier test running
iotile_coretools
train
py
cdd077b75820b542a69b08ee3bf9c652f481c9f0
diff --git a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java index <HASH>..<HASH> 100644 --- a/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java +++ b/example/src/main/java/com/alibaba/otter/canal/example/kafka/CanalKafkaClientFlatMessageExample.java @@ -117,7 +117,7 @@ public class CanalKafkaClientFlatMessageExample { } for (FlatMessage message : messages) { long batchId = message.getId(); - int size = message.getData().size(); + int size = message.getData() == null ? 0 : message.getData().size(); if (batchId == -1 || size == 0) { // try { // Thread.sleep(1000);
FIX NPE When ddl event (#<I>)
alibaba_canal
train
java
dfdf5642fe0a7328d6e627dfee189c80e67ea3d9
diff --git a/provider/ec2/environ.go b/provider/ec2/environ.go index <HASH>..<HASH> 100644 --- a/provider/ec2/environ.go +++ b/provider/ec2/environ.go @@ -834,6 +834,17 @@ func (e *environ) Subnets(_ instance.Id) ([]network.SubnetInfo, error) { } results := make([]network.SubnetInfo, len(resp.Subnets), len(resp.Subnets)) + for i, subnet := range resp.Subnets { + // No VLANTag available + cidr := subnet.CIDRBlock + allocatableLow := network.DecimalToIP(network.IPToDecimal(start) + 4) + info := network.SubnetInfo{ + CIDR: cidr, + ProviderID: subnet.Id, + AllocatableIPLow: allocatableLow, + } + results[i] = info + } return results, nil }
Calculate AllocatableIPLow for SubnetInfo
juju_juju
train
go
f2b3b356404c934f1da10f8a70d85151f0696a53
diff --git a/internal/config/config.go b/internal/config/config.go index <HASH>..<HASH> 100644 --- a/internal/config/config.go +++ b/internal/config/config.go @@ -33,7 +33,7 @@ import ( var ( // Default sections - sectionDefaults = []string{"agent", "global_tags", "outputs", + sectionDefaults = []string{"global_tags", "agent", "outputs", "processors", "aggregators", "inputs"} // Default input plugins @@ -536,13 +536,13 @@ func printFilteredOutputs(outputFilters []string, commented bool) { } func printFilteredGlobalSections(sectionFilters []string) { - if sliceContains("agent", sectionFilters) { - fmt.Printf(agentConfig) - } - if sliceContains("global_tags", sectionFilters) { fmt.Printf(globalTagsConfig) } + + if sliceContains("agent", sectionFilters) { + fmt.Printf(agentConfig) + } } type printer interface {
Print global_tags first in sample configuration
influxdata_telegraf
train
go
2cbcfe29d1323b6e5a622d00d8d11da77dc38884
diff --git a/lib/coral/machine/fog.rb b/lib/coral/machine/fog.rb index <HASH>..<HASH> 100644 --- a/lib/coral/machine/fog.rb +++ b/lib/coral/machine/fog.rb @@ -236,10 +236,6 @@ class Fog < Plugin::Machine if ssh_results ssh_results.each do |result| - ui.info(result.stdout, { :prefix => false }) - ui.warn(result.stderr, { :prefix => false }) - ui.success(result.status) - results << { :status => result.status, :result => result.stdout.strip,
Removing debug printouts in the fog machine provider.
coralnexus_corl
train
rb
8ba8fc48219918274e22d3a4f81659c0e0a77041
diff --git a/salt/states/mdadm.py b/salt/states/mdadm.py index <HASH>..<HASH> 100644 --- a/salt/states/mdadm.py +++ b/salt/states/mdadm.py @@ -93,8 +93,8 @@ def present(name, can_assemble[dev] = __salt__['cmd.retcode'](cmd) == 0 if True in can_assemble.values() and False in can_assemble.values(): - in_raid = [x[0] for x in devices.items() if x[1]] - not_in_raid = [x[0] for x in devices.items() if not x[1]] + in_raid = sorted([x[0] for x in can_assemble.items() if x[1]]) + not_in_raid = sorted([x[0] for x in can_assemble.items() if not x[1]]) ret['comment'] = 'Devices are a mix of RAID constituents ({0}) and '\ 'non-RAID-constituents({1}).'.format(in_raid, not_in_raid) ret['result'] = False
Fixed a name reference in raid.assemble. Also, raid.assemble sorts the devices alphabetically if it needs to report that the RAID and non-RAID devices are mixed
saltstack_salt
train
py
afa31827ea5b4468b2f923ddb32ad1f8a5ef60fc
diff --git a/app/config/bootstrap.php b/app/config/bootstrap.php index <HASH>..<HASH> 100644 --- a/app/config/bootstrap.php +++ b/app/config/bootstrap.php @@ -131,9 +131,17 @@ Libraries::add('app'); // use lithium\g11n\Catalog; // // Catalog::config(array( -// 'runtime' => array('adapter' => 'Memory'), -// 'app' => array('adapter' => 'Gettext', 'path' => LITHIUM_APP_PATH . '/resources/po'), -// 'lithium' => array('adapter' => 'Gettext', 'path' => LITHIUM_LIBRARY_PATH . '/lithium/resources/po') +// 'runtime' => array( +// 'adapter' => 'Memory' +// ), +// 'app' => array( +// 'adapter' => 'Gettext', +// 'path' => LITHIUM_APP_PATH . '/extensions/g11n/data' +// ), +// 'lithium' => array( +// 'adapter' => 'Gettext', +// 'path' => LITHIUM_LIBRARY_PATH . '/lithium/g11n/data' +// ) // )); /**
Updating locations for g<I>n data in app bootstrap.
UnionOfRAD_framework
train
php
4bc394e799e0853c035e83f1d81edaac1f2d79e2
diff --git a/python/phonenumbers/__init__.py b/python/phonenumbers/__init__.py index <HASH>..<HASH> 100644 --- a/python/phonenumbers/__init__.py +++ b/python/phonenumbers/__init__.py @@ -143,7 +143,7 @@ from .phonenumbermatcher import PhoneNumberMatch, PhoneNumberMatcher, Leniency # Version number is taken from the upstream libphonenumber version # together with an indication of the version of the Python-specific code. -__version__ = "8.5.0" +__version__ = "8.5.1" __all__ = ['PhoneNumber', 'CountryCodeSource', 'FrozenPhoneNumber', 'REGION_CODE_FOR_NON_GEO_ENTITY', 'NumberFormat', 'PhoneNumberDesc', 'PhoneMetadata',
Prep for <I> release
daviddrysdale_python-phonenumbers
train
py
050454c90fa1eeeedb5c59a7dd147ca70a645645
diff --git a/gqltesting/testing.go b/gqltesting/testing.go index <HASH>..<HASH> 100644 --- a/gqltesting/testing.go +++ b/gqltesting/testing.go @@ -43,6 +43,9 @@ func RunTest(t *testing.T, test *Test) { test.Context = context.Background() } result := test.Schema.Exec(test.Context, test.Query, test.OperationName, test.Variables) + + checkErrors(t, test.ExpectedErrors, result.Errors) + // Verify JSON to avoid red herring errors. got, err := formatJSON(result.Data) if err != nil { @@ -53,8 +56,6 @@ func RunTest(t *testing.T, test *Test) { t.Fatalf("want: invalid JSON: %s", err) } - checkErrors(t, test.ExpectedErrors, result.Errors) - if !bytes.Equal(got, want) { t.Logf("got: %s", got) t.Logf("want: %s", want)
Update testing.go check for errors before expected response
graph-gophers_graphql-go
train
go
f61a34231143f302e1c05ed295adbe6cb6e0aac9
diff --git a/command/format/state.go b/command/format/state.go index <HASH>..<HASH> 100644 --- a/command/format/state.go +++ b/command/format/state.go @@ -74,7 +74,11 @@ func State(opts *StateOpts) string { for _, k := range ks { v := m.OutputValues[k] p.buf.WriteString(fmt.Sprintf("%s = ", k)) - p.writeValue(v.Value, plans.NoOp, 0) + if v.Sensitive { + p.buf.WriteString("(sensitive value)") + } else { + p.writeValue(v.Value, plans.NoOp, 0) + } p.buf.WriteString("\n") } } diff --git a/command/format/state_test.go b/command/format/state_test.go index <HASH>..<HASH> 100644 --- a/command/format/state_test.go +++ b/command/format/state_test.go @@ -219,7 +219,7 @@ map_var = { "first" = "foo" "second" = "bar" } -sensitive_var = "secret!!!" +sensitive_var = (sensitive value) string_var = "string value"` func basicState(t *testing.T) *states.State {
Hide sensitive outputs in terraform show
hashicorp_terraform
train
go,go
730389c4460445bf59c7b6958ea6a54c165044ca
diff --git a/openquake/server/tests/functional_test.py b/openquake/server/tests/functional_test.py index <HASH>..<HASH> 100644 --- a/openquake/server/tests/functional_test.py +++ b/openquake/server/tests/functional_test.py @@ -117,8 +117,8 @@ class EngineServerTestCase(unittest.TestCase): tmpdb = '%s:%s' % (cls.tmpdb, cls.dbserverport) cls.fd, cls.errfname = tempfile.mkstemp() cls.dbs = subprocess.Popen( - [sys.executable, '-m', 'openquake.server.dbserver', tmpdb], - env=env, stderr=cls.fd) # redirect the server errors + [sys.executable, '-m', 'openquake.server.dbserver', + tmpdb, cls.errfname], env=env, stderr=cls.fd) cls.proc = subprocess.Popen( [sys.executable, '-m', 'openquake.server.manage', 'runserver', cls.hostport, '--noreload', '--nothreading', 'tmpdb=' + tmpdb],
Passed the log file to the dbserver in the functional tests Former-commit-id: 1dcd5d<I>b<I>f<I>b<I>a<I>ed [formerly 3cab<I>c5d<I>ee<I>af9f<I>aea<I>e<I>b] Former-commit-id: <I>b0bbc<I>fbeba<I>c4f<I>e<I>
gem_oq-engine
train
py
1d14825f81e2ec191b85110d14fdd5604209345b
diff --git a/telemetry/telemetry/core/timeline/model.py b/telemetry/telemetry/core/timeline/model.py index <HASH>..<HASH> 100644 --- a/telemetry/telemetry/core/timeline/model.py +++ b/telemetry/telemetry/core/timeline/model.py @@ -43,10 +43,11 @@ class TimelineModel(object): if self._frozen: raise Exception("Cannot add events once recording is done") self._root_events.append(event) - self._all_events.extend( - event.GetAllChildrenRecursive(include_self=True)) def DidFinishRecording(self): + for event in self._root_events: + self._all_events.extend( + event.GetAllChildrenRecursive(include_self=True)) self._frozen = True def ImportTraces(self, traces, shift_world_to_zero=True):
Fixed generation of TimelineModel._all_events. - Events were added recursively to _all_events during AddEvent for root events - Asynchronous events that were added later on by the importer were not included. - Fixed by moving the recursive population of _all_events to DidFinishRecording BUG=<I> Review URL: <URL>
catapult-project_catapult
train
py
ec934f58d2f73492075c545d8e678ca04f7e61d8
diff --git a/src/check-types.js b/src/check-types.js index <HASH>..<HASH> 100644 --- a/src/check-types.js +++ b/src/check-types.js @@ -290,7 +290,7 @@ * */ function error (data) { - return Object.prototype.toString.call(data) === '[object Error]'; + return data instanceof Error || Object.prototype.toString.call(data) === '[object Error]'; } /** diff --git a/test/check-types.js b/test/check-types.js index <HASH>..<HASH> 100644 --- a/test/check-types.js +++ b/test/check-types.js @@ -366,6 +366,15 @@ assert.isTrue(check.error(new Error())); }); + test('error with derived error returns true', function () { + function DerivedError () { + Error.call(this); + } + DerivedError.prototype = new Error(); + DerivedError.prototype.constructor = DerivedError; + assert.isTrue(check.error(new DerivedError())); + }); + test('error with object returns false', function () { assert.isFalse(check.error({})); });
Recognise derived error objects.
philbooth_check-types.js
train
js,js
113eaeed73e0b1611154bad01d9b5f38910b1eaa
diff --git a/lib/ruboto/util/xml_element.rb b/lib/ruboto/util/xml_element.rb index <HASH>..<HASH> 100644 --- a/lib/ruboto/util/xml_element.rb +++ b/lib/ruboto/util/xml_element.rb @@ -212,7 +212,7 @@ module Ruboto if_else( "scriptInfo.getCallbackProcs() != null && scriptInfo.getCallbackProcs()[#{constant_string}] != null", [super_string] + ruby_call, - ['String rubyClassName = Script.toCamelCase(scriptInfo.getScriptName());'] + + ['String rubyClassName = scriptInfo.getRubyClassName();'] + if_else( # TODO(uwe): Remove defined?(rubyClassName) if we remove non-class-based class definitions "(Boolean)JRubyAdapter.runScriptlet(\"defined?(\" + rubyClassName + \") == 'constant' && \" + rubyClassName + \".instance_methods(false).any?{|m| m.to_sym == :#{snake_case_attribute}}\")",
Switch to get rubyClassName from scriptInfo
ruboto_ruboto
train
rb
68a3c485d3fa2f007e306db098865c967342633e
diff --git a/cherrypy/test/test_states.py b/cherrypy/test/test_states.py index <HASH>..<HASH> 100644 --- a/cherrypy/test/test_states.py +++ b/cherrypy/test/test_states.py @@ -101,7 +101,6 @@ class ServerStateTests(helper.CPWebCase): self.assertEqual(len(db_connection.threads), 0) # Test server start - cherrypy.server.quickstart(self.server_class) engine.start() self.assertEqual(engine.state, engine.states.STARTED)
Removing the last call to server.quickstart (which is deprecated).
cherrypy_cheroot
train
py
8941e36c484f6e228cb4dccd95c70455fac45335
diff --git a/src/lib/context.js b/src/lib/context.js index <HASH>..<HASH> 100644 --- a/src/lib/context.js +++ b/src/lib/context.js @@ -1,6 +1,8 @@ import { L10nError } from './errors'; import { format } from './resolver'; +const IntlObjects = new WeakMap(); + export class Context { constructor(env, langs, resIds) { this.langs = langs; @@ -105,7 +107,15 @@ export class Context { } _memoizeIntlObject(ctor, {code}, opts) { - return new ctor(code, opts); + const cache = IntlObjects.get(ctor) || {}; + const id = code + JSON.stringify(opts); + + if (!cache[id]) { + cache[id] = new ctor(code, opts); + IntlObjects.set(ctor, cache); + } + + return cache[id]; } }
Naive memoization of Intl formatters This is a very naive implementation of memoization of Intl formatters which assumes opts are always passed in the same order. Doesn't affect node's performance, but I'm seeing an improvement on jsshell: format: mean: <I> (-<I>%) stdev: <I> sample: <I>
l20n_l20n.js
train
js
0578e2c5bf1186000b58a2f8411723550199a7dd
diff --git a/lib/easy_upnp/control_point/argument_validator.rb b/lib/easy_upnp/control_point/argument_validator.rb index <HASH>..<HASH> 100644 --- a/lib/easy_upnp/control_point/argument_validator.rb +++ b/lib/easy_upnp/control_point/argument_validator.rb @@ -58,6 +58,7 @@ module EasyUpnp class TypeValidator # Valid UPnP types for each ruby class RUBY_TYPE_TO_UPNP_TYPE = { + Array: %w{list}, Float: %w{r4 r8 number fixed.14.4 float}, Integer: %w{ui1 ui2 ui4 i1 i2 i4 int}, String: %w{char string bin.base64 bin.hex uri uuid}, diff --git a/lib/easy_upnp/control_point/client_wrapper.rb b/lib/easy_upnp/control_point/client_wrapper.rb index <HASH>..<HASH> 100644 --- a/lib/easy_upnp/control_point/client_wrapper.rb +++ b/lib/easy_upnp/control_point/client_wrapper.rb @@ -42,13 +42,13 @@ module EasyUpnp :'xmlns:u' => @urn }, }.merge(@call_options) - + if [email protected]? attrs = attrs.merge( cookies: HTTPI::Cookie.new(@cookies) ) end - + advanced_typecasting = @advanced_typecasting response = @client.call(action_name, attrs) do
Add list as an available UPnP type
sidoh_easy_upnp
train
rb,rb
da9b0f905c1337433f5c78255943d93032595d17
diff --git a/indra/tools/reading/wait_for_complete.py b/indra/tools/reading/wait_for_complete.py index <HASH>..<HASH> 100644 --- a/indra/tools/reading/wait_for_complete.py +++ b/indra/tools/reading/wait_for_complete.py @@ -63,7 +63,7 @@ if __name__ == '__main__': ) args = parser.parse_args() - from submit_reading_pipeline import wait_for_complete + from indra.tools.reading.submit_reading_pipeline import wait_for_complete job_list = None if args.job_list is not None:
Clarify path to wait_for_complete.
sorgerlab_indra
train
py
91d737e37c521d3d4a7c82c37a95d231af92a67c
diff --git a/angr/analyses/cfg/cfg_fast.py b/angr/analyses/cfg/cfg_fast.py index <HASH>..<HASH> 100644 --- a/angr/analyses/cfg/cfg_fast.py +++ b/angr/analyses/cfg/cfg_fast.py @@ -1462,6 +1462,12 @@ class CFGFast(ForwardAnalysis, CFGBase): # pylint: disable=abstract-method None ) + if not procedure.NO_RET: + # it returns + cfg_node.has_return = True + self._function_exits[current_function_addr].add(addr) + self._function_add_return_site(addr, current_function_addr) + return entries def _scan_irsb(self, addr, current_function_addr, previous_jumpkind, previous_src_node, previous_src_ins_addr,
CFGFast: Add return endpoints for SimProcedure functions.
angr_angr
train
py
3b26402c362e4864589b74bfc8a71f2e8c819608
diff --git a/src/ResolveIteratorCapableTrait.php b/src/ResolveIteratorCapableTrait.php index <HASH>..<HASH> 100644 --- a/src/ResolveIteratorCapableTrait.php +++ b/src/ResolveIteratorCapableTrait.php @@ -59,7 +59,10 @@ trait ResolveIteratorCapableTrait $i = 0; while (!$test($iterator) && $i < $limit) { - /* @var $iterator IteratorAggregate */ + if (!($iterator instanceof IteratorAggregate)) { + break; + } + $_it = $iterator->getIterator(); if ($iterator === $_it) { throw $this->_createOutOfRangeException(
Added test for `IteratorAggregate` This is because the test function is not guaranteed to test for `Iterator` interface.
Dhii_iterator-helper-base
train
php
36379c6085cc098512d5d8366bb6bd4ddc518ead
diff --git a/spyderlib/widgets/projectexplorer.py b/spyderlib/widgets/projectexplorer.py index <HASH>..<HASH> 100644 --- a/spyderlib/widgets/projectexplorer.py +++ b/spyderlib/widgets/projectexplorer.py @@ -321,6 +321,8 @@ class Project(object): item_preceding, tree, include, exclude, show_all) parent_dirname = abspardir(dirname) parent = dirs[parent_dirname] + if item_preceding is None: + item_preceding = parent item_preceding = self.create_dir_item(dirname, parent, item_preceding, tree, include, exclude, show_all)
Project explorer: when refreshing tree to show a new entry preceding the first item of a branch, this entry was added as the last item of the branch instead of the first one
spyder-ide_spyder
train
py
0cd50e99affa6e325afd41bc75d243fe6fb4ece1
diff --git a/function/function.go b/function/function.go index <HASH>..<HASH> 100644 --- a/function/function.go +++ b/function/function.go @@ -456,7 +456,10 @@ func (f *Function) Build() (io.Reader, error) { if err := zip.AddFile(path, file); err != nil { return nil, err } - defer file.Close() + + if err := file.Close(); err != nil { + return nil, err + } } if err := zip.Close(); err != nil {
remove deferring of file Close() for builds could quickly lead to exhausting descriptors
apex_apex
train
go
8aaa956a298d1cee30d11c1ad536919eb4213236
diff --git a/src/math.js b/src/math.js index <HASH>..<HASH> 100644 --- a/src/math.js +++ b/src/math.js @@ -83,10 +83,10 @@ math.bound = function( min, val, max ){ math.makeBoundingBox = function( bb ){ if( bb == null ){ return { - x1: 0, - y1: 0, - x2: 0, - y2: 0, + x1: Infinity, + y1: Infinity, + x2: -Infinity, + y2: -Infinity, w: 0, h: 0 };
fixes bb calcs for layers #<I> with negative x1, y1
cytoscape_cytoscape.js
train
js
ae2cf125c71b92bfa9a96c5fad6ee14ddcd57335
diff --git a/tests/test.py b/tests/test.py index <HASH>..<HASH> 100644 --- a/tests/test.py +++ b/tests/test.py @@ -88,7 +88,6 @@ class UidGidTest(unittest.TestCase): if os.getuid() != 0: return True - os.chown(self.pidfile, NOBODY_UID, NOBODY_GID) os.chown(self.logfile, NOBODY_UID, NOBODY_GID) os.system("python tests/daemon_uid_gid.py %s %s" % (self.pidfile, self.logfile))
Do not chown pidfile in tests
thesharp_daemonize
train
py
9ba0192d9acf8d3c3736d758911b03fcf24e6e65
diff --git a/test/functional/test_auth.rb b/test/functional/test_auth.rb index <HASH>..<HASH> 100755 --- a/test/functional/test_auth.rb +++ b/test/functional/test_auth.rb @@ -369,7 +369,7 @@ module LitleOnline 'orderSource'=>'ecommerce', 'card'=>{ 'type'=>'VI', - 'number' =>'4100800000000000', + 'number' =>'4100300000100000', 'expDate' =>'1210' }, }
Fixed test_auth so that card numbers asking for special responses follow sandbox documentation
Vantiv_litle-sdk-for-ruby
train
rb
901d18a3278311655eb4089612bbeac07d4d17a4
diff --git a/src/toil/batchSystems/mesos/batchSystem.py b/src/toil/batchSystems/mesos/batchSystem.py index <HASH>..<HASH> 100644 --- a/src/toil/batchSystems/mesos/batchSystem.py +++ b/src/toil/batchSystems/mesos/batchSystem.py @@ -355,7 +355,7 @@ class MesosBatchSystem(BatchSystemSupport, task.task_id.value, offer.id.value) remainingCores -= jobType.cores remainingMemory -= toMiB(jobType.memory) - remainingDisk -= jobType.disk + remainingDisk -= toMiB(jobType.disk) nextToLaunchIndex += 1 if self.jobQueues[jobType] and not runnableTasksOfType: log.debug('Offer %(offer)s not suitable to run the tasks with requirements '
Fix Mesos disk allocation (resolves #<I>)
DataBiosphere_toil
train
py
4f809fda40488827c5c7d0e0394386c3f490af86
diff --git a/lib/dm-core/spec/shared/adapter_spec.rb b/lib/dm-core/spec/shared/adapter_spec.rb index <HASH>..<HASH> 100644 --- a/lib/dm-core/spec/shared/adapter_spec.rb +++ b/lib/dm-core/spec/shared/adapter_spec.rb @@ -51,7 +51,7 @@ share_examples_for 'An Adapter' do end after :all do - Object.send(:remove_const,'Heffalump') + Object.send(:remove_const,'Heffalump') if defined? ::Heffalump end if adapter_supports?(:create)
Be more defensive when removing Heffalump const in shared adapter specs
datamapper_dm-core
train
rb
61bbeb300ecf2d2e662ac5ffe7bf83e306a4bc89
diff --git a/marshmallow_jsonapi/fields.py b/marshmallow_jsonapi/fields.py index <HASH>..<HASH> 100644 --- a/marshmallow_jsonapi/fields.py +++ b/marshmallow_jsonapi/fields.py @@ -85,16 +85,20 @@ class Relationship(BaseRelationship): return None def add_resource_linkage(self, value): + def stringify(value): + if value is not None: + return str(value) + return value + if self.many: - included_data = [ - {'type': self.type_, - 'id': get_value_or_raise(self.id_field, each)} - for each in value - ] + included_data = [{ + 'type': self.type_, + 'id': stringify(get_value_or_raise(self.id_field, each)) + } for each in value] else: included_data = { 'type': self.type_, - 'id': get_value_or_raise(self.id_field, value) + 'id': stringify(get_value_or_raise(self.id_field, value)) } return included_data
The value of id members must always be strings unless empty
marshmallow-code_marshmallow-jsonapi
train
py
f470e2fc061dd68fdbde658274777f444c4a794d
diff --git a/site/kotlin-example-loader.js b/site/kotlin-example-loader.js index <HASH>..<HASH> 100644 --- a/site/kotlin-example-loader.js +++ b/site/kotlin-example-loader.js @@ -41,9 +41,9 @@ module.exports = function kotlinExampleLoader(source) { then(() => { fs.readFile(TMP_FILE_NAME, (e, buffer) => { handleError(e); - fs.readFile(TMP_SOURCE_MAP_FILE_NAME, (e, sourceMapBuffer) => { + fs.readFile(TMP_SOURCE_MAP_FILE_NAME, 'utf8', (e, sourceMap) => { handleError(e); - callback(null, buffer, sourceMapBuffer); + callback(null, buffer, sourceMap); cleanup(); }); });
RING-UI-CR-<I> decode sourcemap to string before passing it to webpack Former-commit-id: <I>edfadac1b<I>a7dd1c9a<I>d<I>f<I>c<I>
JetBrains_ring-ui
train
js
d5bdb823d84c38ec6daad86fe524a7342c0b0858
diff --git a/examples/sampleserver.py b/examples/sampleserver.py index <HASH>..<HASH> 100644 --- a/examples/sampleserver.py +++ b/examples/sampleserver.py @@ -195,8 +195,10 @@ class ServerHandler(util.HTTPHandler): self.wfile.write(response.body) except: + import sys + sys.excepthook(*sys.exc_info()) self._headers(500) - raise KeyboardInterrupt + raise SystemExit def user(self): try:
[project @ Added debug info when the sample server gets an exception in handleOpenIDRequest]
openid_python-openid
train
py
8ff97e6b7c279334e417dbdb65e64d0de2656986
diff --git a/lib/shared.js b/lib/shared.js index <HASH>..<HASH> 100644 --- a/lib/shared.js +++ b/lib/shared.js @@ -12,7 +12,7 @@ const sha256File = require('sha256-file'); * @param {Object} serverless * @return {undefined} */ -function checkForAndDeleteMaxCacheVersions(options, serverless) { +function checkForAndDeleteMaxCacheVersions({ serverless, options, log }) { // If we're using the static cache, and we have static cache max versions enabled if ( options.useStaticCache && @@ -42,10 +42,17 @@ function checkForAndDeleteMaxCacheVersions(options, serverless) { rimraf.sync(files[i]); items++; } + // Log the number of cache files flushed - serverless.cli.log( - `Removed ${items} items from cache because of staticCacheMaxVersions` - ); + if (log) { + log.info( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } else { + serverless.cli.log( + `Removed ${items} items from cache because of staticCacheMaxVersions` + ); + } } } }
refactor: Adapt `shared` to modern logs
UnitedIncome_serverless-python-requirements
train
js
7a8388cc75677ec26f9fb6991be10fe0cd2410b2
diff --git a/app/presenters/hyrax/work_show_presenter.rb b/app/presenters/hyrax/work_show_presenter.rb index <HASH>..<HASH> 100644 --- a/app/presenters/hyrax/work_show_presenter.rb +++ b/app/presenters/hyrax/work_show_presenter.rb @@ -116,7 +116,7 @@ module Hyrax end def link_name - current_ability.can?(:read, id) ? to_s : 'File' + current_ability.can?(:read, id) ? to_s : 'Private' end def export_as_nt
Relabel the links for private child works from 'File' "File" isn't an accurate description of a work. When a child work isn't readable, just list it as "Private" instead.
samvera_hyrax
train
rb
e8e76476b996bd4cd2e23ad6d2259b7f6400a7a3
diff --git a/test/specs/grapesjs/index.js b/test/specs/grapesjs/index.js index <HASH>..<HASH> 100644 --- a/test/specs/grapesjs/index.js +++ b/test/specs/grapesjs/index.js @@ -376,5 +376,16 @@ describe('GrapesJS', () => { editor = obj.init(config); expect(editor.Commands.get('export-template').test).toEqual(1); }); + + it('Dump unused css classes/selectors', () => { + config.fromElement = 1; + config.storageManager = { type: 0 }; + fixture.innerHTML = documentEl; + const editor = obj.init(config); + const css = editor.getCss({ dumpUnusedSelectors: 1 }); + const protCss = editor.getConfig().protectedCss; + expect(editor.getStyle().length).toEqual(2); + expect(css).toEqual(`${protCss}.test2{color:red;}.test3{color:blue;}`); + }); }); });
added unit testing to dump unused css selectors
artf_grapesjs
train
js
48d9f5d20b6265930a9ddf9cad05285098952f46
diff --git a/umap/plot.py b/umap/plot.py index <HASH>..<HASH> 100644 --- a/umap/plot.py +++ b/umap/plot.py @@ -20,16 +20,16 @@ except ImportError: warn( """The umap.plot package requires extra plotting libraries to be installed. You can install these via pip using - + pip install umap-learn[plot] - + or via conda using - - conda install seaborn datashader bokeh holoviews + + conda install datashader bokeh holoviews colorcet """ ) raise ImportError( - "umap.plot requires matplotlib, seaborn, datashader and holoviews to be " + "umap.plot requires matplotlib, datashader, colorcet and holoviews to be " "installed" ) from None
Update import notes in plot.py
lmcinnes_umap
train
py
684ca55dbf78b657eee385665a51b430b395f805
diff --git a/aws/aws_test.go b/aws/aws_test.go index <HASH>..<HASH> 100644 --- a/aws/aws_test.go +++ b/aws/aws_test.go @@ -346,6 +346,9 @@ func Test_Examples(t *testing.T) { "@pulumi/cloud", "@pulumi/cloud-aws", }, + // #494: lambda tests are unexpectedly proposing and performing code changes + AllowEmptyPreviewChanges: true, + AllowEmptyUpdateChanges: true, ExtraRuntimeValidation: containersRuntimeValidator(fargateRegion), }, {
Allow changes in empty preview/update for long-running tests. (#<I>) The `containers` test uses a lambda that appears to suffer from the same unexpected no-op changes as the others in this repo. Investigating this is tracked by #<I>.
pulumi_pulumi-cloud
train
go
835966955fda2dd876e7f9c8dd1e5e79e4a166a8
diff --git a/openstack/resource_openstack_networking_port_v2.go b/openstack/resource_openstack_networking_port_v2.go index <HASH>..<HASH> 100644 --- a/openstack/resource_openstack_networking_port_v2.go +++ b/openstack/resource_openstack_networking_port_v2.go @@ -137,9 +137,8 @@ func resourceNetworkingPortV2() *schema.Resource { Elem: &schema.Resource{ Schema: map[string]*schema.Schema{ "ip_address": { - Type: schema.TypeString, - Required: true, - ValidateFunc: validation.SingleIP(), + Type: schema.TypeString, + Required: true, }, "mac_address": { Type: schema.TypeString,
Networking v2: Fix Allowed Address Pairs validation (#<I>) This change reverts a previous modification which restricted an allowed address pair's IP address to be a single IP address rather than a CIDR.
terraform-providers_terraform-provider-openstack
train
go
e3b136d5ae49e2117b8f7a5ccfc6d104a5df9554
diff --git a/objectrocket/auth.py b/objectrocket/auth.py index <HASH>..<HASH> 100644 --- a/objectrocket/auth.py +++ b/objectrocket/auth.py @@ -129,7 +129,7 @@ class Auth(bases.BaseAuthLayer): :rtype: dict """ # Attempt to authenticate. - url = '{}{}'.format(self._url, 'verify') + url = '{}{}/'.format(self._url, 'verify') resp = requests.post( url, json={'token': token}, diff --git a/tests/test_auth.py b/tests/test_auth.py index <HASH>..<HASH> 100644 --- a/tests/test_auth.py +++ b/tests/test_auth.py @@ -175,3 +175,21 @@ def test_auth_username_setter(client): client.auth._username = testval assert client.auth._username is testval assert orig_val is not testval + + [email protected] +def test_auth_verify_makes_expected_call(client): + token = 'testing' + expected_url = 'https://sjc-api.objectrocket.com/v2/tokens/verify/' + expected_user_data = {'testing': 'testing'} + responses.add( + responses.POST, + expected_url, + status=200, + body=json.dumps({'data': expected_user_data}), + content_type="application/json" + ) + + output = client.auth._verify(token) + + assert output == expected_user_data
Add test to ensure _verify functionality.
objectrocket_python-client
train
py,py
4e8043ef74d440d6ece751341664ab1bb3bddebd
diff --git a/scripts/ci/extract-changelog.py b/scripts/ci/extract-changelog.py index <HASH>..<HASH> 100755 --- a/scripts/ci/extract-changelog.py +++ b/scripts/ci/extract-changelog.py @@ -23,4 +23,4 @@ for i, line in enumerate(changelog): break if start != 0: - print ''.join(changelog[start+1:end]).strip() + print(''.join(changelog[start+1:end]).strip())
Fix script to extract changelog for Python 3
skydive-project_skydive
train
py
0106755ae459273e63d707dfb5649ab81b7af612
diff --git a/ghost/admin/Gruntfile.js b/ghost/admin/Gruntfile.js index <HASH>..<HASH> 100644 --- a/ghost/admin/Gruntfile.js +++ b/ghost/admin/Gruntfile.js @@ -34,7 +34,7 @@ module.exports = function(grunt) { shell: { 'npm-install': { - command: 'npm install' + command: 'yarn install' }, 'bower-install': {
🛠 switch to using yarn in our Grunt tasks (#<I>) no issue - use `yarn install` instead of `npm install` in our `grunt init` task (used in Ghost's `grunt init` via subgrunt)
TryGhost_Ghost
train
js
c6ca4c1f73186cc36796d52ec0027f9c2c1bb1c3
diff --git a/lib/waistband/result.rb b/lib/waistband/result.rb index <HASH>..<HASH> 100644 --- a/lib/waistband/result.rb +++ b/lib/waistband/result.rb @@ -13,6 +13,7 @@ module Waistband def respond_to_missing?(method_name, include_private = false) return true if @result_hash.keys.include?(method_name.to_s) + return true if @result_hash['_source'] && @result_hash['_source'].keys.include?(method_name.to_s) super end
better respond_to_missing in result
taskrabbit_waistband
train
rb
ad32c3821bb3f25e52cc527932c0af7eac0c6aac
diff --git a/src/Mover.php b/src/Mover.php index <HASH>..<HASH> 100644 --- a/src/Mover.php +++ b/src/Mover.php @@ -2,12 +2,14 @@ namespace CoenJacobs\Mozart; +use CoenJacobs\Mozart\Composer\Autoload\Autoloader; use CoenJacobs\Mozart\Composer\Autoload\Classmap; use CoenJacobs\Mozart\Composer\Autoload\NamespaceAutoloader; use CoenJacobs\Mozart\Composer\Package; use League\Flysystem\Adapter\Local; use League\Flysystem\Filesystem; use Symfony\Component\Finder\Finder; +use Symfony\Component\Finder\SplFileInfo; class Mover { @@ -95,10 +97,10 @@ class Mover /** * @param Package $package - * @param $autoloader - * @param $file - * @param $path - * @return mixed + * @param Autoloader $autoloader + * @param SplFileInfo $file + * @param string $path + * @return string */ public function moveFile(Package $package, $autoloader, $file, $path = '') {
Add types to method @params
coenjacobs_mozart
train
php
ad69e057709cfc9428393584e33937e03dd1aff4
diff --git a/src/server/pps/server/monitor.go b/src/server/pps/server/monitor.go index <HASH>..<HASH> 100644 --- a/src/server/pps/server/monitor.go +++ b/src/server/pps/server/monitor.go @@ -283,7 +283,7 @@ func (a *apiServer) monitorPipeline(pachClient *client.APIClient, pipelineInfo * return err } case <-pachClient.Ctx().Done(): - return context.DeadlineExceeded + return pachClient.Ctx().Err() } } }, backoff.NewInfiniteBackOff(), func(err error, d time.Duration) error {
Return correct error from monitorPipeline
pachyderm_pachyderm
train
go
b84b286ffb78ce5758ff9bfbf7288671fe65c6ca
diff --git a/examples/flashing_doge.py b/examples/flashing_doge.py index <HASH>..<HASH> 100644 --- a/examples/flashing_doge.py +++ b/examples/flashing_doge.py @@ -1,15 +1,16 @@ import pyfakewebcam -import scipy.misc as misc import numpy as np import time import timeit -cam = pyfakewebcam.FakeWebcam('/dev/video1', 640, 512) +from PIL import Image + +cam = pyfakewebcam.FakeWebcam('/dev/video1', 1280, 720) cam.print_capabilities() -im0 = misc.imread("doge1.jpg") -im1 = np.zeros((512,640,3), dtype=np.uint8) +im0 = np.array( Image.open("doge1.jpg") ) +im1 = np.zeros((720,1280,3), dtype=np.uint8) while True:
flashing_doge.py : changed resolution to <I>p
jremmons_pyfakewebcam
train
py
3f33e24bf851dc088081b64a80859458d54da312
diff --git a/src/ui/axis/src/orientedAxis.js b/src/ui/axis/src/orientedAxis.js index <HASH>..<HASH> 100644 --- a/src/ui/axis/src/orientedAxis.js +++ b/src/ui/axis/src/orientedAxis.js @@ -55,9 +55,9 @@ export default function orientAxis(AxisComponent, orientation) { const axisProps = omit(this.props, ['scale', 'scales']); return ( <AxisComponent + {...axisProps} tickValues={this.state.tickValues} scale={this.props.scale || getValue(this.props, AXIS_TYPE_TO_SCALE_PATH[orientation])} - {...axisProps} /> ); }
props provided by OrientedAxis to its wrapped Axis component should override any user-defined props
ihmeuw_ihme-ui
train
js
3c846dd689b40e21445c950a80606fdd3bd57edd
diff --git a/DependencyInjection/Configuration/Configuration.php b/DependencyInjection/Configuration/Configuration.php index <HASH>..<HASH> 100644 --- a/DependencyInjection/Configuration/Configuration.php +++ b/DependencyInjection/Configuration/Configuration.php @@ -82,7 +82,6 @@ class Configuration implements ConfigurationInterface ->fixXmlConfig('client') ->children() ->arrayNode('clients') - ->requiresAtLeastOneElement() ->useAttributeAsKey('alias', false) ->prototype('array') ->fixXmlConfig('dsn')
Remove requirement that at least one client be provided
snc_SncRedisBundle
train
php
10e289c26e311e2109e9a7204f4f88b1dda0faca
diff --git a/tests/test_layers.py b/tests/test_layers.py index <HASH>..<HASH> 100644 --- a/tests/test_layers.py +++ b/tests/test_layers.py @@ -240,7 +240,15 @@ def test_torch_layer(): assert not torch.allclose(model1(input), model2(input)) model2.load_state_dict(pickle.loads(pickle.dumps(model1.state_dict()))) assert torch.allclose(model1(input), model2(input)) - # TODO jittization + + # tracing (freezing) + model3 = torch.jit.trace(input)(model2) + assert torch.allclose(model1(input), model3(input)) + assert torch.allclose(model1(input + 1), model3(input + 1)) + + model4 = torch.jit.trace(input, optimize=True)(model2) + assert torch.allclose(model1(input), model4(input)) + assert torch.allclose(model1(input + 1), model4(input + 1)) def test_keras_layer():
Add testing of model freezing for pytorch
arogozhnikov_einops
train
py