hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
01ae025a094642cf65b858828afec4e5a18e727d
diff --git a/jbpm-services/jbpm-executor/src/main/java/org/jbpm/executor/impl/AbstractAvailableJobsExecutor.java b/jbpm-services/jbpm-executor/src/main/java/org/jbpm/executor/impl/AbstractAvailableJobsExecutor.java index <HASH>..<HASH> 100644 --- a/jbpm-services/jbpm-executor/src/main/java/org/jbpm/executor/impl/AbstractAvailableJobsExecutor.java +++ b/jbpm-services/jbpm-executor/src/main/java/org/jbpm/executor/impl/AbstractAvailableJobsExecutor.java @@ -207,10 +207,10 @@ public abstract class AbstractAvailableJobsExecutor { retryAdd = retryDelay.get(retryDelay.size()-1); } - request.setTime(new Date(System.currentTimeMillis() + retryAdd)); - request.setExecutions(request.getExecutions() + 1); + request.setTime(new Date(System.currentTimeMillis() + retryAdd)); logger.info("Retrying request ( with id {}) - delay configured, next retry at {}", request.getId(), request.getTime()); } + request.setExecutions(request.getExecutions() + 1); logger.debug("Retrying ({}) still available!", request.getRetries()); diff --git a/jbpm-services/jbpm-executor/src/test/java/org/jbpm/executor/BasicExecutorBaseTest.java b/jbpm-services/jbpm-executor/src/test/java/org/jbpm/executor/BasicExecutorBaseTest.java index <HASH>..<HASH> 100644 --- a/jbpm-services/jbpm-executor/src/test/java/org/jbpm/executor/BasicExecutorBaseTest.java +++ b/jbpm-services/jbpm-executor/src/test/java/org/jbpm/executor/BasicExecutorBaseTest.java @@ -252,6 +252,9 @@ public abstract class BasicExecutorBaseTest { List<RequestInfo> inErrorRequests = executorService.getInErrorRequests(new QueryContext()); assertEquals(1, inErrorRequests.size()); + + RequestInfo failedJob = inErrorRequests.get(0); + assertEquals(4, failedJob.getExecutions()); List<ErrorInfo> errors = executorService.getAllErrors(new QueryContext()); logger.info("Errors: {}", errors); @@ -408,6 +411,9 @@ public abstract class BasicExecutorBaseTest { List<RequestInfo> inErrorRequests = executorService.getInErrorRequests(new QueryContext()); assertEquals(1, inErrorRequests.size()); + + RequestInfo failedJob = inErrorRequests.get(0); + assertEquals(3, failedJob.getExecutions()); List<ErrorInfo> errors = executorService.getAllErrors(new QueryContext()); // Three retries means 4 executions in total 1(regular) + 2(retries)
RHBPMS-<I> - Number of job executions is not increased (#<I>)
kiegroup_jbpm
train
7327c99529be9a23cce50106c0341db9ea1f26a1
diff --git a/tests/ci_depends.php b/tests/ci_depends.php index <HASH>..<HASH> 100644 --- a/tests/ci_depends.php +++ b/tests/ci_depends.php @@ -59,14 +59,6 @@ class PhpExtensions { ); /** - * After instantiation holds the current PHP version. - * - * @see http://php.net/phpversion - * @var string Current PHP version. - */ - protected $_phpVersion; - - /** * After instantiation holds the path to loaded PHP configuration file. * * @see http://php.net/php_ini_loaded_file @@ -80,7 +72,6 @@ class PhpExtensions { * @return void */ public function __construct() { - $this->_phpVersion = phpversion(); $this->_iniPath = php_ini_loaded_file(); } @@ -91,9 +82,10 @@ class PhpExtensions { if (isset($extension['require']['php'])) { $version = $extension['require']['php']; - if (!version_compare($this->_phpVersion, $version[1], $version[0])) { + + if (!version_compare(PHP_VERSION, $version[1], $version[0])) { $message = " => not installed, requires a PHP version %s %s (%s installed)\n"; - printf($message, $version[0], $version[1], $this->_phpVersion); + printf($message, $version[0], $version[1], PHP_VERSION); return; } }
Use PHP_VERSION instead of `phpversion()` in CI depends script.
UnionOfRAD_lithium
train
08f03553db25c94f511a194f7003447dcb85f84b
diff --git a/src/Support/helpers.php b/src/Support/helpers.php index <HASH>..<HASH> 100644 --- a/src/Support/helpers.php +++ b/src/Support/helpers.php @@ -119,7 +119,7 @@ if (! function_exists('dd')) { function inline($assetPath) { - preg_match('/^\/assets\/build\/(css|js)\/main\.(css|js)/', $assetPath, $matches); + preg_match('/^\/assets\/build\/(css|js)\/.*\.(css|js)/', $assetPath, $matches); if (!count($matches)) { throw new InvalidArgumentException("Given asset path is not valid: {$assetPath}");
Accept change from ‘main’ to *
tightenco_jigsaw
train
3d86563be2725db6e928c3d176f3e8fdba27b906
diff --git a/src/toil/jobStores/azureJobStore.py b/src/toil/jobStores/azureJobStore.py index <HASH>..<HASH> 100644 --- a/src/toil/jobStores/azureJobStore.py +++ b/src/toil/jobStores/azureJobStore.py @@ -478,9 +478,9 @@ class AzureJobStore(AbstractJobStore): def _newFileID(self, sharedFileName=None): if sharedFileName is None: - ret = bytes(uuid.uuid4()) + ret = str(uuid.uuid4()) else: - ret = bytes(uuid.uuid5(self.sharedFileJobID, bytes(sharedFileName))) + ret = str(uuid.uuid5(self.sharedFileJobID, sharedFileName)) return ret.replace('-', '_') def _associateFileWithJob(self, jobStoreFileID, jobStoreID=None): diff --git a/src/toil/provisioners/gceProvisioner.py b/src/toil/provisioners/gceProvisioner.py index <HASH>..<HASH> 100644 --- a/src/toil/provisioners/gceProvisioner.py +++ b/src/toil/provisioners/gceProvisioner.py @@ -146,7 +146,7 @@ class GCEProvisioner(AbstractProvisioner): 'diskSizeGb' : leaderStorage } disk.update({'boot': True, 'autoDelete': True }) - name= 'l' + bytes(uuid.uuid4()) + name= 'l' + str(uuid.uuid4()) leader = self._gceDriver.create_node(name, leaderNodeType, imageType, location=self._zone, ex_service_accounts=sa_scopes, @@ -448,7 +448,7 @@ class GCEProvisioner(AbstractProvisioner): for i in range(number): name = 'wp' if ex_preemptible else 'wn' - name += bytes(uuid.uuid4()) #'%s-%03d' % (base_name, i) + name += str(uuid.uuid4()) #'%s-%03d' % (base_name, i) status = {'name': name, 'node_response': None, 'node': None} status_list.append(status) diff --git a/src/toil/test/jobStores/jobStoreTest.py b/src/toil/test/jobStores/jobStoreTest.py index <HASH>..<HASH> 100644 --- a/src/toil/test/jobStores/jobStoreTest.py +++ b/src/toil/test/jobStores/jobStoreTest.py @@ -937,7 +937,7 @@ class GoogleJobStoreTest(AbstractJobStoreTest.Test): @googleRetry def _createExternalStore(self): from google.cloud import storage - bucketName = b"import-export-test-" + bytes(uuid.uuid4()) + bucketName = ("import-export-test-" + str(uuid.uuid4())).encode('utf-8') storageClient = storage.Client() return storageClient.create_bucket(bucketName) diff --git a/src/toil/test/provisioners/gceProvisionerTest.py b/src/toil/test/provisioners/gceProvisionerTest.py index <HASH>..<HASH> 100644 --- a/src/toil/test/provisioners/gceProvisionerTest.py +++ b/src/toil/test/provisioners/gceProvisionerTest.py @@ -176,7 +176,7 @@ class GCEAutoscaleTest(AbstractGCEAutoscaleTest): def __init__(self, name): super(GCEAutoscaleTest, self).__init__(name) - self.clusterName = 'provisioner-test-' + bytes(uuid4()) + self.clusterName = 'provisioner-test-' + str(uuid4()) self.requestedLeaderStorage = 80 def setUp(self): @@ -264,7 +264,7 @@ class GCEAutoscaleTestMultipleNodeTypes(AbstractGCEAutoscaleTest): def __init__(self, name): super(GCEAutoscaleTestMultipleNodeTypes, self).__init__(name) - self.clusterName = 'provisioner-test-' + bytes(uuid4()) + self.clusterName = 'provisioner-test-' + str(uuid4()) def setUp(self): super(GCEAutoscaleTestMultipleNodeTypes, self).setUp() @@ -303,7 +303,7 @@ class GCERestartTest(AbstractGCEAutoscaleTest): def __init__(self, name): super(GCERestartTest, self).__init__(name) - self.clusterName = 'restart-test-' + bytes(uuid4()) + self.clusterName = 'restart-test-' + str(uuid4()) def setUp(self): super(GCERestartTest, self).setUp()
Don't make UUIDs into bytes directly
DataBiosphere_toil
train
232b204693351257ceb8d1fe542838067083fdb0
diff --git a/examples/mithril/js/app.js b/examples/mithril/js/app.js index <HASH>..<HASH> 100644 --- a/examples/mithril/js/app.js +++ b/examples/mithril/js/app.js @@ -4,12 +4,18 @@ import login from './models/login' window.m = m -if (!login.user) { - if (window.location.pathname !== '/login') - login.redirect = window.location.pathname + window.location.search - window.history.pushState(null, null, '/login') +let checkLogin = () => { + if (!login.user) { + console.warn('Unauthorized') + if (window.location.pathname !== '/login') + login.redirect = window.location.pathname + window.location.search + window.history.pushState(null, null, '/login') + } } +window.onhashchange = () => checkLogin() + window.onload = () => { m.route(document.body, '/404', routes) + checkLogin() }
add window.onhashchange event (#9)
porsager_wright
train
c21b9a5eb979b8935be0c8e8540cd1599cd11b52
diff --git a/sciunit/models/base.py b/sciunit/models/base.py index <HASH>..<HASH> 100644 --- a/sciunit/models/base.py +++ b/sciunit/models/base.py @@ -16,8 +16,6 @@ class Model(SciUnit): name = self.__class__.__name__ self.name = name self.params = params - if params is None: - params = {} super(Model, self).__init__() self.check_params()
Remove the if statement in Model constructor
scidash_sciunit
train
1705d1f46c4433f785e4e33446b19ce5c4135f12
diff --git a/lib/jss/api_object/patch_source.rb b/lib/jss/api_object/patch_source.rb index <HASH>..<HASH> 100644 --- a/lib/jss/api_object/patch_source.rb +++ b/lib/jss/api_object/patch_source.rb @@ -304,7 +304,8 @@ module JSS super - @enabled = @init_data[:enabled] ? @init_data[:enabled] : DFT_ENABLED + @enabled = @init_data[:enabled].to_s.jss_to_bool + @enabled ||= false # derive the data not provided for this source type if @init_data[:endpoint] @@ -315,34 +316,35 @@ module JSS @ssl_enabled = url.scheme == HTTPS else @host_name = @init_data[:host_name] - @port = @init_data[:port] + @port = @init_data[:port].to_i @port ||= ssl_enabled? ? DFT_SSL_PORT : DFT_NO_SSL_PORT - @ssl_enabled = @init_data[:ssl_enabled].nil? ? DFT_SSL : @init_data[:ssl_enabled] + @ssl_enabled = @init_data[:ssl_enabled].to_s.jss_to_bool + @ssl_enabled ||= false @endpoint = "#{ssl_enabled ? HTTPS : HTTP}://#{host_name}:#{port}/" end end # init # Get a list of patch titles available from this Patch Source - # - # @return [Array<Hash{Symbol:String}>] One hash for each available title, with - # these keys: - # :name_id String - # :current_version String - # :publisher String - # :last_modified Time - # :app_name String + # @see JSS::PatchSource.available_titles # def available_titles self.class.available_titles id, api: api end + # Get a list of available name_id's for this patch source + # @see JSS::PatchSource.available_name_ids + # + def available_name_ids + self.class.available_name_ids id, api: api + end + # Delete this instance # This method is needed to override APIObject#delete def delete - case self.class - when JSS::PatchExternalSource + case self.class.name + when 'JSS::PatchExternalSource' super - when JSS::PatchInternalSource + when 'JSS::PatchInternalSource' raise JSS::UnsupportedError, 'PatchInteralSources cannot be deleted.' end end
PatchSource#available_name_ids, and better boolean init
PixarAnimationStudios_ruby-jss
train
52e0a45c3edc45a904bb96099b4835dc6fc20545
diff --git a/lib/websearchadminlib.py b/lib/websearchadminlib.py index <HASH>..<HASH> 100644 --- a/lib/websearchadminlib.py +++ b/lib/websearchadminlib.py @@ -2468,13 +2468,15 @@ def perform_checkcollectionstatus(colID, ln, confirm=0, callback='yes'): colID = int(colID) col_dict = dict(get_def_name('', "collection")) - collections = run_sql("SELECT id, name, dbquery FROM collection ORDER BY id") + collections = run_sql("SELECT id, name, dbquery, nbrecs FROM collection " + "ORDER BY id") - header = ['ID', 'Name', 'Query', 'Subcollections', 'Restricted', 'Hosted', 'I18N', 'Status'] + header = ['ID', 'Name','Query', 'Subcollections', 'Restricted', 'Hosted', + 'I18N', 'Status', 'Number of records'] rnk_list = get_def_name('', "rnkMETHOD") actions = [] - for (id, name, dbquery) in collections: + for (id, name, dbquery, nbrecs) in collections: reg_sons = len(get_col_tree(id, 'r')) vir_sons = len(get_col_tree(id, 'v')) status = "" @@ -2519,7 +2521,7 @@ def perform_checkcollectionstatus(colID, ln, confirm=0, callback='yes'): if status == "": status = """<b><span class="info">OK</span></b>""" - actions.append([id, """<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&amp;ln=%s">%s</a>""" % (CFG_SITE_URL, id, ln, name), dbquery, subs, restricted, hosted, i8n, status]) + actions.append([id, """<a href="%s/admin/websearch/websearchadmin.py/editcollection?colID=%s&amp;ln=%s">%s</a>""" % (CFG_SITE_URL, id, ln, name), dbquery, subs, restricted, hosted, i8n, status, nbrecs]) output += tupletotable(header=header, tuple=actions)
WebSearch: collection status page and no of recs * Display number of records on the collection status page in the WebSearch Admin interface.
inveniosoftware_invenio-records
train
b9f5ec49e0492dd1cd04f28ac935e27e3badd39c
diff --git a/creds.go b/creds.go index <HASH>..<HASH> 100644 --- a/creds.go +++ b/creds.go @@ -1,6 +1,7 @@ package tcclient import ( + "context" "crypto/hmac" "crypto/sha256" "encoding/base64" @@ -59,6 +60,8 @@ type Client struct { // HTTPClient is a ReducedHTTPClient to be used for the http call instead of // the DefaultHTTPClient. HTTPClient ReducedHTTPClient + // Context that aborts all requests with this client + Context context.Context } // Certificate represents the certificate used in Temporary Credentials. See diff --git a/http.go b/http.go index <HASH>..<HASH> 100644 --- a/http.go +++ b/http.go @@ -104,12 +104,20 @@ func (client *Client) Request(rawPayload []byte, method, route string, query url return nil, nil, err } } + // Set context if one is given + if client.Context != nil { + callSummary.HTTPRequest = callSummary.HTTPRequest.WithContext(client.Context) + } var resp *http.Response if client.HTTPClient != nil { resp, err = client.HTTPClient.Do(callSummary.HTTPRequest) } else { resp, err = defaultHTTPClient.Do(callSummary.HTTPRequest) } + // return cancelled error, if context was cancelled + if client.Context != nil && client.Context.Err() != nil { + return nil, nil, client.Context.Err() + } // b, e := httputil.DumpResponse(resp, true) // if e == nil { // fmt.Println(string(b))
Add support for aborting requests with context.Context
taskcluster_taskcluster-client-go
train
778d8d7b3ad8053992bb29315bac7689ec84e522
diff --git a/belpy/rdf_to_pysb.py b/belpy/rdf_to_pysb.py index <HASH>..<HASH> 100644 --- a/belpy/rdf_to_pysb.py +++ b/belpy/rdf_to_pysb.py @@ -118,8 +118,11 @@ class BelProcessor(object): """ % statement.format() res_evidence = self.g.query(q_evidence) for stmt in res_evidence: - evidence = stmt[0].format() - citation = stmt[1].format() + try: + evidence = stmt[0].format() + citation = stmt[1].format() + except KeyError: + warnings.warn('Problem converting evidence/citation string') # Query for all annotations of the statement q_annotations = prefixes + """
Catch unicode formatting errors in evidence query
sorgerlab_indra
train
41468230274945e35f23093345fac01d21f88299
diff --git a/lib/graphql/query/arguments.rb b/lib/graphql/query/arguments.rb index <HASH>..<HASH> 100644 --- a/lib/graphql/query/arguments.rb +++ b/lib/graphql/query/arguments.rb @@ -19,6 +19,10 @@ module GraphQL argument_definitions.each do |key, _value| expose_as = argument_definitions[key].expose_as + + # Don't define a helper method if it would override something. + next if self.respond_to?(expose_as) + define_singleton_method expose_as do self[expose_as] end
dont define a helper method if it would override an existing method
rmosolgo_graphql-ruby
train
f77a1b2e337d8b371c9708fd0c8d37a82e206813
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -80,14 +80,6 @@ var createServer = function(e, index) { return server; }; -//--------------------------------------------------------------- -//exit on ctrl-c -process.on('SIGINT', function() { - console.log("\n[ QUIT ]--> Caught interrupt signal on SERVER module"); - process.exit(); -}); -//--------------------------------------------------------------- - module.exports = function(torrent, opts) { if (!opts) opts = {}; if (opts.blocklist) {
removed on sigint handling, not required as it is the default behaviouor
mafintosh_peerflix
train
f53ebd67ebf67160cca48b4c7b96ba8619d7e7fb
diff --git a/TYPO3.Neos/Tests/Functional/TypoScript/RenderingTest.php b/TYPO3.Neos/Tests/Functional/TypoScript/RenderingTest.php index <HASH>..<HASH> 100644 --- a/TYPO3.Neos/Tests/Functional/TypoScript/RenderingTest.php +++ b/TYPO3.Neos/Tests/Functional/TypoScript/RenderingTest.php @@ -238,9 +238,14 @@ class RenderingTest extends AbstractNodeTest { if ($debugMode) { $typoScriptRuntime->injectSettings(array('debugMode' => TRUE, 'rendering' => array('exceptionHandler' => 'TYPO3\TypoScript\Core\ExceptionHandlers\ThrowingHandler'))); } + $contentContext = $this->node->getContext(); + if (!$contentContext instanceof \TYPO3\Neos\Domain\Service\ContentContext) { + $this->fail('Node context must be of type ContentContext'); + } $typoScriptRuntime->pushContextArray(array( 'node' => $this->node, 'documentNode' => $this->node, + 'site' => $contentContext->getCurrentSiteNode(), 'fixturesDirectory' => __DIR__ . '/Fixtures' )); $output = $typoScriptRuntime->render('page1');
[TASK] Add site node to content context for rendering tests Functional tests might give wrong exceptions on failure because the exception handler expects a site context variable. Change-Id: I<I>c3c0d<I>e8e<I>c7e9eefec Releases: master, <I> Original-Commit-Hash: da6b<I>dd<I>c<I>bbb<I>ca<I>f7b<I>f2ef3fa2
neos_neos-development-collection
train
4a2e32790a782facec0a42ed3a2a948e6ec5611e
diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -362,3 +362,14 @@ current_release = subprocess.check_output(["git", "describe", "--abbrev=0"]).dec current_release_date = subprocess.check_output(["git", "log", "-1", "--tags", "--format=%ad", "--date=format:%e %b %Y"]).decode("ascii")[:-1] html_context = {'current_release': current_release, 'current_release_date': current_release_date} + + +def skip(app, what, name, obj, skip, options): + # Ensure constructors are documented + if name == "__init__": + return False + return skip + + +def setup(app): + app.connect('autodoc-skip-member', skip) diff --git a/instaloader.py b/instaloader.py index <HASH>..<HASH> 100755 --- a/instaloader.py +++ b/instaloader.py @@ -427,6 +427,9 @@ class Profile: """ An Instagram Profile. + Provides methods for accessing profile properties, as well as :meth:`Profile.get_posts` and for own profile + :meth:`Profile.get_saved_posts`. + Instances are linked to an :class:`Instaloader` instance. This class implements == and is hashable. """ def __init__(self, instaloader: 'Instaloader', identifier: Union[str, int]): @@ -603,6 +606,7 @@ class Instaloader: download_comments: Tristate = Tristate.no_extra_query, save_metadata: Tristate = Tristate.never, max_connection_attempts: int = 3): + """Instaloader.""" # configuration parameters self.user_agent = user_agent if user_agent is not None else default_user_agent()
Minor documentation improvements - List all __init__ methods in documentation - Further describe purpose of Profile in its docstring
instaloader_instaloader
train
ac3a696721cffc71e6be9899abe4caba3f741280
diff --git a/lib/Parser/DocumentParser.php b/lib/Parser/DocumentParser.php index <HASH>..<HASH> 100644 --- a/lib/Parser/DocumentParser.php +++ b/lib/Parser/DocumentParser.php @@ -17,6 +17,7 @@ use Doctrine\RST\Nodes\Node; use Doctrine\RST\Nodes\TableNode; use Doctrine\RST\Parser; use Doctrine\RST\Parser\Directive as ParserDirective; +use function chr; use function explode; use function sprintf; use function str_replace; @@ -169,6 +170,9 @@ class DocumentParser // Removing UTF-8 BOM $document = str_replace("\xef\xbb\xbf", '', $document); + // Replace \u00a0 with " " + $document = str_replace(chr(194) . chr(160), ' ', $document); + return $document; } diff --git a/lib/Span/SpanToken.php b/lib/Span/SpanToken.php index <HASH>..<HASH> 100644 --- a/lib/Span/SpanToken.php +++ b/lib/Span/SpanToken.php @@ -24,7 +24,6 @@ class SpanToken */ public function __construct(string $type, string $id, array $token) { - dump($type); $this->type = $type; $this->id = $id; $this->token = $token; diff --git a/tests/LiteralNestedInDirective/input/index.rst b/tests/LiteralNestedInDirective/input/index.rst index <HASH>..<HASH> 100644 --- a/tests/LiteralNestedInDirective/input/index.rst +++ b/tests/LiteralNestedInDirective/input/index.rst @@ -7,4 +7,3 @@ /** @var ClassLoader $loader */ $loader = require __DIR__.'/../vendor/autoload.php'; -
Replace \u<I>a0 with " "
doctrine_rst-parser
train
3c79776eb9c8eef8344483c62e423e88f92a03e8
diff --git a/raiden/storage/sqlite.py b/raiden/storage/sqlite.py index <HASH>..<HASH> 100644 --- a/raiden/storage/sqlite.py +++ b/raiden/storage/sqlite.py @@ -67,17 +67,17 @@ class SnapshotRecord(NamedTuple): data: Any -def assert_sqlite_version() -> bool: +def assert_sqlite_version() -> bool: # pragma: no unittest if sqlite3.sqlite_version_info < SQLITE_MIN_REQUIRED_VERSION: return False return True def _sanitize_limit_and_offset(limit: int = None, offset: int = None) -> Tuple[int, int]: - if limit is not None and (not isinstance(limit, int) or limit < 0): + if limit is not None and (not isinstance(limit, int) or limit < 0): # pragma: no unittest raise InvalidNumberInput("limit must be a positive integer") - if offset is not None and (not isinstance(offset, int) or offset < 0): + if offset is not None and (not isinstance(offset, int) or offset < 0): # pragma: no unittest raise InvalidNumberInput("offset must be a positive integer") limit = -1 if limit is None else limit @@ -304,7 +304,9 @@ class SQLiteStorage: ) -> Optional[SnapshotRecord]: """ Get snapshots earlier than state_change with provided ID. """ - if not (state_change_identifier == "latest" or isinstance(state_change_identifier, int)): + if not ( + state_change_identifier == "latest" or isinstance(state_change_identifier, int) + ): # pragma: no unittest raise ValueError("from_identifier must be an integer or 'latest'") cursor = self.conn.cursor() @@ -480,7 +482,9 @@ class SQLiteStorage: if not (from_identifier == "latest" or isinstance(from_identifier, T_StateChangeID)): raise ValueError("from_identifier must be an integer or 'latest'") - if not (to_identifier == "latest" or isinstance(to_identifier, T_StateChangeID)): + if not ( + to_identifier == "latest" or isinstance(to_identifier, T_StateChangeID) + ): # pragma: no unittest raise ValueError("to_identifier must be an integer or 'latest'") cursor = self.conn.cursor() @@ -651,7 +655,7 @@ class SerializedSQLiteStorage: self.database = SQLiteStorage(database_path) self.serializer = serializer - def update_version(self) -> None: + def update_version(self) -> None: # pragma: no unittest self.database.update_version() def count_state_changes(self) -> int: diff --git a/raiden/tasks.py b/raiden/tasks.py index <HASH>..<HASH> 100644 --- a/raiden/tasks.py +++ b/raiden/tasks.py @@ -54,7 +54,7 @@ def _do_check_version(current_version: Tuple[str, ...]): return True -def check_version(current_version: str): +def check_version(current_version: str): # pragma: no unittest """ Check periodically for a new release """ app_version = parse_version(current_version) while True: @@ -95,7 +95,7 @@ def check_gas_reserve(raiden): # pragma: no unittest gevent.sleep(CHECK_GAS_RESERVE_INTERVAL) -def check_rdn_deposits(raiden, user_deposit_proxy: UserDeposit): +def check_rdn_deposits(raiden, user_deposit_proxy: UserDeposit): # pragma: no unittest """ Check periodically for RDN deposits in the user-deposits contract """ while True: rei_balance = user_deposit_proxy.effective_balance(raiden.address, "latest")
Mark some code to be ignored in coverage measure - wrapper functions - missed typechecks
raiden-network_raiden
train
1775e1cdc692ae08cfad7668b02807cea3bd4ed3
diff --git a/plugins/inputs/elasticsearch/elasticsearch.go b/plugins/inputs/elasticsearch/elasticsearch.go index <HASH>..<HASH> 100644 --- a/plugins/inputs/elasticsearch/elasticsearch.go +++ b/plugins/inputs/elasticsearch/elasticsearch.go @@ -50,6 +50,7 @@ type clusterHealth struct { RelocatingShards int `json:"relocating_shards"` InitializingShards int `json:"initializing_shards"` UnassignedShards int `json:"unassigned_shards"` + DelayedUnassignedShards int `json:"delayed_unassigned_shards"` NumberOfPendingTasks int `json:"number_of_pending_tasks"` TaskMaxWaitingInQueueMillis int `json:"task_max_waiting_in_queue_millis"` ActiveShardsPercentAsNumber float64 `json:"active_shards_percent_as_number"` @@ -340,6 +341,7 @@ func (e *Elasticsearch) gatherClusterHealth(url string, acc telegraf.Accumulator "relocating_shards": healthStats.RelocatingShards, "initializing_shards": healthStats.InitializingShards, "unassigned_shards": healthStats.UnassignedShards, + "delayed_unassigned_shards": healthStats.DelayedUnassignedShards, "number_of_pending_tasks": healthStats.NumberOfPendingTasks, "task_max_waiting_in_queue_millis": healthStats.TaskMaxWaitingInQueueMillis, "active_shards_percent_as_number": healthStats.ActiveShardsPercentAsNumber, @@ -366,7 +368,7 @@ func (e *Elasticsearch) gatherClusterHealth(url string, acc telegraf.Accumulator acc.AddFields( "elasticsearch_indices", indexFields, - map[string]string{"index": name}, + map[string]string{"index": name, "name": healthStats.ClusterName}, measurementTime, ) } diff --git a/plugins/inputs/elasticsearch/elasticsearch_test.go b/plugins/inputs/elasticsearch/elasticsearch_test.go index <HASH>..<HASH> 100644 --- a/plugins/inputs/elasticsearch/elasticsearch_test.go +++ b/plugins/inputs/elasticsearch/elasticsearch_test.go @@ -190,11 +190,11 @@ func TestGatherClusterHealthAlsoIndicesHealth(t *testing.T) { acc.AssertContainsTaggedFields(t, "elasticsearch_indices", v1IndexExpected, - map[string]string{"index": "v1"}) + map[string]string{"index": "v1", "name": "elasticsearch_telegraf"}) acc.AssertContainsTaggedFields(t, "elasticsearch_indices", v2IndexExpected, - map[string]string{"index": "v2"}) + map[string]string{"index": "v2", "name": "elasticsearch_telegraf"}) } func TestGatherClusterStatsMaster(t *testing.T) { diff --git a/plugins/inputs/elasticsearch/testdata_test.go b/plugins/inputs/elasticsearch/testdata_test.go index <HASH>..<HASH> 100644 --- a/plugins/inputs/elasticsearch/testdata_test.go +++ b/plugins/inputs/elasticsearch/testdata_test.go @@ -12,6 +12,7 @@ const clusterHealthResponse = ` "relocating_shards": 0, "initializing_shards": 0, "unassigned_shards": 0, + "delayed_unassigned_shards": 0, "number_of_pending_tasks": 0, "task_max_waiting_in_queue_millis": 0, "active_shards_percent_as_number": 100.0 @@ -30,6 +31,7 @@ const clusterHealthResponseWithIndices = ` "relocating_shards": 0, "initializing_shards": 0, "unassigned_shards": 0, + "delayed_unassigned_shards": 0, "number_of_pending_tasks": 0, "task_max_waiting_in_queue_millis": 0, "active_shards_percent_as_number": 100.0, @@ -69,6 +71,7 @@ var clusterHealthExpected = map[string]interface{}{ "relocating_shards": 0, "initializing_shards": 0, "unassigned_shards": 0, + "delayed_unassigned_shards": 0, "number_of_pending_tasks": 0, "task_max_waiting_in_queue_millis": 0, "active_shards_percent_as_number": 100.0,
Add cluster name tag to elasticsearch indices (#<I>)
influxdata_telegraf
train
df8d8f06c3b66ff7e44922fdc05f26d81f379128
diff --git a/lib/pubsub.js b/lib/pubsub.js index <HASH>..<HASH> 100644 --- a/lib/pubsub.js +++ b/lib/pubsub.js @@ -495,9 +495,9 @@ PubSub.prototype.getSubscriptions = function(data, callback) { .getChild('subscriptions') .children.forEach(function(subscription) { var sub = { - jid: self._getJid(subscription.attrs.jid), subscription: subscription.attrs.subscription, - } + } + if (subscription.attrs.jid) sub.jid = self._getJid(subscription.attrs.jid) if (subscription.attrs.node) sub.node = subscription.attrs.node if (subscription.attrs.subid) sub.id = subscription.attrs.subid subscriptions.push(sub)
Attempt to add JID if it exists as attribute
xmpp-ftw_xmpp-ftw-pubsub
train
750dae5f26b64e50f02e8b100b4c6e9f1a4c65d3
diff --git a/scratchapi.js b/scratchapi.js index <HASH>..<HASH> 100644 --- a/scratchapi.js +++ b/scratchapi.js @@ -240,7 +240,8 @@ Scratch.CloudSession.prototype._connect = function(cb) { this.connection = new WebSocket('wss://' + CLOUD_SERVER + '/', [], { headers: { - Cookie: 'scratchsessionsid=' + this.user.sessionId + ';' + cookie: 'scratchsessionsid=' + this.user.sessionId + ';', + origin: 'https://scratch.mit.edu' } } );
Fix cloud connections being rejected because of missing origin header
trumank_scratch-api
train
e48443a4a7ae68bd4e92a11be607beff7dd48dcc
diff --git a/modeshape-jcr/src/main/java/org/modeshape/jcr/JcrI18n.java b/modeshape-jcr/src/main/java/org/modeshape/jcr/JcrI18n.java index <HASH>..<HASH> 100644 --- a/modeshape-jcr/src/main/java/org/modeshape/jcr/JcrI18n.java +++ b/modeshape-jcr/src/main/java/org/modeshape/jcr/JcrI18n.java @@ -458,7 +458,8 @@ public final class JcrI18n { public static I18n federationNodeKeyDoesNotBelongToSource; public static I18n invalidProjectionPath; - + public static I18n invalidProjectionExpression; + static { try { I18n.initialize(JcrI18n.class); diff --git a/modeshape-jcr/src/main/java/org/modeshape/jcr/RepositoryConfiguration.java b/modeshape-jcr/src/main/java/org/modeshape/jcr/RepositoryConfiguration.java index <HASH>..<HASH> 100644 --- a/modeshape-jcr/src/main/java/org/modeshape/jcr/RepositoryConfiguration.java +++ b/modeshape-jcr/src/main/java/org/modeshape/jcr/RepositoryConfiguration.java @@ -2020,7 +2020,7 @@ public class RepositoryConfiguration { } externalPath = expressionMatcher.group(7); }else{ - throw new IllegalStateException("No match found!"); + throw new IllegalStateException(JcrI18n.invalidProjectionExpression.text(pathExpression)); } } diff --git a/modeshape-jcr/src/main/resources/org/modeshape/jcr/JcrI18n.properties b/modeshape-jcr/src/main/resources/org/modeshape/jcr/JcrI18n.properties index <HASH>..<HASH> 100644 --- a/modeshape-jcr/src/main/resources/org/modeshape/jcr/JcrI18n.properties +++ b/modeshape-jcr/src/main/resources/org/modeshape/jcr/JcrI18n.properties @@ -441,3 +441,5 @@ repositoryNotFound = Could not load or find a ModeShape repository named '{0}' u federationNodeKeyDoesNotBelongToSource = The node key '{0}' does not belong to the '{1}' source. Only nodes from that source are allowed. invalidProjectionPath = The path '{0}' is not a valid projection path +invalidProjectionExpression = The projection expression '{0}' does not match the expected format +
MODE-<I> Exception messages was internationalized
ModeShape_modeshape
train
c46a07f8fcd9f92fe28c976e71a66e7d8b93f3c5
diff --git a/xchange-binance/src/main/java/org/knowm/xchange/binance/service/BinanceAccountService.java b/xchange-binance/src/main/java/org/knowm/xchange/binance/service/BinanceAccountService.java index <HASH>..<HASH> 100644 --- a/xchange-binance/src/main/java/org/knowm/xchange/binance/service/BinanceAccountService.java +++ b/xchange-binance/src/main/java/org/knowm/xchange/binance/service/BinanceAccountService.java @@ -6,6 +6,7 @@ import java.util.ArrayList; import java.util.Date; import java.util.List; import java.util.stream.Collectors; + import org.knowm.xchange.Exchange; import org.knowm.xchange.binance.BinanceErrorAdapter; import org.knowm.xchange.binance.dto.BinanceException; @@ -74,7 +75,7 @@ public class BinanceAccountService extends BinanceAccountServiceRaw implements A acc.balances.stream() .map(b -> new Balance(b.getCurrency(), b.getTotal(), b.getAvailable())) .collect(Collectors.toList()); - return new AccountInfo(new Wallet(balances)); + return new AccountInfo(new Date(acc.updateTime), new Wallet(balances)); } catch (BinanceException e) { throw BinanceErrorAdapter.adapt(e); } diff --git a/xchange-core/src/main/java/org/knowm/xchange/dto/account/AccountInfo.java b/xchange-core/src/main/java/org/knowm/xchange/dto/account/AccountInfo.java index <HASH>..<HASH> 100644 --- a/xchange-core/src/main/java/org/knowm/xchange/dto/account/AccountInfo.java +++ b/xchange-core/src/main/java/org/knowm/xchange/dto/account/AccountInfo.java @@ -5,9 +5,12 @@ import java.math.BigDecimal; import java.util.Arrays; import java.util.Collection; import java.util.Collections; +import java.util.Date; import java.util.HashMap; import java.util.Map; +import javax.annotation.Nullable; + /** * DTO representing account information * @@ -27,6 +30,11 @@ public final class AccountInfo implements Serializable { /** The wallets owned by this account */ private final Map<String, Wallet> wallets; + /** The timestamp at which this account information was generated. May be + * null if not provided by the exchange. */ + @Nullable + private final Date timestamp; + /** @see #AccountInfo(String, BigDecimal, Collection) */ public AccountInfo(Wallet... wallets) { @@ -35,6 +43,14 @@ public final class AccountInfo implements Serializable { this(null, null, wallets); } + /** @see #AccountInfo(String, BigDecimal, Collection, Date) */ + public AccountInfo(Date timestamp, Wallet... wallets) { + + // TODO when refactoring for separate feature interfaces, change this constructor to require at + // least two wallets + this(null, null, Arrays.asList(wallets), timestamp); + } + /** @see #AccountInfo(String, BigDecimal, Collection) */ public AccountInfo(Collection<Wallet> wallets) { @@ -61,9 +77,23 @@ public final class AccountInfo implements Serializable { * @param wallets the user's wallets */ public AccountInfo(String username, BigDecimal tradingFee, Collection<Wallet> wallets) { + this(username, tradingFee, wallets, null); + } + + + /** + * Constructs an {@link AccountInfo}. + * + * @param username the user name. + * @param tradingFee the trading fee. + * @param wallets the user's wallets + * @param timestamp the timestamp for the account snapshot. + */ + public AccountInfo(String username, BigDecimal tradingFee, Collection<Wallet> wallets, Date timestamp) { this.username = username; this.tradingFee = tradingFee; + this.timestamp = timestamp; if (wallets.size() == 0) { this.wallets = Collections.emptyMap(); @@ -125,6 +155,14 @@ public final class AccountInfo implements Serializable { return tradingFee; } + /** + * @return The timestamp at which this account information was generated. May be null if + * not provided by the exchange. + */ + public Date getTimestamp() { + return timestamp; + } + @Override public String toString() {
Add the account info timestamp to the generic API.
knowm_XChange
train
8055d4de1f13686473d91eb54688ffe1a1eb703b
diff --git a/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensor.java b/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensor.java index <HASH>..<HASH> 100644 --- a/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensor.java +++ b/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensor.java @@ -4,7 +4,6 @@ import com.google.common.primitives.Ints; import io.improbable.keanu.tensor.TensorShape; import io.improbable.keanu.tensor.bool.BooleanTensor; import io.improbable.keanu.tensor.intgr.IntegerTensor; -import lombok.AllArgsConstructor; import org.apache.commons.lang3.ArrayUtils; import org.apache.commons.math3.analysis.function.Sigmoid; import org.apache.commons.math3.linear.BlockRealMatrix; @@ -84,6 +83,38 @@ public class JVMDoubleTensor extends DoubleTensor { return new JVMDoubleTensor(buffer, new long[]{n, n}); } + public static JVMDoubleTensor arange(double start, double end) { + return arange(start, end, 1.0); + } + + public static JVMDoubleTensor arange(double start, double end, double stepSize) { + int steps = (int) Math.ceil((end - start) / stepSize); + double[] buffer = new double[steps]; + + double position = start; + for (int i = 0; i < buffer.length; i++) { + buffer[i] = position; + position += stepSize; + } + + return new JVMDoubleTensor(buffer, new long[]{buffer.length}); + } + + public static JVMDoubleTensor linspace(double start, double end, int numberOfPoints) { + + double stepSize = (end - start) / numberOfPoints; + + double[] buffer = new double[numberOfPoints]; + + double position = start; + for (int i = 0; i < buffer.length; i++) { + buffer[i] = position; + position += stepSize; + } + + return new JVMDoubleTensor(buffer, new long[]{buffer.length}); + } + private double[] newBuffer() { return new double[buffer.length]; } @@ -753,15 +784,7 @@ public class JVMDoubleTensor extends DoubleTensor { @Override public DoubleTensor clamp(DoubleTensor min, DoubleTensor max) { - double[] newBuffer = newBuffer(); - double[] minBuffer = min.asFlatDoubleArray(); - double[] maxBuffer = max.asFlatDoubleArray(); - - for (int i = 0; i < buffer.length; i++) { - newBuffer[i] = Math.max(minBuffer[i], Math.min(maxBuffer[i], buffer[i])); - } - - return new JVMDoubleTensor(newBuffer, shapeCopy()); + return duplicate().clampInPlace(min, max); } @Override diff --git a/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensorFactory.java b/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensorFactory.java index <HASH>..<HASH> 100644 --- a/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensorFactory.java +++ b/keanu-project/src/main/java/io/improbable/keanu/tensor/dbl/JVMDoubleTensorFactory.java @@ -34,17 +34,17 @@ public class JVMDoubleTensorFactory implements DoubleTensorFactory { @Override public DoubleTensor linspace(double start, double end, int numberOfPoints) { - return null; + return JVMDoubleTensor.linspace(start, end, numberOfPoints); } @Override public DoubleTensor arange(double start, double end) { - return null; + return JVMDoubleTensor.arange(start, end); } @Override public DoubleTensor arange(double start, double end, double stepSize) { - return null; + return JVMDoubleTensor.arange(start, end, stepSize); } @Override
add arange and linspace to jvm double tensor
improbable-research_keanu
train
9e0035e08de16c217f0dd7a4e44d92d6395bd06f
diff --git a/jsonschema/tests/test_jsonschema_test_suite.py b/jsonschema/tests/test_jsonschema_test_suite.py index <HASH>..<HASH> 100644 --- a/jsonschema/tests/test_jsonschema_test_suite.py +++ b/jsonschema/tests/test_jsonschema_test_suite.py @@ -93,23 +93,6 @@ else: return -if sys.version_info < (3, 7): - message = "datetime.date.fromisoformat is new in 3.7+" - - def missing_date_fromisoformat(test): - return skip( - message=message, - subject="date", - description="invalidates non-padded month dates", - )(test) or skip( - message=message, - subject="date", - description="invalidates non-padded day dates", - )(test) -else: - def missing_date_fromisoformat(test): - return - if sys.version_info < (3, 9): message = "Rejecting leading zeros is 3.9+" allowed_leading_zeros = skip( @@ -171,7 +154,6 @@ TestDraft3 = DRAFT3.to_unittest_testcase( format_checker=draft3_format_checker, skip=lambda test: ( narrow_unicode_build(test) - or missing_date_fromisoformat(test) or missing_format(draft3_format_checker)(test) or complex_email_validation(test) or skip( @@ -200,7 +182,6 @@ TestDraft4 = DRAFT4.to_unittest_testcase( format_checker=draft4_format_checker, skip=lambda test: ( narrow_unicode_build(test) - or missing_date_fromisoformat(test) or allowed_leading_zeros(test) or missing_format(draft4_format_checker)(test) or complex_email_validation(test) @@ -266,7 +247,6 @@ TestDraft6 = DRAFT6.to_unittest_testcase( format_checker=draft6_format_checker, skip=lambda test: ( narrow_unicode_build(test) - or missing_date_fromisoformat(test) or allowed_leading_zeros(test) or missing_format(draft6_format_checker)(test) or complex_email_validation(test) @@ -311,7 +291,6 @@ TestDraft7 = DRAFT7.to_unittest_testcase( format_checker=draft7_format_checker, skip=lambda test: ( narrow_unicode_build(test) - or missing_date_fromisoformat(test) or allowed_leading_zeros(test) or leap_second(test) or missing_format(draft7_format_checker)(test) @@ -394,7 +373,6 @@ TestDraft201909Format = DRAFT201909.to_unittest_testcase( format_checker=draft201909_format_checker, skip=lambda test: ( complex_email_validation(test) - or missing_date_fromisoformat(test) or allowed_leading_zeros(test) or leap_second(test) or missing_format(draft201909_format_checker)(test) @@ -431,7 +409,6 @@ TestDraft202012Format = DRAFT202012.to_unittest_testcase( format_checker=draft202012_format_checker, skip=lambda test: ( complex_email_validation(test) - or missing_date_fromisoformat(test) or allowed_leading_zeros(test) or leap_second(test) or missing_format(draft202012_format_checker)(test)
Remove <I>-related code.
Julian_jsonschema
train
5ffdd47979f3e82bc79330661d43155f761d06f7
diff --git a/lib/simple_navigation/core/configuration.rb b/lib/simple_navigation/core/configuration.rb index <HASH>..<HASH> 100644 --- a/lib/simple_navigation/core/configuration.rb +++ b/lib/simple_navigation/core/configuration.rb @@ -34,7 +34,7 @@ module SimpleNavigation @auto_highlight = true @consider_item_names_as_safe = true if defined?(ActiveSupport::Deprecation) - ActiveSupport::Deprecation.warn "simple-navigation: consider_item_names_as_safe will be set to false by default in 3.13.0 release, hence item names will be considered unsafe by default", caller + ActiveSupport::Deprecation.warn "simple-navigation: consider_item_names_as_safe will be set to false by default in 3.13.0 release, hence item names will be considered unsafe by default. See https://github.com/andi/simple-navigation/wiki", caller end end
+ added link to deprec warning
codeplant_simple-navigation
train
e9b55edc84e7e7f1661cb68a03a4fcc9b8b9b607
diff --git a/internal/service/elasticache/replication_group_test.go b/internal/service/elasticache/replication_group_test.go index <HASH>..<HASH> 100644 --- a/internal/service/elasticache/replication_group_test.go +++ b/internal/service/elasticache/replication_group_test.go @@ -1168,6 +1168,7 @@ func TestAccElastiCacheReplicationGroup_enableAuthTokenTransitEncryption(t *test } var rg elasticache.ReplicationGroup + rName := sdkacctest.RandomWithPrefix(acctest.ResourcePrefix) resourceName := "aws_elasticache_replication_group.test" resource.ParallelTest(t, resource.TestCase{ @@ -1177,7 +1178,7 @@ func TestAccElastiCacheReplicationGroup_enableAuthTokenTransitEncryption(t *test CheckDestroy: testAccCheckReplicationDestroy, Steps: []resource.TestStep{ { - Config: testAccReplicationGroup_EnableAuthTokenTransitEncryptionConfig(sdkacctest.RandString(10), sdkacctest.RandString(16)), + Config: testAccReplicationGroup_EnableAuthTokenTransitEncryptionConfig(rName, sdkacctest.RandString(16)), Check: resource.ComposeTestCheckFunc( testAccCheckReplicationGroupExists(resourceName, &rg), resource.TestCheckResourceAttr(resourceName, "transit_encryption_enabled", "true"),
Fix 'Error: invalid value for replication_group_id (must begin with a letter' in acceptance test.
terraform-providers_terraform-provider-aws
train
edff0226b09d3af3ac118a30939c6a7300dafa2f
diff --git a/domain/example/pom.xml b/domain/example/pom.xml index <HASH>..<HASH> 100644 --- a/domain/example/pom.xml +++ b/domain/example/pom.xml @@ -127,6 +127,17 @@ </execution> </executions> </plugin> + <plugin> + <groupId>org.apache.felix</groupId> + <artifactId>maven-bundle-plugin</artifactId> + <extensions>true</extensions> + <configuration> + <instructions> + <Import-Package>org.openengsb.core.api.model, *</Import-Package> + <Delegation-Annotations>true</Delegation-Annotations> + </instructions> + </configuration> + </plugin> </plugins> </build> @@ -143,6 +154,11 @@ <version>3.0.0-SNAPSHOT</version> <scope>provided</scope> </dependency> + <dependency> + <groupId>org.openengsb.labs.delegation</groupId> + <artifactId>org.openengsb.labs.delegation.service</artifactId> + <scope>provided</scope> + </dependency> </dependencies> </project> diff --git a/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleRequestModel.java b/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleRequestModel.java index <HASH>..<HASH> 100644 --- a/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleRequestModel.java +++ b/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleRequestModel.java @@ -18,7 +18,9 @@ package org.openengsb.domain.example.model; import org.openengsb.core.api.ekb.annotations.Model; +import org.openengsb.labs.delegation.service.Provide; +@Provide @Model public class ExampleRequestModel { private Integer id; diff --git a/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleResponseModel.java b/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleResponseModel.java index <HASH>..<HASH> 100644 --- a/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleResponseModel.java +++ b/domain/example/src/main/java/org/openengsb/domain/example/model/ExampleResponseModel.java @@ -18,7 +18,9 @@ package org.openengsb.domain.example.model; import org.openengsb.core.api.ekb.annotations.Model; +import org.openengsb.labs.delegation.service.Provide; +@Provide @Model public class ExampleResponseModel { private String result;
[OPENENGSB-<I>] fixed the NoClassDefFound exception of the example domain
openengsb_openengsb
train
09c9d506e3d58453d494c31de5790bea8fa7c0fb
diff --git a/master/buildbot/reporters/mail.py b/master/buildbot/reporters/mail.py index <HASH>..<HASH> 100644 --- a/master/buildbot/reporters/mail.py +++ b/master/buildbot/reporters/mail.py @@ -169,8 +169,7 @@ class MailNotifier(ReporterBase): return a @defer.inlineCallbacks - def createEmail(self, msgdict, builderName, title, results, builds=None, - patches=None, logs=None): + def createEmail(self, msgdict, title, results, builds=None, patches=None, logs=None): text = msgdict['body'] type = msgdict['type'] subject = msgdict['subject'] @@ -242,7 +241,6 @@ class MailNotifier(ReporterBase): body = merge_reports_prop(reports, 'body') subject = merge_reports_prop_take_first(reports, 'subject') type = merge_reports_prop_take_first(reports, 'type') - builderName = merge_reports_prop_take_first(reports, 'builder_name') results = merge_reports_prop(reports, 'results') builds = merge_reports_prop(reports, 'builds') users = merge_reports_prop(reports, 'users') @@ -257,8 +255,8 @@ class MailNotifier(ReporterBase): if not body.endswith(b"\n\n"): msgdict['body'] = body + b'\n\n' - m = yield self.createEmail(msgdict, builderName, self.master.config.title, - results, builds, patches, logs) + m = yield self.createEmail(msgdict, self.master.config.title, results, builds, + patches, logs) # now, who is this message going to? if worker is None: diff --git a/master/buildbot/test/unit/reporters/test_mail.py b/master/buildbot/test/unit/reporters/test_mail.py index <HASH>..<HASH> 100644 --- a/master/buildbot/test/unit/reporters/test_mail.py +++ b/master/buildbot/test/unit/reporters/test_mail.py @@ -68,7 +68,7 @@ class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, msgdict = create_msgdict(funnyChars) mn = yield self.setupMailNotifier('[email protected]') - m = yield mn.createEmail(msgdict, 'builder-name', 'project-name', SUCCESS, [build]) + m = yield mn.createEmail(msgdict, 'project-name', SUCCESS, [build]) cte_lines = [l for l in m.as_string().split("\n") if l.startswith('Content-Transfer-Encoding:')] @@ -114,8 +114,7 @@ class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, build = yield self.insert_build_finished(SUCCESS) msgdict = create_msgdict() mn = yield self.setupMailNotifier('[email protected]') - m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', - SUCCESS, [build]) + m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, [build]) try: m.as_string() @@ -130,8 +129,7 @@ class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, mn = yield self.setupMailNotifier('[email protected]', extraHeaders=dict(hhh=properties.Property('hhh'))) # add some Unicode to detect encoding problems - m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', - SUCCESS, [build]) + m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, [build]) txt = m.as_string() # note that the headers *are* rendered @@ -146,8 +144,7 @@ class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, builds[1]['builder']['name'] = 'builder2' msgdict = create_msgdict() mn = yield self.setupMailNotifier('[email protected]', extraHeaders=dict(hhh='vvv')) - m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', 'project-n\u00E5me', - SUCCESS, builds) + m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, builds) txt = m.as_string() # note that the headers are *not* rendered @@ -166,9 +163,7 @@ class TestMailNotifier(ConfigErrorsMixin, TestReactorMixin, mn = yield self.setupMailNotifier('[email protected]', generators=[BuildStatusGenerator(add_logs=True)]) - m = yield mn.createEmail(msgdict, 'builder-n\u00E5me', - 'project-n\u00E5me', SUCCESS, - [build], patches, logs) + m = yield mn.createEmail(msgdict, 'project-n\u00E5me', SUCCESS, [build], patches, logs) try: s = m.as_string()
reporters: Remove unused builder name argument from MailNotifier
buildbot_buildbot
train
fcdb943d7ed9033ca15016e4accc86374927e3f3
diff --git a/lib/bud/lattice-lib.rb b/lib/bud/lattice-lib.rb index <HASH>..<HASH> 100644 --- a/lib/bud/lattice-lib.rb +++ b/lib/bud/lattice-lib.rb @@ -343,7 +343,7 @@ class Bud::BagLattice < Bud::Lattice wrap_unsafe(rv) end - morph :mult do |k| + morph :card do |k| rv = @v[k] rv ||= 0 Bud::MaxLattice.new(rv) diff --git a/test/tc_lattice.rb b/test/tc_lattice.rb index <HASH>..<HASH> 100644 --- a/test/tc_lattice.rb +++ b/test/tc_lattice.rb @@ -1126,7 +1126,7 @@ class SimpleBag b_sum <= b1 + b2 b_sum <= b2 + b1 has_foo <= b_sum.contains?("foo") - done <= b_intersect.mult("foo").gt(2) + done <= b_intersect.card("foo").gt(2) end end
Rename lbag#mult to lbag#card, for consistency with SoCC draft.
bloom-lang_bud
train
221c673f31a553b5b889e7ae262c77143b24d65b
diff --git a/test/e2e/storage/volume_provisioning.go b/test/e2e/storage/volume_provisioning.go index <HASH>..<HASH> 100644 --- a/test/e2e/storage/volume_provisioning.go +++ b/test/e2e/storage/volume_provisioning.go @@ -818,6 +818,7 @@ var _ = utils.SIGDescribe("Dynamic Provisioning", func() { framework.KubeDescribe("GlusterDynamicProvisioner", func() { It("should create and delete persistent volumes [fast]", func() { + framework.SkipIfProviderIs("gke") By("creating a Gluster DP server Pod") pod := startGlusterDpServerPod(c, ns) serverUrl := "http://" + pod.Status.PodIP + ":8081"
Skip GlusterDynamicProvisioner test in GKE The GlusterDynamicProvisioner test will not work on GKE because master node is in a different project and cannot talk to the pod running on node which is used for gluster provisioner. So add the code to skip the test on GKE
kubernetes_kubernetes
train
f1ab75e7c46e2ba784459c75b8ae6a0b1025ed7a
diff --git a/Classes/Handler/ErrorHandler.php b/Classes/Handler/ErrorHandler.php index <HASH>..<HASH> 100755 --- a/Classes/Handler/ErrorHandler.php +++ b/Classes/Handler/ErrorHandler.php @@ -64,7 +64,7 @@ class ErrorHandler public function injectSettings(array $settings): void { $this->settings = $settings; - $this->dsn = $settings['dsn'] ?? ''; + $this->dsn = $settings['dsn']; $this->transportClass = $settings['transportClass'] ?? ''; } @@ -77,7 +77,7 @@ class ErrorHandler return; } - $release = $this->settings['release'] ?? ''; + $release = $this->settings['release']; if (empty($release)) { $release = $this->getReleaseFromReleaseFile(); } @@ -85,12 +85,12 @@ class ErrorHandler $clientBuilder = ClientBuilder::create( [ 'dsn' => $this->dsn, - 'environment' => $this->settings['environment'] ?? '', + 'environment' => $this->settings['environment'], 'release' => $release, 'project_root' => FLOW_PATH_ROOT, - 'http_proxy' => $this->settings['http_proxy'] ?? '', + 'http_proxy' => $this->settings['http_proxy'], 'prefixes' => [FLOW_PATH_ROOT], - 'sample_rate' => $this->settings['sample_rate'] ?? 1, + 'sample_rate' => $this->settings['sample_rate'], 'in_app_exclude' => [ FLOW_PATH_ROOT . '/Packages/Application/PunktDe.Sentry.Flow/Classes/', FLOW_PATH_ROOT . '/Packages/Framework/Neos.Flow/Classes/Aop/', @@ -98,8 +98,8 @@ class ErrorHandler FLOW_PATH_ROOT . '/Packages/Framework/Neos.Flow/Classes/Log/', FLOW_PATH_ROOT . '/Packages/Libraries/neos/flow-log/' ], - 'default_integrations' => $this->settings['default_integrations'] ?? true, - 'attach_stacktrace' => $this->settings['attach_stacktrace'] ?? true, + 'default_integrations' => $this->settings['default_integrations'], + 'attach_stacktrace' => $this->settings['attach_stacktrace'], ] );
TASK: Remove unnecessary null null coalescing operators
punktDe_sentry-flow
train
05aceafd33a5ea6160dc7347ee63b670c0a0fa09
diff --git a/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/AggregationRenderer.java b/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/AggregationRenderer.java index <HASH>..<HASH> 100644 --- a/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/AggregationRenderer.java +++ b/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/AggregationRenderer.java @@ -7,7 +7,6 @@ package org.hibernate.ogm.datastore.mongodb.query.parsing.impl; import java.util.Arrays; -import java.util.LinkedList; import java.util.List; import java.util.Locale; @@ -53,8 +52,8 @@ public class AggregationRenderer { } public List<Document> asDocumentPipeline() { - if ( aggregationType == Type.COUNT_DISTINCT && propertyPath != null ) { - return getDistinctCount(); + if ( aggregationType == Type.COUNT_DISTINCT ) { + return getDistinctCountGroup(); } else if ( aggregationType == Type.COUNT ) { return Arrays.asList( getCount() ); @@ -68,16 +67,31 @@ public class AggregationRenderer { Document groupOptions = new Document(); groupOptions.append( "_id", null ); groupOptions.append( PROJECTION_FIELD, new Document().append( aggregationTypeOperator, propertyPath ) ); + Document group = new Document(); group.append( "$group", groupOptions ); return group; } - private List<Document> getDistinctCount() { - List<Document> distinctCount = new LinkedList<>(); - distinctCount.add( new Document().append( "$group", new Document().append( "_id", propertyPath ) ) ); - distinctCount.add( getCount() ); - return distinctCount; + /* + * A distinct query on the id looks something like: + * + * db.Author.aggregate([{ $group : { _id: "$_id" }}, {$group : {_id:null, n: {$sum: 1}}}, {$project: {n: 1, _id:0}}]) + * + * This method returns the "groups" part. + */ + private List<Document> getDistinctCountGroup() { + // propertyPath is null when the query is a select on an entity + // In this case we can make the distinct on the id of the document + Object groupColumn = propertyPath == null ? "$_id" : propertyPath; + Document group = new Document(); + group.append( "$group", new Document( "_id", groupColumn ) ); + + Document groupSum = new Document(); + groupSum.append( "$group", new Document() + .append( "_id", propertyPath ) + .append( PROJECTION_FIELD, new Document( "$sum", 1 ) ) ); + return Arrays.asList( group, groupSum ); } private Document getCount() { diff --git a/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBQueryRendererDelegate.java b/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBQueryRendererDelegate.java index <HASH>..<HASH> 100644 --- a/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBQueryRendererDelegate.java +++ b/mongodb/src/main/java/org/hibernate/ogm/datastore/mongodb/query/parsing/impl/MongoDBQueryRendererDelegate.java @@ -17,6 +17,7 @@ import java.util.Set; import org.bson.Document; import org.hibernate.engine.spi.SessionFactoryImplementor; import org.hibernate.hql.ast.origin.hql.resolve.path.AggregationPropertyPath; +import org.hibernate.hql.ast.origin.hql.resolve.path.AggregationPropertyPath.Type; import org.hibernate.hql.ast.origin.hql.resolve.path.PropertyPath; import org.hibernate.hql.ast.spi.EntityNamesResolver; import org.hibernate.hql.ast.spi.SingleEntityQueryBuilder; @@ -183,7 +184,7 @@ public class MongoDBQueryRendererDelegate extends SingleEntityQueryRendererDeleg @Override public void activateAggregation(AggregationPropertyPath.Type aggregationType) { - if ( aggregationType == AggregationPropertyPath.Type.COUNT ) { + if ( aggregationType == Type.COUNT || aggregationType == Type.COUNT_DISTINCT ) { this.aggregation = new AggregationRenderer( aggregationType ); projections.add( aggregation.getAggregationProjection() ); }
OGM-<I> Support distinct on entity Example: SELECT(DISTINCT a) FROM Author a
hibernate_hibernate-ogm
train
85051d8af7abf010911ff01539bd0ae132f3c96e
diff --git a/src/main/java/com/jayway/maven/plugins/android/phase09package/ApklibMojo.java b/src/main/java/com/jayway/maven/plugins/android/phase09package/ApklibMojo.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/jayway/maven/plugins/android/phase09package/ApklibMojo.java +++ b/src/main/java/com/jayway/maven/plugins/android/phase09package/ApklibMojo.java @@ -66,6 +66,13 @@ public class ApklibMojo extends AbstractAndroidMojo private File ndkOutputDirectory; /** + * <p>Classifier to add to the artifact generated. If given, the artifact will be an attachment instead.</p> + * + * @parameter + */ + private String classifier; + + /** * Defines the architecture for the NDK build * * @parameter expression="${android.ndk.build.architecture}" default-value="armeabi" @@ -87,8 +94,16 @@ public class ApklibMojo extends AbstractAndroidMojo File outputFile = createApkLibraryFile(); - // Set the generated .apklib file as the main artifact (because the pom states <packaging>apklib</packaging>) - project.getArtifact().setFile( outputFile ); + if ( classifier == null ) + { + // Set the generated file as the main artifact (because the pom states <packaging>apklib</packaging>) + project.getArtifact().setFile( outputFile ); + } + else + { + // If there is a classifier specified, attach the artifact using that + projectHelper.attachArtifact( project, outputFile, classifier ); + } } /**
Added "classifier" parameter to the apklib goal to provide classifier support as is implemented for the apk goal.
simpligility_android-maven-plugin
train
4af252cce5f6d06f251e230d2c86becbdbddcb1c
diff --git a/shadowapi/src/main/java/org/robolectric/util/ReflectionHelpers.java b/shadowapi/src/main/java/org/robolectric/util/ReflectionHelpers.java index <HASH>..<HASH> 100644 --- a/shadowapi/src/main/java/org/robolectric/util/ReflectionHelpers.java +++ b/shadowapi/src/main/java/org/robolectric/util/ReflectionHelpers.java @@ -115,14 +115,17 @@ public class ReflectionHelpers { @SuppressWarnings("unchecked") public static <R> R getField(final Object object, final String fieldName) { try { - return traverseClassHierarchy(object.getClass(), NoSuchFieldException.class, new InsideTraversal<R>() { - @Override - public R run(Class<?> traversalClass) throws Exception { - Field field = traversalClass.getDeclaredField(fieldName); - field.setAccessible(true); - return (R) field.get(object); - } - }); + return traverseClassHierarchy( + object.getClass(), + NoSuchFieldException.class, + new InsideTraversal<R>() { + @Override + public R run(Class<?> traversalClass) throws Exception { + Field field = getDeclaredField(traversalClass, fieldName); + field.setAccessible(true); + return (R) field.get(object); + } + }); } catch (Exception e) { throw new RuntimeException(e); } @@ -137,15 +140,18 @@ public class ReflectionHelpers { */ public static void setField(final Object object, final String fieldName, final Object fieldNewValue) { try { - traverseClassHierarchy(object.getClass(), NoSuchFieldException.class, new InsideTraversal<Void>() { - @Override - public Void run(Class<?> traversalClass) throws Exception { - Field field = traversalClass.getDeclaredField(fieldName); - field.setAccessible(true); - field.set(object, fieldNewValue); - return null; - } - }); + traverseClassHierarchy( + object.getClass(), + NoSuchFieldException.class, + new InsideTraversal<Void>() { + @Override + public Void run(Class<?> traversalClass) throws Exception { + Field field = getDeclaredField(traversalClass, fieldName); + field.setAccessible(true); + field.set(object, fieldNewValue); + return null; + } + }); } catch (Exception e) { throw new RuntimeException(e); } @@ -161,7 +167,7 @@ public class ReflectionHelpers { */ public static void setField(Class<?> type, final Object object, final String fieldName, final Object fieldNewValue) { try { - Field field = type.getDeclaredField(fieldName); + Field field = getDeclaredField(type, fieldName); field.setAccessible(true); field.set(object, fieldNewValue); } catch (Exception e) { @@ -196,7 +202,7 @@ public class ReflectionHelpers { */ public static <R> R getStaticField(Class<?> clazz, String fieldName) { try { - return getStaticField(clazz.getDeclaredField(fieldName)); + return getStaticField(getDeclaredField(clazz, fieldName)); } catch (Exception e) { throw new RuntimeException(e); } @@ -226,7 +232,7 @@ public class ReflectionHelpers { */ public static void setStaticField(Class<?> clazz, String fieldName, Object fieldNewValue) { try { - setStaticField(clazz.getDeclaredField(fieldName), fieldNewValue); + setStaticField(getDeclaredField(clazz, fieldName), fieldNewValue); } catch (Exception e) { throw new RuntimeException(e); }
Use the new `getDeclaredField` helper in more parts of ReflectionHelpers This is more compatible with JDK <I>+. PiperOrigin-RevId: <I>
robolectric_robolectric
train
2d393585d9983079736414ca98fd6ca295ce882e
diff --git a/cloudvolume/datasource/precomputed/spatial_index.py b/cloudvolume/datasource/precomputed/spatial_index.py index <HASH>..<HASH> 100644 --- a/cloudvolume/datasource/precomputed/spatial_index.py +++ b/cloudvolume/datasource/precomputed/spatial_index.py @@ -187,7 +187,7 @@ class SpatialIndex(object): # optimization is important for querying # entire datasets, which is contemplated # for shard generation. - if not bbox.contains_bbox(self.bounds): + if bbox.contains_bbox(self.bounds): labels.update( (int(label) for label in res.keys()) ) # fast path: 16% CPU else: for label, label_bbx in res.items():
fix: fast path had wrong modifier in front of it from debugging
seung-lab_cloud-volume
train
5a1a8e83f43350620b2f4922d111e1553117086e
diff --git a/public/js/editors/panel.js b/public/js/editors/panel.js index <HASH>..<HASH> 100644 --- a/public/js/editors/panel.js +++ b/public/js/editors/panel.js @@ -185,27 +185,11 @@ var Panel = function (name, settings) { panel.focus(); }); - // Remove emmet keymaps from javascript panel + // Restore keymaps taken by emmet but that we need for other functionalities if (name === 'javascript') { - for (var k in CodeMirror.keyMap.default) { - if (CodeMirror.keyMap.default.hasOwnProperty(k)) { - if (CodeMirror.keyMap.default[k].indexOf('emmet') !== -1) { - var o = {}; - o[k] = function(cm) {}; - panel.editor.addKeyMap(o); - } - } - } - // Restore the keymaps that we need - panel.editor.addKeyMap({ - 'Tab': 'autocomplete' - }); - panel.editor.addKeyMap({ - 'Enter': 'newlineAndIndent' - }); - panel.editor.addKeyMap({ - 'Cmd-D': 'deleteLine' - }); + // panel.editor.addKeyMap({ + // 'Cmd-D': 'deleteLine' + // }); panel.editor.addKeyMap({ 'Cmd-/': function(cm) { CodeMirror.commands.toggleComment(cm); } });
Stopped overwriting emmet keymaps, except for the ones we need.
jsbin_jsbin
train
d9ae20a2b9c9fed6b348e6a6bcba1cf92b168874
diff --git a/controller-client/src/main/java/org/jboss/as/controller/client/helpers/domain/impl/DomainClientImpl.java b/controller-client/src/main/java/org/jboss/as/controller/client/helpers/domain/impl/DomainClientImpl.java index <HASH>..<HASH> 100644 --- a/controller-client/src/main/java/org/jboss/as/controller/client/helpers/domain/impl/DomainClientImpl.java +++ b/controller-client/src/main/java/org/jboss/as/controller/client/helpers/domain/impl/DomainClientImpl.java @@ -162,7 +162,7 @@ public class DomainClientImpl implements DomainClient { op.get("operation").set("start"); ModelNode address = op.get("address"); address.add("host", hostControllerName); - address.add("server", serverName); + address.add("server-config", serverName); ModelNode result = executeForResult(OperationBuilder.Factory.create(op).build()); String status = result.asString(); return Enum.valueOf(ServerStatus.class, status); @@ -176,7 +176,7 @@ public class DomainClientImpl implements DomainClient { op.get("operation").set("stop"); ModelNode address = op.get("address"); address.add("host", hostControllerName); - address.add("server", serverName); + address.add("server-config", serverName); // FIXME add graceful shutdown ModelNode result = executeForResult(OperationBuilder.Factory.create(op).build()); String status = result.asString(); @@ -191,7 +191,7 @@ public class DomainClientImpl implements DomainClient { op.get("operation").set("restart"); ModelNode address = op.get("address"); address.add("host", hostControllerName); - address.add("server", serverName); + address.add("server-config", serverName); // FIXME add graceful shutdown ModelNode result = executeForResult(OperationBuilder.Factory.create(op).build()); String status = result.asString();
Use correct address for server lifecycle ops
wildfly_wildfly
train
a2d3470a24076299ff8f7b098c4a8cd13fd5569f
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -1,6 +1,8 @@ +const SpriteLoaderPlugin = require("svg-sprite-loader/plugin"); + const defaults = { inline: { svgo: false }, // Options for vue-svg-loader - sprite: {}, // Options for svg-sprite-loader + sprite: { extract: true }, // Options for svg-sprite-loader data: {}, // Options for url-loader external: {} // Options for file-loader }; @@ -23,9 +25,17 @@ function handler(api, options) { */ function setup(config, options) { const rule = config.module.rule("svg"); // Find the svg rule - options.external = { ...rule.use("file-loader").get("options"), ...options.external }; // Make sure we save the file loader options + + const fileLoaderOptions = rule.use("file-loader").get("options"); // Get the file loader options + options.external = { ...fileLoaderOptions, ...options.external }; // Make sure we save the file loader options + options.sprite = { spriteFilename: fileLoaderOptions.name, ...options.sprite }; // Use file loader options for sprite name + rule.uses.clear(); // Clear out existing uses of the svg rule + config + .plugin("sprite") + .use(SpriteLoaderPlugin); + rule .oneOf("inline").resourceQuery(/inline/).use("vue-svg-loader").loader("vue-svg-loader").options(options.inline).end().end() .oneOf("sprite").resourceQuery(/sprite/).use("svg-sprite-loader").loader("svg-sprite-loader").options(options.sprite).end().end()
Add reference implementation of sprite loader
sam3d_vue-svg
train
4430a55eed6c03daf7bab02483e984bf53bcfdfb
diff --git a/server/monitor.go b/server/monitor.go index <HASH>..<HASH> 100644 --- a/server/monitor.go +++ b/server/monitor.go @@ -1101,11 +1101,18 @@ type LeafNodeOptsVarz struct { Remotes []RemoteLeafOptsVarz `json:"remotes,omitempty"` } +// Contains lists of subjects not allowed to be imported/exported +type DenyRules struct { + Exports []string `json:"exports,omitempty"` + Imports []string `json:"imports,omitempty"` +} + // RemoteLeafOptsVarz contains monitoring remote leaf node information type RemoteLeafOptsVarz struct { - LocalAccount string `json:"local_account,omitempty"` - TLSTimeout float64 `json:"tls_timeout,omitempty"` - URLs []string `json:"urls,omitempty"` + LocalAccount string `json:"local_account,omitempty"` + TLSTimeout float64 `json:"tls_timeout,omitempty"` + URLs []string `json:"urls,omitempty"` + Deny *DenyRules `json:"deny,omitempty"` } // VarzOptions are the options passed to Varz(). @@ -1278,10 +1285,18 @@ func (s *Server) createVarz(pcpu float64, rss int64) *Varz { if l := len(ln.Remotes); l > 0 { rlna := make([]RemoteLeafOptsVarz, l) for i, r := range ln.Remotes { + var deny *DenyRules + if len(r.DenyImports) > 0 || len(r.DenyExports) > 0 { + deny = &DenyRules{ + Imports: r.DenyImports, + Exports: r.DenyExports, + } + } rlna[i] = RemoteLeafOptsVarz{ LocalAccount: r.LocalAccount, URLs: urlsToStrings(r.URLs), TLSTimeout: r.TLSTimeout, + Deny: deny, } } varz.LeafNode.Remotes = rlna diff --git a/server/monitor_test.go b/server/monitor_test.go index <HASH>..<HASH> 100644 --- a/server/monitor_test.go +++ b/server/monitor_test.go @@ -2821,7 +2821,7 @@ func TestMonitorLeafNode(t *testing.T) { opts.LeafNode.TLSConfig != nil, []RemoteLeafOptsVarz{ { - "acc", 1, []string{"localhost:1234"}, + "acc", 1, []string{"localhost:1234"}, nil, }, }, } @@ -2840,7 +2840,7 @@ func TestMonitorLeafNode(t *testing.T) { // Having this here to make sure that if fields are added in ClusterOptsVarz, // we make sure to update this test (compiler will report an error if we don't) - _ = LeafNodeOptsVarz{"", 0, 0, 0, false, false, []RemoteLeafOptsVarz{{"", 0, nil}}} + _ = LeafNodeOptsVarz{"", 0, 0, 0, false, false, []RemoteLeafOptsVarz{{"", 0, nil, nil}}} // Alter the fields to make sure that we have a proper deep copy // of what may be stored in the server. Anything we change here
[added] leaf deny exports/imports to varz monitoring (#<I>) * [added] leaf deny exports/imports to varz monitoring
nats-io_gnatsd
train
4864bf67602d589d6cf904866a20183f148de986
diff --git a/lib/plugins/module-paths.js b/lib/plugins/module-paths.js index <HASH>..<HASH> 100644 --- a/lib/plugins/module-paths.js +++ b/lib/plugins/module-paths.js @@ -35,7 +35,7 @@ if (config.noGlobalPlugins) { var env = process.env || {}; var execPath = process.execPath; var isWin = process.platform === 'win32'; -var paths = addon.concat(module.paths.map(formatPath)); +var paths = module.paths.map(formatPath); var globalDir = formatPath(getGlobalDir()); var appDataDir = formatPath(env.APPDATA, 'npm'); var pluginsPath = formatPath(config.baseDir); @@ -58,6 +58,7 @@ if (pluginsPath && paths.indexOf(pluginsPath) == -1) { } paths.unshift(config.CUSTOM_PLUGIN_PATH, config.systemPluginPath); +paths = addon.concat(paths); addDebugPaths(paths); var nvmBin = env.NVM_BIN;
refactor: refine custom plugin path
avwo_whistle
train
c1bbd85b688758bb943822743d5ad75bf3761ab1
diff --git a/modules/page/html/render/private.js b/modules/page/html/render/private.js index <HASH>..<HASH> 100644 --- a/modules/page/html/render/private.js +++ b/modules/page/html/render/private.js @@ -22,9 +22,15 @@ exports.create = function (api) { meta: {type: 'post'}, isPrivate: true, prepublish: function (msg) { - msg.recps = [id].concat(msg.mentions).filter(function (e) { - return ref.isFeed(typeof e === 'string' ? e : e.link) + msg.recps = [id] + + msg.mentions.forEach(mention => { + mention = typeof mention === 'string' ? mention : mention.link + if (ref.isFeed(mention) && !msg.recps.includes(mention)) { + msg.recps.push(mention) + } }) + return msg }, placeholder: i18n('Write a private message')
only add a person to msg.recps once
ssbc_patchwork
train
d2ded05cb137348090c38fa9c4d8b422c06ecfea
diff --git a/expression/builtin_miscellaneous_vec.go b/expression/builtin_miscellaneous_vec.go index <HASH>..<HASH> 100644 --- a/expression/builtin_miscellaneous_vec.go +++ b/expression/builtin_miscellaneous_vec.go @@ -536,9 +536,18 @@ func (b *builtinNameConstRealSig) vecEvalReal(input *chunk.Chunk, result *chunk. } func (b *builtinReleaseLockSig) vectorized() bool { - return false + return true } +// See https://dev.mysql.com/doc/refman/5.7/en/miscellaneous-functions.html#function_release-lock +// The release lock function will do nothing. +// Warning: release_lock() function is parsed but ignored. func (b *builtinReleaseLockSig) vecEvalInt(input *chunk.Chunk, result *chunk.Column) error { - return errors.Errorf("not implemented") + n := input.NumRows() + result.ResizeInt64(n, false) + i64s := result.Int64s() + for i := range i64s { + i64s[i] = 1 + } + return nil }
expression: implement vectorized evaluation for builtinReleaseLockSig (#<I>)
pingcap_tidb
train
e11e4a161dca124ab63a9aedf30300707bbb55bd
diff --git a/timestring/Date.py b/timestring/Date.py index <HASH>..<HASH> 100644 --- a/timestring/Date.py +++ b/timestring/Date.py @@ -163,6 +163,11 @@ class Date: if minute: new_date = new_date.replace(minute=int(max(minute))) + #second + seconds = date.get('seconds', 0) + if seconds: + new_date = new_date.replace(second=int(seconds)) + self.date = new_date elif type(date) in (types.IntType, types.LongType, types.FloatType) and re.match('^\d{10}$', str(date)): diff --git a/timestring/tests.py b/timestring/tests.py index <HASH>..<HASH> 100644 --- a/timestring/tests.py +++ b/timestring/tests.py @@ -64,6 +64,14 @@ class timestringTests(unittest.TestCase): self.assertEqual(_range[1].hour, 6) self.assertEqual(_range[1].minute, 41) + date = Date('2013-09-10T10:45:50') + self.assertEqual(date.year, 2013) + self.assertEqual(date.month, 9) + self.assertEqual(date.day, 10) + self.assertEqual(date.hour, 10) + self.assertEqual(date.minute, 45) + self.assertEqual(date.second, 50) + def test_singles(self): now = datetime.now() # diff --git a/timestring/timestring_re.py b/timestring/timestring_re.py index <HASH>..<HASH> 100644 --- a/timestring/timestring_re.py +++ b/timestring/timestring_re.py @@ -34,9 +34,9 @@ TIMESTRING_RE = re.compile(re.sub('[\t\n\s]', '', re.sub('(\(\?\#[^\)]+\))', '', | - (?# =-=-=-= Matches "2012/12/11", "5/23/2012", "05/2012", "2012" =-=-=-= ) + (?# =-=-=-= Matches "2012/12/11", "2013-09-10T", "5/23/2012", "05/2012", "2012" =-=-=-= ) ( - ((?P<year_3>[12][089]\d{2})[/-](?P<month_3>[01]?\d)([/-](?P<date_3>[0-3]?\d))?) + ((?P<year_3>[12][089]\d{2})[/-](?P<month_3>[01]?\d)([/-](?P<date_3>[0-3]?\d))?)T? | ((?P<month_2>[01]?\d)[/-](?P<date_2>[0-3]?\d)[/-](?P<year_2>(([12][089]\d{2})|(\d{2})))) | @@ -51,7 +51,7 @@ TIMESTRING_RE = re.compile(re.sub('[\t\n\s]', '', re.sub('(\(\?\#[^\)]+\))', '', ( ((?P<hour>[012]?[0-9]):(?P<minute>[0-5]\d)\s?(?P<am>am|pm|p|a)) | - ((?P<hour_2>[012]?[0-9]):(?P<minute_2>[0-5]\d)) + ((?P<hour_2>[012]?[0-9]):(?P<minute_2>[0-5]\d)(:(?P<seconds>[0-5]\d))?) | ((?P<hour_3>[012]?[0-9])\s?(?P<am_1>am|pm|p|a|o'?clock)) |
support for matching YYY-MM-DDTHH:II:SS
stevepeak_timestring
train
b4b17af44c41a807b81d7613eb37625f76492554
diff --git a/gflex/f1d.py b/gflex/f1d.py index <HASH>..<HASH> 100644 --- a/gflex/f1d.py +++ b/gflex/f1d.py @@ -11,6 +11,7 @@ class F1D(Flexure): def run(self): self.bc_check() + self.solver_start_time = time.time() if self.method == 'FD': # Finite difference super(F1D, self).FD() @@ -32,8 +33,11 @@ class F1D(Flexure): sys.exit('Error: method must be "FD", "FFT", "SAS", or "SAS_NG"') if self.Verbose: print 'F1D run' - self.method_func () - # self.plot() # in here temporarily + self.method_func() + + self.time_to_solve = time.time() - self.solver_start_time + if self.Quiet == False: + print 'Time to solve [s]:', self.time_to_solve def finalize(self): if self.Verbose: print 'F1D finalized' @@ -661,8 +665,6 @@ class F1D(Flexure): self.calc_max_flexural_wavelength() print 'maxFlexuralWavelength_ncells', self.maxFlexuralWavelength_ncells - self.solver_start_time = time.time() - if self.solver == "iterative" or self.solver == "Iterative": if self.Debug: print "Using generalized minimal residual method for iterative solution" @@ -685,10 +687,6 @@ class F1D(Flexure): # (i.e. material removed) self.w = spsolve(self.coeff_matrix, -self.qs, use_umfpack=True) - self.time_to_solve = time.time() - self.solver_start_time - # Always print this! - print 'Time to solve [s]:', self.time_to_solve - if self.Debug: print "w.shape:" print self.w.shape
Universal solution timer based on f2d.py
awickert_gFlex
train
bf151e4aa8f27fe44ac2cf6a881c6ce80a2a0f60
diff --git a/lib/jrubyfx/application.rb b/lib/jrubyfx/application.rb index <HASH>..<HASH> 100644 --- a/lib/jrubyfx/application.rb +++ b/lib/jrubyfx/application.rb @@ -39,14 +39,4 @@ class JRubyFX::Application < Java.javafx.application.Application #call our custom launcher to avoid a java shim JavaFXImpl::Launcher.launch_app(self, *args) end - - ## - # call-seq: - # launch_object(app) - # - # When called on a subclass, this is effectively our main method. - def self.launch_object(obj, *args) - #call our custom launcher to avoid a java shim - JavaFXImpl::Launcher.launch_app_with_object(obj, *args) - end end diff --git a/lib/jrubyfx/java_fx_impl.rb b/lib/jrubyfx/java_fx_impl.rb index <HASH>..<HASH> 100644 --- a/lib/jrubyfx/java_fx_impl.rb +++ b/lib/jrubyfx/java_fx_impl.rb @@ -50,10 +50,6 @@ module JavaFXImpl #:nodoc: all @@launchCalled = AtomicBoolean.new(false) # Atomic boolean go boom on bikini def self.launch_app(application_class, *args) - launch_app_with_object(application_class.new, *args) - end - - def self.launch_app_with_object(application, *args) #prevent multiple! if @@launchCalled.getAndSet(true) throw IllegalStateException.new "Application launch must not be called more than once" @@ -64,7 +60,7 @@ module JavaFXImpl #:nodoc: all count_down_latch = CountDownLatch.new(1) thread = Java.java.lang.Thread.new do begin - launch_app_from_thread(application, args) + launch_app_from_thread(application_class, args) rescue => ex puts "Exception starting app:" p ex @@ -82,7 +78,7 @@ module JavaFXImpl #:nodoc: all end end - def self.launch_app_from_thread(application, args) + def self.launch_app_from_thread(application_class, args) #platformImpl startup? CountDownLatch.new(1).tap do |latch| PlatformImpl.startup { latch.countDown } @@ -90,7 +86,7 @@ module JavaFXImpl #:nodoc: all end begin - launch_app_after_platform(application, args) + launch_app_after_platform(application_class, args) rescue => ex puts "Error running Application:" p ex @@ -100,7 +96,7 @@ module JavaFXImpl #:nodoc: all PlatformImpl.tkExit # kill the toolkit and exit end - def self.launch_app_after_platform(application, args) + def self.launch_app_after_platform(application_class, args) #listeners - for the end finished_latch = CountDownLatch.new(1) @@ -110,11 +106,12 @@ module JavaFXImpl #:nodoc: all finished_latch.countDown }) + application = application_class.new + unless application.is_a? Java::javafx.application.Application raise "Invalid type: cannot launch non-Application" end - # Correct answer: yes ParametersImpl.registerParameters(application, ParametersImpl.new(args)) application.init diff --git a/samples/javafx/analog_clock.rb b/samples/javafx/analog_clock.rb index <HASH>..<HASH> 100755 --- a/samples/javafx/analog_clock.rb +++ b/samples/javafx/analog_clock.rb @@ -2,13 +2,9 @@ require 'jrubyfx' class AnalogClock < JRubyFX::Application - def initialize(size) - super() - @size = size - end - def start(stage) @stage = stage + @size = parameters.raw[0].to_i height, width, = @size, @size with(stage, init_style: :transparent, width: width+5, height: height+25, @@ -74,5 +70,5 @@ class AnalogClock < JRubyFX::Application end end -size = ARGV.shift || 300 -AnalogClock.launch_object(AnalogClock.new(size.to_i)) +size = ARGV.shift || 300.to_s +AnalogClock.launch(size)
Revert and kill launch_with_object
jruby_jrubyfx
train
1a3939659b2c2506fb50ca3ee21e84c642cb5f64
diff --git a/src/data.js b/src/data.js index <HASH>..<HASH> 100644 --- a/src/data.js +++ b/src/data.js @@ -49,7 +49,7 @@ Data.prototype = { descriptor[ this.expando ] = { value: unlock }; Object.defineProperties( owner, descriptor ); - // Support: Android<4 + // Support: Android < 4 // Fallback to a less secure definition } catch ( e ) { descriptor[ this.expando ] = unlock; @@ -92,8 +92,6 @@ Data.prototype = { } } } - - return this; }, get: function( owner, key ) { // Either a valid cache is found, or will be created. @@ -136,11 +134,12 @@ Data.prototype = { }, remove: function( owner, key ) { var i, name, - unlock = this.key( owner ), - cache = this.cache[ unlock ]; + unlock = this.key( owner ), + cache = this.cache[ unlock ]; if ( key === undefined ) { this.cache[ unlock ] = {}; + } else { // Support array or space separated string of keys if ( jQuery.isArray( key ) ) { @@ -166,7 +165,7 @@ Data.prototype = { i = name.length; while ( i-- ) { - delete cache[ name[i] ]; + delete cache[ name[ i ] ]; } } }, @@ -210,7 +209,7 @@ jQuery.extend({ }, removeData: function( elem, name ) { - return data_user.remove( elem, name ); + data_user.remove( elem, name ); }, // TODO: Replace all calls to _data and _removeData with direct @@ -225,14 +224,14 @@ jQuery.extend({ }, _removeData: function( elem, name ) { - return data_priv.remove( elem, name ); + data_priv.remove( elem, name ); } }); jQuery.fn.extend({ data: function( key, value ) { var attrs, name, - elem = this[0], + elem = this[ 0 ], i = 0, data = null; @@ -244,7 +243,7 @@ jQuery.fn.extend({ if ( elem.nodeType === 1 && !data_priv.get( elem, "hasDataAttrs" ) ) { attrs = elem.attributes; for ( ; i < attrs.length; i++ ) { - name = attrs[i].name; + name = attrs[ i ].name; if ( name.indexOf( "data-" ) === 0 ) { name = jQuery.camelCase( name.substring(5) ); @@ -267,12 +266,12 @@ jQuery.fn.extend({ return jQuery.access( this, function( value ) { var data, - camelKey = jQuery.camelCase( key ); + camelKey = jQuery.camelCase( key ); // The calling jQuery object (element matches) is not empty - // (and therefore has an element appears at this[0]) and the + // (and therefore has an element appears at this[ 0 ]) and the // `value` parameter was not undefined. An empty jQuery object - // will result in `undefined` for elem = this[0] which will + // will result in `undefined` for elem = this[ 0 ] which will // throw an exception if an attempt to read a data cache is made. if ( elem && value === undefined ) { // Attempt to get data from the cache @@ -297,7 +296,7 @@ jQuery.fn.extend({ } // We tried really hard, but the data doesn't exist. - return undefined; + return; } // Set the data... @@ -311,10 +310,10 @@ jQuery.fn.extend({ // This might not apply to all properties...* data_user.set( this, camelKey, value ); - // *... In the case of properties that might ACTUALLY + // *... In the case of properties that might _actually_ // have dashes, we need to also store a copy of that // unchanged property. - if ( /-/.test( key ) && data !== undefined ) { + if ( key.indexOf("-") !== -1 && data !== undefined ) { data_user.set( this, key, value ); } }); @@ -334,7 +333,6 @@ function dataAttr( elem, key, data ) { // If nothing was found internally, try to fetch any // data from the HTML5 data-* attribute if ( data === undefined && elem.nodeType === 1 ) { - name = "data-" + key.replace( rmultiDash, "-$1" ).toLowerCase(); data = elem.getAttribute( name );
No ticket. Simplifies data methods and updates comments. Closes gh-<I>
jquery_jquery
train
4b657f14011a27b45aed1d8c3be3482f9216846f
diff --git a/TestsExample/App.js b/TestsExample/App.js index <HASH>..<HASH> 100644 --- a/TestsExample/App.js +++ b/TestsExample/App.js @@ -33,6 +33,7 @@ import Test791 from './src/Test791'; import Test817 from './src/Test817'; import Test831 from './src/Test831'; import Test844 from './src/Test844'; +import Test852 from './src/Test852'; import Test861 from './src/Test861'; import Test865 from './src/Test865';
fix: add missing import in App.js (#<I>) Added missing import statement in App.js for Test<I> in TestsExample project.
kmagiera_react-native-screens
train
6df2bae8ac1827e5b09f61e8f2cbdd81e3e3af7f
diff --git a/param/parameterized.py b/param/parameterized.py index <HASH>..<HASH> 100644 --- a/param/parameterized.py +++ b/param/parameterized.py @@ -2,7 +2,6 @@ Generic support for objects with full-featured Parameters and messaging. """ -__version__='$Revision$' import copy import re
Removed unused __version__ attribute from parameterized.py (I think it's usual to have __version__ only in __init__.py).
pyviz_param
train
0154fe5e255fd1cf5bfe184d9c5bda4ff9f330ec
diff --git a/web/src/main/java/uk/ac/ebi/atlas/solr/index/IndexingProgress.java b/web/src/main/java/uk/ac/ebi/atlas/solr/index/IndexingProgress.java index <HASH>..<HASH> 100644 --- a/web/src/main/java/uk/ac/ebi/atlas/solr/index/IndexingProgress.java +++ b/web/src/main/java/uk/ac/ebi/atlas/solr/index/IndexingProgress.java @@ -34,13 +34,16 @@ import java.nio.file.Files; import java.nio.file.Path; import java.util.Iterator; import java.util.List; +import java.util.concurrent.CopyOnWriteArrayList; @Named @Scope("singleton") public class IndexingProgress implements Iterable<IndexingProgress.ProcessedFile>{ private static final Logger LOGGER = Logger.getLogger(IndexingProgress.class); - private List<ProcessedFile> processedFiles = Lists.newArrayList(); + // This list may be iterated from a different thread than the index build thread, generating a + // ConcurrentModificationException if we didn't use a thread safe implementation like CopyOnWriteArrayList + private List<ProcessedFile> processedFiles = new CopyOnWriteArrayList<>(); private long processedDiskSpace; private long totalTimeTaken; diff --git a/web/src/test/java/uk/ac/ebi/atlas/acceptance/rest/tests/BioentityIndexControllerIT.java b/web/src/test/java/uk/ac/ebi/atlas/acceptance/rest/tests/BioentityIndexControllerIT.java index <HASH>..<HASH> 100644 --- a/web/src/test/java/uk/ac/ebi/atlas/acceptance/rest/tests/BioentityIndexControllerIT.java +++ b/web/src/test/java/uk/ac/ebi/atlas/acceptance/rest/tests/BioentityIndexControllerIT.java @@ -32,10 +32,10 @@ import static org.hamcrest.Matchers.is; public class BioentityIndexControllerIT { private static final String STATUS_SERVICE_END_POINT_URL = "/gxa/buildIndex/status"; - private static final String BUILD_INDEX_SERVICE_END_POINT_URL = "/gxa/buildIndex"; +// private static final String BUILD_INDEX_SERVICE_END_POINT_URL = "/gxa/buildIndex"; private EndPoint statusServiceEndPoint = new EndPoint(STATUS_SERVICE_END_POINT_URL); - private EndPoint buildIndexServiceEndPoint = new EndPoint(BUILD_INDEX_SERVICE_END_POINT_URL); +// private EndPoint buildIndexServiceEndPoint = new EndPoint(BUILD_INDEX_SERVICE_END_POINT_URL); @Test public void statusShouldBeInitialized() {
now IndexingProgress is a thread safe Iterable
ebi-gene-expression-group_atlas
train
c83af4091d536e58b01636c52ee678adf82deaff
diff --git a/src/controls/VideoDisplay.js b/src/controls/VideoDisplay.js index <HASH>..<HASH> 100644 --- a/src/controls/VideoDisplay.js +++ b/src/controls/VideoDisplay.js @@ -1,4 +1,4 @@ -import React, { PureComponent } from 'react'; +import React, { Component } from 'react'; import PropTypes from 'prop-types'; import ResizeObserver from 'resize-observer-polyfill'; @@ -27,7 +27,7 @@ import { logWarning } from '../utils/console'; * but are adjusted so the canvas maximally fills the container area. */ -class VideoDisplay extends PureComponent { +class VideoDisplay extends Component { constructor(props) { super(props); this.state = { @@ -48,15 +48,18 @@ class VideoDisplay extends PureComponent { this.checkForBadStuff(); const { displayWidth, displayHeight } = this.getDeviceDisplayDimensions(); - const { endStream, setCanvasSize } = this.props.pipeVideoStreamToCanvas( - this.canvas, - ctx => { - this.handleFrameUpdate(ctx); - } - ); + const { + endStream, + setCanvasSize, + setPlaceholderImage + } = this.props.pipeVideoStreamToCanvas(this.canvas, ctx => { + this.handleFrameUpdate(ctx); + }); setCanvasSize(displayWidth, displayHeight); + this.getPlaceholderImage(setPlaceholderImage); this.endStream = endStream; this.setCanvasSize = setCanvasSize; + this.setPlaceholderImage = setPlaceholderImage; this.updateContainerDimensions(); this.containerResizeObserver = new ResizeObserver( @@ -69,6 +72,7 @@ class VideoDisplay extends PureComponent { this.checkForBadStuff(); const { displayWidth, displayHeight } = this.getDeviceDisplayDimensions(); this.setCanvasSize(displayWidth, displayHeight); + this.getPlaceholderImage(this.setPlaceholderImage); this.updateContainerDimensions(); } @@ -122,6 +126,19 @@ class VideoDisplay extends PureComponent { }; } + getPlaceholderImage(callback) { + const { playlist, activeTrackIndex } = this.props; + if (playlist[activeTrackIndex] && playlist[activeTrackIndex].artwork) { + const img = new Image(); + img.crossOrigin = 'anonymous'; + img.onload = () => callback(img); + img.onerror = () => callback(); + img.src = playlist[activeTrackIndex].artwork[0].src; + } else { + callback(); + } + } + handleFrameUpdate(canvasContext) { const { width, height } = this.canvas; if (width && height) { @@ -244,4 +261,8 @@ VideoDisplay.defaultProps = { background: '#000' }; -export default playerContextFilter(VideoDisplay, ['pipeVideoStreamToCanvas']); +export default playerContextFilter(VideoDisplay, [ + 'pipeVideoStreamToCanvas', + 'playlist', + 'activeTrackIndex' +]); diff --git a/src/utils/streamVideoElementToCanvas.js b/src/utils/streamVideoElementToCanvas.js index <HASH>..<HASH> 100644 --- a/src/utils/streamVideoElementToCanvas.js +++ b/src/utils/streamVideoElementToCanvas.js @@ -3,6 +3,7 @@ function streamVideoElementToCanvas(videoElement, canvas, callback) { let requestId = null; let widthSet = null; let heightSet = null; + let placeholderImage = null; requestId = requestAnimationFrame(streamToCanvas); @@ -13,31 +14,58 @@ function streamVideoElementToCanvas(videoElement, canvas, callback) { setCanvasSize(width, height) { widthSet = width || null; heightSet = height || null; + }, + setPlaceholderImage(img) { + placeholderImage = img || null; } }; function streamToCanvas() { const { videoWidth, videoHeight } = videoElement; + + // we want to draw the current frame image from the video element + let imageElement = videoElement; + let imageWidth = videoWidth; + let imageHeight = videoHeight; let targetWidth = videoWidth; let targetHeight = videoHeight; + + // however if there's no video to display (usually means we're playing + // audio) then we want to display a placeholder image, if available + if (!(targetWidth && targetHeight) && placeholderImage) { + imageElement = placeholderImage; + imageWidth = placeholderImage.width; + imageHeight = placeholderImage.height; + targetWidth = placeholderImage.width; + targetHeight = placeholderImage.height; + } + + // figure out what resolution the drawn image should be if (widthSet && heightSet) { targetWidth = widthSet; targetHeight = heightSet; } else if (widthSet) { targetWidth = widthSet; - targetHeight = (widthSet / videoWidth) * videoHeight; + targetHeight = (widthSet / imageWidth) * imageHeight; } else if (heightSet) { targetHeight = heightSet; - targetWidth = (heightSet / videoHeight) * videoWidth; + targetWidth = (heightSet / imageHeight) * imageWidth; } + + // resize the canvas to the draw resolution if it doesn't already match if (canvas.width !== targetWidth || canvas.height !== targetHeight) { canvas.width = targetWidth; canvas.height = targetHeight; } - ctx.drawImage(videoElement, 0, 0, targetWidth, targetHeight); + + // draw + ctx.drawImage(imageElement, 0, 0, targetWidth, targetHeight); + + // let the callback handle any post-processing if (callback) { callback(ctx); } + requestId = requestAnimationFrame(streamToCanvas); } }
Display placeholder image (without flicker!) when video is absent.
benwiley4000_cassette
train
daa53493c207920accc3263babac3cae5e6d2dee
diff --git a/app/deploy.go b/app/deploy.go index <HASH>..<HASH> 100644 --- a/app/deploy.go +++ b/app/deploy.go @@ -179,6 +179,9 @@ func GetDeploy(id string, u *auth.User) (*DeployData, error) { return nil, err } defer conn.Close() + if !bson.IsObjectIdHex(id) { + return nil, fmt.Errorf("id parameter is not ObjectId: %s", id) + } if err := conn.Deploys().FindId(bson.ObjectIdHex(id)).One(&dep); err != nil { return nil, err } diff --git a/app/deploy_test.go b/app/deploy_test.go index <HASH>..<HASH> 100644 --- a/app/deploy_test.go +++ b/app/deploy_test.go @@ -404,6 +404,13 @@ func (s *S) TestGetDeployNotFound(c *check.C) { c.Assert(deploy, check.IsNil) } +func (s *S) TestGetDeployInvalidHex(c *check.C) { + lastDeploy, err := GetDeploy("abc123", nil) + c.Assert(err, check.NotNil) + c.Assert(err, check.ErrorMatches, "id parameter is not ObjectId: abc123") + c.Assert(lastDeploy, check.IsNil) +} + func (s *S) TestGetDiffInDeploys(c *check.C) { s.conn.Deploys().RemoveAll(nil) myDeploy := DeployData{
app/deploy: verifying if the id is a valid ObjectId in get deploy the check is needed because bson.ObjectIdHex panics on try to convert an invalid ObjectId.
tsuru_tsuru
train
7c9a79f9a6a443d47529bb4b1054c6a9bb4ef47f
diff --git a/src/Edge.php b/src/Edge.php index <HASH>..<HASH> 100644 --- a/src/Edge.php +++ b/src/Edge.php @@ -13,12 +13,12 @@ use Innmind\Immutable\Map; final class Edge { - private Node $from; + private Node\Name $from; private Node $to; /** @var Map<string, string> */ private Map $attributes; - private function __construct(Node $from, Node $to) + private function __construct(Node\Name $from, Node $to) { $this->from = $from; $this->to = $to; @@ -26,12 +26,12 @@ final class Edge $this->attributes = Map::of(); } - public static function between(Node $from, Node $to): self + public static function between(Node\Name $from, Node $to): self { return new self($from, $to); } - public function from(): Node + public function from(): Node\Name { return $this->from; } diff --git a/src/Layout/Dot.php b/src/Layout/Dot.php index <HASH>..<HASH> 100644 --- a/src/Layout/Dot.php +++ b/src/Layout/Dot.php @@ -182,7 +182,7 @@ final class Dot return Str::of(\sprintf( ' %s %s %s', - $edge->from()->name()->toString(), + $edge->from()->toString(), $type, $edge->to()->name()->toString(), )) diff --git a/src/Node.php b/src/Node.php index <HASH>..<HASH> 100644 --- a/src/Node.php +++ b/src/Node.php @@ -62,7 +62,7 @@ final class Node public function linkedTo(self $node): Edge { - $edge = Edge::between($this, $node); + $edge = Edge::between($this->name, $node); $this->edges = ($this->edges)($edge); return $edge; diff --git a/tests/EdgeTest.php b/tests/EdgeTest.php index <HASH>..<HASH> 100644 --- a/tests/EdgeTest.php +++ b/tests/EdgeTest.php @@ -18,7 +18,7 @@ class EdgeTest extends TestCase public function testInterface() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); $this->assertTrue($edge->attributes()->empty()); @@ -28,7 +28,7 @@ class EdgeTest extends TestCase public function testNodes() { $edge = Edge::between( - $from = Node::named('a'), + $from = Node\Name::of('a'), $to = Node::named('b'), ); @@ -39,7 +39,7 @@ class EdgeTest extends TestCase public function testAsBidirectional() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -55,7 +55,7 @@ class EdgeTest extends TestCase public function testWithoutDirection() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -71,7 +71,7 @@ class EdgeTest extends TestCase public function testShaped() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -94,7 +94,7 @@ class EdgeTest extends TestCase public function testShapedWhenBidirectional() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); $edge->asBidirectional(); @@ -120,7 +120,7 @@ class EdgeTest extends TestCase public function testDisplayAs() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -136,7 +136,7 @@ class EdgeTest extends TestCase public function testUseColor() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -152,7 +152,7 @@ class EdgeTest extends TestCase public function testTarget() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -168,7 +168,7 @@ class EdgeTest extends TestCase public function testDotted() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -184,7 +184,7 @@ class EdgeTest extends TestCase public function testBold() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); @@ -200,7 +200,7 @@ class EdgeTest extends TestCase public function testFilled() { $edge = Edge::between( - Node::named('a'), + Node\Name::of('a'), Node::named('b'), ); diff --git a/tests/NodeTest.php b/tests/NodeTest.php index <HASH>..<HASH> 100644 --- a/tests/NodeTest.php +++ b/tests/NodeTest.php @@ -47,7 +47,7 @@ class NodeTest extends TestCase $edge = $node->linkedTo($to); $this->assertInstanceOf(Edge::class, $edge); - $this->assertSame($node, $edge->from()); + $this->assertSame($node->name(), $edge->from()); $this->assertSame($to, $edge->to()); $this->assertCount(1, $node->edges()); $this->assertCount(0, $to->edges());
only keep the source name in the edge
Innmind_Graphviz
train
d7db4780b654d522199b057779dbd144edfa6d19
diff --git a/synapse/models/inet.py b/synapse/models/inet.py index <HASH>..<HASH> 100644 --- a/synapse/models/inet.py +++ b/synapse/models/inet.py @@ -248,14 +248,30 @@ class UrlType(DataType): port = None proto = proto.lower() - hostpart = resloc.lower() + hostpart = resloc.lower().replace('[.]','.') subs['proto'] = proto + host = hostpart if hostpart.find(':') != -1: host, portstr = hostpart.rsplit(':', 1) port = self.tlib.getTypeParse('inet:port', portstr)[0] + # use of exception handling logic here is fastest way to both + # validate and convert the data... normally wouldnt use.... + + ipv4 = None + try: + + ipv4 = ipv4int(host) + subs['ipv4'] = ipv4 + + except BadTypeValu as e: + pass + + if ipv4 is None and fqdnre.match(host): + subs['fqdn'] = host + # try for a default iana protocol lookup if port is None: port = s_l_iana.services.get(proto) @@ -665,3 +681,25 @@ class InetMod(CoreModule): } name = 'inet' return ((name, modl), ) + + @modelrev('inet',201706121318) + def _revModl201706121318(self): + + # account for the updated sub-property extraction for inet:url nodes + adds = [] + rows = self.core.getRowsByProp('inet:url') + + for i,p,v,t in rows: + norm,subs = self.core.getTypeNorm('inet:url', v) + + fqdn = subs.get('fqdn') + if fqdn is not None: + adds.append((i, 'inet:url:fqdn', fqdn, t)) + + ipv4 = subs.get('ipv4') + if ipv4 is not None: + adds.append((i, 'inet:url:ipv4', ipv4, t)) + + if adds: + self.core.addRows(adds) + diff --git a/synapse/tests/test_model_inet.py b/synapse/tests/test_model_inet.py index <HASH>..<HASH> 100644 --- a/synapse/tests/test_model_inet.py +++ b/synapse/tests/test_model_inet.py @@ -229,10 +229,14 @@ class InetModelTest(SynTest): self.eq(node[1].get('inet:url:port'), 9999) self.eq(node[1].get('inet:url:user'), 'visi') self.eq(node[1].get('inet:url:passwd'), 'hehe') + self.eq(node[1].get('inet:url:fqdn'), 'www.vertex.link') node = core.formTufoByProp('inet:url', 'HTTP://www.vertex.link/') self.eq(node[1].get('inet:url:port'), 80) + node = core.formTufoByProp('inet:url', 'HTTP://1.2.3.4/') + self.eq(node[1].get('inet:url:ipv4'), 0x01020304) + def test_model_inet_netpost(self): with s_cortex.openurl('sqlite:///:memory:') as core: @@ -336,6 +340,7 @@ class InetModelTest(SynTest): def test_model_fqdn_punycode(self): with s_cortex.openurl('ram:///') as core: + core.setConfOpt('enforce', 1) node = core.formTufoByProp('inet:fqdn', 'www.xn--heilpdagogik-wiki-uqb.de') @@ -346,3 +351,20 @@ class InetModelTest(SynTest): self.eq(core.getTypeRepr('inet:fqdn', fqdn), 'www.heilpädagogik-wiki.de') self.raises(BadTypeValu, core.getTypeNorm, 'inet:fqdn', '!@#$%') + + def test_model_inet_201706121318(self): + + with s_cortex.openurl('ram:///') as core: + + # create fake old-style url nodes by rows alone... + tick = now() + iden0 = guid() + iden1 = guid() + + rows = ( + (iden0, 'tufo:form', 'inet:url', tick), + (iden0, 'inet:url', 'http://www.woot.com/', tick), + + (iden1, 'tufo:form', 'inet:url', tick), + (iden1, 'inet:url', 'http://1.2.3.4/', tick), + )
fix for ipv4/fqdn subprop from url
vertexproject_synapse
train
2acce0e6e4606fec2506208f2486f06065f7b0cc
diff --git a/molgenis-app/src/main/java/org/molgenis/app/WebAppPermissionRegistry.java b/molgenis-app/src/main/java/org/molgenis/app/WebAppPermissionRegistry.java index <HASH>..<HASH> 100644 --- a/molgenis-app/src/main/java/org/molgenis/app/WebAppPermissionRegistry.java +++ b/molgenis-app/src/main/java/org/molgenis/app/WebAppPermissionRegistry.java @@ -10,6 +10,7 @@ import org.molgenis.core.ui.controller.FeedbackController; import org.molgenis.core.ui.controller.RedirectController; import org.molgenis.core.ui.data.importer.wizard.ImportWizardController; import org.molgenis.core.ui.jobs.JobsController; +import org.molgenis.data.system.model.RootSystemPackage; import org.molgenis.dataexplorer.controller.DataExplorerController; import org.molgenis.dataexplorer.negotiator.NegotiatorController; import org.molgenis.datarowedit.controller.DataRowEditController; @@ -18,6 +19,7 @@ import org.molgenis.ontology.sorta.controller.SortaController; import org.molgenis.questionnaires.controller.QuestionnaireController; import org.molgenis.searchall.controller.SearchAllPluginController; import org.molgenis.security.core.PermissionSet; +import org.molgenis.securityui.controller.SecurityUiController; import org.molgenis.semanticmapper.controller.MappingServiceController; import org.molgenis.util.Pair; import org.springframework.security.acls.domain.ObjectIdentityImpl; @@ -116,6 +118,8 @@ public class WebAppPermissionRegistry implements PermissionRegistry register(PLUGIN, ONE_CLICK_IMPORTER, editor, READ); register(ENTITY_TYPE, JOB_EXECUTION, editor, COUNT); register(ENTITY_TYPE, ONE_CLICK_IMPORT_JOB_EXECUTION, editor, WRITE); + register(PACKAGE, RootSystemPackage.PACKAGE_SYSTEM, user, COUNT); + register(PLUGIN, SecurityUiController.ID, manager, READ); } @Override diff --git a/molgenis-app/src/test/java/org/molgenis/app/WebAppPermissionRegistryTest.java b/molgenis-app/src/test/java/org/molgenis/app/WebAppPermissionRegistryTest.java index <HASH>..<HASH> 100644 --- a/molgenis-app/src/test/java/org/molgenis/app/WebAppPermissionRegistryTest.java +++ b/molgenis-app/src/test/java/org/molgenis/app/WebAppPermissionRegistryTest.java @@ -24,7 +24,7 @@ public class WebAppPermissionRegistryTest public void testGetPermissions() { Multimap<ObjectIdentity, Pair<PermissionSet, Sid>> permissions = new WebAppPermissionRegistry().getPermissions(); - assertEquals(permissions.size(), 44); + assertEquals(permissions.size(), 46); Collection<Pair<PermissionSet, Sid>> pairs = permissions.get(new PluginIdentity(HomeController.ID)); assertEquals(copyOf(pairs), ImmutableSet.of(new Pair<>(READ, new GrantedAuthoritySid("ROLE_ANONYMOUS")), new Pair<>(READ, new GrantedAuthoritySid("ROLE_USER"))));
fix (app): Grant COUNT on sys package to USER role and READ on Security plugin to MANAGER role.
molgenis_molgenis
train
12e06ff42bf3cc755d69c61c03819c62fccfcec3
diff --git a/core/src/test/java/org/infinispan/test/TestingUtil.java b/core/src/test/java/org/infinispan/test/TestingUtil.java index <HASH>..<HASH> 100644 --- a/core/src/test/java/org/infinispan/test/TestingUtil.java +++ b/core/src/test/java/org/infinispan/test/TestingUtil.java @@ -173,6 +173,24 @@ public class TestingUtil { log.trace("Node " + rpcManager.getAddress() + " finished rehash task."); } } + + public static void waitForRehashToComplete(Cache cache, int groupSize) { + LockSupport.parkNanos(TimeUnit.SECONDS.toNanos(1)); + int gracetime = 30000; // 30 seconds? + long giveup = System.currentTimeMillis() + gracetime; + CacheViewsManager cacheViewsManager = TestingUtil.extractGlobalComponent(cache.getCacheManager(), CacheViewsManager.class); + RpcManager rpcManager = TestingUtil.extractComponent(cache, RpcManager.class); + while (cacheViewsManager.getCommittedView(cache.getName()).getMembers().size() != groupSize) { + if (System.currentTimeMillis() > giveup) { + String message = String.format("Timed out waiting for rehash to complete on node %s, expected member count %s, current member count is %s!", + rpcManager.getAddress(), groupSize, cacheViewsManager.getCommittedView(cache.getName())); + log.error(message); + throw new RuntimeException(message); + } + LockSupport.parkNanos(TimeUnit.MILLISECONDS.toNanos(100)); + } + log.trace("Node " + rpcManager.getAddress() + " finished rehash task."); + } public static void waitForRehashToComplete(Collection<? extends Cache> caches) { waitForRehashToComplete(caches.toArray(new Cache[caches.size()]));
Add another variation of waitForRehashToComplete to TestingUtil
infinispan_infinispan
train
127278c7a642f7d90f5970b53e390eb209dcd9e3
diff --git a/src/com/inet/lib/less/CssFormatter.java b/src/com/inet/lib/less/CssFormatter.java index <HASH>..<HASH> 100644 --- a/src/com/inet/lib/less/CssFormatter.java +++ b/src/com/inet/lib/less/CssFormatter.java @@ -26,6 +26,7 @@ */ package com.inet.lib.less; +import java.net.URI; import java.net.URL; import java.text.DecimalFormat; import java.text.DecimalFormatSymbols; @@ -358,7 +359,12 @@ public class CssFormatter implements Cloneable { case 1: return url.startsWith( "." ); case 2: - return !url.startsWith( "data:" ); // data: URL never rewrite + if (url.startsWith("/")) return false; // never rewrite root urls + try { + return new URI(url).getScheme() == null; // nver rewrite urls with a scheme (data, http, https) + } catch (Exception e) { + return false; + } } }
in "all" mode don't rewrite when it starts already with root (/) and when it has a schem
i-net-software_jlessc
train
fd57ef509d75fcba3c4a26594a7c26838888ad2f
diff --git a/samcli/__init__.py b/samcli/__init__.py index <HASH>..<HASH> 100644 --- a/samcli/__init__.py +++ b/samcli/__init__.py @@ -2,4 +2,4 @@ SAM CLI version """ -__version__ = '0.14.0' +__version__ = '0.14.1'
chore: Version bump SAM CLI to <I> (#<I>)
awslabs_aws-sam-cli
train
d1570e1793adcca75a0990f6d613928c880fcb9a
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -22,7 +22,7 @@ A Note on Installing lxml ------------------------- While lxml is not a requirement, I have had a hard time installing lxml in the -past. I have found this set of commands to work perfectly: +past. I have found this set of commands to work perfectly:: STATIC_DEPS=true easy_install 'lxml>=2.2beta4' STATIC_DEPS=true sudo easy_install 'lxml>=2.2beta4' @@ -56,18 +56,18 @@ Using the NZB Parser ==================== If you're using a specific parser, like the ``ETreeNZBParser``, you will first -have to instantiate it: +have to instantiate it:: nzb_parser = ETreeNZBParser() -Otherwise, you can just import the default parser for your system: +Otherwise, you can just import the default parser for your system:: from pynzb import nzb_parser Then, simply call the ``parse`` method, giving it the xml string as the only -argument: +argument:: files = nzb_parser.parse('<?xml ... my nzb file here ... </nzb>') @@ -120,7 +120,7 @@ Example -------- In this example, we will grab an Ubuntu NZB and parse the file, printing out -some information about each file and its segments. +some information about each file and its segments:: from pynzb import nzb_parser from urllib2 import urlopen
Update setup.py with the new README.rst contents.
ericflo_pynzb
train
c1182966e51b4f78e707a7e72b51a260aa4d455a
diff --git a/lib/wunderlist/api.rb b/lib/wunderlist/api.rb index <HASH>..<HASH> 100644 --- a/lib/wunderlist/api.rb +++ b/lib/wunderlist/api.rb @@ -34,9 +34,9 @@ module Wunderlist list end - def list(list_name) - list_name = [list_name] - list_id = get_list_ids(list_name)[0] + def list(list_name_or_id) + list_id = get_list_ids(list_name_or_id).first + res_list = self.request :get, "api/v1/lists/#{list_id}" list = Wunderlist::List.new(res_list) list.api = self @@ -58,8 +58,8 @@ module Wunderlist end - def webhooks(list_name) - list_id = get_list_ids([list_name]).first + def webhooks(list_name_or_id) + list_id = get_list_ids(list_name_or_id).first res_webhooks = self.request :get, 'api/v1/webhooks', { :list_id => list_id } res_webhooks.reduce([]) do |webhooks, webhook| @@ -69,8 +69,8 @@ module Wunderlist end end - def tasks(list_names = [], completed = false) - list_ids = get_list_ids(list_names) + def tasks(list_names_or_ids = [], completed = false) + list_ids = get_list_ids(list_names_or_ids) tasks = [] list_ids.each do |list_id| res_tasks = self.request :get, 'api/v1/tasks', {:list_id => list_id, :completed => completed} @@ -108,10 +108,9 @@ module Wunderlist end - def new_webhook(list_name, attrs = {}) + def new_webhook(list_name_or_id, attrs = {}) + list_id = get_list_ids(list_name_or_id).first attrs.stringify_keys - list_name = [list_name] - list_id = get_list_ids(list_name)[0] attrs['list_id'] = list_id task = Wunderlist::Webhook.new(attrs) task.api = self @@ -199,6 +198,10 @@ module Wunderlist def get_list_ids(list_names = []) + return list_names if list_names.all? {|i| i.is_a?(Integer) } + return [list_names] if list_names.is_a? Integer + list_names = [list_names] if list_names.is_a? String + lists = self.lists if !list_names.empty? lists = lists.select{|elm| list_names.include?(elm.title)}
allowing to pass either a list name or list id to api
sh8_wunderlist-api
train
a033b19b21a79bdea326e701f79fa5a440926626
diff --git a/envconfig.go b/envconfig.go index <HASH>..<HASH> 100644 --- a/envconfig.go +++ b/envconfig.go @@ -39,6 +39,9 @@ func Process(prefix string, spec interface{}) error { fieldName := typeOfSpec.Field(i).Name key := fmt.Sprintf("%s_%s", prefix, fieldName) value := os.Getenv(strings.ToUpper(key)) + if value == "" { + continue + } switch f.Kind() { case reflect.String: f.SetString(value) @@ -65,7 +68,11 @@ func Process(prefix string, spec interface{}) error { case reflect.Float32: floatValue, err := strconv.ParseFloat(value, f.Type().Bits()) if err != nil { - return err + return &ParseError{ + FieldName: fieldName, + TypeName: f.Kind().String(), + Value: value, + } } f.SetFloat(floatValue) } diff --git a/envconfig_test.go b/envconfig_test.go index <HASH>..<HASH> 100644 --- a/envconfig_test.go +++ b/envconfig_test.go @@ -17,6 +17,7 @@ type Specification struct { func TestProcess(t *testing.T) { var s Specification + os.Clearenv() os.Setenv("ENV_CONFIG_DEBUG", "true") os.Setenv("ENV_CONFIG_PORT", "8080") os.Setenv("ENV_CONFIG_RATE", "0.5") @@ -39,13 +40,55 @@ func TestProcess(t *testing.T) { } } +func TestParseErrorBool(t *testing.T) { + var s Specification + os.Clearenv() + os.Setenv("ENV_CONFIG_DEBUG", "string") + err := Process("env_config", &s) + v, ok := err.(*ParseError) + if !ok { + t.Errorf("expected ParseError, got %v", v) + } + if v.FieldName != "Debug" { + t.Errorf("expected %s, got %v", "Debug", v.FieldName) + } + if s.Debug != false { + t.Errorf("expected %v, got %v", false, s.Debug) + } +} + +func TestParseErrorFloat32(t *testing.T) { + var s Specification + os.Clearenv() + os.Setenv("ENV_CONFIG_RATE", "string") + err := Process("env_config", &s) + v, ok := err.(*ParseError) + if !ok { + t.Errorf("expected ParseError, got %v", v) + } + if v.FieldName != "Rate" { + t.Errorf("expected %s, got %v", "Rate", v.FieldName) + } + if s.Rate != 0 { + t.Errorf("expected %v, got %v", 0, s.Rate) + } +} + func TestParseErrorInt(t *testing.T) { var s Specification + os.Clearenv() os.Setenv("ENV_CONFIG_PORT", "string") err := Process("env_config", &s) - if v, ok := err.(*ParseError); !ok { + v, ok := err.(*ParseError) + if !ok { t.Errorf("expected ParseError, got %v", v) } + if v.FieldName != "Port" { + t.Errorf("expected %s, got %v", "Port", v.FieldName) + } + if s.Port != 0 { + t.Errorf("expected %v, got %v", 0, s.Port) + } } func TestInvalidSpecificationError(t *testing.T) {
Add more tests around bool specs
kelseyhightower_envconfig
train
e0ea7ae3ef24072dc075d051d846239b3e4bb86d
diff --git a/src/Eloquent/PostgisTrait.php b/src/Eloquent/PostgisTrait.php index <HASH>..<HASH> 100644 --- a/src/Eloquent/PostgisTrait.php +++ b/src/Eloquent/PostgisTrait.php @@ -52,7 +52,7 @@ trait PostgisTrait } } - parent::setRawAttributes($attributes, $sync); + return parent::setRawAttributes($attributes, $sync); } public function getPostgisFields()
Fix bug that causes other packages not to work The Eloquent Model's `setRawAttributes` method returns `this`, which allows it to be chained with other methods. This package currently does not return anything from the overriden method, which causes other packages (such as [spatie/laravel-activitylog](<URL>) to break. This fix returns the result of `parent::setRawAttributes` to resolve this issue.
njbarrett_laravel-postgis
train
0603b300569542516231da533dc3b89c64edd880
diff --git a/p2p/host/basic/basic_host.go b/p2p/host/basic/basic_host.go index <HASH>..<HASH> 100644 --- a/p2p/host/basic/basic_host.go +++ b/p2p/host/basic/basic_host.go @@ -258,7 +258,7 @@ func (h *BasicHost) newStreamHandler(s network.Stream) { } lzc, protoID, handle, err := h.Mux().NegotiateLazy(s) - took := time.Now().Sub(before) + took := time.Since(before) if err != nil { if err == io.EOF { logf := log.Debugf diff --git a/p2p/net/mock/mock_stream.go b/p2p/net/mock/mock_stream.go index <HASH>..<HASH> 100644 --- a/p2p/net/mock/mock_stream.go +++ b/p2p/net/mock/mock_stream.go @@ -191,7 +191,7 @@ func (s *stream) transport() { default: } } - delay := o.arrivalTime.Sub(time.Now()) + delay := time.Until(o.arrivalTime) if delay >= 0 { timer.Reset(delay) } else {
Make use of time.Until & time.Since (#<I>) No need to manually subtract timestamps.
libp2p_go-libp2p
train
fc0369374b514f4320b0df16e5bbf52fdce7cf2c
diff --git a/mpop/plugin_base.py b/mpop/plugin_base.py index <HASH>..<HASH> 100644 --- a/mpop/plugin_base.py +++ b/mpop/plugin_base.py @@ -120,7 +120,7 @@ def load_plugins(directory): LOG.info("Imported plugin file "+ os.path.join(directory, name)+ ".") - except ImportError, exc: + except Exception, exc: LOG.warning("Could not read plugin file "+ os.path.join(directory, name)+ ": "+str(exc))
Bugfix: loading plugins should fail on any exception.
pytroll_satpy
train
fd3684903989b9dc725afda4755fb5bfc4c993e6
diff --git a/src/net/sf/mpxj/mpp/MPP12Reader.java b/src/net/sf/mpxj/mpp/MPP12Reader.java index <HASH>..<HASH> 100644 --- a/src/net/sf/mpxj/mpp/MPP12Reader.java +++ b/src/net/sf/mpxj/mpp/MPP12Reader.java @@ -1597,7 +1597,7 @@ final class MPP12Reader implements MPPVariantReader task.setLateStart(MPPUtility.getTimestamp(data, 12)); task.setLevelAssignments((metaData[13] & 0x04) != 0); task.setLevelingCanSplit((metaData[13] & 0x02) != 0); - task.setLevelingDelay(MPPUtility.getDuration(((double) MPPUtility.getInt(data, 82)) / 3, MPPUtility.getDurationTimeUnits(MPPUtility.getShort(data, 86)))); + task.setLevelingDelay(MPPUtility.getAdjustedDuration(m_file, MPPUtility.getInt(data, 82), MPPUtility.getDurationTimeUnits(MPPUtility.getShort(data, 86)))); //task.setLinkedFields(); // Calculated value task.setMarked((metaData[9] & 0x40) != 0); task.setMilestone((metaData[8] & 0x20) != 0); diff --git a/src/net/sf/mpxj/mpp/MPP14Reader.java b/src/net/sf/mpxj/mpp/MPP14Reader.java index <HASH>..<HASH> 100644 --- a/src/net/sf/mpxj/mpp/MPP14Reader.java +++ b/src/net/sf/mpxj/mpp/MPP14Reader.java @@ -1597,7 +1597,7 @@ final class MPP14Reader implements MPPVariantReader task.setLateStart(MPPUtility.getTimestamp(data, 12)); task.setLevelAssignments((metaData[13] & 0x04) != 0); task.setLevelingCanSplit((metaData[13] & 0x02) != 0); - task.setLevelingDelay(MPPUtility.getDuration(((double) MPPUtility.getInt(data, 58)) / 3, MPPUtility.getDurationTimeUnits(MPPUtility.getShort(data, 62)))); + task.setLevelingDelay(MPPUtility.getAdjustedDuration(m_file, MPPUtility.getInt(data, 58), MPPUtility.getDurationTimeUnits(MPPUtility.getShort(data, 62)))); //task.setLinkedFields(); // Calculated value task.setMarked((metaData[9] & 0x40) != 0); task.setMilestone((metaData[8] & 0x20) != 0); diff --git a/src/net/sf/mpxj/mpp/MPP9Reader.java b/src/net/sf/mpxj/mpp/MPP9Reader.java index <HASH>..<HASH> 100644 --- a/src/net/sf/mpxj/mpp/MPP9Reader.java +++ b/src/net/sf/mpxj/mpp/MPP9Reader.java @@ -1826,7 +1826,7 @@ final class MPP9Reader implements MPPVariantReader task.setLateStart(MPPUtility.getTimestamp(data, 12)); task.setLevelAssignments((metaData[13] & 0x04) != 0); task.setLevelingCanSplit((metaData[13] & 0x02) != 0); - task.setLevelingDelay(MPPUtility.getDuration(((double) MPPUtility.getInt(data, 82)) / 3, MPPUtility.getDurationTimeUnits(MPPUtility.getShort(data, 86)))); + task.setLevelingDelay(MPPUtility.getAdjustedDuration(m_file, MPPUtility.getInt(data, 82), MPPUtility.getDurationTimeUnits(MPPUtility.getShort(data, 86)))); //task.setLinkedFields(); // Calculated value task.setMarked((metaData[9] & 0x40) != 0); task.setMilestone((metaData[8] & 0x20) != 0); diff --git a/xdocs/changes.xml b/xdocs/changes.xml index <HASH>..<HASH> 100644 --- a/xdocs/changes.xml +++ b/xdocs/changes.xml @@ -22,6 +22,7 @@ <action dev="joniles" type="fix">Updated to fix incorrect month duration assumption (contributed by Frank Illenberger).</action> <action dev="joniles" type="fix">Updated to fix incorrect number format in MSPDI file in non-English locales (contributed by Frank Illenberger).</action> <action dev="joniles" type="fix">Updated to fix incorrect resource assignment actual work attribute for MPP14 files.</action> + <action dev="joniles" type="fix">Updated to fix incorrect task leveling delay attribute for MPP9, MPP12, and MPP14 files.</action> </release> <release version="4.0.0" date="25/05/2010">
Updated to fix leveling delay read from MPP files.
joniles_mpxj
train
7e64d1d3054eeb86ffc104996f79d88c1f1b5130
diff --git a/state-log/src/main/java/net/kuujo/copycat/StateLogConfig.java b/state-log/src/main/java/net/kuujo/copycat/StateLogConfig.java index <HASH>..<HASH> 100644 --- a/state-log/src/main/java/net/kuujo/copycat/StateLogConfig.java +++ b/state-log/src/main/java/net/kuujo/copycat/StateLogConfig.java @@ -289,4 +289,15 @@ public class StateLogConfig implements Copyable<StateLogConfig> { return flushInterval; } + /** + * Sets the log flush interval, returning the configuration for method chaining. + * + * @param flushInterval The log flush interval. + * @return The log configuration. + */ + public StateLogConfig withFlushInterval(long flushInterval) { + setFlushInterval(flushInterval); + return this; + } + }
Add missing fluent method in StateLogConfig.
atomix_atomix
train
cc787208e4a6eace5283537b602cf6d7aa44d814
diff --git a/emannotationschemas/models.py b/emannotationschemas/models.py index <HASH>..<HASH> 100644 --- a/emannotationschemas/models.py +++ b/emannotationschemas/models.py @@ -11,6 +11,8 @@ Base = declarative_base() annotation_models = {} +root_id_model_name = "CellSegment" + class InvalidSchemaField(Exception): '''Exception raised if a schema can't be translated to a model''' @@ -53,7 +55,7 @@ field_column_map = { } -def add_column(attrd, k, field): +def add_column(attrd, k, field, dataset): field_type = type(field) do_index = field.metadata.get('index', False) if field_type in field_column_map: @@ -70,8 +72,12 @@ def add_column(attrd, k, field): attrd[k + "_" + sub_k] = Column(Geometry(postgis_geom, dimension=3)) else: + dyn_args = [field_column_map[type(sub_field)]] + if sub_k == 'root_id': + fk = dataset + "_" + root_id_model_name + ".id" + dyn_args.append(ForeignKey(fk)) attrd[k + "_" + - sub_k] = Column(field_column_map[type(sub_field)], + sub_k] = Column(*dyn_args, index=do_sub_index) else: raise InvalidSchemaField( @@ -81,11 +87,11 @@ def add_column(attrd, k, field): def make_cell_segment_model(dataset): attr_dict = { - '__tablename__': dataset + '_' + "CellSegment" + '__tablename__': dataset + '_' + root_id_model_name } - model_name = dataset.capitalize() + "CellSegment" + model_name = dataset.capitalize() + root_id_model_name if model_name not in annotation_models: - annotation_models[model_name] = type(model_name, (TSBase,), attrd) + annotation_models[model_name] = type(model_name, (TSBase,), attr_dict) return annotation_models[model_name] @@ -103,7 +109,7 @@ def make_annotation_model_from_schema(dataset, annotation_type, Schema): } for k, field in Schema._declared_fields.items(): if (not field.metadata.get('drop_column', False)): - attrd = add_column(attrd, k, field) + attrd = add_column(attrd, k, field, dataset) if issubclass(Schema, ReferenceAnnotation): target_field = Schema._declared_fields['target_id'] reference_type = target_field.metadata['reference_type']
adding foreignkey relationships to CellSegments
seung-lab_EMAnnotationSchemas
train
abab903fe8a1a98807e1910a9bde6e8a8a3e18c9
diff --git a/watchdog_reader.go b/watchdog_reader.go index <HASH>..<HASH> 100644 --- a/watchdog_reader.go +++ b/watchdog_reader.go @@ -24,17 +24,29 @@ func newWatchdogReader(reader io.Reader, timeout time.Duration, timer *time.Time } // Read reads up to len(p) bytes into p -func (t *watchdogReader) Read(p []byte) (n int, err error) { - //never read more bytes than watchdogChunkSize - readTarget := p - if len(p) > watchdogChunkSize { - readTarget = p[0:watchdogChunkSize] +func (t *watchdogReader) Read(p []byte) (int, error) { + //read from underlying reader in chunks not larger than watchdogChunkSize + //while resetting the watchdog timer before every read; the small chunk + //size ensures that the timer does not fire when reading a large amount of + //data from a slow connection + start := 0 + end := len(p) + for start < end { + length := end - start + if length > watchdogChunkSize { + length = watchdogChunkSize + } + + resetTimer(t.timer, t.timeout) + n, err := t.reader.Read(p[start:length]) + start += n + if n == 0 || err != nil { + return start, err + } } resetTimer(t.timer, t.timeout) - n, err = t.reader.Read(readTarget) - resetTimer(t.timer, t.timeout) - return + return start, nil } // Check it satisfies the interface
read as much as possible in one call
ncw_swift
train
7fb75e081975450ea6aeab1238b6651252ea97ea
diff --git a/copy/copy.go b/copy/copy.go index <HASH>..<HASH> 100644 --- a/copy/copy.go +++ b/copy/copy.go @@ -25,14 +25,16 @@ import ( ) type digestingReader struct { - source io.Reader - digester digest.Digester - expectedDigest digest.Digest - validationFailed bool + source io.Reader + digester digest.Digester + expectedDigest digest.Digest + validationFailed bool + validationSucceeded bool } // newDigestingReader returns an io.Reader implementation with contents of source, which will eventually return a non-EOF error -// and set validationFailed to true if the source stream does not match expectedDigest. +// or set validationSucceeded/validationFailed to true if the source stream does/does not match expectedDigest. +// (neither is set if EOF is never reached). func newDigestingReader(source io.Reader, expectedDigest digest.Digest) (*digestingReader, error) { if err := expectedDigest.Validate(); err != nil { return nil, errors.Errorf("Invalid digest specification %s", expectedDigest) @@ -65,6 +67,7 @@ func (d *digestingReader) Read(p []byte) (int, error) { d.validationFailed = true return 0, errors.Errorf("Digest did not match, expected %s, got %s", d.expectedDigest, actualDigest) } + d.validationSucceeded = true } return n, err } diff --git a/copy/copy_test.go b/copy/copy_test.go index <HASH>..<HASH> 100644 --- a/copy/copy_test.go +++ b/copy/copy_test.go @@ -51,6 +51,7 @@ func TestDigestingReaderRead(t *testing.T) { assert.Equal(t, int64(len(c.input)), n, c.digest.String()) assert.Equal(t, c.input, dest.Bytes(), c.digest.String()) assert.False(t, reader.validationFailed, c.digest.String()) + assert.True(t, reader.validationSucceeded, c.digest.String()) } // Modified input for _, c := range cases { @@ -60,7 +61,23 @@ func TestDigestingReaderRead(t *testing.T) { dest := bytes.Buffer{} _, err = io.Copy(&dest, reader) assert.Error(t, err, c.digest.String()) - assert.True(t, reader.validationFailed) + assert.True(t, reader.validationFailed, c.digest.String()) + assert.False(t, reader.validationSucceeded, c.digest.String()) + } + // Truncated input + for _, c := range cases { + source := bytes.NewReader(c.input) + reader, err := newDigestingReader(source, c.digest) + require.NoError(t, err, c.digest.String()) + if len(c.input) != 0 { + dest := bytes.Buffer{} + truncatedLen := int64(len(c.input) - 1) + n, err := io.CopyN(&dest, reader, truncatedLen) + assert.NoError(t, err, c.digest.String()) + assert.Equal(t, truncatedLen, n, c.digest.String()) + } + assert.False(t, reader.validationFailed, c.digest.String()) + assert.False(t, reader.validationSucceeded, c.digest.String()) } }
Compute also validationSucceeded in digestingReader This will allow us to reliably detect that we have processed the full input (i.e. that PutBlob did not get the data from elsewhere and ignore it). The value is not uset yet, so does not change behavior.
containers_image
train
f1651489c22b5a095d4e2017f7d371e0cd8a1cba
diff --git a/lib/Doctrine/Mapper/Joined.php b/lib/Doctrine/Mapper/Joined.php index <HASH>..<HASH> 100644 --- a/lib/Doctrine/Mapper/Joined.php +++ b/lib/Doctrine/Mapper/Joined.php @@ -106,8 +106,8 @@ class Doctrine_Mapper_Joined extends Doctrine_Mapper_Abstract $record->state(Doctrine_Record::STATE_TDIRTY); - foreach ($table->getOption('joinedParents') as $parent) { - $parentTable = $conn->getTable($parent); + foreach ($table->getParentClasses() as $parent) { + $parentTable = $conn->getClassMetadata($parent); $conn->delete($parentTable, $record->identifier()); }
more experiments with the new testsuite.
doctrine_orm
train
59062804a940174b027f7d590a0ba821d0c832a9
diff --git a/src/audio/audio.js b/src/audio/audio.js index <HASH>..<HASH> 100644 --- a/src/audio/audio.js +++ b/src/audio/audio.js @@ -11,31 +11,31 @@ import loader from "./../loader/loader.js"; * audio channel list * @ignore */ -var audioTracks = {}; +let audioTracks = {}; /** * current active track * @ignore */ -var current_track_id = null; +let current_track_id = null; /** * error retry counter * @ignore */ -var retry_counter = 0; +let retry_counter = 0; /** * list of active audio formats * @ignore */ -var audioExts = []; +let audioExts = []; /** * event listener callback on load error * @ignore */ -var soundLoadError = function (sound_name, onerror_cb) { +let soundLoadError = function (sound_name, onerror_cb) { // check the retry counter if (retry_counter++ > 3) { // something went wrong
change var to let (es6) in audio.js
melonjs_melonJS
train
14dc02a696c62fafe7973f69f9b0ceb4028eeff6
diff --git a/lib/has_meta.rb b/lib/has_meta.rb index <HASH>..<HASH> 100644 --- a/lib/has_meta.rb +++ b/lib/has_meta.rb @@ -67,7 +67,7 @@ module HasMeta define_singleton_method :"find_by_#{attribute}_id" do |value| data_type = Helper.get_type value data_type = :int if data_type == :integer - self.find_by_id(Metadata.where(table_type: self.name, key: "#{attribute}_id", "#{data_type.to_s}_value": value ).first.try(:table_row_id)) + self.find_by_id(Metadata.where(meta_model_type: self.arel_table.name, key: "#{attribute}_id", "#{data_type.to_s}_value": value ).first.try(:meta_model_id)) end else @@ -82,7 +82,7 @@ module HasMeta define_singleton_method :"find_by_#{attribute}" do |value| data_type = Helper.get_type value data_type = :int if data_type == :integer - self.find_by_id(Metadata.where(table_type: self.name, key: attribute, "#{data_type.to_s}_value": value ).first.try(:table_row_id)) + self.find_by_id(Metadata.where(meta_model_type: self.arel_table.name, key: attribute, "#{data_type.to_s}_value": value ).first.try(:meta_model_id)) end end @@ -98,14 +98,14 @@ module HasMeta return false unless self.persisted? case val when {} - meta = Metadata.where(:key => key, :table_type => self.class.base_class.name, :table_row_id => self.id) + meta = Metadata.where(:key => key, :meta_model_type => self.arel_table.name, :meta_model_id => self.id) return meta.present? ? meta.last.value : nil when nil, '' - return Metadata.where(:key => key, :table_type => self.class.base_class.name, :table_row_id => self.id).destroy_all + return Metadata.where(:key => key, :meta_model_type => self.arel_table.name, :meta_model_id => self.id).destroy_all end #we are setting a value - meta = Metadata.where(:key => key, :table_type => self.class.base_class.name, :table_row_id => self.id).first_or_create + meta = Metadata.where(:key => key, :meta_model_type => self.arel_table.name, :meta_model_id => self.id).first_or_create data_type = Helper.get_type val if data_type == :integer and !val.is_a? Integer @@ -118,11 +118,11 @@ module HasMeta end #ends def meta def list_meta_keys - meta = Metadata.where(:table_type => self.class.base_class.name, :table_row_id => self.id).pluck(:key) + meta = Metadata.where(:meta_model_type => self.arel_table.name, :meta_model_id => self.id).pluck(:key) end def remove_meta(key) - Metadata.where(:key => key, :table_type => self.class.base_class.name, :table_row_id => self.id).destroy_all + Metadata.where(:key => key, :meta_model_type => self.arel_table.name, :meta_model_id => self.id).destroy_all end end #ends module InstanceMethods
update column names in has_meta to reflect belongs_to relationship
ProTrainings_has-meta
train
e311e31772705a6b65cf43323a4c3dc1f6b9282d
diff --git a/user_messages/models.py b/user_messages/models.py index <HASH>..<HASH> 100644 --- a/user_messages/models.py +++ b/user_messages/models.py @@ -13,6 +13,10 @@ class Thread(models.Model): objects = ThreadManager() + @models.permalink + def get_absolute_url(self): + return ('messages_message_lightbox', (self.id,)) + @property @cached_attribute def latest_message(self): @@ -42,4 +46,4 @@ class Message(models.Model): @models.permalink def get_absolute_url(self): - return ('thread_detail', (), {'thread_id': self.thread_id}) + return ('messages_message_lightbox', (self.thread_id,)) diff --git a/user_messages/views.py b/user_messages/views.py index <HASH>..<HASH> 100644 --- a/user_messages/views.py +++ b/user_messages/views.py @@ -26,7 +26,7 @@ def thread_detail(request, thread_id, form = MessageReplyForm(request.POST, user=request.user, thread=thread) if form.is_valid(): form.save() - return HttpResponseRedirect(reverse(inbox)) + return HttpResponseRedirect(reverse('messages_inbox')) else: form = MessageReplyForm(user=request.user, thread=thread) thread.userthread_set.filter(user=request.user).update(unread=False) @@ -60,4 +60,4 @@ def thread_delete(request, thread_id): qs = Thread.objects.filter(userthread__user=request.user) thread = get_object_or_404(qs, pk=thread_id) thread.userthread_set.filter(user=request.user).update(deleted=True) - return HttpResponseRedirect(reverse(inbox)) + return HttpResponseRedirect(reverse('messages_inbox'))
Make some changes to url reverseing.
pinax_pinax-messages
train
71a77879ff9b4c8069d7cceb2a81b3d0a32a616f
diff --git a/lib/cuke_slicer/version.rb b/lib/cuke_slicer/version.rb index <HASH>..<HASH> 100644 --- a/lib/cuke_slicer/version.rb +++ b/lib/cuke_slicer/version.rb @@ -1,4 +1,4 @@ module CukeSlicer # The current version for the gem. - VERSION = "2.1.0" + VERSION = "2.2.0" end
Bump gem version Updating gem version to '<I>' for the upcoming release.
grange-insurance_cuke_slicer
train
b8e5f04e322620494daf4b4723d11f24d660f30d
diff --git a/CHANGELOG.rst b/CHANGELOG.rst index <HASH>..<HASH> 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,5 +1,10 @@ # master +## Version 2.1.1 (25 February 2018) + +* switch to sdist +* better ABI mode fallback behaviour + ## Version 2.1.0 (17 November 2017) * support cffi API mode as well: much faster startup, about 20% faster on the diff --git a/README.rst b/README.rst index <HASH>..<HASH> 100644 --- a/README.rst +++ b/README.rst @@ -197,7 +197,7 @@ Update version number:: Update pypi package:: - $ python setup.py bdist_wheel + $ python setup.py sdist $ twine upload dist/* diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -66,7 +66,7 @@ author = u'John Cupitt' # The short X.Y version. version = u'2.1' # The full version, including alpha/beta/rc tags. -release = u'2.1.0' +release = u'2.1.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/pyvips/version.py b/pyvips/version.py index <HASH>..<HASH> 100644 --- a/pyvips/version.py +++ b/pyvips/version.py @@ -1,4 +1,4 @@ # this is execfile()d into setup.py imported into __init__.py -__version__ = '2.1.0' +__version__ = '2.1.1' __all__ = ['__version__'] diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -12,14 +12,6 @@ from os import path from setuptools import setup, find_packages -# normally we attempt to setup in API mode (with a C extension), but when making -# a wheel, we want to pretend to be a pure Python wheel ... use --abi to go into -# pure Python mode -ABI_mode = False -if "--abi" in sys.argv: - ABI_mode = True - sys.argv.remove("--abi") - here = path.abspath(path.dirname(__file__)) info = {} @@ -73,7 +65,7 @@ pyvips_packages = find_packages(exclude=['docs', 'tests', 'examples']) sys.path.append(path.join(here, 'pyvips')) -def API_setup(): +def setup_API(): setup( name='pyvips', version=info['__version__'], @@ -97,7 +89,7 @@ def API_setup(): zip_safe=False, ) -def ABI_setup(): +def setup_ABI(): setup( name='pyvips', version=info['__version__'], @@ -117,16 +109,12 @@ def ABI_setup(): extras_require=extras, ) +# try to install in API mode first, then if that fails, fall back to ABI -# we try to install twice: first, in API mode (which will try to build some C -# against the libvips headers), then if that fails, in ABI mode, which just -# needs the shared library - -if not ABI_mode: - try: - API_setup() - except Exception: - ABI_mode = True +# API mode requires a working C compiler plus all the libvips headers whereas +# ABI only needs the libvips shared library to be on the system -if ABI_mode: - ABI_setup() +try: + setup_API() +except Exception: + setup_ABI()
another attempt at ABI fallback package as sdist, try setup twice
libvips_pyvips
train
56fd098ee2f21631b42725283d0fbb43793cb52d
diff --git a/src/main/java/redis/clients/jedis/Connection.java b/src/main/java/redis/clients/jedis/Connection.java index <HASH>..<HASH> 100644 --- a/src/main/java/redis/clients/jedis/Connection.java +++ b/src/main/java/redis/clients/jedis/Connection.java @@ -61,24 +61,6 @@ public class Connection { } } - protected Object read() { - try { - return protocol.read(inputStream); - } catch (JedisConnectionException e) { - disconnect(); - throw new JedisConnectionException(e); - } - } - - protected void sendProtocolCommand(final Command cmd, final byte[]... args) { - try { - protocol.sendCommand(outputStream, cmd, args); - } catch (JedisConnectionException e) { - disconnect(); - throw new JedisConnectionException(e); - } - } - protected Connection sendCommand(final Command cmd, final String... args) { final byte[][] bargs = new byte[args.length][]; for (int i = 0; i < args.length; i++) { @@ -89,14 +71,14 @@ public class Connection { protected Connection sendCommand(final Command cmd, final byte[]... args) { connect(); - sendProtocolCommand(cmd, args); + protocol.sendCommand(outputStream, cmd, args); pipelinedCommands++; return this; } protected Connection sendCommand(final Command cmd) { connect(); - sendProtocolCommand(cmd, new byte[0][]); + protocol.sendCommand(outputStream, cmd, new byte[0][]); pipelinedCommands++; return this; } @@ -163,7 +145,7 @@ public class Connection { protected String getStatusCodeReply() { flush(); pipelinedCommands--; - final byte[] resp = (byte[]) read(); + final byte[] resp = (byte[]) protocol.read(inputStream); if (null == resp) { return null; } else { @@ -183,13 +165,13 @@ public class Connection { public byte[] getBinaryBulkReply() { flush(); pipelinedCommands--; - return (byte[]) read(); + return (byte[]) protocol.read(inputStream); } public Long getIntegerReply() { flush(); pipelinedCommands--; - return (Long) read(); + return (Long) protocol.read(inputStream); } public List<String> getMultiBulkReply() { @@ -200,14 +182,14 @@ public class Connection { public List<byte[]> getBinaryMultiBulkReply() { flush(); pipelinedCommands--; - return (List<byte[]>) read(); + return (List<byte[]>) protocol.read(inputStream); } @SuppressWarnings("unchecked") public List<Object> getObjectMultiBulkReply() { flush(); pipelinedCommands--; - return (List<Object>) read(); + return (List<Object>) protocol.read(inputStream); } public List<Object> getAll() { @@ -218,7 +200,7 @@ public class Connection { List<Object> all = new ArrayList<Object>(); flush(); while (pipelinedCommands > except) { - all.add(read()); + all.add(protocol.read(inputStream)); pipelinedCommands--; } return all; @@ -227,6 +209,6 @@ public class Connection { public Object getOne() { flush(); pipelinedCommands--; - return read(); + return protocol.read(inputStream); } -} +} \ No newline at end of file
keep old connection file. DO NOT CROSS BRANCHES AGAIN
xetorthio_jedis
train
03c58d9d2dc481e1f36bd08652cf0231505f220f
diff --git a/src/Propel/Generator/Behavior/Delegate/DelegateBehavior.php b/src/Propel/Generator/Behavior/Delegate/DelegateBehavior.php index <HASH>..<HASH> 100644 --- a/src/Propel/Generator/Behavior/Delegate/DelegateBehavior.php +++ b/src/Propel/Generator/Behavior/Delegate/DelegateBehavior.php @@ -130,7 +130,8 @@ class DelegateBehavior extends Behavior } $script .= " if (is_callable(array('$ARFQCN', \$name))) { - if (!\$delegate = \$this->get$relationName()) { + \$delegate = \$this->get$relationName(); + if (!\$delegate) { \$delegate = new $ARClassName(); \$this->set$relationName(\$delegate); }
Fix error of inline assignment creating false positives with ! negation.
propelorm_Propel2
train
2f848e02a1e73e1079b681c2b1e6bca8eb05139f
diff --git a/lib/remi/dataset.rb b/lib/remi/dataset.rb index <HASH>..<HASH> 100644 --- a/lib/remi/dataset.rb +++ b/lib/remi/dataset.rb @@ -27,9 +27,6 @@ module Remi raise "datastep called, no block given" if not block_given? - # All this needs to do is open and close the dataset - - logger.debug "Starting datastep #{dataset}" dataset.each do |ds| @@ -42,10 +39,23 @@ module Remi ds.close_and_write_header end + end + + + def read(dataset) + + include Log + + logger.debug "Reading dataset #{dataset.name}" + + dataset.open_for_read + yield dataset + dataset.close end + class Dataset include Log @@ -177,6 +187,20 @@ module Remi end + def readline(mapto_ds) + + + # hmmm... need to figure out how to just read one row here + + mapto_ds.vars.each_with_values do |name,obj,value| + + @vars[name] = value + + end + + + end + def to_s diff --git a/scratch/dev-dataset.rb b/scratch/dev-dataset.rb index <HASH>..<HASH> 100644 --- a/scratch/dev-dataset.rb +++ b/scratch/dev-dataset.rb @@ -145,15 +145,21 @@ def test_writeread end -# read work.have do |di| -# end +# I wonder if this should be a "datastep" method as well +# then I would just have datasteps that could be nested to +# read or write depending on context + read work.have do |di| + d1[:mofo] = rand() + d1.readline(di) + d1[:russel] = "ALPHABET" + d1.output - end + end + end -# work.have.open_for_read =begin
Need to figure out how to read one row
inside-track_remi
train
b23c468b12f81531367a3e98a92f0e7fa46042de
diff --git a/.pylintrc b/.pylintrc index <HASH>..<HASH> 100644 --- a/.pylintrc +++ b/.pylintrc @@ -2,4 +2,4 @@ command=/home/oadams/venv3/bin/pylint disable=locally-disabled -limit=0.0 +limit=10.0 diff --git a/src/datasets/timit.py b/src/datasets/timit.py index <HASH>..<HASH> 100644 --- a/src/datasets/timit.py +++ b/src/datasets/timit.py @@ -136,20 +136,22 @@ def get_valid_prefixes(feat_type, target_type): if path.split("/")[-1].startswith("m")] female_valid_speakers = [path for path in valid_speakers if path.split("/")[-1].startswith("f")] - # Select the first two male speakers. + # Select the first two male speakers and first female speaker chosen_paths.extend(male_valid_speakers[:2]) - # Randomly select one female speakers. chosen_paths.extend(female_valid_speakers[:1]) - valid_prefixes = [] + valid_input_paths = [] + valid_target_paths = [] for speaker_path in chosen_paths: fns = os.listdir(speaker_path) for filename in fns: if filename.endswith(feat_type + ".npy") and not filename.startswith("sa"): - valid_prefixes.append( - os.path.join(speaker_path, filename.split()[0])) + valid_input_paths.append(os.path.join(speaker_path, filename)) + target_fn = "%s.%s" % (filename.split(".")[0], target_type) + assert target_fn in fns + valid_target_paths.append(os.path.join(speaker_path, target_fn)) - return valid_prefixes + return valid_input_paths, valid_target_paths class Corpus(corpus.AbstractCorpus): """ Class to interface with the TIMIT corpus.""" @@ -159,9 +161,8 @@ class Corpus(corpus.AbstractCorpus): def __init__(self, feat_type, target_type): super().__init__(feat_type, target_type) - if target_type != "phonemes": + if target_type != "phn": raise Exception("target_type %s not implemented." % target_type) - timit_dir = os.path.join(config.TGT_DIR, "timit") def prepare(self): """ Preprocesses the TIMIT data. """ @@ -190,7 +191,7 @@ class Corpus(corpus.AbstractCorpus): if not os.path.basename(prefix).startswith("sa")] def get_valid_prefixes(self): - get_valid_prefixes(self.feat_type, self.target_type) + return get_valid_prefixes(self.feat_type, self.target_type) def get_test_prefixes(self): raise Exception("Not implemented.") diff --git a/src/run.py b/src/run.py index <HASH>..<HASH> 100644 --- a/src/run.py +++ b/src/run.py @@ -38,7 +38,7 @@ def run(): exp_dir = prep_exp_dir() corpus = datasets.timit.Corpus(feat_type="log_mel_filterbank", - target_type="phonemes") + target_type="phn") corpus_reader = CorpusReader(corpus, num_train=2**i, max_samples=1000) model = rnn_ctc.Model(exp_dir, corpus_reader) model.train()
Changing the shape of getting valid prefixes.
persephone-tools_persephone
train
929c77a02a55049e0eb19425f4f822f490af82da
diff --git a/cli/help.js b/cli/help.js index <HASH>..<HASH> 100644 --- a/cli/help.js +++ b/cli/help.js @@ -15,9 +15,9 @@ function rewriteExitLine(line) { ' -i, --interactive open an interactive debugger (chromium dev tools)' ].join('\n') } - +console.log(process.argv) new Cucumber.Cli({ - argv: ['node', 'cucumber-electron', '--help'], + argv: process.argv, cwd: process.cwd(), stdout: process.stdout }).run()
Pass argv to Cucumber CLI
cucumber_cucumber-electron
train
eba1dd002526e9db68db63840b9f6d2df5b10c19
diff --git a/builtin/credential/approle/path_login.go b/builtin/credential/approle/path_login.go index <HASH>..<HASH> 100644 --- a/builtin/credential/approle/path_login.go +++ b/builtin/credential/approle/path_login.go @@ -287,7 +287,8 @@ func (b *backend) pathLoginUpdate(ctx context.Context, req *logical.Request, dat }, Metadata: metadata, Alias: &logical.Alias{ - Name: role.RoleID, + Name: role.RoleID, + Metadata: metadata, }, } role.PopulateTokenAuth(auth) diff --git a/builtin/credential/approle/path_login_test.go b/builtin/credential/approle/path_login_test.go index <HASH>..<HASH> 100644 --- a/builtin/credential/approle/path_login_test.go +++ b/builtin/credential/approle/path_login_test.go @@ -171,6 +171,22 @@ func TestAppRole_RoleLogin(t *testing.T) { t.Fatalf("expected a non-nil auth object in the response") } + if loginResp.Auth.Metadata == nil { + t.Fatalf("expected a non-nil metadata object in the response") + } + + if val := loginResp.Auth.Metadata["role_name"]; val != "role1" { + t.Fatalf("expected metadata.role_name to equal 'role1', got: %v", val) + } + + if loginResp.Auth.Alias.Metadata == nil { + t.Fatalf("expected a non-nil alias metadata object in the response") + } + + if val := loginResp.Auth.Alias.Metadata["role_name"]; val != "role1" { + t.Fatalf("expected metadata.alias.role_name to equal 'role1', got: %v", val) + } + // Test renewal renewReq := generateRenewRequest(storage, loginResp.Auth)
approle: Include role_name in alias metadata (#<I>) This change allows people who are using templated policies to use the role_name in their templates through {{ identity.entity.aliases.approle.metadata.role_name }}.
hashicorp_vault
train
364af50e9faa127324995f52c56e8ba0875d0dd2
diff --git a/rqalpha/mod/rqalpha_mod_sys_booking/api_booking.py b/rqalpha/mod/rqalpha_mod_sys_booking/api_booking.py index <HASH>..<HASH> 100644 --- a/rqalpha/mod/rqalpha_mod_sys_booking/api_booking.py +++ b/rqalpha/mod/rqalpha_mod_sys_booking/api_booking.py @@ -32,12 +32,6 @@ from . import mod_name @export_as_api [email protected]_phase(EXECUTION_PHASE.ON_INIT, - EXECUTION_PHASE.BEFORE_TRADING, - EXECUTION_PHASE.ON_BAR, - EXECUTION_PHASE.ON_TICK, - EXECUTION_PHASE.AFTER_TRADING, - EXECUTION_PHASE.SCHEDULED) def get_positions(booking=None): env = Environment.get_instance() mod = env.mod_dict[mod_name] @@ -46,12 +40,6 @@ def get_positions(booking=None): @export_as_api [email protected]_phase(EXECUTION_PHASE.ON_INIT, - EXECUTION_PHASE.BEFORE_TRADING, - EXECUTION_PHASE.ON_BAR, - EXECUTION_PHASE.ON_TICK, - EXECUTION_PHASE.AFTER_TRADING, - EXECUTION_PHASE.SCHEDULED) @apply_rules(verify_that('direction').is_in([POSITION_DIRECTION.LONG, POSITION_DIRECTION.SHORT])) def get_position(order_book_id, direction, booking=None): env = Environment.get_instance() @@ -60,18 +48,17 @@ def get_position(order_book_id, direction, booking=None): return booking_account.get_position(order_book_id, direction, booking) +@export_as_api @apply_rules( verify_that('id_or_ins').is_valid_future(), verify_that('amount').is_number().is_greater_or_equal_than(0), verify_that('side').is_in([SIDE.BUY, SIDE.SELL]), - verify_that('position_effect').is_in([POSITION_EFFECT.OPEN, POSITION_EFFECT.CLOSE]), + verify_that('position_effect').is_in([POSITION_EFFECT.OPEN, POSITION_EFFECT.CLOSE, POSITION_EFFECT.CLOSE_TODAY]), verify_that('style').is_instance_of((LimitOrder, MarketOrder, type(None)))) def send_order(order_book_id, amount, side, position_effect, style): env = Environment.get_instance() - order = None - - order = Order.__from_create__(order_book_id, amount, side, style, POSITION_EFFECT.CLOSE) + order = Order.__from_create__(order_book_id, amount, side, style, position_effect) env.broker.submit_order(order)
remove phase check in api_booking
ricequant_rqalpha
train
71344b478282222132104081833dfcce687692f2
diff --git a/lib/rscons/builders/program.rb b/lib/rscons/builders/program.rb index <HASH>..<HASH> 100644 --- a/lib/rscons/builders/program.rb +++ b/lib/rscons/builders/program.rb @@ -17,24 +17,22 @@ module Rscons def run(target, sources, cache, env, vars = {}) # build sources to linkable objects objects = env.build_sources(sources, [env['OBJSUFFIX'], env['LIBSUFFIX']].flatten, cache, vars) - if objects - ld = if env["LD"] - env["LD"] - elsif sources.find {|s| s.has_suffix?(env["DSUFFIX"])} - env["DC"] - elsif sources.find {|s| s.has_suffix?(env["CXXSUFFIX"])} - env["CXX"] - else - env["CC"] - end - vars = vars.merge({ - '_TARGET' => target, - '_SOURCES' => objects, - 'LD' => ld, - }) - command = env.build_command(env['LDCOM'], vars) - standard_build("LD #{target}", target, command, objects, env, cache) - end + ld = if env["LD"] + env["LD"] + elsif sources.find {|s| s.has_suffix?(env["DSUFFIX"])} + env["DC"] + elsif sources.find {|s| s.has_suffix?(env["CXXSUFFIX"])} + env["CXX"] + else + env["CC"] + end + vars = vars.merge({ + '_TARGET' => target, + '_SOURCES' => objects, + 'LD' => ld, + }) + command = env.build_command(env['LDCOM'], vars) + standard_build("LD #{target}", target, command, objects, env, cache) end end end
Program: do not check env.build_sources() return value (it will raise an exception if something was wrong)
holtrop_rscons
train
b7930a11ab0068661dbad602c208f9fe78ff7681
diff --git a/mempool/estimatefee.go b/mempool/estimatefee.go index <HASH>..<HASH> 100644 --- a/mempool/estimatefee.go +++ b/mempool/estimatefee.go @@ -45,7 +45,7 @@ const ( // it will provide fee estimations. DefaultEstimateFeeMinRegisteredBlocks = 3 - bytePerKb = 1024 + bytePerKb = 1000 btcPerSatoshi = 1E-8 )
mempool/estimatefee: make 1 Kb = <I> bytes This commit changes the value of bytesPerKb to <I> from <I>. This is done to ensure consistency between the fee estimator and the mempool, where the feeRate is set to fee * <I> / serializedSize
btcsuite_btcd
train
6da446bfa9998f9ecfcbdf272072301ec695382d
diff --git a/lib/okcomputer/built_in_checks/size_threshold_check.rb b/lib/okcomputer/built_in_checks/size_threshold_check.rb index <HASH>..<HASH> 100644 --- a/lib/okcomputer/built_in_checks/size_threshold_check.rb +++ b/lib/okcomputer/built_in_checks/size_threshold_check.rb @@ -2,6 +2,7 @@ module OKComputer class SizeThresholdCheck < Check attr_accessor :size_proc attr_accessor :threshold + attr_accessor :name # Public: Initialize a check for a backed-up Resque queue # diff --git a/lib/okcomputer/check.rb b/lib/okcomputer/check.rb index <HASH>..<HASH> 100644 --- a/lib/okcomputer/check.rb +++ b/lib/okcomputer/check.rb @@ -3,7 +3,7 @@ module OKComputer CALL_DEPRECATION_MESSAGE = "Deprecation warning: Please define #check rather than defining #call" # to be set by Registry upon registration - attr_accessor :name + attr_accessor :registrant_name # nil by default, only set to true if the check deems itself failed attr_accessor :failure_occurred # nil by default, set by #check to control the output @@ -34,7 +34,7 @@ module OKComputer # # Returns a String def to_text - "#{name}: #{message}" + "#{registrant_name}: #{message}" end # Public: The JSON output of performing the check @@ -43,7 +43,7 @@ module OKComputer def to_json(*args) # NOTE swallowing the arguments that Rails passes by default since we don't care. This may prove to be a bad idea # Rails passes stuff like this: {:prefixes=>["ok_computer", "application"], :template=>"show", :layout=>#<Proc>}] - {name => message}.to_json + {registrant_name => message}.to_json end # Public: Whether the check passed diff --git a/lib/okcomputer/registry.rb b/lib/okcomputer/registry.rb index <HASH>..<HASH> 100644 --- a/lib/okcomputer/registry.rb +++ b/lib/okcomputer/registry.rb @@ -27,7 +27,7 @@ module OKComputer # check_name - The name of the check to retrieve # check_object - Instance of Checker to register def self.register(check_name, check_object) - check_object.name = check_name + check_object.registrant_name = check_name registry[check_name] = check_object end diff --git a/spec/okcomputer/check_spec.rb b/spec/okcomputer/check_spec.rb index <HASH>..<HASH> 100644 --- a/spec/okcomputer/check_spec.rb +++ b/spec/okcomputer/check_spec.rb @@ -5,7 +5,7 @@ module OKComputer let(:message) { "message" } it "has a name attribute which it does not set" do - subject.name.should be_nil + subject.registrant_name.should be_nil end context "#check" do @@ -53,20 +53,20 @@ module OKComputer context "displaying the message captured by #check" do before do - subject.name = "foo" + subject.registrant_name = "foo" subject.should_not_receive(:call) subject.message = message end context "#to_text" do - it "combines the name and message" do - subject.to_text.should == "#{subject.name}: #{subject.message}" + it "combines the registrant_name and message" do + subject.to_text.should == "#{subject.registrant_name}: #{subject.message}" end end context "#to_json" do - it "returns JSON with the name as the key and message as the value" do - subject.to_json.should == {subject.name => subject.message}.to_json + it "returns JSON with the registrant_name as the key and message as the value" do + subject.to_json.should == {subject.registrant_name => subject.message}.to_json end end end diff --git a/spec/okcomputer/registry_spec.rb b/spec/okcomputer/registry_spec.rb index <HASH>..<HASH> 100644 --- a/spec/okcomputer/registry_spec.rb +++ b/spec/okcomputer/registry_spec.rb @@ -2,7 +2,7 @@ require "spec_helper" module OKComputer describe Registry do - let(:check_object) { stub(:checker, :name= => nil) } + let(:check_object) { stub(:checker, :registrant_name= => nil) } context ".registry" do let(:some_hash) { stub(:hash) } @@ -52,7 +52,7 @@ module OKComputer context ".register(check_name, check_object)" do let(:check_name) { "foo" } - let(:second_check_object) { stub(:checker, :name= => nil) } + let(:second_check_object) { stub(:checker, :registrant_name= => nil) } before do # make sure it isn't there yet @@ -60,7 +60,7 @@ module OKComputer end it "assigns the given name to the checker" do - check_object.should_receive(:name=).with(check_name) + check_object.should_receive(:registrant_name=).with(check_name) Registry.register(check_name, check_object) end
Switch all uses of name outside of size threshold check to use registrant_name meaning the name a check is registered under
sportngin_okcomputer
train
a1aa0ed9d47dc9b34cf560c1489b2df243082988
diff --git a/lib/i18n/backend/fallbacks.rb b/lib/i18n/backend/fallbacks.rb index <HASH>..<HASH> 100644 --- a/lib/i18n/backend/fallbacks.rb +++ b/lib/i18n/backend/fallbacks.rb @@ -35,11 +35,11 @@ module I18n # usual. # # The default option takes precedence over fallback locales - # only when it's not a String. When default contains a String it - # is evaluated after fallback locales. + # only when it's a Symbol. When the default contains a String or a Proc + # it is evaluated last after all the fallback locales have been tried. def translate(locale, key, options = {}) return super if options[:fallback] - string_default = extract_string_default!(options) if options[:default] + default = extract_string_or_lambda_default!(options) if options[:default] options[:fallback] = true I18n.fallbacks[locale].each do |fallback| @@ -51,20 +51,20 @@ module I18n end options.delete(:fallback) - return super(locale, nil, options.merge(:default => string_default)) if string_default + return super(locale, nil, options.merge(:default => default)) if default raise(I18n::MissingTranslationData.new(locale, key, options)) end - def extract_string_default!(options) + def extract_string_or_lambda_default!(options) defaults = Array(options[:default]) - if index = find_first_string_default(defaults) + if index = find_first_string_or_lambda_default(defaults) options[:default] = defaults[0, index] defaults[index] end end - def find_first_string_default(defaults) - defaults.each_index { |ix| return ix if String === defaults[ix] } + def find_first_string_or_lambda_default(defaults) + defaults.each_with_index { |default, ix| return ix if String === default || Proc === default } nil end end diff --git a/test/backend/fallbacks_test.rb b/test/backend/fallbacks_test.rb index <HASH>..<HASH> 100644 --- a/test/backend/fallbacks_test.rb +++ b/test/backend/fallbacks_test.rb @@ -45,12 +45,18 @@ class I18nBackendFallbacksTranslateTest < Test::Unit::TestCase assert_equal "Baz in :de-DE", I18n.t(:bar, :locale => :'de-DE', :default => [:baz]) end + test "returns the :de translation for a missing :'de-DE' when :default is a Proc" do + assert_equal 'Bar in :de', I18n.t(:bar, :locale => :'de-DE', :default => lambda{"Default Bar"}) + assert_equal "Default Bar", I18n.t(:missing_bar, :locale => :'de-DE', :default => lambda{"Default Bar"}) + end + test "returns the :'de-DE' default :baz translation for a missing :'de-DE' when defaults contains Symbol" do assert_equal 'Baz in :de-DE', I18n.t(:missing_foo, :locale => :'de-DE', :default => [:baz, "Default Bar"]) end - test "returns the defaults translation for a missing :'de-DE' when defaults a contains String before Symbol" do + test "returns the defaults translation for a missing :'de-DE' when defaults contains a String or Proc before Symbol" do assert_equal "Default Bar", I18n.t(:missing_foo, :locale => :'de-DE', :default => [:missing_bar, "Default Bar", :baz]) + assert_equal "Default Bar", I18n.t(:missing_foo, :locale => :'de-DE', :default => [:missing_bar, lambda{"Default Bar"}, :baz]) end test "returns the default translation for a missing :'de-DE' and existing :de when default is a Hash" do
The default option takes precedence over fallback locales only when it's a Symbol. When it's a String or a Proc it is evaluated last after all the fallback locales have been tried.
ruby-i18n_i18n
train
e4696bb47adc1334b3e862642019dafbfb8a2ebe
diff --git a/src/CRUDlex/CRUDMySQLData.php b/src/CRUDlex/CRUDMySQLData.php index <HASH>..<HASH> 100644 --- a/src/CRUDlex/CRUDMySQLData.php +++ b/src/CRUDlex/CRUDMySQLData.php @@ -136,8 +136,6 @@ class CRUDMySQLData extends CRUDData { if ($result) { $entity->set($field, array('id' => $entity->get($field), 'name' => $result[$nameField])); - } else { - $entity->set($field, null); } } }
not getting the reference when fetching it won't happen due to DB constraints
philiplb_CRUDlex
train
0b45b84bb375fbbf2ba75bfba5e605940ce46e3a
diff --git a/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DefaultDmnTransform.java b/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DefaultDmnTransform.java index <HASH>..<HASH> 100644 --- a/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DefaultDmnTransform.java +++ b/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DefaultDmnTransform.java @@ -140,11 +140,16 @@ public class DefaultDmnTransform implements DmnTransform, DmnElementTransformCon protected DmnDecision transformDecision(Decision decision) { Expression expression = decision.getExpression(); + if (expression == null) { + LOG.decisionWithoutExpression(decision); + return null; + } + if (expression instanceof DecisionTable) { return transformDecisionTable((DecisionTable) expression); } else { - LOG.decisionTypeNotSupported(decision); + LOG.decisionTypeNotSupported(expression, decision); return null; } } diff --git a/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DmnTransformLogger.java b/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DmnTransformLogger.java index <HASH>..<HASH> 100644 --- a/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DmnTransformLogger.java +++ b/engine/src/main/java/org/camunda/bpm/dmn/engine/impl/transform/DmnTransformLogger.java @@ -24,13 +24,14 @@ import org.camunda.bpm.dmn.engine.impl.DmnLogger; import org.camunda.bpm.model.dmn.BuiltinAggregator; import org.camunda.bpm.model.dmn.HitPolicy; import org.camunda.bpm.model.dmn.instance.Decision; +import org.camunda.bpm.model.dmn.instance.Expression; public class DmnTransformLogger extends DmnLogger { - public void decisionTypeNotSupported(Decision decision) { + public void decisionTypeNotSupported(Expression expression, Decision decision) { logInfo( "001", - "The expression type '{}' of the decision '{}' is not supported.", decision.getClass().getSimpleName(), decision.getName() + "The expression type '{}' of the decision '{}' is not supported. The decision will be ignored.", expression.getClass().getSimpleName(), decision.getName() ); } @@ -129,4 +130,11 @@ public class DmnTransformLogger extends DmnLogger { ); } + public void decisionWithoutExpression(Decision decision) { + logInfo( + "014", + "The decision '{}' has no expression and will be ignored.", decision.getName() + ); + } + }
chore(engine): improve logging on unsupported decision tables
camunda_camunda-engine-dmn
train
8fdd34d43021688bfee97cf3e0c132bd21c7967d
diff --git a/salt/crypt.py b/salt/crypt.py index <HASH>..<HASH> 100644 --- a/salt/crypt.py +++ b/salt/crypt.py @@ -206,8 +206,8 @@ def get_rsa_pub_key(path): ''' log.debug('salt.crypt.get_rsa_pub_key: Loading public key') if HAS_M2: - with salt.utils.files.fopen(path) as f: - data = salt.utils.stringutils.to_bytes(f.read()).replace(b'RSA ', b'') + with salt.utils.files.fopen(path, 'rb') as f: + data = f.read().replace(b'RSA ', b'') bio = BIO.MemoryBuffer(data) key = RSA.load_pub_key_bio(bio) else: diff --git a/tests/unit/test_crypt.py b/tests/unit/test_crypt.py index <HASH>..<HASH> 100644 --- a/tests/unit/test_crypt.py +++ b/tests/unit/test_crypt.py @@ -215,7 +215,7 @@ class M2CryptTestCase(TestCase): self.assertEqual(SIG, crypt.sign_message('/keydir/keyname.pem', MSG, passphrase='password')) def test_verify_signature(self): - with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)): + with patch('salt.utils.files.fopen', mock_open(read_data=six.b(PUBKEY_DATA))): self.assertTrue(crypt.verify_signature('/keydir/keyname.pub', MSG, SIG)) def test_encrypt_decrypt_bin(self): @@ -289,13 +289,13 @@ class TestM2CryptoRegression47124(TestCase): @skipIf(not HAS_M2, "Skip when m2crypto is not installed") def test_m2crypto_verify_bytes(self): message = salt.utils.stringutils.to_unicode('meh') - with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)): + with patch('salt.utils.files.fopen', mock_open(read_data=six.b(PUBKEY_DATA))): salt.crypt.verify_signature('/keydir/keyname.pub', message, self.SIGNATURE) @skipIf(not HAS_M2, "Skip when m2crypto is not installed") def test_m2crypto_verify_unicode(self): message = salt.utils.stringutils.to_bytes('meh') - with patch('salt.utils.files.fopen', mock_open(read_data=PUBKEY_DATA)): + with patch('salt.utils.files.fopen', mock_open(read_data=six.b(PUBKEY_DATA))): salt.crypt.verify_signature('/keydir/keyname.pub', message, self.SIGNATURE) @skipIf(not HAS_M2, "Skip when m2crypto is not installed")
m2crypto open file in rb mode for pub key
saltstack_salt
train
f73c6217bb18d2bba7e0869f1feed702f79dd7ae
diff --git a/src/main/java/pl/nort/GitConfigurationService.java b/src/main/java/pl/nort/GitConfigurationService.java index <HASH>..<HASH> 100644 --- a/src/main/java/pl/nort/GitConfigurationService.java +++ b/src/main/java/pl/nort/GitConfigurationService.java @@ -11,6 +11,13 @@ public class GitConfigurationService implements ConfigurationService { private static final String LOCAL_REPOSITORY_PATH_IN_TEMP = "nort-config-git-config-repository"; + /** + * Read configuration from the remote repository residing at {@code repositoryURI}. Keeps a local + * clone of the repository in the system tmp directory. + * + * @param repositoryURI + * @throws GitAPIException + */ public GitConfigurationService(String repositoryURI) throws GitAPIException { this(repositoryURI, System.getProperty("java.io.tmpdir"), LOCAL_REPOSITORY_PATH_IN_TEMP); } @@ -21,14 +28,22 @@ public class GitConfigurationService implements ConfigurationService { try { clonedRepoPath = File.createTempFile(localRepositoryPathInTemp, "", new File(tmpPath)); + // This folder can't exist of JGit will throw NPE on clone + clonedRepoPath.delete(); } catch (IOException e) { throw new GitConfigurationServiceException("Unable to create local clone directory: " + localRepositoryPathInTemp, e); } - Git.cloneRepository() + Git clonedRepo = Git.cloneRepository() .setURI(repositoryURI) .setDirectory(clonedRepoPath) .call(); + + try { + System.out.println(clonedRepo.getRepository().getDirectory()); + } finally { + clonedRepo.close(); + } } @Override diff --git a/src/test/java/pl/nort/GitConfigurationServiceIntegrationTest.java b/src/test/java/pl/nort/GitConfigurationServiceIntegrationTest.java index <HASH>..<HASH> 100644 --- a/src/test/java/pl/nort/GitConfigurationServiceIntegrationTest.java +++ b/src/test/java/pl/nort/GitConfigurationServiceIntegrationTest.java @@ -19,7 +19,7 @@ public class GitConfigurationServiceIntegrationTest { @Test public void shouldReadConfigFromRemoteRepository() throws Exception { - String repoCoordinates = "[email protected]:nort/config-git-sample-config.git"; + String repoCoordinates = "https://github.com/nort/config-git-sample-config.git"; GitConfigurationService gitConfigurationService = new GitConfigurationService(repoCoordinates); assertThat(gitConfigurationService.getConfiguration()).isNotEmpty();
remove tmp directory before cloning repo
cfg4j_cfg4j
train
20970a52677e7fd3a1b72f14225d8d24d381a625
diff --git a/server/webapp/WEB-INF/rails.new/spec/javascripts/value_stream_map_renderer_spec.js b/server/webapp/WEB-INF/rails.new/spec/javascripts/value_stream_map_renderer_spec.js index <HASH>..<HASH> 100644 --- a/server/webapp/WEB-INF/rails.new/spec/javascripts/value_stream_map_renderer_spec.js +++ b/server/webapp/WEB-INF/rails.new/spec/javascripts/value_stream_map_renderer_spec.js @@ -441,7 +441,6 @@ describe("value_stream_map_renderer", function () { assertEquals("stage details for pipeline instances are not populated correctly.", "/go/pipelines/current/1/defaultStage/1", jQuery("#vsm-container #current ul ul li.stage_bar.Passed a").attr("href")); assertEquals("stage hover message is not correctly populated", "defaultStage (took 1m 57.0s)", jQuery("#vsm-container #current ul ul li.stage_bar.Passed").attr('title')); - assertEquals("stage details for pipeline instances are not populated correctly.", 1, jQuery("#vsm-container #current ul ul li.stage_bar.Unknown > span").length); });
Fix test: Do not assert on the non existent span for stage name.
gocd_gocd
train
c1f3f8e725132499d1161a59b9dc1645b5c80504
diff --git a/phat-server/src/main/java/phat/server/microphone/TCPAudioMicroServer.java b/phat-server/src/main/java/phat/server/microphone/TCPAudioMicroServer.java index <HASH>..<HASH> 100644 --- a/phat-server/src/main/java/phat/server/microphone/TCPAudioMicroServer.java +++ b/phat-server/src/main/java/phat/server/microphone/TCPAudioMicroServer.java @@ -29,8 +29,10 @@ import java.net.ServerSocket; import java.net.Socket; import java.net.SocketException; import java.net.UnknownHostException; +import java.util.Random; import java.util.logging.Level; import java.util.logging.Logger; +import javax.sound.sampled.AudioFormat; import phat.mobile.servicemanager.server.ServiceManagerServer; import phat.sensors.microphone.MicrophoneControl; import phat.sensors.microphone.MicrophoneData; @@ -77,7 +79,7 @@ public class TCPAudioMicroServer implements SensorListener, TCPSensorServer { System.out.println("Nuevo Cliente: " + socket); upClient(socket); } catch (SocketException ex) { - if(!endServer) { + if (!endServer) { Logger.getLogger(TCPAudioMicroServer.class.getName()).log(Level.SEVERE, null, ex); } } @@ -104,7 +106,7 @@ public class TCPAudioMicroServer implements SensorListener, TCPSensorServer { } } - public boolean send(byte[] buffer, int offset, int numSamples, int numReadings) { + /*public boolean send(byte[] buffer, int offset, int numSamples, int numReadings) { if (socket != null && socket.isConnected() && oos != null) { try { int totalSize = numSamples * numReadings; @@ -123,7 +125,7 @@ public class TCPAudioMicroServer implements SensorListener, TCPSensorServer { return true; } return false; - } + }*/ @Override public void stop() { @@ -144,6 +146,9 @@ public class TCPAudioMicroServer implements SensorListener, TCPSensorServer { Logger.getLogger(getClass().getSimpleName()).log(Level.SEVERE, null, e1); } } + Random random = new Random(); + //byte[] newRated = new byte[266]; // 8000 Hz + byte[] newRated = new byte[532]; // 16000 Hz @Override public void update(Sensor source, SensorData sd) { @@ -155,10 +160,32 @@ public class TCPAudioMicroServer implements SensorListener, TCPSensorServer { if (socket != null && socket.isConnected() && oos != null) { try { int totalSize = microphoneData.getData().length; - int numSamples = microphoneData.getAudioFormat().getSampleSizeInBits(); + int frameSize = microphoneData.getAudioFormat().getFrameSize(); byte[] data = new byte[totalSize]; System.arraycopy(microphoneData.getData(), 0, data, 0, totalSize); - AudioStreamDataPacket asdp = new AudioStreamDataPacket(data, numSamples, totalSize / numSamples); + + //int i = 0, j = 0, c = 0; + for (int i = 0, j = 0, c = 0; i < newRated.length - 1 && j < data.length - 1; i += 2, j += 6/*10*/) { + newRated[i] = data[j]; + newRated[i + 1] = data[j + 1]; + if (c > 2) { + //j += 2; + j -= 2; + c = 0; + } else { + c++; + } + } + + //new AudioFormat(8000, 16, 1, true, false); + //System.out.println(microphoneData.getAudioFormat()); + for (int i = 1; i < newRated.length; i += 2) { + if (random.nextBoolean()) { + newRated[i]++; + } + } + //System.out.println(totalSize+":"+newRated.length+":"+microphoneData.getAudioFormat().getSampleRate()); + AudioStreamDataPacket asdp = new AudioStreamDataPacket(newRated, newRated.length / frameSize, frameSize); oos.writeObject(asdp); oos.flush(); oos.reset();
Send Audio <I> Hz and with little noise
Grasia_phatsim
train
fa971e497bf40594235662bf1acc5728d1a67f4a
diff --git a/doc/conf.py b/doc/conf.py index <HASH>..<HASH> 100644 --- a/doc/conf.py +++ b/doc/conf.py @@ -60,7 +60,7 @@ copyright = '2014-2015, Benjamin Hodgson' # The short X.Y version. version = '0.11' # The full version, including alpha/beta/rc tags. -release = '0.11.0' +release = '0.11.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -32,7 +32,7 @@ builtin_plugins = [ setup( name="Contexts", - version="0.11.0", + version="0.11.1", author="Benjamin Hodgson", author_email="[email protected]", url="https://github.com/benjamin-hodgson/Contexts",
Increment version to <I>
benjamin-hodgson_Contexts
train
33dc483f728b97624f1a7489af2a7c52a7798617
diff --git a/src/main/java/net/openhft/chronicle/map/TcpReplicator.java b/src/main/java/net/openhft/chronicle/map/TcpReplicator.java index <HASH>..<HASH> 100644 --- a/src/main/java/net/openhft/chronicle/map/TcpReplicator.java +++ b/src/main/java/net/openhft/chronicle/map/TcpReplicator.java @@ -847,7 +847,7 @@ class TcpReplicator extends AbstractChannelReplicator implements Closeable { private class TcpSocketChannelEntryWriter { private final ByteBuffer out; - private final ByteBufferBytes in; + final ByteBufferBytes in; private final EntryCallback entryCallback; private long lastSentTime; @@ -869,12 +869,6 @@ class TcpReplicator extends AbstractChannelReplicator implements Closeable { uncompletedWork = null; } - /** - * @return the buffer messages can be written to - */ - Bytes buffer() { - return in; - } /** * writes the timestamp into the buffer @@ -1023,21 +1017,48 @@ class TcpReplicator extends AbstractChannelReplicator implements Closeable { /** * Reads map entries from a socket, this could be a client or server socket */ - private class TcpSocketChannelEntryReader { - private final ByteBuffer in; - private final ByteBufferBytes out; + class TcpSocketChannelEntryReader { + ByteBuffer in; + ByteBufferBytes out; public long lastHeartBeatReceived = System.currentTimeMillis(); private int sizeInBytes; private byte state; + private TcpSocketChannelEntryReader() { in = ByteBuffer.allocateDirect(replicationConfig.packetSize() + maxEntrySizeBytes); - out = new ByteBufferBytes(in); + out = new ByteBufferBytes(in.slice()); out.limit(0); in.clear(); } + + void resizeBuffer(int size) { + if (size < in.capacity()) + throw new IllegalStateException("it not possible to resize the buffer smaller"); + + ByteBuffer buffer = ByteBuffer.allocateDirect(size); + buffer.clear(); + + long outPosition = out.position(); + long outLimit = out.limit(); + + int inPosition = in.position(); + int inLimit = in.limit(); + + in.position(0); + for (int i = 0; i < inLimit; i++) { + buffer.put(in.get()); + } + buffer.position(inPosition); + buffer.limit(inLimit); + in = buffer; + out = new ByteBufferBytes(in.slice()); + out.position(outPosition); + out.limit(outLimit); + } + /** * reads from the socket and writes them to the buffer * @@ -1104,7 +1125,7 @@ class TcpReplicator extends AbstractChannelReplicator implements Closeable { } else { final Work futureWork = statelessServerConnector.processStatelessEvent(state, - out, attached.entryWriter.buffer()); + attached.entryWriter.in, attached.entryReader); // in some cases it may not be possible to send out all the data before we // fill out the write buffer, so this data will be send when the buffer @@ -1112,7 +1133,7 @@ class TcpReplicator extends AbstractChannelReplicator implements Closeable { if (futureWork != null) { // we will complete what we can now - boolean isComplete = futureWork.doWork(attached.entryWriter.buffer()); + boolean isComplete = futureWork.doWork(attached.entryWriter.in); if (!isComplete) attached.entryWriter.uncompletedWork = futureWork; @@ -1205,8 +1226,10 @@ class StatelessServerConnector<K, V> { } Work processStatelessEvent(byte eventId, - @NotNull Bytes in, - @NotNull Bytes out) { + @NotNull Bytes out, + TcpReplicator.TcpSocketChannelEntryReader reader) { + + ByteBufferBytes in = reader.out; final StatelessChronicleMap.EventId event = StatelessChronicleMap.EventId.values()[eventId]; @@ -1643,7 +1666,6 @@ class StatelessServerConnector<K, V> { out.writeObject(e); } - private Map<K, V> readEntries(Bytes in) { long size = in.readStopBit(); @@ -1654,6 +1676,8 @@ class StatelessServerConnector<K, V> { } return result; } + + } class KeyValueSerializer<K, V> {
HCOLL-<I> added resizeBuffer()
OpenHFT_Chronicle-Map
train
7a4e41e077915242b8a072d6807d3c67cd3c0a7a
diff --git a/templates/vue/application/src/router.js b/templates/vue/application/src/router.js index <HASH>..<HASH> 100644 --- a/templates/vue/application/src/router.js +++ b/templates/vue/application/src/router.js @@ -75,10 +75,15 @@ const router = new Router({ }); router.beforeEach((to, from, next) => { + + if (to.name === "login-form" && auth.athenticated()) { + next({ name: "home" }); + } + if (to.matched.some(record => record.meta.requiresAuth)) { if (!auth.athenticated()) { next({ - path: "/login-form", + name: "login-form", query: { redirect: to.fullPath } }); } else {
Add redirect from login-form to home view for authenticated user (#<I>)
DevExpress_devextreme-cli
train
d80bf0e3556cc384733bd8b73926f77aa375c9d3
diff --git a/openquake/calculators/ebrisk.py b/openquake/calculators/ebrisk.py index <HASH>..<HASH> 100644 --- a/openquake/calculators/ebrisk.py +++ b/openquake/calculators/ebrisk.py @@ -216,7 +216,7 @@ class EbriskCalculator(event_based.EventBasedCalculator): def check_number_loss_curves(self): """ - Raise an error if generating too many loss curves (>max_num_loss_curves) + Raise an error for too many loss curves (> max_num_loss_curves) """ shp = self.assetcol.tagcol.agg_shape( (self.L,), aggregate_by=self.oqparam.aggregate_by)
Improved docstring [skip CI]
gem_oq-engine
train
c7f4424cdf17d47b3eeca92619b5cf8cde573217
diff --git a/ceph_deploy/conf/cephdeploy.py b/ceph_deploy/conf/cephdeploy.py index <HASH>..<HASH> 100644 --- a/ceph_deploy/conf/cephdeploy.py +++ b/ceph_deploy/conf/cephdeploy.py @@ -22,6 +22,7 @@ cd_conf_template = """ # Repositories section # +# yum repos: # [myrepo] # baseurl = https://user:[email protected]/rhel6 # gpgurl = https://example.org/keys/release.asc @@ -35,6 +36,17 @@ cd_conf_template = """ # gpgcheck=1 # type=rpm-md # gpgkey=https://ceph.com/git/?p=ceph.git;a=blob_plain;f=keys/autobuild.asc + +# apt repos: +# [myrepo] +# baseurl = https://user:[email protected]/ +# gpgurl = https://example.org/keys/release.asc +# default = True +# extra-repos = cephrepo # will install the cephrepo file too +# +# [cephrepo] +# baseurl=http://ceph.com/rpm-emperor/el6/noarch +# gpgkey=https://ceph.com/git/?p=ceph.git;a=blob_plain;f=keys/autobuild.asc """
add apt repo examples to conf stub
ceph_ceph-deploy
train
5c7d776ed8c93498832f81d24fdf7c1782d96717
diff --git a/views/js/preview/actionBarHook.js b/views/js/preview/actionBarHook.js index <HASH>..<HASH> 100644 --- a/views/js/preview/actionBarHook.js +++ b/views/js/preview/actionBarHook.js @@ -67,6 +67,7 @@ define([ } }); }); + $(".preview-container").remove(); });
added method to remove iframe from DOM
oat-sa_extension-tao-item
train
7c57814949ec51b59cc2d1c31ea5401e7a3323c9
diff --git a/gifi/feature.py b/gifi/feature.py index <HASH>..<HASH> 100644 --- a/gifi/feature.py +++ b/gifi/feature.py @@ -104,7 +104,7 @@ def _finish(): raise CommandException('Rebase finished with an error, please fix it manually and then feature-finish once again.') _push_working_branch(config, repo) repo.git.push(config.target_remote, 'HEAD:%s' % config.target_branch) - _discard(repo) + _discard() pull_requests = _get_pull_requests(repo) if len(pull_requests) > 0: slack.notify('%s merged.' % ', '.join(pull_requests)) @@ -114,7 +114,7 @@ def _get_pull_requests(repo): return get_from_last_commit_message(repo, PULL_REQUEST_COMMIT_TAG) -def _discard(repo=None): +def _discard(): repo = get_repo(repo) config = configuration(repo) _fetch(repo, config)
Do not pass repo to discard command as it does not work from CLI
kokosing_git-gifi
train
68145aa91a8ab3545aaeb32ff94768e060d0a8c4
diff --git a/lionengine-game-platform/src/main/java/com/b3dgs/lionengine/game/platform/CameraPlatform.java b/lionengine-game-platform/src/main/java/com/b3dgs/lionengine/game/platform/CameraPlatform.java index <HASH>..<HASH> 100644 --- a/lionengine-game-platform/src/main/java/com/b3dgs/lionengine/game/platform/CameraPlatform.java +++ b/lionengine-game-platform/src/main/java/com/b3dgs/lionengine/game/platform/CameraPlatform.java @@ -158,7 +158,7 @@ public class CameraPlatform private void checkHorizontalLimit(double vx) { // Middle - if (offset.getLocationIntX() == -intervalHorizontal || offset.getLocationIntX() == intervalHorizontal) + if (offset.getLocationX() == -intervalHorizontal || offset.getLocationX() == intervalHorizontal) { location.moveLocation(1, vx, 0); } @@ -210,12 +210,12 @@ public class CameraPlatform offset.moveLocation(1, vx, 0); - if (location.getLocationIntX() == getLimitMapLeft() && offset.getLocationX() >= intervalHorizontal) + if (location.getLocationX() == getLimitMapLeft() && offset.getLocationX() >= intervalHorizontal) { offset.setLocationX(intervalHorizontal); location.moveLocation(1, vx, 0); } - if (location.getLocationIntX() == getLimitMapRight() && offset.getLocationX() <= -intervalHorizontal) + if (location.getLocationX() == getLimitMapRight() && offset.getLocationX() <= -intervalHorizontal) { offset.setLocationX(-intervalHorizontal); location.moveLocation(1, vx, 0); diff --git a/lionengine-game/src/main/java/com/b3dgs/lionengine/game/CameraGame.java b/lionengine-game/src/main/java/com/b3dgs/lionengine/game/CameraGame.java index <HASH>..<HASH> 100644 --- a/lionengine-game/src/main/java/com/b3dgs/lionengine/game/CameraGame.java +++ b/lionengine-game/src/main/java/com/b3dgs/lionengine/game/CameraGame.java @@ -299,36 +299,36 @@ public class CameraGame double my = 0.0; // Horizontal move // Can scroll only on offset interval - if (offset.getLocationIntX() <= -intervalHorizontal || offset.getLocationIntX() >= intervalHorizontal) + if (offset.getLocationX() <= -intervalHorizontal || offset.getLocationX() >= intervalHorizontal) { mx = vx; } offset.moveLocation(extrp, vx, 0); // Block offset on its limits - if (offset.getLocationIntX() < -intervalHorizontal) + if (offset.getLocationX() < -intervalHorizontal) { offset.setLocationX(-intervalHorizontal); } - if (offset.getLocationIntX() > intervalHorizontal) + if (offset.getLocationX() > intervalHorizontal) { offset.setLocationX(intervalHorizontal); } // Vertical move // Can scroll only on offset interval - if (offset.getLocationIntY() <= -intervalVertical || offset.getLocationIntX() >= intervalVertical) + if (offset.getLocationIntY() <= -intervalVertical || offset.getLocationY() >= intervalVertical) { my = vy; } offset.moveLocation(extrp, 0, vy); // Block offset on its limits - if (offset.getLocationIntY() < -intervalVertical) + if (offset.getLocationY() < -intervalVertical) { offset.setLocationY(-intervalVertical); } - if (offset.getLocationIntY() > intervalVertical) + if (offset.getLocationY() > intervalVertical) { offset.setLocationY(intervalVertical); }
#<I>: Glitch fixed.
b3dgs_lionengine
train
991620375669d2bcae20e1aa44bb07435aa0852c
diff --git a/src/dotenvy/cli.py b/src/dotenvy/cli.py index <HASH>..<HASH> 100755 --- a/src/dotenvy/cli.py +++ b/src/dotenvy/cli.py @@ -8,6 +8,7 @@ from future import standard_library standard_library.install_aliases() import argparse +import os import subprocess import sys @@ -26,7 +27,8 @@ def main(stdout=sys.stdout): args = parser.parse_args() try: - env = dotenvy.read_file(args.file) + env = os.environ.copy() + env.update(dotenvy.read_file(args.file)) except IOError as e: sys.exit('Cannot load dotenv: %s\n%s' % (args.file, str(e)))
Fix cli tool doesn't use full env
chickenzord_dotenvy
train
05eb4425fa3bffca94a9fed380d265316d582b66
diff --git a/spec/helper.rb b/spec/helper.rb index <HASH>..<HASH> 100644 --- a/spec/helper.rb +++ b/spec/helper.rb @@ -1,6 +1,8 @@ unless ENV['CI'] require 'simplecov' - SimpleCov.start + SimpleCov.start do + add_filter 'spec' + end end require 'base64' require 'mtgox'
Don't include specs in code coverage report
sferik_mtgox
train