hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
846ca4b52b11e68a121252390144bb6ea4f2a172
diff --git a/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java b/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java index <HASH>..<HASH> 100644 --- a/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java +++ b/molgenis-data-mapper/src/main/java/org/molgenis/data/mapper/service/impl/MappingServiceImpl.java @@ -1,6 +1,5 @@ package org.molgenis.data.mapper.service.impl; -import org.elasticsearch.common.collect.Lists; import org.molgenis.MolgenisFieldTypes.AttributeType; import org.molgenis.auth.MolgenisUser; import org.molgenis.data.*; @@ -21,26 +20,25 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.security.access.prepost.PreAuthorize; -import org.springframework.security.core.context.SecurityContextHolder; import org.springframework.transaction.annotation.Transactional; -import java.util.Collections; -import java.util.Iterator; import java.util.List; -import java.util.Objects; -import java.util.stream.Collectors; +import static java.util.Collections.singletonList; import static java.util.Objects.requireNonNull; import static org.molgenis.MolgenisFieldTypes.AttributeType.*; import static org.molgenis.data.mapper.meta.MappingProjectMetaData.NAME; import static org.molgenis.data.meta.model.EntityMetaData.AttributeCopyMode.DEEP_COPY_ATTRS; import static org.molgenis.security.core.runas.RunAsSystemProxy.runAsSystem; +import static org.molgenis.util.DependencyResolver.hasSelfReferences; +import static org.springframework.security.core.context.SecurityContextHolder.getContext; public class MappingServiceImpl implements MappingService { private static final Logger LOG = LoggerFactory.getLogger(MappingServiceImpl.class); private static final int BATCH_SIZE = 1000; + public static final String SOURCE = "source"; private final DataService dataService; private final AlgorithmService algorithmService; @@ -202,6 +200,11 @@ public class MappingServiceImpl implements MappingService { LOG.info("Applying mappings to repository [" + targetMetaData.getName() + "]"); applyMappingsToRepositories(mappingTarget, targetRepo); + if (hasSelfReferences(targetRepo.getEntityMetaData())) + { + LOG.info("Self reference found, applying the mapping for a second time to set references"); + applyMappingsToRepositories(mappingTarget, targetRepo); + } LOG.info("Done applying mappings to repository [" + targetMetaData.getName() + "]"); return targetMetaData.getName(); }
Check if there are self referencing attributes and apply mapping twice
molgenis_molgenis
train
983d36b7eed36fe43bfb9536f81f245e12aee562
diff --git a/src/Forms/FormRequestHandler.php b/src/Forms/FormRequestHandler.php index <HASH>..<HASH> 100644 --- a/src/Forms/FormRequestHandler.php +++ b/src/Forms/FormRequestHandler.php @@ -260,7 +260,7 @@ class FormRequestHandler extends RequestHandler ); } - return $this->httpError(404); + return $this->httpError(404, "Could not find a suitable form-action callback function"); } /**
FIX: Better message when form action handler not found. Fixes #<I> to some extent, although this fix will be most useful when <URL>
silverstripe_silverstripe-framework
train
25a9fa7925a74581ebabe5ef74cabdea82b053ba
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -1,7 +1,7 @@ var fs = require('fs'); var path = require('path'); -var potentialFilenames = ['LICENSE', 'README', 'README.md', 'README.markdown','license.txt']; +var potentialFilenames = ['LICENSE', 'LICENSE.md', 'README', 'README.md', 'README.markdown', 'license.txt']; var licenseFromString = function(str){ if (str.indexOf('MIT') > -1) { @@ -9,7 +9,7 @@ var licenseFromString = function(str){ } else if (str.indexOf('BSD') > -1) { return 'BSD*'; } else if (str.indexOf('Apache License') > -1) { - return 'Apache*'; + return 'Apache*'; } else if (str.indexOf('Mozilla') > -1) { return 'Mozilla*'; } else if (str.indexOf('LGPL') > -1) { @@ -24,7 +24,7 @@ var licenseFromString = function(str){ return 'Artistic*'; } else if (str.indexOf('DO WHAT THE FUCK YOU WANT TO PUBLIC LICENSE') > -1) { return 'WTF*'; - } + } } module.exports = function(packagePath){
some repos have license.md listed so github lists it licely.
AceMetrix_package-license
train
5523c37b65f3225ee0b55386c75598f43975c8ac
diff --git a/lib/mongodb/commands/base_command.js b/lib/mongodb/commands/base_command.js index <HASH>..<HASH> 100644 --- a/lib/mongodb/commands/base_command.js +++ b/lib/mongodb/commands/base_command.js @@ -1,15 +1,17 @@ /** Base object used for common functionality **/ -var BaseCommand = exports.BaseCommand = function() { +var BaseCommand = exports.BaseCommand = function BaseCommand() { }; var id = 1; -BaseCommand.prototype.getRequestId = function() { +BaseCommand.prototype.getRequestId = function getRequestId() { if (!this.requestId) this.requestId = id++; return this.requestId; }; +BaseCommand.prototype.setMongosReadPreference = function setMongosReadPreference(readPreference, tags) {} + BaseCommand.prototype.updateRequestId = function() { this.requestId = id++; return this.requestId;
Added setReadPReference on BaseCommand
mongodb_node-mongodb-native
train
80588a0d787bcc955e027ed13e9b6f30eaa7c41b
diff --git a/main.go b/main.go index <HASH>..<HASH> 100644 --- a/main.go +++ b/main.go @@ -15,7 +15,7 @@ import ( //"encoding/base64" ) -var plat string +var plat, appPath, version, genDir string type current struct { Version string @@ -58,21 +58,14 @@ func newGzReader(r io.ReadCloser) io.ReadCloser { return g } -func main() { - plat = os.Getenv("GOOS") + "-" + os.Getenv("GOARCH") - - appPath := os.Args[1] - version := os.Args[2] - genDir := "public" - os.MkdirAll(genDir, 0755) - - c := current{Version: version, Sha256: generateSha256(appPath)} +func createUpdate(path string, platform string) { + c := current{Version: version, Sha256: generateSha256(path)} b, err := json.MarshalIndent(c, "", " ") if err != nil { fmt.Println("error:", err) } - err = ioutil.WriteFile(filepath.Join(genDir, plat+".json"), b, 0755) + err = ioutil.WriteFile(filepath.Join(genDir, platform+".json"), b, 0755) if err != nil { panic(err) } @@ -81,13 +74,13 @@ func main() { var buf bytes.Buffer w := gzip.NewWriter(&buf) - f, err := ioutil.ReadFile(appPath) + f, err := ioutil.ReadFile(path) if err != nil { panic(err) } w.Write(f) w.Close() // You must close this first to flush the bytes to the buffer. - err = ioutil.WriteFile(filepath.Join(genDir, version, plat+".gz"), buf.Bytes(), 0755) + err = ioutil.WriteFile(filepath.Join(genDir, version, platform+".gz"), buf.Bytes(), 0755) files, err := ioutil.ReadDir(genDir) if err != nil { @@ -104,14 +97,14 @@ func main() { os.Mkdir(filepath.Join(genDir, file.Name(), version), 0755) - fName := filepath.Join(genDir, file.Name(), plat+".gz") + fName := filepath.Join(genDir, file.Name(), platform+".gz") old, err := os.Open(fName) if err != nil { - // Don't have an old release for this os/arch, continue on - continue + // Don't have an old release for this os/arch, continue on + continue } - fName = filepath.Join(genDir, version, plat+".gz") + fName = filepath.Join(genDir, version, platform+".gz") newF, err := os.Open(fName) if err != nil { fmt.Fprintf(os.Stderr, "Can't open %s: error: %s\n", fName, err) @@ -126,7 +119,26 @@ func main() { if err := binarydist.Diff(ar, br, patch); err != nil { panic(err) } - ioutil.WriteFile(filepath.Join(genDir, file.Name(), version, plat), patch.Bytes(), 0755) + ioutil.WriteFile(filepath.Join(genDir, file.Name(), version, platform), patch.Bytes(), 0755) + } +} + +func main() { + plat = os.Getenv("GOOS") + "-" + os.Getenv("GOARCH") + + appPath = os.Args[1] + version = os.Args[2] + genDir = "public" + os.MkdirAll(genDir, 0755) + + // If dir is given create update for each file + files, err := ioutil.ReadDir(appPath) + if err == nil { + for _, file := range files { + createUpdate(filepath.Join(appPath, file.Name()), file.Name()) + } + os.Exit(0) } + createUpdate(appPath, plat) }
Added ability to specify a directory of binaries for cross compiling
sqs_go-selfupdate
train
5b084007671dd4ee10e601f4fda68faa6bbaf31c
diff --git a/src/components/Overlay.js b/src/components/Overlay.js index <HASH>..<HASH> 100644 --- a/src/components/Overlay.js +++ b/src/components/Overlay.js @@ -22,6 +22,10 @@ class Overlay extends React.Component { updateOverlay(overlay) { const overlayOptions = this.props.pickOverlayOptions(this.props); + if (this.props.map !== this.overlay.getMap()) { + overlayOptions.map = this.props.map; + } + overlay.setOptions(overlayOptions); }
#<I> children in componentDidMount issue
zeakd_react-naver-maps
train
65a46facc102c75323d65e5857172e8ac328d146
diff --git a/baton/_baton_mappers.py b/baton/_baton_mappers.py index <HASH>..<HASH> 100644 --- a/baton/_baton_mappers.py +++ b/baton/_baton_mappers.py @@ -15,20 +15,20 @@ from baton.mappers import DataObjectMapper, CollectionMapper, IrodsEntityMapper, from baton.models import DataObject, Collection, PreparedSpecificQuery, SpecificQuery, SearchCriterion -class IrodsMetadataMapper(BatonRunner, IrodsMetadataMapper): +class BatonIrodsMetadataMapper(BatonRunner, IrodsMetadataMapper): """ iRODS metadata mapper, implemented using baton. """ - def get_all(self, path: str) -> Sequence[IrodsMetadata]: + def get_all(self, path: str) -> IrodsMetadata: pass - def add(self, path: str, metadata: Union[IrodsMetadata, Union[IrodsMetadata, Iterable[IrodsMetadata]]]): + def set(self, path: str, metadata: IrodsMetadata): pass - def set(self, path: str, metadata: Union[IrodsMetadata, Iterable[IrodsMetadata]]): + def add(self, path: str, metadata: IrodsMetadata): pass - def remove(self, path: str, metadata: Union[IrodsMetadata, Iterable[IrodsMetadata]]): + def remove(self, path: str, metadata: IrodsMetadata): pass diff --git a/baton/mappers.py b/baton/mappers.py index <HASH>..<HASH> 100644 --- a/baton/mappers.py +++ b/baton/mappers.py @@ -12,40 +12,48 @@ class IrodsMetadataMapper(Generic[EntityType], metaclass=ABCMeta): iRODS metadata mapper. """ @abstractmethod - def get_all(self, path: str) -> Sequence[IrodsMetadata]: + def get_all(self, path: str) -> IrodsMetadata: """ Gets all of the metadata for the given entity. - :param path: the path of the entity to getr the metadata for + + A `ValueError` will be raised will be raised if the path does not correspond to a valid entity. + :param path: the path of the entity to get the metadata for :return: metadata for the given entity """ @abstractmethod - def add(self, path: str, metadata: Union[IrodsMetadata, Union[IrodsMetadata, Iterable[IrodsMetadata]]]): + def add(self, path: str, metadata: IrodsMetadata): """ - Adds the given metadata or collection of metadata to the given iRODS entity. + Adds the given metadata to the given iRODS entity. + + A `ValueError` will be raised will be raised if the path does not correspond to a valid entity. :param path: the path of the entity to add the metadata to :param metadata: the metadata to write """ @abstractmethod - def set(self, path: str, metadata: Union[IrodsMetadata, Iterable[IrodsMetadata]]): + def set(self, path: str, metadata: IrodsMetadata): """ - Sets the given metadata or collection of metadata on the given iRODS entity. + Sets the given metadata on the given iRODS entity. Similar to `add` although pre-existing metadata with matching keys will be overwritten. + + A `ValueError` will be raised will be raised if the path does not correspond to a valid entity. :param path: the path of the entity to set the metadata for :param metadata: the metadata to set """ @abstractmethod - def remove(self, path: str, metadata: Union[IrodsMetadata, Iterable[IrodsMetadata]]): + def remove(self, path: str, metadata: IrodsMetadata): """ - Removes the given metadata or collection of metadata from the given iRODS entity. + Removes the given metadata from the given iRODS entity. - An exception will be raised if the entity does not have metadata with the given key and value. If this exception + A `KeyError` will be raised if the entity does not have metadata with the given key and value. If this exception is raised part-way through the removal of multiple pieces of metadata, a rollback will not occur - it would be necessary to get the metadata for the entity to determine what metadata in the collection was removed successfully. + + A `ValueError` will be raised will be raised if the path does not correspond to a valid entity. :param path: the path of the entity to remove metadata from :param metadata: the metadata to remove """
Fixed issue with interface for #<I>.
wtsi-hgi_python-baton-wrapper
train
adc3761bfc868f0349e238769871070b8ca18ab2
diff --git a/plugin/pkg/scheduler/factory/plugins.go b/plugin/pkg/scheduler/factory/plugins.go index <HASH>..<HASH> 100644 --- a/plugin/pkg/scheduler/factory/plugins.go +++ b/plugin/pkg/scheduler/factory/plugins.go @@ -19,6 +19,7 @@ package factory import ( "fmt" "regexp" + "sort" "strings" "sync" @@ -317,5 +318,6 @@ func ListAlgorithmProviders() string { for name := range algorithmProviderMap { availableAlgorithmProviders = append(availableAlgorithmProviders, name) } + sort.Strings(availableAlgorithmProviders) return strings.Join(availableAlgorithmProviders, " | ") }
Doc page for scheduler is not stable (for man pages) Need to sort maps
kubernetes_kubernetes
train
02c2d2468842ef6802f41c39bb5b609e0c8ccc20
diff --git a/buildbot/master.py b/buildbot/master.py index <HASH>..<HASH> 100644 --- a/buildbot/master.py +++ b/buildbot/master.py @@ -217,7 +217,7 @@ class BotMaster(service.MultiService): if t2 is None: return -1 return cmp(t1, t2) - builders.sort(cmp=_sortfunc) + builders.sort(_sortfunc) try: for b in builders: b.maybeStartBuild() diff --git a/buildbot/status/web/feeds.py b/buildbot/status/web/feeds.py index <HASH>..<HASH> 100644 --- a/buildbot/status/web/feeds.py +++ b/buildbot/status/web/feeds.py @@ -139,16 +139,12 @@ class FeedResource(XmlResource): break # Sort build list by date, youngest first. - if sys.version_info[:3] >= (2,4,0): - builds.sort(key=lambda build: build.getTimes(), reverse=True) - else: - # If you need compatibility with python < 2.4, use this for - # sorting instead: - # We apply Decorate-Sort-Undecorate - deco = [(build.getTimes(), build) for build in builds] - deco.sort() - deco.reverse() - builds = [build for (b1, build) in deco] + # To keep compatibility with python < 2.4, use this for sorting instead: + # We apply Decorate-Sort-Undecorate + deco = [(build.getTimes(), build) for build in builds] + deco.sort() + deco.reverse() + builds = [build for (b1, build) in deco] if builds: builds = builds[:min(len(builds), maxFeeds)] diff --git a/buildbot/util.py b/buildbot/util.py index <HASH>..<HASH> 100644 --- a/buildbot/util.py +++ b/buildbot/util.py @@ -18,7 +18,10 @@ def naturalSort(l): return s def key_func(item): return [try_int(s) for s in re.split('(\d+)', item)] - l.sort(key=key_func) + # prepend integer keys to each element, sort them, then strip the keys + keyed_l = [ (key_func(i), i) for i in l ] + keyed_l.sort() + l = [ i[1] for i in keyed_l ] return l def now():
(closes #<I>) don't use keyword arguments to List.sort
buildbot_buildbot
train
a9c29bbd259990168cdc6c4163a5e18ab2df005c
diff --git a/rtv/content.py b/rtv/content.py index <HASH>..<HASH> 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -6,7 +6,7 @@ from datetime import datetime import six import praw -from praw.errors import InvalidSubreddit, NotFound +from praw.errors import InvalidSubreddit from kitchen.text.display import wrap from . import exceptions @@ -376,7 +376,7 @@ class SubredditContent(Content): # Strip leading and trailing backslashes name = name.strip(' /').split('/') if name[0] in ['r', 'u', 'user', 'domain']: - listing, *name = name + listing, name = name[0], name[1:] if len(name) > 1: name, name_order = name order = order or name_order
Remove use of python 3 specific tuple unpacking
michael-lazar_rtv
train
c6b48fdb96a6c21b3e5e374b532f903032082877
diff --git a/singularity/build/google.py b/singularity/build/google.py index <HASH>..<HASH> 100644 --- a/singularity/build/google.py +++ b/singularity/build/google.py @@ -282,7 +282,7 @@ def run_build(logfile=None): "metadata": json.dumps(metadata)} # Did the user specify a specific log file? - logfile = get_build_metadata(key='logfile', logfile) + logfile = get_build_metadata('logfile') if logfile is not None: response['logfile'] = logfile
modified: singularity/build/google.py
singularityhub_singularity-python
train
007df950fc9e9fad255167db74517095a8326f25
diff --git a/src/java/org/apache/cassandra/db/CommitLog.java b/src/java/org/apache/cassandra/db/CommitLog.java index <HASH>..<HASH> 100644 --- a/src/java/org/apache/cassandra/db/CommitLog.java +++ b/src/java/org/apache/cassandra/db/CommitLog.java @@ -24,6 +24,7 @@ import org.apache.cassandra.io.DataInputBuffer; import org.apache.cassandra.io.DataOutputBuffer; import org.apache.cassandra.utils.FBUtilities; import org.apache.cassandra.utils.FileUtils; +import org.apache.commons.lang.StringUtils; import org.apache.log4j.Logger; import java.io.*; @@ -198,14 +199,17 @@ public class CommitLog { public void run() { - executor.submit(syncer); - try - { - Thread.sleep(DatabaseDescriptor.getCommitLogSyncPeriod()); - } - catch (InterruptedException e) + while (true) { - throw new RuntimeException(e); + executor.submit(syncer); + try + { + Thread.sleep(DatabaseDescriptor.getCommitLogSyncPeriod()); + } + catch (InterruptedException e) + { + throw new RuntimeException(e); + } } } }).start();
add missing while loop on periodic commitlog sync thread. patch by jbellis; reviewed by Chris Goffinet for CASSANDRA-<I> git-svn-id: <URL>
Stratio_stratio-cassandra
train
bab7a8450b8e390811bd62525182a7e609c56dbf
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -118,19 +118,23 @@ Prompt.prototype._run = function(callback) { var keyUps = events.keypress.filter(function(evt) { return evt.key.name === 'up'; - }).share(); + }).share(); + + var keyDowns = events.keypress.filter(function(evt) { + return evt.key.name === 'down'; + }).share(); - var keyDowns = events.keypress.filter(function(evt) { - return evt.key.name === 'down'; - }).share(); + var keySlash = events.keypress.filter(function(evt) { + return evt.value === '/' && !self.searchMode; + }).share(); - var keySlash = events.keypress.filter(function(evt) { - return evt.value === '/' && !self.searchMode; - }).share(); + var keyMinus = events.keypress.filter(function(evt) { + return evt.value === '-' && !self.searchMode; + }).share(); - var keyMinus = events.keypress.filter(function(evt) { - return evt.value === '-' && !self.searchMode; - }).share(); + var dotKey = events.keypress.filter(function(evt) { + return evt.value === '.' && !self.searchMode; + }).share(); var alphaNumeric = events.keypress.filter(function(evt) { return evt.key.name === 'backspace' || alphaNumericRegex.test(evt.value); @@ -167,6 +171,7 @@ Prompt.prototype._run = function(callback) { keyUps.takeUntil(outcome.done).forEach(this.onUpKey.bind(this)); keyDowns.takeUntil(outcome.done).forEach(this.onDownKey.bind(this)); keyMinus.takeUntil(outcome.done).forEach(this.handleBack.bind(this)); + dotKey.takeUntil(outcome.done).forEach(this.onSubmit.bind(this)); events.keypress.takeUntil(outcome.done).forEach(this.hideKeyPress.bind(this)); searchTerm.takeUntil(outcome.done).forEach(this.onKeyPress.bind(this)); outcome.done.forEach(this.onSubmit.bind(this));
Add missing `.` shortcut (select the current directory)
KamiKillertO_inquirer-select-directory
train
a138dba91a3b39aa6d5598fbdab0ee2d25dc9365
diff --git a/src/Toolbar/Toolbar.react.js b/src/Toolbar/Toolbar.react.js index <HASH>..<HASH> 100644 --- a/src/Toolbar/Toolbar.react.js +++ b/src/Toolbar/Toolbar.react.js @@ -86,47 +86,54 @@ const contextTypes = { uiTheme: PropTypes.object.isRequired, }; -function getStyles(props, context) { - const { toolbar } = context.uiTheme; +function getStyles(props, context, state) { + const { toolbar, toolbarSearchActive } = context.uiTheme; - const container = {}; + const local = {}; + const isSearchActive = state.isSearchActive; if (props.translucent) { - container.position = 'absolute'; - container.elevation = 0; - container.top = 0; - container.left = 0; - container.right = 0; + local.container = { + ...StyleSheet.absoluteFillObject, + elevation: 0, + }; } return { container: [ toolbar.container, - container, + local.container, + isSearchActive && toolbarSearchActive.container, props.style.container, ], leftElementContainer: [ toolbar.leftElementContainer, + isSearchActive && toolbarSearchActive.leftElementContainer, props.style.leftElementContainer, ], leftElement: [ toolbar.leftElement, + isSearchActive && toolbarSearchActive.leftElement, props.style.leftElement, ], centerElementContainer: [ toolbar.centerElementContainer, + isSearchActive && toolbarSearchActive.centerElementContainer, props.style.centerElementContainer, ], titleText: [ toolbar.titleText, + isSearchActive && toolbarSearchActive.titleText, props.style.titleText, ], rightElementContainer: [ toolbar.rightElementContainer, + isSearchActive && toolbarSearchActive.rightElementContainer, props.style.rightElementContainer, ], rightElement: [ toolbar.rightElement, + isSearchActive && toolbarSearchActive.rightElement, props.style.rightElement, ], }; @@ -378,7 +385,7 @@ class Toolbar extends Component { ); } render() { - const styles = getStyles(this.props, this.context); + const styles = getStyles(this.props, this.context, this.state); return ( <Animated.View style={styles.container}> diff --git a/src/styles/getTheme.js b/src/styles/getTheme.js index <HASH>..<HASH> 100644 --- a/src/styles/getTheme.js +++ b/src/styles/getTheme.js @@ -121,7 +121,23 @@ export default function getTheme(theme, ...more) { margin: 16, color: palette.alternateTextColor, }, - // searchInput: TYPO.paperFontTitle, + }), + toolbarSearchActive: StyleSheet.create({ + container: { + backgroundColor: palette.canvasColor, + }, + leftElement: { + color: palette.secondaryTextColor, + }, + centerElementContainer: { }, + titleText: { + flex: 1, + marginLeft: 16, + color: palette.primaryTextColor, + }, + rightElement: { + color: palette.secondaryTextColor, + }, }), }, theme);
Fix style of toolbar's searching
xotahal_react-native-material-ui
train
9d8b1a40b14ccc012ce688874a655a387091e8b7
diff --git a/mrivis/__init__.py b/mrivis/__init__.py index <HASH>..<HASH> 100644 --- a/mrivis/__init__.py +++ b/mrivis/__init__.py @@ -5,11 +5,13 @@ __author__ = """Pradeep Reddy Raamana""" __email__ = '[email protected]' -__all__ = ['checkerboard', 'color_mix', 'voxelwise_diff', 'collage', 'aseg_on_mri', 'color_maps'] +__all__ = ['checkerboard', 'color_mix', 'voxelwise_diff', 'collage', 'aseg_on_mri', + 'Collage', 'SlicePicker', 'color_maps'] from sys import version_info if version_info.major > 2: from mrivis.workflow import checkerboard, color_mix, voxelwise_diff, collage, aseg_on_mri + from mrivis.base import Collage, SlicePicker from mrivis import color_maps else: # from .mrivis import checkerboard
exposing the new classes at the top level [skip ci]
raamana_mrivis
train
7c7d5801d9f8dfa99790f9103c3f570c05ba152b
diff --git a/framework/Commands/Migrate.php b/framework/Commands/Migrate.php index <HASH>..<HASH> 100644 --- a/framework/Commands/Migrate.php +++ b/framework/Commands/Migrate.php @@ -183,17 +183,6 @@ class {$className} public function down() { } - /* - // Use transactionalUp/transactionalDown to do migration with transaction - // Attention! Some Mysql commands (aka ALTER, DROP etc) cause implict commit! See more at https://dev.mysql.com/doc/refman/5.7/en/implicit-commit.html - public function transactionalUp() - { - } - - public function transactionalDown() - { - } - */ } FILE; diff --git a/framework/Orm/Migration.php b/framework/Orm/Migration.php index <HASH>..<HASH> 100644 --- a/framework/Orm/Migration.php +++ b/framework/Orm/Migration.php @@ -42,44 +42,21 @@ abstract class Migration return (int)$m[1]; } - public function up() - { - $this->db->beginTransaction(); - try - { - $this->transactionalUp(); - $this->db->commit(); - } - catch(\Exception $e) - { - $this->db->rollback(); - throw $e; - } + final public function beginTransaction() { + return $this->db->beginTransaction(); } - public function down() - { - $this->db->beginTransaction(); - try - { - $this->transactionalDown(); - $this->db->commit(); - } - catch(\Exception $e) - { - $this->db->rollback(); - throw $e; - } + final public function rollback() { + return $this->db->rollback(); } - public function transactionalUp() - { - + final public function commit() { + return $this->db->commit(); } - public function transactionalDown() - { - } + abstract public function up(); + + abstract public function down(); final protected function createTable($tableName, $columns = [], $indexes = [], $extensions = []) {
- remove transactionalUp/transactionalDown methods from Migration class - add direct methods for transactions(begin,rollback,commit) to Migration class
pr-of-it_t4
train
a66e1507d4784d9f777cfac031ff74dd8efe246c
diff --git a/web/concrete/src/Localization/Service/LanguageList.php b/web/concrete/src/Localization/Service/LanguageList.php index <HASH>..<HASH> 100644 --- a/web/concrete/src/Localization/Service/LanguageList.php +++ b/web/concrete/src/Localization/Service/LanguageList.php @@ -15,7 +15,7 @@ class LanguageList */ public function getLanguageList() { - $languages = Language::getAll(false, false, Localization::activeLocale()); + $languages = Language::getAll(true, true); return $languages; }
Remove country and script specifications from languages A language is only 'en', a locale is 'en_US': remove country codes from getLanguageList so that we have only languages and not locales. Furthermore we don't need to tell Punic which is the active locale: Localization::changeLocale already sets the Punic default locale. Former-commit-id: e5ab2be<I>faeb2edf<I>eb<I>bedc<I>f0
concrete5_concrete5
train
cb79b60ebc32ed762a2373bee43a0cabe5406b4f
diff --git a/worker/hostkeyreporter/shim.go b/worker/hostkeyreporter/shim.go index <HASH>..<HASH> 100644 --- a/worker/hostkeyreporter/shim.go +++ b/worker/hostkeyreporter/shim.go @@ -5,6 +5,7 @@ package hostkeyreporter import ( "github.com/juju/errors" + "github.com/juju/juju/api/base" apihostkeyreporter "github.com/juju/juju/api/hostkeyreporter" "github.com/juju/juju/worker" diff --git a/worker/hostkeyreporter/worker.go b/worker/hostkeyreporter/worker.go index <HASH>..<HASH> 100644 --- a/worker/hostkeyreporter/worker.go +++ b/worker/hostkeyreporter/worker.go @@ -5,6 +5,7 @@ package hostkeyreporter import ( "io/ioutil" + "os" "path/filepath" "github.com/juju/errors" @@ -87,7 +88,15 @@ func (w *hostkeyreporter) run() error { } func (w *hostkeyreporter) readSSHKeys() ([]string, error) { - filenames, err := filepath.Glob(w.sshDir() + "/ssh_host_*_key.pub") + sshDir := w.sshDir() + if _, err := os.Stat(sshDir); os.IsNotExist(err) { + logger.Warningf("%s doesn't exist - giving up", sshDir) + return nil, dependency.ErrUninstall + } else if err != nil { + return nil, errors.Trace(err) + } + + filenames, err := filepath.Glob(sshDir + "/ssh_host_*_key.pub") if err != nil { return nil, errors.Trace(err) } diff --git a/worker/hostkeyreporter/worker_test.go b/worker/hostkeyreporter/worker_test.go index <HASH>..<HASH> 100644 --- a/worker/hostkeyreporter/worker_test.go +++ b/worker/hostkeyreporter/worker_test.go @@ -4,12 +4,12 @@ package hostkeyreporter_test import ( - "errors" "fmt" "io/ioutil" "os" "path/filepath" + "github.com/juju/errors" jujutesting "github.com/juju/testing" jc "github.com/juju/testing/checkers" gc "gopkg.in/check.v1" @@ -64,14 +64,25 @@ func (s *Suite) TestInvalidConfig(c *gc.C) { c.Check(s.stub.Calls(), gc.HasLen, 0) } -func (s *Suite) TestNoKeys(c *gc.C) { - // Pass an empty directory so the keys created in setup won't be - // there. +func (s *Suite) TestNoSSHDir(c *gc.C) { + // No /etc/ssh at all s.config.RootDir = c.MkDir() w, err := hostkeyreporter.New(s.config) c.Assert(err, jc.ErrorIsNil) err = workertest.CheckKilled(c, w) + c.Check(errors.Cause(err), gc.Equals, dependency.ErrUninstall) +} + +func (s *Suite) TestNoKeys(c *gc.C) { + // Pass an empty /etc/ssh + dir := c.MkDir() + c.Assert(os.MkdirAll(filepath.Join(dir, "etc", "ssh"), 0777), jc.ErrorIsNil) + s.config.RootDir = dir + + w, err := hostkeyreporter.New(s.config) + c.Assert(err, jc.ErrorIsNil) + err = workertest.CheckKilled(c, w) c.Check(err, gc.ErrorMatches, "no SSH host keys found") }
worker/hostkeyreporter: Give up if /etc/ssh doesn't exist This situation most likely means that the agent is running in a test situation with a fake root directory. We don't want the worker being continually restarted in this case.
juju_juju
train
1b481f45f23dd94cb40840b813d7d603874a0049
diff --git a/src/rules/media-feature-range-operator-space-after/__tests__/index.js b/src/rules/media-feature-range-operator-space-after/__tests__/index.js index <HASH>..<HASH> 100644 --- a/src/rules/media-feature-range-operator-space-after/__tests__/index.js +++ b/src/rules/media-feature-range-operator-space-after/__tests__/index.js @@ -13,13 +13,41 @@ testRule("always", tr => { tr.ok("@media (max-width > 600px) {}") tr.ok("@media (max-width>= 600px) and (min-width<= 3em) {}") - tr.notOk("@media (max-width<600px) {}", messages.expectedAfter()) - tr.notOk("@media (max-width<= 600px) {}", messages.expectedAfter()) - tr.notOk("@media (max-width=\t600px) {}", messages.expectedAfter()) - tr.notOk("@media (max-width>\n600px) {}", messages.expectedAfter()) - tr.notOk("@media (max-width>\r\n600px) {}", messages.expectedAfter(), "CRLF") - tr.notOk("@media (max-width>=600px) and (min-width< 3em) {}", messages.expectedAfter()) - tr.notOk("@media (max-width> 600px) and (min-width=3em) {}", messages.expectedAfter()) + tr.notOk("@media (max-width<600px) {}", { + message: messages.expectedAfter(), + line: 1, + column: 19, + }) + tr.notOk("@media (max-width<= 600px) {}", { + message: messages.expectedAfter(), + line: 1, + column: 20, + }) + tr.notOk("@media (max-width=\t600px) {}", { + message: messages.expectedAfter(), + line: 1, + column: 19, + }) + tr.notOk("@media (max-width>\n600px) {}", { + message: messages.expectedAfter(), + line: 1, + column: 19, + }) + tr.notOk("@media (max-width>\r\n600px) {}", { + message: messages.expectedAfter(), + line: 1, + column: 19, + }, "CRLF") + tr.notOk("@media (max-width>=600px) and (min-width< 3em) {}", { + message: messages.expectedAfter(), + line: 1, + column: 20, + }) + tr.notOk("@media (max-width> 600px) and (min-width=3em) {}", { + message: messages.expectedAfter(), + line: 1, + column: 42, + }) }) testRule("never", tr => { @@ -29,11 +57,39 @@ testRule("never", tr => { tr.ok("@media (max-width>600px) {}") tr.ok("@media (max-width >=600px) and (min-width <=3em) {}") - tr.notOk("@media (max-width < 600px) {}", messages.rejectedAfter()) - tr.notOk("@media (max-width <= 600px) {}", messages.rejectedAfter()) - tr.notOk("@media (max-width =\t600px) {}", messages.rejectedAfter()) - tr.notOk("@media (max-width >\n600px) {}", messages.rejectedAfter()) - tr.notOk("@media (max-width >\r\n600px) {}", messages.rejectedAfter(), "CRLF") - tr.notOk("@media (max-width >= 600px) and (min-width <3em) {}", messages.rejectedAfter()) - tr.notOk("@media (max-width >600px) and (min-width = 3em) {}", messages.rejectedAfter()) + tr.notOk("@media (max-width < 600px) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 20, + }) + tr.notOk("@media (max-width <= 600px) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 21, + }) + tr.notOk("@media (max-width =\t600px) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 20, + }) + tr.notOk("@media (max-width >\n600px) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 20, + }) + tr.notOk("@media (max-width >\r\n600px) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 20, + }, "CRLF") + tr.notOk("@media (max-width >= 600px) and (min-width <3em) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 21, + }) + tr.notOk("@media (max-width >600px) and (min-width = 3em) {}", { + message: messages.rejectedAfter(), + line: 1, + column: 43, + }) }) diff --git a/src/rules/media-feature-range-operator-space-after/index.js b/src/rules/media-feature-range-operator-space-after/index.js index <HASH>..<HASH> 100644 --- a/src/rules/media-feature-range-operator-space-after/index.js +++ b/src/rules/media-feature-range-operator-space-after/index.js @@ -31,13 +31,15 @@ export default function (expectation) { }) function checkAfterOperator(match, params, node) { + const endIndex = match.index + match[1].length checker.after({ source: params, - index: match.index + match[1].length, + index: endIndex, err: m => { report({ message: m, - node: node, + node, + index: endIndex + node.name.length + node.raws.afterName.length + 2, result, ruleName, })
Add accuracy for media-feature-range-operator-space-after
stylelint_stylelint
train
6f9382cce1d07c9eab5813cfbf60e020942c92a4
diff --git a/go/libkb/constants.go b/go/libkb/constants.go index <HASH>..<HASH> 100644 --- a/go/libkb/constants.go +++ b/go/libkb/constants.go @@ -250,6 +250,7 @@ const ( SCChatDuplicateMessage = int(keybase1.StatusCode_SCChatDuplicateMessage) SCChatClientError = int(keybase1.StatusCode_SCChatClientError) SCAccountReset = int(keybase1.StatusCode_SCAccountReset) + SCTeamReadError = int(keybase1.StatusCode_SCTeamReadError) ) const ( diff --git a/go/systests/rpc_test.go b/go/systests/rpc_test.go index <HASH>..<HASH> 100644 --- a/go/systests/rpc_test.go +++ b/go/systests/rpc_test.go @@ -45,6 +45,7 @@ func TestRPCs(t *testing.T) { testCheckDevicesForUser(t, tc2.G) testIdentify2(t, tc2.G) testMerkle(t, tc2.G) + testIdentifyLite(t) if err := client.CtlServiceStop(tc2.G); err != nil { t.Fatal(err) @@ -280,3 +281,69 @@ func testMerkle(t *testing.T, g *libkb.GlobalContext) { t.Fatalf("Failed basic sanity check") } } + +func testIdentifyLite(t *testing.T) { + + tt := newTeamTester(t) + defer tt.cleanup() + + tt.addUser("abc") + teamName := tt.users[0].createTeam() + g := tt.users[0].tc.G + + team, err := GetTeamForTestByStringName(context.Background(), g, teamName) + if err != nil { + t.Fatal(err) + } + + cli, err := client.GetIdentifyClient(g) + if err != nil { + t.Fatalf("failed to get new identifyclient: %v", err) + } + + // test ok assertions + var assertions = []string{"team:" + teamName, "tid:" + team.ID.String(), "t_alice"} + for _, assertion := range assertions { + _, err := cli.IdentifyLite(context.Background(), keybase1.IdentifyLiteArg{Assertion: assertion}) + if err != nil { + t.Fatalf("IdentifyLite (%s) failed: %v\n", assertion, err) + } + } + + // test identify by assertion and id + assertions = []string{"team:" + teamName, "tid:" + team.ID.String()} + for _, assertion := range assertions { + _, err := cli.IdentifyLite(context.Background(), keybase1.IdentifyLiteArg{Id: team.ID.AsUserOrTeam(), Assertion: assertion}) + if err != nil { + t.Fatalf("IdentifyLite by assertion and id (%s) failed: %v\n", assertion, err) + } + } + + // test identify by id only + _, err = cli.IdentifyLite(context.Background(), keybase1.IdentifyLiteArg{Id: team.ID.AsUserOrTeam()}) + if err != nil { + t.Fatalf("IdentifyLite id only failed: %v\n", err) + } + + // test team read error + assertions = []string{"team:jwkj22111z"} + for _, assertion := range assertions { + _, err := cli.IdentifyLite(context.Background(), keybase1.IdentifyLiteArg{Assertion: assertion}) + aerr, ok := err.(libkb.AppStatusError) + if !ok { + t.Fatalf("Expected an AppStatusError for %s, but got: %v (%T)", assertion, err, err) + } + if aerr.Code != libkb.SCTeamReadError { + t.Fatalf("app status code: %d, expected %d", aerr.Code, libkb.SCTeamReadError) + } + } + + // test not found assertions + assertions = []string{"t_weriojweroi"} + for _, assertion := range assertions { + _, err := cli.IdentifyLite(context.Background(), keybase1.IdentifyLiteArg{Assertion: assertion}) + if _, ok := err.(libkb.NotFoundError); !ok { + t.Fatalf("assertion %s, error: %s (%T), expected libkb.NotFoundError", assertion, err, err) + } + } +}
Add systest of IdentifyLite
keybase_client
train
285b70b77c37b3ff4d9e6166a8d5c9896095148f
diff --git a/django_auth_ldap/backend.py b/django_auth_ldap/backend.py index <HASH>..<HASH> 100644 --- a/django_auth_ldap/backend.py +++ b/django_auth_ldap/backend.py @@ -143,6 +143,9 @@ class LDAPBackend: # def authenticate(self, request, username=None, password=None, **kwargs): + if username is None: + return None + if password or self.settings.PERMIT_EMPTY_PASSWORD: ldap_user = _LDAPUser(self, username=username.strip(), request=request) user = self.authenticate_ldap_user(ldap_user, password) diff --git a/tests/tests.py b/tests/tests.py index <HASH>..<HASH> 100644 --- a/tests/tests.py +++ b/tests/tests.py @@ -260,6 +260,11 @@ class LDAPTest(TestCase): user = authenticate(username="invalid", password="i_do_not_exist") self.assertIsNone(user) + def test_username_none(self): + self._init_settings() + user = authenticate(username=None, password="password") + self.assertIsNone(user) + @spy_ldap("simple_bind_s") def test_simple_bind_escaped(self, mock): """ Bind with a username that requires escaping. """
Reject requests with no username When a request comes in with no username, an exception is thrown: ``` AttributeError at / 'NoneType' object has no attribute 'strip' ``` A correct LDAP user always needs a username, so we can reject any request that has no username.
django-auth-ldap_django-auth-ldap
train
440c994f47738110bff3107e917fba9428c4831f
diff --git a/tenacity-core/src/main/java/com/yammer/tenacity/core/properties/ArchaiusPropertyRegister.java b/tenacity-core/src/main/java/com/yammer/tenacity/core/properties/ArchaiusPropertyRegister.java index <HASH>..<HASH> 100644 --- a/tenacity-core/src/main/java/com/yammer/tenacity/core/properties/ArchaiusPropertyRegister.java +++ b/tenacity-core/src/main/java/com/yammer/tenacity/core/properties/ArchaiusPropertyRegister.java @@ -30,7 +30,7 @@ public class ArchaiusPropertyRegister { public void register(BreakerboxConfiguration breakerboxConfiguration) { ConfigurationManager.install( new DynamicConfiguration( - new URLConfigurationSource(breakerboxConfiguration.getUrls()), + new URLConfigurationSource(breakerboxConfiguration.getUrls().split(",")), new TenacityPollingScheduler( breakerboxConfiguration.getInitialDelay(), breakerboxConfiguration.getDelay(),
split on , for archaius register
yammer_tenacity
train
589bd433ca0d5f9396032e7a5c23ba8281d5d9e2
diff --git a/lib/rest-core/event_source.rb b/lib/rest-core/event_source.rb index <HASH>..<HASH> 100644 --- a/lib/rest-core/event_source.rb +++ b/lib/rest-core/event_source.rb @@ -15,6 +15,7 @@ class RestCore::EventSource < Struct.new(:client, :path, :query, :opts, o = {REQUEST_HEADERS => {'Accept' => 'text/event-stream'}, HIJACK => true}.merge(opts) client.get(path, query, o){ |sock| onopen(sock) } + self end def closed? @@ -29,6 +30,7 @@ class RestCore::EventSource < Struct.new(:client, :path, :query, :opts, def wait raise RC::Error.new("Not yet started for: #{self}") unless mutex mutex.synchronize{ condv.wait(mutex) until closed? } unless closed? + self end def onopen sock=nil, &cb @@ -38,6 +40,7 @@ class RestCore::EventSource < Struct.new(:client, :path, :query, :opts, @onopen.call(sock) if @onopen onmessage_for(sock) end + self rescue Exception => e begin # close the socket since we're going to stop anyway sock.close # if we don't close it, client might wait forever @@ -53,6 +56,7 @@ class RestCore::EventSource < Struct.new(:client, :path, :query, :opts, elsif @onmessage @onmessage.call(event, sock) end + self end # would also be called upon closing, would always be called at least once @@ -66,6 +70,7 @@ class RestCore::EventSource < Struct.new(:client, :path, :query, :opts, condv.signal # should never deadlock someone end end + self end protected diff --git a/test/test_event_source.rb b/test/test_event_source.rb index <HASH>..<HASH> 100644 --- a/test/test_event_source.rb +++ b/test/test_event_source.rb @@ -39,25 +39,21 @@ SSE sock.should.kind_of IO flag.should.eq 0 flag += 1 - end - - es.onmessage do |event, sock| + end. + onmessage do |event, sock| event.should.eq(m.shift) sock.should.kind_of IO sock.should.not.closed? flag += 1 - end - - es.onerror do |error, sock| + end. + onerror do |error, sock| error.should.kind_of EOFError m.should.eq [] sock.should.closed? flag.should.eq 3 flag += 1 - end + end.start.wait - es.start - es.wait flag.should.eq 4 t.join end @@ -68,9 +64,8 @@ SSE es.onmessage do es.close flag += 1 - end - es.start - es.wait + end.start.wait + flag.should.eq 1 t.join end
return self so we can chain them...
godfat_rest-core
train
480c7bcd4f2411f1efe8e896da860cf0d7ec1960
diff --git a/spec/pivotal-ui-react/expander/expander_spec.js b/spec/pivotal-ui-react/expander/expander_spec.js index <HASH>..<HASH> 100644 --- a/spec/pivotal-ui-react/expander/expander_spec.js +++ b/spec/pivotal-ui-react/expander/expander_spec.js @@ -91,7 +91,7 @@ describe('ExpanderTrigger', function() { var ExpanderTrigger = require('../../../src/pivotal-ui-react/expander/expander').ExpanderTrigger; return React.render( (<ExpanderTrigger> - <div>Click here to trigger</div> + <button>Click here to trigger</button> </ExpanderTrigger>), root ); diff --git a/src/pivotal-ui-react/expander/expander.js b/src/pivotal-ui-react/expander/expander.js index <HASH>..<HASH> 100644 --- a/src/pivotal-ui-react/expander/expander.js +++ b/src/pivotal-ui-react/expander/expander.js @@ -16,7 +16,7 @@ var Collapse = require('react-bootstrap/lib/Collapse'); * render() { * return ( * <article> - * <ExpanderTrigger ref="trigger">Click to Toggle Content</ExpanderTrigger> + * <ExpanderTrigger ref="trigger"><button>Click to Toggle Content</button></ExpanderTrigger> * <ExpanderContent ref="content">Content to be toggled</ExpanderContent> * </article> * ); @@ -35,7 +35,8 @@ var ExpanderTrigger = React.createClass({ this.setState({target}); }, - toggleExpander() { + toggleExpander(event) { + event.preventDefault(); if (this.state.target) { this.state.target.toggle(); } else { @@ -44,7 +45,7 @@ var ExpanderTrigger = React.createClass({ }, render() { - return <div onClick={this.toggleExpander}>{this.props.children}</div>; + return React.cloneElement(this.props.children, {onClick: this.toggleExpander}); } }); @@ -99,6 +100,7 @@ Require the subcomponent: ``` var ExpanderContent = require('pui-react-expander').ExpanderContent; +var ExpanderTrigger = require('pui-react-expander').ExpanderTrigger; ``` @@ -108,7 +110,7 @@ else within the DOM. The expander pattern requires two components -- the ExpanderContent and the ExpanderTrigger. You will need to implement a component which handles the communication between these two components so the Trigger knows which Content to toggle. This is done -through the setTarget method exposed on the ExpanderTrigger. +through the setTarget method exposed on the ExpanderTrigger. *Note that the contents of the ExpanderTrigger component must be a button or link.* See the example below for how to use these components in your own application.
feat(expander): ExpanderTrigger no longer wraps the given child This avoids some accessibility problems we had before. Also updates the documentation to clarify that the trigger must be a link or button. [Finishes #<I>] BREAKING CHANGE: The DOM for ExpanderTrigger has changed. There is no longer a wrapping div.
pivotal-cf_pivotal-ui
train
6022fea9e24930a40049024755d6301784a4f441
diff --git a/src/main/java/org/jmxtrans/embedded/EmbeddedJmxTrans.java b/src/main/java/org/jmxtrans/embedded/EmbeddedJmxTrans.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/jmxtrans/embedded/EmbeddedJmxTrans.java +++ b/src/main/java/org/jmxtrans/embedded/EmbeddedJmxTrans.java @@ -132,7 +132,7 @@ public class EmbeddedJmxTrans implements EmbeddedJmxTransMBean { private final Logger logger = LoggerFactory.getLogger(getClass()); - enum State {STOPPED, STARTING, STARTED, STOPPING} + enum State {STOPPED, STARTED, ERROR} private State state = State.STOPPED; @@ -177,7 +177,6 @@ public class EmbeddedJmxTrans implements EmbeddedJmxTransMBean { return; } logger.info("Start..."); - state = State.STARTING; for (Query query : queries) { query.start(); @@ -240,7 +239,8 @@ public class EmbeddedJmxTrans implements EmbeddedJmxTransMBean { shutdownHook.registerToRuntime(); state = State.STARTED; logger.info("EmbeddedJmxTrans started"); - } catch (Exception e) { + } catch (RuntimeException e) { + this.state = State.ERROR; if (logger.isDebugEnabled()) { // to troubleshoot JMX call errors or equivalent, it may be useful to log and rethrow logger.warn("Exception starting EmbeddedJmxTrans", e); @@ -281,9 +281,6 @@ public class EmbeddedJmxTrans implements EmbeddedJmxTransMBean { logger.warn("Ignore failure collecting and exporting metrics during stop", e); } - state = State.STOPPING; - logger.info("Set state to {}", state); - // queries and outputwriters can be stopped even if exports threads are running thanks to the lifecycleLock logger.info("Stop queries..."); for (Query query : queries) { @@ -306,6 +303,7 @@ public class EmbeddedJmxTrans implements EmbeddedJmxTransMBean { state = State.STOPPED; logger.info("Set state to {}", state); } catch (RuntimeException e) { + state = State.ERROR; if (logger.isDebugEnabled()) { // to troubleshoot JMX call errors or equivalent, it may be useful to log and rethrow logger.warn("Exception stopping EmbeddedJmxTrans", e);
Remove `State.STARTED` and `State.STOPPED` as suggested by @YannRobert. Introduce a `State.ERROR` when `start()` or `stop()` fail.
jmxtrans_embedded-jmxtrans
train
b939ed17971786a10de4a00633f616944a8a2e2c
diff --git a/spec/lib/capistrano/version_validator_spec.rb b/spec/lib/capistrano/version_validator_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/capistrano/version_validator_spec.rb +++ b/spec/lib/capistrano/version_validator_spec.rb @@ -89,6 +89,29 @@ module Capistrano end end end + + context "with multiple versions" do + let(:current_version) { "3.5.9" } + + context "valid" do + let(:version) { [">= 3.5.0", "< 3.5.10"] } + it { is_expected.to be_truthy } + end + + context "invalid" do + let(:version) { [">= 3.5.0", "< 3.5.8"] } + it "fails" do + expect { subject }.to raise_error(RuntimeError) + end + end + + context "invalid" do + let(:version) { ["> 3.5.9", "< 3.5.13"] } + it "fails" do + expect { subject }.to raise_error(RuntimeError) + end + end + end end end end
Add a couple of tests for multiple version definitions Since I'm about to document this.
capistrano_capistrano
train
400b8f43db207a3dae5f73bf89291870191a368a
diff --git a/py3status/modules/frame.py b/py3status/modules/frame.py index <HASH>..<HASH> 100644 --- a/py3status/modules/frame.py +++ b/py3status/modules/frame.py @@ -103,6 +103,10 @@ class Py3status: out += [{'full_text': self.format_separator}] output += out + # Remove last separator + if self.format_separator: + output = output[:-1] + if '{button}' in self.format: if self.open: format_control = self.format_button_open
Remove additional separator at the end of frames
ultrabug_py3status
train
37a4c6675cc74ace2040eb08d5be77781fcc811b
diff --git a/AlphaTwirl/Counter/KeyValueComposer.py b/AlphaTwirl/Counter/KeyValueComposer.py index <HASH>..<HASH> 100755 --- a/AlphaTwirl/Counter/KeyValueComposer.py +++ b/AlphaTwirl/Counter/KeyValueComposer.py @@ -63,7 +63,6 @@ class KeyValueComposer(object): # ] if not self._use_backref: - keys = self._apply_binnings(self.binnings, keys) return self._fast_path_without_backref(keys, vals) # e.g., @@ -228,14 +227,12 @@ class KeyValueComposer(object): return ret def _fast_path_without_backref(self, keys_list, vals_list): - for keys in keys_list: - keys[:] = [k for k in keys if k is not None] - for vals in vals_list: - vals[:] = [v for v in vals if v is not None] prod = tuple(itertools.product(*(keys_list + vals_list))) - keys = tuple(e[0:len(keys_list)] for e in prod) if keys_list else None - vals = tuple(e[len(keys_list):] for e in prod) if vals_list else None - return keys, vals + key = tuple(e[0:len(keys_list)] for e in prod) if keys_list else None + val = tuple(e[len(keys_list):] for e in prod) if vals_list else None + key = self._apply_binnings_2(self.binnings, key) + key, val = self._remove_None(key, val) + return key, val def _build_uniq_ref_idxs(self, keys, vals, backref_idxs): uniq_idxs, ref_idxs = self._build_uniq_ref_idxs_sub(keys + vals, backref_idxs)
apply binnings later in _fast_path_without_backref()
alphatwirl_alphatwirl
train
34d2533ac3bb63365587ed33910ee8e42e5d1258
diff --git a/common/digestauth.py b/common/digestauth.py index <HASH>..<HASH> 100644 --- a/common/digestauth.py +++ b/common/digestauth.py @@ -72,7 +72,7 @@ class auth(object): return self.send401UnauthorizedResponse() if requestHeader[0:7] != "Digest ": # client has attempted to use something other than Digest authenication; deny - if self.printdebug: self.printdebug("AUTH DEBUG badrequest") + if self.printdebug: self.printdebug("AUTH DEBUG badrequest: no digest auth used") return self.denyBadRequest() reqHeaderDict = parseAuthHeader(requestHeader) if not self.directiveProper(reqHeaderDict, web.ctx.fullpath): @@ -85,6 +85,7 @@ class auth(object): return self.send401UnauthorizedResponse() else: # Their header had a more fundamental problem. Something is fishy. Deny access. + if self.printdebug: self.printdebug("AUTH DEBUG bad request: not conforming to RFC 2617 ") return self.denyBadRequest("Authorization Request Header does not conform to RFC 2617 section 3.2.2") # if user sent a "logout" nonce, make them type in the password again if len(reqHeaderDict.nonce) != 34: @@ -148,7 +149,9 @@ class auth(object): if self.printdebug: self.printdebug( "DEBUG directiveProper: missing " + variable) return False # IE doesn't send "opaque" and does not include GET parameters in the Digest field - standardsUncompliant = self.tolerateIE and ("MSIE" in web.ctx.environ.get('HTTP_USER_AGENT',"")) + #standardsUncompliant = self.tolerateIE and ("MSIE" in web.ctx.environ.get('HTTP_USER_AGENT',"")) + standardsUncompliant = True #Support crappy (Microsoft) software by default, regardless of uyser agent. + if reqHeaderDict['realm'] != self.realm: if self.printdebug: self.printdebug( "DEBUG directiveProper: realm not matching got '" + reqHeaderDict['realm'] + "' expected '" + self.realm + "'")
allow standard uncompliant clients (usually Microsoft junk) by default
proycon_clam
train
738fb56dd6f3412a95141995058a632677219e7a
diff --git a/safe/messaging/item/numbered_list.py b/safe/messaging/item/numbered_list.py index <HASH>..<HASH> 100644 --- a/safe/messaging/item/numbered_list.py +++ b/safe/messaging/item/numbered_list.py @@ -76,6 +76,6 @@ class NumberedList(AbstractList): else: text = '' for i, item in enumerate(self.items): - text += ' %s. %s\n' % (i, item.to_text()) + text += ' %s. %s\n' % (i+1, item.to_text()) return text \ No newline at end of file diff --git a/safe/messaging/test_messaging.py b/safe/messaging/test_messaging.py index <HASH>..<HASH> 100644 --- a/safe/messaging/test_messaging.py +++ b/safe/messaging/test_messaging.py @@ -98,24 +98,24 @@ class MessagingTest(unittest.TestCase): res = p.to_html() self.assertEqual(expected_res, res) - def test_item_list(self): + def test_list(self): """Tests complex messages are rendered correctly in plain text/html """ l1 = NumberedList(Text('FOO'), ImportantText('BAR'), 'dsds') expected_res = ( - ' 0. FOO\n' - ' 1. *BAR*\n' - ' 2. dsds\n') + ' 1. FOO\n' + ' 2. *BAR*\n' + ' 3. dsds\n') res = l1.to_text() self.assertEqual(expected_res, res) expected_res = ( - '<ol>\n' - '<li>FOO</li>\n' - '<li><strong>BAR</strong></li>\n' - '<li>dsds</li>\n' - '</ol>') + '<ol>\n' + '<li>FOO</li>\n' + '<li><strong>BAR</strong></li>\n' + '<li>dsds</li>\n' + '</ol>') res = l1.to_html() self.assertEqual(expected_res, res)
made NumberedList starting from 1
inasafe_inasafe
train
b260be151c6280f8a8f7f799876077e4fe3fec0d
diff --git a/lib/active_scaffold/data_structures/association/active_mongoid.rb b/lib/active_scaffold/data_structures/association/active_mongoid.rb index <HASH>..<HASH> 100644 --- a/lib/active_scaffold/data_structures/association/active_mongoid.rb +++ b/lib/active_scaffold/data_structures/association/active_mongoid.rb @@ -32,7 +32,7 @@ module ActiveScaffold::DataStructures::Association protected def reflect_on_association(name) - @association.klass.reflect_on_am_association(reverse_name) + @association.klass.reflect_on_am_association(name) end end end
fix reverse association for active_mongoid
activescaffold_active_scaffold
train
a5d0aa6340443611a8c64bc7b9286f96bcd3d8bc
diff --git a/mockito/invocation.py b/mockito/invocation.py index <HASH>..<HASH> 100644 --- a/mockito/invocation.py +++ b/mockito/invocation.py @@ -117,8 +117,9 @@ class MatchingInvocation(Invocation): return False for key, p1 in sorted( - self.named_params.iteritems(), - key=lambda (k, v): 1 if k is matchers.KWARGS_SENTINEL else 0): + self.named_params.iteritems(), + key=lambda k_v: 1 if k_v[0] is matchers.KWARGS_SENTINEL else 0 + ): if key is matchers.KWARGS_SENTINEL: break
Do not unpack tuple in lambda arg position
kaste_mockito-python
train
6210928940129aa063955e238011d73546b9023c
diff --git a/js/test/beautify-tests.js b/js/test/beautify-tests.js index <HASH>..<HASH> 100755 --- a/js/test/beautify-tests.js +++ b/js/test/beautify-tests.js @@ -509,7 +509,8 @@ function run_beautifier_tests(test_obj, Urlencoded, js_beautify) bt('var a = new function() {};'); bt('var a = new function a()\n {};'); test_fragment('new function'); - + bt("foo({\n 'a': 1\n},\n10);", + "foo(\n {\n 'a': 1\n },\n 10);"); opts.brace_style = 'collapse'; @@ -546,6 +547,8 @@ function run_beautifier_tests(test_obj, Urlencoded, js_beautify) bt('var a = new function() {};'); bt('var a = new function a() {};'); test_fragment('new function'); + bt("foo({\n 'a': 1\n},\n10);", + "foo({\n 'a': 1\n },\n 10);"); opts.brace_style = "end-expand"; @@ -582,6 +585,8 @@ function run_beautifier_tests(test_obj, Urlencoded, js_beautify) bt('var a = new function() {};'); bt('var a = new function a() {};'); test_fragment('new function'); + bt("foo({\n 'a': 1\n},\n10);", + "foo({\n 'a': 1\n },\n 10);"); opts.brace_style = 'collapse'; diff --git a/python/jsbeautifier/tests/testjsbeautifier.py b/python/jsbeautifier/tests/testjsbeautifier.py index <HASH>..<HASH> 100644 --- a/python/jsbeautifier/tests/testjsbeautifier.py +++ b/python/jsbeautifier/tests/testjsbeautifier.py @@ -463,7 +463,8 @@ class TestJSBeautifier(unittest.TestCase): bt('var a = new function() {};'); bt('var a = new function a()\n {};'); test_fragment('new function'); - + bt("foo({\n 'a': 1\n},\n10);", + "foo(\n {\n 'a': 1\n },\n 10);"); self.options.brace_style = 'collapse'; @@ -500,6 +501,8 @@ class TestJSBeautifier(unittest.TestCase): bt('var a = new function() {};'); bt('var a = new function a() {};'); test_fragment('new function'); + bt("foo({\n 'a': 1\n},\n10);", + "foo({\n 'a': 1\n },\n 10);"); self.options.brace_style = "end-expand"; @@ -536,6 +539,8 @@ class TestJSBeautifier(unittest.TestCase): bt('var a = new function() {};'); bt('var a = new function a() {};'); test_fragment('new function'); + bt("foo({\n 'a': 1\n},\n10);", + "foo({\n 'a': 1\n },\n 10);"); self.options.brace_style = 'collapse';
Test for another brace indenting case Closes #<I>
beautify-web_js-beautify
train
80e36b17776e6c472c2bf547c890ac5f9971dff4
diff --git a/spec/integrations/shared_examples.rb b/spec/integrations/shared_examples.rb index <HASH>..<HASH> 100644 --- a/spec/integrations/shared_examples.rb +++ b/spec/integrations/shared_examples.rb @@ -17,10 +17,10 @@ shared_examples_for 'Firehose::Rack::App' do end before(:each) { WebMock.disable! } - after(:each) { WebMock.enable! } + after(:each) { WebMock.enable! } let(:app) { Firehose::Rack::App.new } - let(:messages) { (1..2000).map{|n| "msg-#{n}" } } + let(:messages) { (1..200).map{|n| "msg-#{n}" } } let(:channel) { "/firehose/integration/#{Time.now.to_i}" } let(:http_url) { "http://#{uri.host}:#{uri.port}#{channel}" } let(:ws_url) { "ws://#{uri.host}:#{uri.port}#{channel}" }
Integrations pump <I> messages instead of 2,<I> for faster run times.
firehoseio_firehose
train
6589de3f28461e137ba7fa9c94c598530425c4c9
diff --git a/lib/scene_toolkit/cli/verify.rb b/lib/scene_toolkit/cli/verify.rb index <HASH>..<HASH> 100644 --- a/lib/scene_toolkit/cli/verify.rb +++ b/lib/scene_toolkit/cli/verify.rb @@ -6,9 +6,9 @@ module SceneToolkit class CLI < Optitron::CLI desc "Verify library or release. Executes all validations if none specified" SceneToolkit::Release.available_validations.each { |name, desc| opt name, desc } - opt "hide_valid", "Do not display valid releases" - opt "move_invalid_to", "Move INVALID releases to specified folder", :type => :string - opt "move_valid_to", "Move VALID releases to specified folder", :type => :string + opt "hide-valid", "Do not display valid releases results" + opt "move-invalid-to", "Move INVALID releases to specified folder", :type => :string + opt "move-valid-to", "Move VALID releases to specified folder", :type => :string def verify(directory_string) validations_to_exec = [] @@ -20,12 +20,12 @@ module SceneToolkit validations_to_exec = SceneToolkit::Release::available_validations.keys end - invalid_target_directory = params.delete("move_invalid_to") + invalid_target_directory = params.delete("move-invalid-to") unless invalid_target_directory.nil? raise ArgumentError.new("#{invalid_target_directory} does not exist") unless File.directory?(invalid_target_directory) end - valid_target_directory = params.delete("move_valid_to") + valid_target_directory = params.delete("move-valid-to") unless valid_target_directory.nil? raise ArgumentError.new("#{invalid_target_directory} does not exist") unless File.directory?(valid_target_directory) end @@ -38,7 +38,7 @@ module SceneToolkit release_count += 1 if release.valid?(validations_to_exec) valid_releases += 1 - if not params["hide_valid"] or not valid_target_directory.nil? + if not params["hide-valid"] or not valid_target_directory.nil? heading(release, :green) do print_errors(release) print_warnings(release)
Use dashes instead of underscores for command params
knoopx_scene-toolkit
train
81cb0141e5dab05caae4148dfc464854f1f1218a
diff --git a/salt/transport/road/raet/stacking.py b/salt/transport/road/raet/stacking.py index <HASH>..<HASH> 100644 --- a/salt/transport/road/raet/stacking.py +++ b/salt/transport/road/raet/stacking.py @@ -96,7 +96,7 @@ class StackUdp(object): main=main, ha=ha) self.estate.stack = self - self.server = aiding.SocketUdpNb(ha=self.estate.ha, bufsize=raeting.MAX_PACKET_SIZE) + self.server = aiding.SocketUdpNb(ha=self.estate.ha, bufsize=raeting.MAX_PACKET_SIZE * 2) self.server.reopen() # open socket self.estate.ha = self.server.ha # update estate host address after open self.dumpLocal() # save local estate data diff --git a/salt/transport/road/raet/test/test_stackBootstrap0.py b/salt/transport/road/raet/test/test_stackBootstrap0.py index <HASH>..<HASH> 100644 --- a/salt/transport/road/raet/test/test_stackBootstrap0.py +++ b/salt/transport/road/raet/test/test_stackBootstrap0.py @@ -241,7 +241,7 @@ def test(): # segmented packets stuff = [] - for i in range(300): + for i in range(600): stuff.append(str(i).rjust(4, " ")) stuff = "".join(stuff)
started fixing raet header format
saltstack_salt
train
a77374e37ed1171e588e67958334772e0432739d
diff --git a/index.js b/index.js index <HASH>..<HASH> 100644 --- a/index.js +++ b/index.js @@ -3,10 +3,10 @@ var request = require('request'); var CouchDBClient = function (options) { this.options = {}; - Object.assign(this.options, options, { + Object.assign(this.options, { host: '127.0.0.1', port: '5984' - }); + }, options); this.place = 'http://' + this.options.host + ':' + this.options.port; }; function d(name, obj) {
fixed CouchDBClient constructor so that user defined host and port can be used without being overwritten by default values
legodude17_couchdb-client
train
d247ea1df12d41c735407b4015d98fb1470a2d5b
diff --git a/alot/commands/globals.py b/alot/commands/globals.py index <HASH>..<HASH> 100644 --- a/alot/commands/globals.py +++ b/alot/commands/globals.py @@ -244,11 +244,8 @@ class EditCommand(ExternalCommand): :param thread: run asynchronously, don't block alot :type thread: bool """ - self.spawn = settings.get('editor_spawn') or spawn - if thread != None: - self.thread = thread - else: - self.thread = settings.get('editor_in_thread') + self.spawn = spawn or settings.get('editor_spawn') + self.thread = thread or settings.get('editor_in_thread') editor_cmdstring = None if os.path.isfile('/usr/bin/editor'):
make EditCommand use spawn/thread parms directly This makes the command use its spawn and thread parameters and fall back to the values given in the config
pazz_alot
train
0a28712551388ae8ba5107a982941acc94fb74ad
diff --git a/examples/map.py b/examples/map.py index <HASH>..<HASH> 100755 --- a/examples/map.py +++ b/examples/map.py @@ -23,7 +23,7 @@ try: from PIL import Image except ImportError: # PIL not in search path. Let's see if it can be found in the parent folder - sys.stderr.write("Module PIL/Image not found. PIL can be found at http://www.pythonware.com/library/pil/") + sys.stderr.write("Module PIL/Image not found. Pillow (a PIL fork) can be found at http://python-imaging.github.io/") sys.exit(70) # EX_SOFTWARE def get_heightmap_image(chunk, buffer=False, gmin=False, gmax=False):
Recommend Pillow instead of PIL library
twoolie_NBT
train
b2f7134bdce86d99dd5fccdeeab959b5b97d673e
diff --git a/packages/generator-nitro/generators/app/index.js b/packages/generator-nitro/generators/app/index.js index <HASH>..<HASH> 100644 --- a/packages/generator-nitro/generators/app/index.js +++ b/packages/generator-nitro/generators/app/index.js @@ -98,6 +98,7 @@ module.exports = class extends Generator { }, ]).then((answers) => { this._update = answers.update; + this.options.skipInstall = true; if (!this._update) { return; @@ -428,8 +429,14 @@ module.exports = class extends Generator { this.log(chalk.red(e.message)); } - this.log(yosay( - `All done – run \`npm start\` to start ${chalk.cyan('Nitro')} in development mode` - )); + if (this._update) { + this.log(yosay( + `All done – Check local changes and then\nrun \`npm install\` to update your project.` + )); + } else { + this.log(yosay( + `All done –\nrun \`npm start\` to start ${chalk.cyan('Nitro')} in development mode.` + )); + } } };
fix(generator): Do not install on project update
namics_generator-nitro
train
2bd15efbda56fe4ca3d5d7cd27b6cd5aac98f651
diff --git a/lib/savon/request.rb b/lib/savon/request.rb index <HASH>..<HASH> 100644 --- a/lib/savon/request.rb +++ b/lib/savon/request.rb @@ -55,12 +55,19 @@ module Savon def build configure_proxy configure_timeouts + configure_headers configure_ssl configure_auth configure_redirect_handling @http_request end + + private + + def configure_headers + @http_request.headers = @globals[:headers] if @globals.include? :headers + end end class SOAPRequest < HTTPRequest diff --git a/spec/savon/request_spec.rb b/spec/savon/request_spec.rb index <HASH>..<HASH> 100644 --- a/spec/savon/request_spec.rb +++ b/spec/savon/request_spec.rb @@ -16,6 +16,20 @@ describe Savon::WSDLRequest do expect(wsdl_request.build).to be_an(HTTPI::Request) end + describe "headers" do + it "are set when specified" do + globals.headers("Proxy-Authorization" => "Basic auth") + configured_http_request = new_wsdl_request.build + + expect(configured_http_request.headers["Proxy-Authorization"]).to eq("Basic auth") + end + + it "are not set otherwise" do + configured_http_request = new_wsdl_request.build + expect(configured_http_request.headers).to_not include("Proxy-Authorization") + end + end + describe "proxy" do it "is set when specified" do globals.proxy("http://proxy.example.com")
Add headers configuration to WSDLRequest#build (#<I>) e.g. some proxy servers may require ```ruby { "Proxy-Authorization" => "Basic base<I>EncodedAuthHash" } ``` as part of the headers. In particular we ran into issues with using the Proximo Heroku Add-On. This fixes #<I> and may be considered as a late addition to #<I>.
savonrb_savon
train
b34db0dfdea7981ef0c60dff1fd7e8e95fae5f58
diff --git a/tensorflow_datasets/translate/wmt.py b/tensorflow_datasets/translate/wmt.py index <HASH>..<HASH> 100644 --- a/tensorflow_datasets/translate/wmt.py +++ b/tensorflow_datasets/translate/wmt.py @@ -789,7 +789,7 @@ def _parse_parallel_sentences(f1, f2): if split_path[-1] == "gz": lang = split_path[-2] - with tf.io.gfile.GFile(path) as f, gzip.GzipFile(fileobj=f) as g: + with tf.io.gfile.GFile(path, "rb") as f, gzip.GzipFile(fileobj=f) as g: return g.read().split("\n"), lang if split_path[-1] == "txt":
Another utf-8 fix for WMT dataset. This time, we should make sure to treat the gz files as binary. PiperOrigin-RevId: <I>
tensorflow_datasets
train
cac7815a249d8a79b89c2fc5c60b6dc65d230a8c
diff --git a/packages/components/bolt-video/plugins/cue-points.js b/packages/components/bolt-video/plugins/cue-points.js index <HASH>..<HASH> 100644 --- a/packages/components/bolt-video/plugins/cue-points.js +++ b/packages/components/bolt-video/plugins/cue-points.js @@ -26,7 +26,10 @@ export function cuePointsPlugin(player, elem) { player.tt.activeCues[0].startTime, ); - _render(player.allCuePointData[0]); + // only render cue points CTA if the overlay element exists to be rendered into + if (elem.overlayElement){ + _render(player.allCuePointData[0]); + } } };
chore: confirm the video player overlay exists before attempting to render a CTA inside of it
bolt-design-system_bolt
train
1a1123c7dabbe480aff345b695e13d3a735c9c8e
diff --git a/lib/ProMotion/table/table.rb b/lib/ProMotion/table/table.rb index <HASH>..<HASH> 100644 --- a/lib/ProMotion/table/table.rb +++ b/lib/ProMotion/table/table.rb @@ -125,12 +125,16 @@ module ProMotion table_cell end - def update_table_data(index_paths = nil) - if index_paths - index_paths = [index_paths] unless index_paths.is_a?(Array) + def update_table_data(args = {}) + # Try and detect if the args param is a NSIndexPath or an array of them + args = { index_paths: args } if args.is_a?(NSIndexPath) || (args.is_a?(Array) && args.first.is_a?(NSIndexPath)) + + if args[:index_paths] + args[:animation] ||= UITableViewRowAnimationNone + index_paths = [args[:index_paths]] unless args[:index_paths].is_a?(Array) table_view.beginUpdates - table_view.reloadRowsAtIndexPaths(index_path, withRowAnimation:UITableViewRowAnimationNone) + table_view.reloadRowsAtIndexPaths(index_paths, withRowAnimation:args[:animation]) table_view.endUpdates else self.update_table_view_data(self.table_data)
Allow args to be just an NSIndexPath or an array of them.
infinitered_ProMotion
train
1cc79351eee9763cc592f7d0678e290cf8887109
diff --git a/Vps/Exception.php b/Vps/Exception.php index <HASH>..<HASH> 100644 --- a/Vps/Exception.php +++ b/Vps/Exception.php @@ -38,7 +38,8 @@ class Vps_Exception extends Vps_Exception_NoLog $user = "guest"; try { if ($u = Zend_Registry::get('userModel')->getAuthedUser()) { - $user = "$u, id $u->id, $u->role"; + $userName = $u->__toString(); + $user = "$userName, id $u->id, $u->role"; } } catch (Exception $e) { $user = "error getting user";
if user::__toString throws an exception we get an fatal error here and we can't catch that. How great is Php
koala-framework_koala-framework
train
4037098eba7661297ef4eb93155e1f15d1b85cc2
diff --git a/admin/settings/appearance.php b/admin/settings/appearance.php index <HASH>..<HASH> 100644 --- a/admin/settings/appearance.php +++ b/admin/settings/appearance.php @@ -102,6 +102,7 @@ if ($hassiteconfig) { // speedup for non-admins, add all caps used on this page $temp->add(new admin_setting_configcheckbox('navlinkcoursesections', new lang_string('navlinkcoursesections', 'admin'), new lang_string('navlinkcoursesections_help', 'admin'), 0)); $temp->add(new admin_setting_configcheckbox('usesitenameforsitepages', new lang_string('usesitenameforsitepages', 'admin'), new lang_string('configusesitenameforsitepages', 'admin'), 0)); $temp->add(new admin_setting_configcheckbox('linkadmincategories', new lang_string('linkadmincategories', 'admin'), new lang_string('linkadmincategories_help', 'admin'), 0)); + $temp->add(new admin_setting_configcheckbox('navshowfrontpagemods', new lang_string('navshowfrontpagemods', 'admin'), new lang_string('navshowfrontpagemods_help', 'admin'), 1)); $temp->add(new admin_setting_configcheckbox('navadduserpostslinks', new lang_string('navadduserpostslinks', 'admin'), new lang_string('navadduserpostslinks_help', 'admin'), 1)); $ADMIN->add('appearance', $temp); diff --git a/lang/en/admin.php b/lang/en/admin.php index <HASH>..<HASH> 100644 --- a/lang/en/admin.php +++ b/lang/en/admin.php @@ -693,6 +693,8 @@ $string['navigationupgrade'] = 'This upgrade introduces two new navigation block $string['navcourselimit'] = 'Course limit'; $string['navlinkcoursesections'] = 'Link course sections'; $string['navlinkcoursesections_help'] = 'If enabled course sections will be shown as links within the navigation.'; +$string['navshowfrontpagemods'] = 'Show front page activities in the navigation'; +$string['navshowfrontpagemods_help'] = 'If enabled front page activities will be shown on the navigation under site pages.'; $string['navshowallcourses'] = 'Show all courses'; $string['navshowcategories'] = 'Show course categories'; $string['neverdeleteruns'] = 'Never delete runs'; diff --git a/lib/navigationlib.php b/lib/navigationlib.php index <HASH>..<HASH> 100644 --- a/lib/navigationlib.php +++ b/lib/navigationlib.php @@ -1608,6 +1608,7 @@ class global_navigation extends navigation_node { continue; } $activity = new stdClass; + $activity->course = $course->id; $activity->section = $section->section; $activity->name = $cm->name; $activity->icon = $cm->icon; @@ -1714,7 +1715,8 @@ class global_navigation extends navigation_node { * @param course_modinfo $modinfo Object returned from {@see get_fast_modinfo()} * @return array Array of activity nodes */ - protected function load_section_activities(navigation_node $sectionnode, $sectionnumber, $activities) { + protected function load_section_activities(navigation_node $sectionnode, $sectionnumber, $activities, $course = null) { + global $CFG; // A static counter for JS function naming static $legacyonclickcounter = 0; @@ -1724,6 +1726,18 @@ class global_navigation extends navigation_node { } $activitynodes = array(); + if (empty($activities)) { + return $activitynodes; + } + + if (!is_object($course)) { + $activity = reset($activities); + $courseid = $activity->course; + } else { + $courseid = $course->id; + } + $showactivities = ($courseid != SITEID || !empty($CFG->navshowfrontpagemods)); + foreach ($activities as $activity) { if ($activity->section != $sectionnumber) { continue; @@ -1763,7 +1777,7 @@ class global_navigation extends navigation_node { $activitynode = $sectionnode->add($activityname, $action, navigation_node::TYPE_ACTIVITY, null, $activity->id, $icon); $activitynode->title(get_string('modulename', $activity->modname)); $activitynode->hidden = $activity->hidden; - $activitynode->display = $activity->display; + $activitynode->display = $showactivities && $activity->display; $activitynode->nodetype = $activity->nodetype; $activitynodes[$activity->id] = $activitynode; }
MDL-<I> navigation: Added setting to control display of front page activities on the navigation
moodle_moodle
train
97e8e8346cb3463670cea51e88ef26bc186d811f
diff --git a/i3pystatus/reddit.py b/i3pystatus/reddit.py index <HASH>..<HASH> 100644 --- a/i3pystatus/reddit.py +++ b/i3pystatus/reddit.py @@ -62,7 +62,6 @@ class Reddit(IntervalModule): } on_leftclick = "open_permalink" - on_click = "open_link" _permalink = "" _url = "" @@ -136,6 +135,9 @@ class Reddit(IntervalModule): "color": color, } + def open_mail(self): + user_open('https://www.reddit.com/message/unread/') + def open_permalink(self): user_open(self._permalink)
Added method to open users mailbox.
enkore_i3pystatus
train
63057c6fc2f89fb326d4d280123d7c1d2de863ab
diff --git a/packages/vaex-jupyter/vaex/jupyter/_version.py b/packages/vaex-jupyter/vaex/jupyter/_version.py index <HASH>..<HASH> 100644 --- a/packages/vaex-jupyter/vaex/jupyter/_version.py +++ b/packages/vaex-jupyter/vaex/jupyter/_version.py @@ -1,2 +1,2 @@ -__version_tuple__ = (0, 7, 0) -__version__ = '0.7.0' +__version_tuple__ = (0, 8, 0) +__version__ = '0.8.0' diff --git a/packages/vaex-meta/setup.py b/packages/vaex-meta/setup.py index <HASH>..<HASH> 100644 --- a/packages/vaex-meta/setup.py +++ b/packages/vaex-meta/setup.py @@ -21,7 +21,7 @@ install_requires = [ 'vaex-hdf5>=0.12.1,<0.13', 'vaex-viz>=0.5.2,<0.6', 'vaex-server>=0.8.1,<0.9', - 'vaex-jupyter>=0.7.0,<0.8', + 'vaex-jupyter>=0.8.0,<0.8', 'vaex-ml>=0.17.0,<0.18', # vaex-graphql is not on conda-forge yet ]
🔖 vaex-jupyter <I> released
vaexio_vaex
train
fc460248be5f079bca23c17e962a764a6aa0831a
diff --git a/generators/crud/Generator.php b/generators/crud/Generator.php index <HASH>..<HASH> 100644 --- a/generators/crud/Generator.php +++ b/generators/crud/Generator.php @@ -67,6 +67,7 @@ class Generator extends \yii\gii\Generator [['modelClass'], 'validateClass', 'params' => ['extends' => BaseActiveRecord::className()]], [['baseControllerClass'], 'validateClass', 'params' => ['extends' => Controller::className()]], [['controllerClass'], 'match', 'pattern' => '/Controller$/', 'message' => 'Controller class name must be suffixed with "Controller".'], + [['controllerClass'], 'match', 'pattern' => '/[A-Z0-9][^\\]+Controller$/', 'message' => 'Controller class name must start with an uppercase letter.'], [['controllerClass', 'searchModelClass'], 'validateNewClass'], [['indexWidgetType'], 'in', 'range' => ['grid', 'list']], [['modelClass'], 'validateModelClass'], @@ -100,7 +101,8 @@ class Generator extends \yii\gii\Generator 'modelClass' => 'This is the ActiveRecord class associated with the table that CRUD will be built upon. You should provide a fully qualified class name, e.g., <code>app\models\Post</code>.', 'controllerClass' => 'This is the name of the controller class to be generated. You should - provide a fully qualified namespaced class, .e.g, <code>app\controllers\PostController</code>.', + provide a fully qualified namespaced class, .e.g, <code>app\controllers\PostController</code>. + The controller class name should follow the CamelCase scheme with an uppercase first letter', 'baseControllerClass' => 'This is the class that the new CRUD controller class will extend from. You should provide a fully qualified class name, e.g., <code>yii\web\Controller</code>.', 'moduleID' => 'This is the ID of the module that the generated controller will belong to.
Fix #<I> Adding a new additional validation rule in the gii-crud generator enforcing the expected format.
yiisoft_yii2-gii
train
fe9e282f42bb7304dc849d6deffac9c7085ace0a
diff --git a/riak/tests/test_btypes.py b/riak/tests/test_btypes.py index <HASH>..<HASH> 100644 --- a/riak/tests/test_btypes.py +++ b/riak/tests/test_btypes.py @@ -88,3 +88,19 @@ class BucketTypeTests(object): buckets.extend(nested_buckets) self.assertIn(bucket, buckets) + + @unittest.skipIf(SKIP_BTYPES == '1', "SKIP_BTYPES is set") + def test_btype_list_keys(self): + btype = self.client.bucket_type("pytest") + bucket = btype.bucket(self.bucket_name) + + obj = bucket.new(self.key_name) + obj.data = [1,2,3] + obj.store() + + self.assertIn(self.key_name, bucket.get_keys()) + keys = [] + for keylist in bucket.stream_keys(): + keys.extend(keylist) + + self.assertIn(self.key_name, keys) diff --git a/riak/transports/http/resources.py b/riak/transports/http/resources.py index <HASH>..<HASH> 100644 --- a/riak/transports/http/resources.py +++ b/riak/transports/http/resources.py @@ -69,7 +69,7 @@ class RiakHttpResources(object): query.update(options) if self.riak_kv_wm_bucket_type and bucket_type: return mkpath("/types", quote_plus(bucket_type), "buckets", - quote_plus(bucket), "keys", **options) + quote_plus(bucket), "keys", **query) if self.riak_kv_wm_buckets: return mkpath("/buckets", quote_plus(bucket), "keys", **query)
Add bucket-type test for list-keys, fix bug in resources.py
basho_riak-python-client
train
a19c9dad4dd9f81b5d4b59aecf682138c0f36e84
diff --git a/netmiko/cisco/cisco_ios.py b/netmiko/cisco/cisco_ios.py index <HASH>..<HASH> 100644 --- a/netmiko/cisco/cisco_ios.py +++ b/netmiko/cisco/cisco_ios.py @@ -20,6 +20,27 @@ class CiscoIosBase(CiscoBaseConnection): self.disable_paging() self.set_base_prompt() + def set_base_prompt( + self, + pri_prompt_terminator: str = "#", + alt_prompt_terminator: str = ">", + delay_factor: float = 1.0, + pattern: Optional[str] = None, + ) -> str: + """ + Cisco IOS/IOS-XE abbreviates the prompt at 20-chars in config mode. + + Consequently, abbreviate the base_prompt + """ + base_prompt = super().set_base_prompt( + pri_prompt_terminator=pri_prompt_terminator, + alt_prompt_terminator=alt_prompt_terminator, + delay_factor=delay_factor, + pattern=pattern, + ) + self.base_prompt = base_prompt[:16] + return self.base_prompt + def check_config_mode(self, check_string: str = ")#", pattern: str = r"#") -> bool: """ Checks if the device is in configuration mode or not. diff --git a/netmiko/cisco/cisco_xr.py b/netmiko/cisco/cisco_xr.py index <HASH>..<HASH> 100644 --- a/netmiko/cisco/cisco_xr.py +++ b/netmiko/cisco/cisco_xr.py @@ -20,6 +20,27 @@ class CiscoXrBase(CiscoBaseConnection): self._test_channel_read(pattern=r"[>#]") self.set_base_prompt() + def set_base_prompt( + self, + pri_prompt_terminator: str = "#", + alt_prompt_terminator: str = ">", + delay_factor: float = 1.0, + pattern: Optional[str] = None, + ) -> str: + """ + Cisco IOS-XR abbreviates the prompt at 31-chars in config mode. + + Consequently, abbreviate the base_prompt + """ + base_prompt = super().set_base_prompt( + pri_prompt_terminator=pri_prompt_terminator, + alt_prompt_terminator=alt_prompt_terminator, + delay_factor=delay_factor, + pattern=pattern, + ) + self.base_prompt = base_prompt[:16] + return self.base_prompt + def send_config_set( self, config_commands: Union[str, Sequence[str], TextIO, None] = None, diff --git a/tests/conftest.py b/tests/conftest.py index <HASH>..<HASH> 100755 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -35,6 +35,13 @@ def net_connect(request): device = test_devices[device_under_test] device["verbose"] = False conn = ConnectHandler(**device) + # Temporarily set the hostname + if device_under_test == "cisco3_long_name": + conn.send_config_set("hostname cisco3-with-a-very-long-hostname") + elif device_under_test == "cisco_xr_long_name": + conn.send_config_set("hostname iosxr3-with-very-long-hostname-plus") + conn.commit() + conn.exit_config_mode() return conn diff --git a/tests/test_netmiko_config.py b/tests/test_netmiko_config.py index <HASH>..<HASH> 100755 --- a/tests/test_netmiko_config.py +++ b/tests/test_netmiko_config.py @@ -220,4 +220,8 @@ def test_disconnect(net_connect, commands, expected_responses): """ Terminate the SSH session """ + if net_connect.host == "cisco3.lasthop.io": + hostname = net_connect.send_command("show run | inc hostname") + if "long-hostname" in hostname: + net_connect.send_config_set("hostname cisco3") net_connect.disconnect() diff --git a/tests/test_suite_alt.sh b/tests/test_suite_alt.sh index <HASH>..<HASH> 100755 --- a/tests/test_suite_alt.sh +++ b/tests/test_suite_alt.sh @@ -12,6 +12,10 @@ echo "Starting tests...good luck:" \ && $PYTEST test_netmiko_config.py --test_device cisco3 \ && $PYTEST test_netmiko_config_acl.py --test_device cisco3 \ \ +&& echo "Cisco IOS-XE and IOS-XR Long Name Test" \ +&& $PYTEST test_netmiko_config.py --test_device cisco3_long_name +&& $PYTEST test_netmiko_config.py --test_device cisco_xr_long_name +\ && echo "Exception and Timeout Tests" \ && $PYTEST test_netmiko_exceptions.py \ \
Fix Cisco IOS, IOS-XE, and IOS-XR abbreviating prompt in config mode issue (#<I>)
ktbyers_netmiko
train
4a9265be11c07f12d95da90c3545462d606f645d
diff --git a/go/vt/schemamanager/schemamanager.go b/go/vt/schemamanager/schemamanager.go index <HASH>..<HASH> 100644 --- a/go/vt/schemamanager/schemamanager.go +++ b/go/vt/schemamanager/schemamanager.go @@ -88,8 +88,10 @@ func Run(ctx context.Context, controller Controller, executor Executor) error { controller.OnReadFail(ctx, err) return err } - controller.OnReadSuccess(ctx) + if len(sqls) == 0 { + return nil + } keyspace := controller.Keyspace() if err := executor.Open(ctx, keyspace); err != nil { log.Errorf("failed to open executor: %v", err)
make schema manager stop early if there is no schema change
vitessio_vitess
train
50c5eb508d297f8514438e3c65366c2cfda45e71
diff --git a/addon/-task-group.js b/addon/-task-group.js index <HASH>..<HASH> 100644 --- a/addon/-task-group.js +++ b/addon/-task-group.js @@ -1,4 +1,5 @@ import Ember from 'ember'; +const { computed } = Ember; import { objectAssign, _ComputedProperty } from './utils'; import TaskStateMixin from './-task-state-mixin'; import { propertyModifiers, resolveScheduler } from './-property-modifiers-mixin'; @@ -9,9 +10,9 @@ export const TaskGroup = Ember.Object.extend(TaskStateMixin, { return `<TaskGroup:${this._propertyName}>`; }, - // FIXME: this is hacky and perhaps wrong - isRunning: Ember.computed.or('numRunning', 'numQueued'), - isQueued: false, + _numRunningOrNumQueued: computed.or('numRunning', 'numQueued'), + isRunning: computed.bool('_numRunningOrNumQueued'), + isQueued: false }); export function TaskGroupProperty(...decorators) { @@ -34,4 +35,3 @@ TaskGroupProperty.prototype = Object.create(_ComputedProperty.prototype); objectAssign(TaskGroupProperty.prototype, propertyModifiers, { constructor: TaskGroupProperty, }); - diff --git a/tests/unit/task-groups-test.js b/tests/unit/task-groups-test.js index <HASH>..<HASH> 100644 --- a/tests/unit/task-groups-test.js +++ b/tests/unit/task-groups-test.js @@ -122,3 +122,26 @@ test("task groups can be cancelled", function(assert) { assertStates(assert, taskB, false, false, true, suffix); }); +test("task groups return a boolean for isRunning", function(assert) { + assert.expect(3); + + let contextResolve; + let defer = Ember.RSVP.defer() + + let Obj = Ember.Object.extend({ + tg: taskGroup().enqueue(), + + myTask: task(function * () { + yield defer.promise; + }).group('tg') + }); + + let obj = Obj.create(); + let tg = obj.get('tg'); + let myTask = obj.get('myTask'); + assert.strictEqual(tg.get('isRunning'), false); + Em.run(() => myTask.perform()); + assert.strictEqual(tg.get('isRunning'), true); + Ember.run(defer, defer.resolve); + assert.strictEqual(tg.get('isRunning'), false); +});
Make TaskGroup.isRunning also return a boolean
machty_ember-concurrency
train
52fce0ae30304d48b803fa526852884f682c11db
diff --git a/src/acdhOeaw/schema/Object.php b/src/acdhOeaw/schema/Object.php index <HASH>..<HASH> 100644 --- a/src/acdhOeaw/schema/Object.php +++ b/src/acdhOeaw/schema/Object.php @@ -64,6 +64,15 @@ abstract class Object { private $id; /** + * Allows to keep track of the corresponding repository resource state: + * - null - unknown + * - true - recent call to updateRms() created the repository resource + * - false - repository resource already existed uppon last updateRms() call + * @var bool + */ + protected $created; + + /** * Fedora connection object. * @var \acdhOeaw\fedora\Fedora */ @@ -72,8 +81,8 @@ abstract class Object { /** * Creates an object representing a real-world entity. * - * @param Fedora $fedora - * @param string $id + * @param Fedora $fedora repository connection object + * @param string $id entity identifier (derived class-specific) */ public function __construct(Fedora $fedora, string $id) { $this->fedora = $fedora; @@ -153,12 +162,14 @@ abstract class Object { */ public function updateRms(bool $create = true, bool $uploadBinary = true, string $path = '/'): FedoraResource { - $created = $this->findResource($create, $uploadBinary, $path); + $this->created = $this->findResource($create, $uploadBinary, $path); // if it has just been created it would be a waste of time to update it - if (!$created) { - $meta = $this->mergeMetadata($this->res->getMetadata(), $this->getMetadata()); - $this->res->setMetadata($meta, true); + if (!$this->created) { + $meta = $this->getMetadata(); + $this->fedora->fixMetadataReferences($meta); + $meta = $this->mergeMetadata($this->res->getMetadata(), $meta); + $this->res->setMetadata($meta); $this->res->updateMetadata(); $binaryContent = $this->getBinaryData(); @@ -171,6 +182,19 @@ abstract class Object { } /** + * Informs about the corresponding repository resource state uppon last call + * to the `updateRms()` method: + * - null - the updateRms() was not called yet + * - true - repository resource was created by last call to the updateRms() + * - false - repository resource already existed uppoin last call to the + * updateRms() + * @return bool + */ + public function getCreated(): bool { + return $this->created; + } + + /** * Tries to find a repository resource representing a given object. * * @param bool $create should repository resource be created if it was not
schema\Object class provides information about corresponding repository resource state
acdh-oeaw_repo-php-util
train
a84a2edfdd57c1428a74410548b3afdd88aaf928
diff --git a/src/Illuminate/Database/Eloquent/Relations/Relation.php b/src/Illuminate/Database/Eloquent/Relations/Relation.php index <HASH>..<HASH> 100755 --- a/src/Illuminate/Database/Eloquent/Relations/Relation.php +++ b/src/Illuminate/Database/Eloquent/Relations/Relation.php @@ -478,13 +478,7 @@ abstract class Relation return $this->macroCall($method, $parameters); } - $result = $this->forwardCallTo($this->query, $method, $parameters); - - if ($result === $this->query) { - return $this; - } - - return $result; + return $this->forwardDecoratedCallTo($this->query, $method, $parameters); } /** diff --git a/src/Illuminate/Support/Traits/ForwardsCalls.php b/src/Illuminate/Support/Traits/ForwardsCalls.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Support/Traits/ForwardsCalls.php +++ b/src/Illuminate/Support/Traits/ForwardsCalls.php @@ -38,6 +38,27 @@ trait ForwardsCalls } /** + * Forward a method call to the given object, returning $this if the forwarded call returned itself. + * + * @param mixed $object + * @param string $method + * @param array $parameters + * @return mixed + * + * @throws \BadMethodCallException + */ + protected function forwardDecoratedCallTo($object, $method, $parameters) + { + $result = $this->forwardCallTo($object, $method, $parameters); + + if ($result === $object) { + return $this; + } + + return $result; + } + + /** * Throw a bad method call exception for the given method. * * @param string $method
[8.x] Add forwardDecoratedCallTo (#<I>) * [8.x] Add forwardDecoratedCallTo * Code style * Update ForwardsCalls.php
laravel_framework
train
1ce693048156c01af607dd2fbebeb421deca0093
diff --git a/lib/kickscraper/client.rb b/lib/kickscraper/client.rb index <HASH>..<HASH> 100644 --- a/lib/kickscraper/client.rb +++ b/lib/kickscraper/client.rb @@ -26,8 +26,8 @@ module Kickscraper self::process_api_call "project", id_or_slug.to_s end - def search_projects(q, page = nil) - self::process_api_call "projects", "search", "q=" + URI.escape(q), page + def search_projects(query, page = nil) + self::process_api_call "projects", "search", query, page end def ending_soon_projects(deadline_timestamp = nil) @@ -68,14 +68,14 @@ module Kickscraper end - def process_api_call(request_for, additional_path, query_string = "", cursor = nil) + def process_api_call(request_for, additional_path, query = "", cursor = nil) # create the path to the API resource we want - api_path = self::create_api_path(request_for, additional_path, query_string, cursor) + url_and_params = self::create_api_path(request_for, additional_path, query, cursor) # make the api call - response = connection.get(api_path) + response = connection.get(url_and_params[:url], url_and_params[:params]) # handle the response, returning an object with the results @@ -168,39 +168,36 @@ module Kickscraper end - def create_api_path(request_for, additional_path, query_string = "", cursor = nil) + def create_api_path(request_for, additional_path = "", query_string = "", cursor = nil) # start with the base path base_path = "/v1" - full_uri = base_path + url = base_path # set a specific sub path for users and projects case request_for.downcase when "user" - full_uri += "/users" + url += "/users" when "project", "projects" - full_uri += "/projects" + url += "/projects" when "category", "categories" - full_uri += "/categories" + url += "/categories" end # add the additional path if we have it - full_uri += "/" + URI.escape(additional_path) unless additional_path.empty? + url += "/" + CGI.escape(additional_path) unless additional_path.empty? - # add the cursor to the query string if we have it - cursor = cursor.to_i - if cursor > 0 then query_string = query_string.empty? ? "cursor=#{cursor}" : "#{query_string}&cursor=#{cursor}" end + # create the params hash and add the params we want + params = {} + params[:q] = query_string unless query_string.empty? + params[:cursor] = cursor unless cursor.nil? - # add the query string if we have it - full_uri += "?" + query_string unless query_string.empty? - - - # return the final uri - full_uri + # return the url and params + {url: url, params: params} end end end \ No newline at end of file diff --git a/lib/kickscraper/connection.rb b/lib/kickscraper/connection.rb index <HASH>..<HASH> 100644 --- a/lib/kickscraper/connection.rb +++ b/lib/kickscraper/connection.rb @@ -8,7 +8,16 @@ class KSToken < Faraday::Middleware end def call(env) + # replace '+' symbols in the query params with their original spaces, because there + # seems to be a bug in the way some versions of Fararay escape parameters with spaces + env[:url].query_params.each { |key, value| + env[:url].query_params[key] = value.tr('+', ' ') + } + + # add the oauth_token to all requests once we have it env[:url].query_params['oauth_token'] = Kickscraper.token unless Kickscraper.token.nil? + + # make the call @app.call(env) end end diff --git a/spec/client_spec.rb b/spec/client_spec.rb index <HASH>..<HASH> 100644 --- a/spec/client_spec.rb +++ b/spec/client_spec.rb @@ -47,7 +47,7 @@ describe Kickscraper::Client do end it "handles searching for projects with special characters" do - projects = client.search_projects %q{angels & demons with special "characters" and punctuation's n^ight%mare} + projects = client.search_projects %q{"angels" & demons !@#$'%^&*()} projects.length.should be > 0 end
Fixing a bug where Faraday double escaped spaces in the search query (closes #<I>)
markolson_kickscraper
train
edbf428ad5752a18e865bc42d7154387a1ea9b28
diff --git a/plotnine/utils.py b/plotnine/utils.py index <HASH>..<HASH> 100644 --- a/plotnine/utils.py +++ b/plotnine/utils.py @@ -623,6 +623,9 @@ def groupby_apply(df, cols, func, *args, **kwargs): as it calls fn twice on the first dataframe. If the nested code also does the same thing, it can be very expensive """ + if df.empty: + return df.copy() + try: axis = kwargs.pop('axis') except KeyError:
Make groupby_apply handle empty data Empty data leads to pd.concat([]) and pd.concat cannot handle an empty list. We have to bail early to avoid that failure.
has2k1_plotnine
train
ecb14178120021d96da14c08836b805489c06672
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -372,6 +372,10 @@ function morphdom(fromNode, toNode, options) { delete fromNodesLookup[toElKey]; } + if (toNode.isSameNode(fromNode)) { + return; + } + if (!childrenOnly) { if (onBeforeElUpdated(fromEl, toEl) === false) { return; @@ -399,6 +403,10 @@ function morphdom(fromNode, toNode, options) { curToNodeKey = getNodeKey(curToNodeChild); while (curFromNodeChild) { + if (curToNodeChild.isSameNode(curFromNodeChild)) { + return; + } + curFromNodeKey = getNodeKey(curFromNodeChild); fromNextSibling = curFromNodeChild.nextSibling; diff --git a/test/browser/test.js b/test/browser/test.js index <HASH>..<HASH> 100644 --- a/test/browser/test.js +++ b/test/browser/test.js @@ -726,6 +726,23 @@ function addTests() { expect(el1.querySelector('#skipMeChild') != null).to.equal(true); }); + it('should use isSameNode to allow reference proxies', function() { + var el1 = document.createElement('div'); + el1.innerHTML = 'stay gold'; + var el2 = document.createElement('div'); + el2.innerHTML = 'ponyboy'; + el2.isSameNode = function (el) {return el.isSameNode(el1)}; + morphdom(el1, el2); + expect(el1.innerHTML).to.equal('stay gold'); + + var containEl1 = document.createElement('div'); + containEl1.appendChild(el1); + var containEl2 = document.createElement('div'); + containEl2.appendChild(el2); + morphdom(containEl1, containEl2); + expect(el1.innerHTML).to.equal('stay gold'); + }); + // xit('should reuse DOM element with matching ID and class name (2)', function() { // // NOTE: This test is currently failing. We need to improve the special case code // // for handling incompatible root nodes.
provides simple mechanism to support using cached nodes and nodes that get augmented outside of the morphdom render cycle
patrick-steele-idem_morphdom
train
12df5879545c337cfd11441d3190a9cbb023b80e
diff --git a/reference.go b/reference.go index <HASH>..<HASH> 100644 --- a/reference.go +++ b/reference.go @@ -106,8 +106,7 @@ func (r *JsonReference) parse(jsonReferenceString string) error { } r.HasFileScheme = r.referenceUrl.Scheme == "file" - - r.HasFullFilePath = strings.HasPrefix(r.GetUrl().Path, "//") + r.HasFullFilePath = strings.HasPrefix(r.GetUrl().Path, "/") r.referencePointer, err = gojsonpointer.NewJsonPointer(r.referenceUrl.Fragment) if err != nil {
fixed bad path on URL parsing ( go <I> to go <I> issue )
xeipuuv_gojsonreference
train
179f798d18b5f1c4a3dc660ef34a8eb2f1a9dbc9
diff --git a/inc/sass-get-var.js b/inc/sass-get-var.js index <HASH>..<HASH> 100644 --- a/inc/sass-get-var.js +++ b/inc/sass-get-var.js @@ -24,7 +24,7 @@ function gather (variable, node) { module.exports = (variable, node) => { if (!variable || typeof variable !== 'string' || !node || typeof node !== 'object') return null - if (variable.indexOf('$') === 0) variable = variable.substring(1) + if (variable.indexOf('$') === 0) variable = variable.substring(1).split(' ')[0] let value = null let values = gather(variable, node) if (!values) return null diff --git a/inc/sass-has-var.js b/inc/sass-has-var.js index <HASH>..<HASH> 100644 --- a/inc/sass-has-var.js +++ b/inc/sass-has-var.js @@ -8,14 +8,22 @@ // export plugin module.exports = (variable, node) => { - if (!variable || typeof variable !== 'string') { return null; } - if (variable.indexOf('$') === 0) { variable = variable.substring(1); } - let parent = (node.parent) ? node.parent : null, - v = false; - if (parent) { - parent.walkDecls(`$${variable}`, decl => { v = true; }); - if (v) { return true; } - return module.exports(variable, parent); + if (!variable || typeof variable !== "string") { + return null; + } + if (variable.indexOf("$") === 0) { + variable = variable.substring(1).split(" ")[0]; + } + let parent = node.parent ? node.parent : null, + v = false; + if (parent) { + parent.walkDecls(`$${variable}`, decl => { + v = true; + }); + if (v) { + return true; } - return false; -}; \ No newline at end of file + return module.exports(variable, parent); + } + return false; +}; diff --git a/package.json b/package.json index <HASH>..<HASH> 100644 --- a/package.json +++ b/package.json @@ -7,7 +7,7 @@ "postcss", "framework" ], - "version": "2.2.4", + "version": "2.2.5", "license": "MIT", "main": "index.js", "author": {
added better compatibility with sass variables
arpadHegedus_postcss-plugin-utilities
train
0ba8ae05a2c9dba86975bd5b10789acdabc1461d
diff --git a/lib/heroku/command/apps.rb b/lib/heroku/command/apps.rb index <HASH>..<HASH> 100644 --- a/lib/heroku/command/apps.rb +++ b/lib/heroku/command/apps.rb @@ -181,18 +181,18 @@ class Heroku::Command::Apps < Heroku::Command::Base validate_arguments! info = api.post_app({ "name" => name, "stack" => options[:stack] }).body - hprint("Creating #{info["name"]}...") begin - if info["create_status"] == "creating" - Timeout::timeout(options[:timeout].to_i) do - loop do - break if heroku.create_complete?(info["name"]) - hprint(".") - sleep 1 + action("Creating #{info['name']}") do + if info['create_status'] == 'creating' + Timeout::timeout(options[:timeout].to_i) do + loop do + break if heroku.create_complete?(info["name"]) + sleep 1 + end end end + status("stack is #{info['stack']}") end - hputs(" done, stack is #{info["stack"]}") (options[:addons] || "").split(",").each do |addon| addon.strip!
update apps:create to use action helper
heroku_legacy-cli
train
7305fe70cbafb83c960222107074b70ad0ad0310
diff --git a/HISTORY.rst b/HISTORY.rst index <HASH>..<HASH> 100644 --- a/HISTORY.rst +++ b/HISTORY.rst @@ -3,6 +3,12 @@ History ------- +3.2.0 (2014-03-11) +++++++++++++++++++ + +* support for addons + + 3.1.0 (2014-03-03) ++++++++++++++++++ diff --git a/bootstrap3/__init__.py b/bootstrap3/__init__.py index <HASH>..<HASH> 100644 --- a/bootstrap3/__init__.py +++ b/bootstrap3/__init__.py @@ -1 +1 @@ -__version__ = '3.1.0' +__version__ = '3.2.0' diff --git a/bootstrap3/forms.py b/bootstrap3/forms.py index <HASH>..<HASH> 100644 --- a/bootstrap3/forms.py +++ b/bootstrap3/forms.py @@ -112,11 +112,18 @@ def render_field(field, layout='', form_group_class=FORM_GROUP_CLASS, elif isinstance(widget, ClearableFileInput): after_render = fix_clearable_file_input - if (addon_after or addon_before) and (isinstance(widget, TextInput) or isinstance(widget, DateInput) or isinstance(widget, Select)): + # Handle addons + if (addon_before or addon_after) and is_widget_with_addon_support(widget): + if not wrapper: + wrapper = '{content}' before = '<span class="input-group-addon">{addon}</span>'.format(addon=addon_before) if addon_before else '' after = '<span class="input-group-addon">{addon}</span>'.format(addon=addon_after) if addon_after else '' - wrapper = '<div class="input-group">{before}{content}{after}</div>'.format(before=before, after=after, - content='{content}') + content = '<div class="input-group">{before}{content}{after}</div>'.format( + before=before, + after=after, + content='{content}', + ) + wrapper = wrapper.format(content=content) # Get help text field_help = force_text(field.help_text) if show_help and field.help_text else '' @@ -261,6 +268,13 @@ def is_widget_with_placeholder(widget): return isinstance(widget, (TextInput, Textarea)) +def is_widget_with_addon_support(widget): + """ + Is this a widget that supports addons? + """ + return isinstance(widget, (TextInput, DateInput, Select)) + + def list_to_class(klass): def fixer(html): mapping = [
<I>, adds support for addons
dyve_django-bootstrap3
train
bdd97480f22ad51d3b4b9b08c7600f92c904f408
diff --git a/CHANGES.md b/CHANGES.md index <HASH>..<HASH> 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,4 +1,5 @@ #### [unreleased] +* add `github_updater_unset_auth_header` filter to return array of URL host parts to remove Basic Auth headers #### 9.3.0 / 2020-02-06 * remove GitHub deprecation notice diff --git a/github-updater.php b/github-updater.php index <HASH>..<HASH> 100644 --- a/github-updater.php +++ b/github-updater.php @@ -12,7 +12,7 @@ * Plugin Name: GitHub Updater * Plugin URI: https://github.com/afragen/github-updater * Description: A plugin to automatically update GitHub, Bitbucket, GitLab, or Gitea hosted plugins, themes, and language packs. It also allows for remote installation of plugins or themes into WordPress. - * Version: 9.3.0 + * Version: 9.3.0.1 * Author: Andy Fragen * License: GNU General Public License v2 * License URI: http://www.gnu.org/licenses/gpl-2.0.html diff --git a/src/GitHub_Updater/Traits/Basic_Auth_Loader.php b/src/GitHub_Updater/Traits/Basic_Auth_Loader.php index <HASH>..<HASH> 100644 --- a/src/GitHub_Updater/Traits/Basic_Auth_Loader.php +++ b/src/GitHub_Updater/Traits/Basic_Auth_Loader.php @@ -310,10 +310,18 @@ trait Basic_Auth_Loader { * @return array $args */ public function http_release_asset_auth( $args, $url ) { + $unset_header = false; $arr_url = parse_url( $url ); $aws_host = false !== strpos( $arr_url['host'], 's3.amazonaws.com' ); $github_releases = false !== strpos( $arr_url['path'], 'releases/download' ); - if ( $aws_host || $github_releases ) { + + $unsets = apply_filters( 'github_updater_unset_auth_header', [] ); + foreach ( (array) $unsets as $unset ) { + if ( false !== strpos( $url, $unset ) ) { + $unset_header = true; + } + } + if ( $aws_host || $github_releases || $unset_header ) { unset( $args['headers']['Authorization'] ); } remove_filter( 'http_request_args', [ $this, 'http_release_asset_auth' ] );
use filter to unset user added URL host
afragen_github-updater
train
35714414a2ab18e7833b39b7e006121fe567ddb9
diff --git a/core/frontier.js b/core/frontier.js index <HASH>..<HASH> 100644 --- a/core/frontier.js +++ b/core/frontier.js @@ -119,11 +119,7 @@ exports.icFrontier = function (config) { // get one level deeper for scalra system servers (monitor, entry...) if (owner === 'scalra') { owner = words[words.length-4]; - - // also obtain proper project name if not for 'demo' project - // TODO: hard-coded now, find a better approach? - //if (project !== 'demo') - // project = words[words.length-3]; + project = words[words.length-3]; } LOG.warn('extracting server info from path... \n[owner]: ' + owner + '\n[project]: ' + project + '\n[name]: ' + name, l_name);
fix: cannot find proper scalra path when deployed on dev server
imonology_scalra
train
fa95270c37a52241d9a54bac5f2a4796d62ae371
diff --git a/contrib/demo1.py b/contrib/demo1.py index <HASH>..<HASH> 100644 --- a/contrib/demo1.py +++ b/contrib/demo1.py @@ -149,10 +149,8 @@ latline_online.printAllElements() # get configuration of 'Q1' print latline_online.getAllConfig(fmt='dict')['Q1'] - eleb1.printConfig() - eleQ1all = latline_online.getElementsByName('Q1') #map(lambda x: x.setStyle(fc='orange'), eleQ1all) @@ -253,8 +251,7 @@ plt.plot(thetaArray, dxArray, 'r') # #### Lattice layout visualization # generate lattice drawing plotting objects -#ptches, xr, yr = latline_online.draw(mode='plain', showfig=False) -ptches, xr, yr = latline_online.draw(mode='fancy', showfig=False) +ptches, anotes, xr, yr = latline_online.draw(mode='fancy', showfig=False) # show drawing at the ax3t = ax3.twinx()
draw() method return patches, anote, xyrange
archman_beamline
train
45d02338a694922ed3009b8703b74abcc59dafe1
diff --git a/intake/catalog/entry.py b/intake/catalog/entry.py index <HASH>..<HASH> 100644 --- a/intake/catalog/entry.py +++ b/intake/catalog/entry.py @@ -129,14 +129,14 @@ class CatalogEntry(DictSerialiseMixin): def _ipython_display_(self): """Display the entry as a rich object in an IPython session.""" contents, warning = self._display_content() - display({ + display({ # noqa: F821 'application/json': contents, 'text/plain': pretty_describe(contents) }, metadata={ 'application/json': { 'root': self.name } }, raw=True) if warning: - display(warning) + display(warning) # noqa: F821 def _display_content(self): """Create a dictionary with content to display in reprs."""
Annotate flake8 expected failures.
intake_intake
train
19da4cf8f323a31eec1eb0538a1c0e6a141bd3fd
diff --git a/spock-core/src/main/java/org/spockframework/runtime/extension/builtin/AutoCleanupExtension.java b/spock-core/src/main/java/org/spockframework/runtime/extension/builtin/AutoCleanupExtension.java index <HASH>..<HASH> 100644 --- a/spock-core/src/main/java/org/spockframework/runtime/extension/builtin/AutoCleanupExtension.java +++ b/spock-core/src/main/java/org/spockframework/runtime/extension/builtin/AutoCleanupExtension.java @@ -35,7 +35,7 @@ public class AutoCleanupExtension extends AbstractAnnotationDrivenExtension<Auto @Override public void visitSpec(SpecInfo spec) { - sharedFieldInterceptor.install(spec.getTopSpec().getCleanupSpecMethod()); - instanceFieldInterceptor.install(spec.getTopSpec().getCleanupMethod()); + sharedFieldInterceptor.install(spec.getBottomSpec().getCleanupSpecMethod()); + instanceFieldInterceptor.install(spec.getBottomSpec().getCleanupMethod()); } }
fields now @AutoCleanup'ed after cleanup(Spec) of subclass (rather than base class) has run
spockframework_spock
train
391ffcf108e556f6426c4753a5309c8cdab4b3ad
diff --git a/test/test.js b/test/test.js index <HASH>..<HASH> 100644 --- a/test/test.js +++ b/test/test.js @@ -48,12 +48,39 @@ test('single entry chunk', async t => { const appDistJs = readFileSync(join(out, 'app-dist.js'), 'utf8'); t.regex(mainDistHtml, /^<!DOCTYPE html>/, 'no prelude'); + t.notRegex(mainDistHtml, /;/, 'no semicolons'); t.regex(mainDistHtml, /<script src="app-dist\.js"><\/script>/, 'references app-dist.js'); t.regex(appDistJs, /\bfunction __webpack_require__\b/, 'has prelude'); t.regex(appDistJs, /module\.exports = 'this should not be imported';/, 'has exports'); }); +test('named single entry', async t => { + const out = randomPath(); + + await runWebpack({ + entry: { + other: join(__dirname, 'src/other.html') + }, + output: { + path: out, + filename: '[name]-dist.html' + }, + module: { + rules: [ + { test: /\.html$/, use: ['extricate-loader', 'html-loader'] }, + { test: /\.jpg$/, use: 'file-loader?name=[name]-dist.[ext]' } + ] + }, + }); + + const otherDistHtml = readFileSync(join(out, 'other-dist.html'), 'utf8'); + + t.regex(otherDistHtml, /^<!DOCTYPE html>/, 'no prelude'); + t.notRegex(otherDistHtml, /;/, 'no semicolons'); + t.regex(otherDistHtml, /<img src="hi-dist\.jpg"\/>/, 'references hi-dist.jpg'); +}); + test('multiple entry chunks', async t => { const out = randomPath(); @@ -81,9 +108,11 @@ test('multiple entry chunks', async t => { const appDistJs = readFileSync(join(out, 'app-dist.js'), 'utf8'); t.regex(oneDistHtml, /^<!DOCTYPE html>/, 'no prelude'); + t.notRegex(oneDistHtml, /;/, 'no semicolons'); t.regex(oneDistHtml, /<script src="app-dist\.js"><\/script>/, 'references app-dist.js'); t.regex(twoDistHtml, /^<!DOCTYPE html>/, 'no prelude'); + t.notRegex(twoDistHtml, /;/, 'no semicolons'); t.regex(twoDistHtml, /<img src="hi-dist\.jpg"\/>/, 'references hi-dist.jpg'); t.truthy(hiDistJpg, 'non-empty'); @@ -113,35 +142,13 @@ test('single entry chunk though function', async t => { const appDistJs = readFileSync(join(out, 'app-dist.js'), 'utf8'); t.regex(mainDistHtml, /^<!DOCTYPE html>/, 'no prelude'); + t.notRegex(mainDistHtml, /;/, 'no semicolons'); t.regex(mainDistHtml, /<script src="app-dist\.js"><\/script>/, 'references app-dist.js'); t.regex(appDistJs, /\bfunction __webpack_require__\b/, 'has prelude'); t.regex(appDistJs, /module\.exports = 'this should not be imported';/, 'has exports'); }); -test('substituting [name] instead of [chunkname]', async t => { - const out = randomPath(); - - await runWebpack({ - entry: join(__dirname, 'src/other.html'), - output: { - path: out, - filename: '[name]-dist.html' - }, - module: { - rules: [ - { test: /\.html$/, use: ['extricate-loader', 'html-loader'] }, - { test: /\.jpg$/, use: 'file-loader?name=[name]-dist.[ext]' } - ] - }, - }); - - const otherDistHtml = readFileSync(join(out, 'other-dist.html'), 'utf8'); - - t.regex(otherDistHtml, /^<!DOCTYPE html>/, 'no prelude'); - t.regex(otherDistHtml, /<img src="hi-dist\.jpg"\/>/, 'references hi-dist.jpg'); -}); - test.after(t => { rimraf.sync(join(__dirname, 'dist')); });
tests: check for no semicolons, move now-redundant "use [name]" test
erikdesjardins_inert-entry-webpack-plugin
train
156b7888ba0004c8df05e288fe3049805265b282
diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/Button.java b/src/main/java/com/redhat/darcy/ui/api/elements/Button.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/Button.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/Button.java @@ -19,5 +19,5 @@ package com.redhat.darcy.ui.api.elements; -public interface Button extends Clickable { +public interface Button extends Clickable, Element { } diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/Disableable.java b/src/main/java/com/redhat/darcy/ui/api/elements/Disableable.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/Disableable.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/Disableable.java @@ -1,5 +1,5 @@ package com.redhat.darcy.ui.api.elements; -public interface Disableable extends Element { +public interface Disableable { boolean isEnabled(); } diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/FileSelect.java b/src/main/java/com/redhat/darcy/ui/api/elements/FileSelect.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/FileSelect.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/FileSelect.java @@ -24,7 +24,7 @@ package com.redhat.darcy.ui.api.elements; * specific dialog. Specifying a FileSelect element tells the underlying automation library to * anticipate that behavior. */ -public interface FileSelect extends HasValue, Disableable { +public interface FileSelect extends HasValue, Disableable, Element { void setFilePath(String path); void clear(); } diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/HasValue.java b/src/main/java/com/redhat/darcy/ui/api/elements/HasValue.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/HasValue.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/HasValue.java @@ -19,6 +19,6 @@ package com.redhat.darcy.ui.api.elements; -public interface HasValue extends Element { +public interface HasValue { String readValue(); } diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/Link.java b/src/main/java/com/redhat/darcy/ui/api/elements/Link.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/Link.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/Link.java @@ -19,6 +19,6 @@ package com.redhat.darcy.ui.api.elements; -public interface Link extends Clickable { +public interface Link extends Clickable, Element { String getLinkText(); } diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/Select.java b/src/main/java/com/redhat/darcy/ui/api/elements/Select.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/Select.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/Select.java @@ -24,7 +24,7 @@ import com.redhat.darcy.ui.api.Locator; import java.util.List; import java.util.Optional; -public interface Select extends Disableable { +public interface Select extends Disableable, Element { void select(Locator locator); List<SelectOption> getOptions(); Optional<SelectOption> getCurrentlySelectedOption(); diff --git a/src/main/java/com/redhat/darcy/ui/api/elements/TextInput.java b/src/main/java/com/redhat/darcy/ui/api/elements/TextInput.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/redhat/darcy/ui/api/elements/TextInput.java +++ b/src/main/java/com/redhat/darcy/ui/api/elements/TextInput.java @@ -19,7 +19,7 @@ package com.redhat.darcy.ui.api.elements; -public interface TextInput extends Clickable, HasValue { +public interface TextInput extends Clickable, HasValue, Element { void clearAndType(String stringToType); void sendKeys(CharSequence... keysToSend); void clear();
Don't extend Element on purely role interfaces "HasValue" is not really an "Element," and making it not an element prevents implementations from being encouraged to handle clients trying to ask for an element of type "HasValue" or any of the other mere role interfaces like Disableable or Clickable.
darcy-framework_darcy-ui
train
484cfd6f0d060c1e3d3e67072f7f15821032d838
diff --git a/indra/db/database_manager.py b/indra/db/database_manager.py index <HASH>..<HASH> 100644 --- a/indra/db/database_manager.py +++ b/indra/db/database_manager.py @@ -371,8 +371,10 @@ class DatabaseManager(object): # 1. fast_raw_pa_link # 2. evidence_counts # 3. pa_meta - # The view, reading_ref_link, may be built at any point, as it has no - # relation to the above views. + # The following can be built at any time and in any order: + # - reading_ref_link + # Note that the order of views below is determined not by the above + # order but by constraints imposed by use-case. self.m_views = {}
Tweak the documentation a bit more.
sorgerlab_indra
train
91295546c20222ba2e204e7b07e2051a592e3412
diff --git a/Neos.Flow/Classes/Mvc/Routing/Dto/UriConstraints.php b/Neos.Flow/Classes/Mvc/Routing/Dto/UriConstraints.php index <HASH>..<HASH> 100644 --- a/Neos.Flow/Classes/Mvc/Routing/Dto/UriConstraints.php +++ b/Neos.Flow/Classes/Mvc/Routing/Dto/UriConstraints.php @@ -236,13 +236,13 @@ final class UriConstraints $originalHost = $host = !empty($uri->getHost()) ? $uri->getHost() : $templateUri->getHost(); $prefix = $this->constraints[self::CONSTRAINT_HOST_PREFIX]['prefix']; $replacePrefixes = $this->constraints[self::CONSTRAINT_HOST_PREFIX]['replacePrefixes']; - foreach ($replacePrefixes as $replacePrefix) { - if ($this->stringStartsWith($host, $replacePrefix)) { - $host = substr($host, strlen($replacePrefix)); - break; - } + + if ($replacePrefixes === []) { + $host = $prefix . $host; + } else { + $regex = '/^(' . implode('|', array_map('preg_quote', $replacePrefixes)) . ')/'; + $host = preg_replace($regex, $prefix, $host); } - $host = $prefix . $host; if ($host !== $originalHost) { $forceAbsoluteUri = true; $uri = $uri->withHost($host); @@ -256,12 +256,8 @@ final class UriConstraints if ($replaceSuffixes === []) { $host .= $suffix; } else { - foreach ($replaceSuffixes as $replaceSuffix) { - if ($this->stringEndsWith($host, $replaceSuffix)) { - $host = preg_replace(sprintf('/%s$/i', $replaceSuffix), $suffix, $host); - break; - } - } + $regex = '/(' . implode('|', array_map('preg_quote', $replaceSuffixes)) . ')$/'; + $host = preg_replace($regex, $suffix, $host); } if ($host !== $originalHost) { $forceAbsoluteUri = true; @@ -297,28 +293,4 @@ final class UriConstraints return $uri; } - - /** - * Whether the given $string starts with the specified $prefix - * - * @param string $string - * @param string $prefix - * @return bool - */ - private function stringStartsWith(string $string, string $prefix): bool - { - return substr($string, 0, strlen($prefix)) === $prefix; - } - - /** - * Whether the given $string ends with the specified $suffix - * - * @param string $string - * @param string $suffix - * @return bool - */ - private function stringEndsWith(string $string, string $suffix): bool - { - return substr($string, -strlen($suffix)) === $suffix; - } }
TASK: Adjust replacement use, adjust prefix handling Takes a (fixed) version of the replacement use suggested by @aberl, adjusts the prefix replacement the same way as the suffix replacement and removed methods no longer needed.
neos_flow-development-collection
train
536b67c24b2f56879f8e9c25e22a2a8055ac6a0e
diff --git a/django_core/models.py b/django_core/models.py index <HASH>..<HASH> 100644 --- a/django_core/models.py +++ b/django_core/models.py @@ -29,7 +29,13 @@ class AbstractBaseModel(models.Model): class Meta: abstract = True - ordering = ['-created_dttm'] + # Default ordering is by id instead of created_dttm for 2 reasons: + # 1) id is indexed (primary key) + # 2) it's safer than created_dttm since created_dttm could be the same + # value which would lead to inconsistent ording in responses from + # queries. This works because id is an integer field that's auto + # incremented. + ordering = ('-id',) def __unicode__(self): return unicode(self.id) @@ -40,8 +46,8 @@ class AbstractBaseModel(models.Model): def save(self, *args, **kwargs): """Optional kwargs: - - id_length: the length of characters to use for the id. Default - is 10. + * id_length: the length of characters to use for the id. Default + is 10. """ self.__class__.save_prep(self) return super(AbstractBaseModel, self).save(*args, **kwargs)
added doc and changed the default ordering to be by -id.
InfoAgeTech_django-core
train
34e9c7ef9667f2a6aef02e74a6657dd1503af993
diff --git a/Library/Compiler.php b/Library/Compiler.php index <HASH>..<HASH> 100755 --- a/Library/Compiler.php +++ b/Library/Compiler.php @@ -457,13 +457,14 @@ class Compiler */ $needConfigure = $this->createConfigFiles($namespace); $needConfigure |= $this->createProjectFiles($namespace); + $needConfigure |= $this->checkIfPhpized($namespace); /** * Round 5. Generate the concatenation cubrid_error_code(oid) */ $this->_stringManager->genConcatCode(); - return $needConfigure; + return $needConfigure; } /** @@ -481,6 +482,7 @@ class Compiler $needConfigure = $this->generate($command); if ($needConfigure) { + exec('cd ext && make clean', $output, $exit); $this->_logger->output('Preparing for PHP compilation...'); @@ -897,6 +899,11 @@ class Compiler return $needConfigure; } + public function checkIfPhpized($namespace) + { + return !file_exists('ext/Makefile'); + } + /** * Returns a short path *
Checking if project needs to be phpized [ci skip]
phalcon_zephir
train
7aeb715bcc831aafd4ca49c82f0ec26471eeed21
diff --git a/packages/material-ui/src/Dialog/Dialog.js b/packages/material-ui/src/Dialog/Dialog.js index <HASH>..<HASH> 100644 --- a/packages/material-ui/src/Dialog/Dialog.js +++ b/packages/material-ui/src/Dialog/Dialog.js @@ -95,7 +95,7 @@ const DialogPaper = styled(Paper, { return [ styles.paper, styles[`scrollPaper${capitalize(styleProps.scroll)}`], - styles[`paperWidth${capitalize(String(styleProps.maxWidth))})`], + styles[`paperWidth${capitalize(String(styleProps.maxWidth))}`], styleProps.fullWidth && styles.paperFullWidth, styleProps.fullScreen && styles.paperFullScreen, ];
[Dialog] Fix override paper styles (#<I>)
mui-org_material-ui
train
2be2dc993b95905b97465100892accde82885ab8
diff --git a/docs/change_log.rst b/docs/change_log.rst index <HASH>..<HASH> 100644 --- a/docs/change_log.rst +++ b/docs/change_log.rst @@ -1,6 +1,13 @@ 更新记录 =========================== +2018.6.6 v1.0.14 +--------------------------- + +* issue ID use directly on github +* 36, common, edit function :meth:`fish_common.check_str()`, optimize, doc and unittest; + + 2018.5.30 v1.0.13 --------------------------- @@ -13,7 +20,6 @@ * 19043, common, edit function :meth:`fish_common.sorted_list_from_dict()`, optimize, doc and unittest; * 19044, file, remove ``auto_add_file_ext()``; * 19045, file, remove ``get_abs_filename_with_sub_path_module()``; -* 19046, common, edit function :meth:`fish_common.check_str()`, optimize, doc and unittest; 2018.5.21 v1.0.12 --------------------------- diff --git a/docs/conf.py b/docs/conf.py index <HASH>..<HASH> 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -57,9 +57,9 @@ author = 'David Yi' # built documents. # # The short X.Y version. -version = '1.0.12' +version = '1.0.14' # The full version, including alpha/beta/rc tags. -release = '1.0.12' +release = '1.0.14' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. diff --git a/fishbase/fish_common.py b/fishbase/fish_common.py index <HASH>..<HASH> 100644 --- a/fishbase/fish_common.py +++ b/fishbase/fish_common.py @@ -491,7 +491,7 @@ def sorted_list_from_dict(p_dict, order=odASC): return o_list[::-1] -# v1.0.13 #19046, edit by David Yi, edit by Hu Jun +# v1.0.13 #36, edit by David Yi, edit by Hu Jun def check_str(p_str, check_style=charChinese): """ 检查字符串是否含有指定类型字符 diff --git a/fishbase/fish_file.py b/fishbase/fish_file.py index <HASH>..<HASH> 100644 --- a/fishbase/fish_file.py +++ b/fishbase/fish_file.py @@ -33,8 +33,8 @@ def get_abs_filename_with_sub_path(sub_path, filename): * abs_filename: (string) 指定 filename 的包含路径的长文件名 举例如下:: - - print('get_abs_filename_with_sub_path') + + print('--- get_abs_filename_with_sub_path demo ---') # define sub dir path_name = 'sub_dir' # define not exists file @@ -49,12 +49,15 @@ def get_abs_filename_with_sub_path(sub_path, filename): abs_filename = get_abs_filename_with_sub_path(path_name, filename) # return True and abs filename print(abs_filename) + print('---') 输出结果:: - + + --- get_abs_filename_with_sub_path demo --- (False, '/Users/****/Documents/dev_python/fishbase/demo/sub_dir/test_file.txt') (True, '/Users/****/Documents/dev_python/fishbase/demo/sub_dir/demo.txt') - + --- + """ try: @@ -99,18 +102,23 @@ def check_sub_path_create(sub_path): * False: 路径不存在,True: 创建成功 举例如下:: - + + print('--- check_sub_path_create demo ---') # 定义子路径名称 sub_path = 'demo_sub_dir' # 检查当前路径下的一个子路径是否存在,不存在则创建 print('check sub path:', sub_path) result = check_sub_path_create(sub_path) print(result) + print('---') 输出结果:: - + + --- check_sub_path_create demo --- check sub path: demo_sub_dir (True, False) + --- + """ # 获得当前路径
<I> <I> update issue ID change to use github, version number in the document change to <I>
chinapnr_fishbase
train
7647c3bc340f0058fb437daacdccb1089486a6f0
diff --git a/tests/SystemTest.php b/tests/SystemTest.php index <HASH>..<HASH> 100644 --- a/tests/SystemTest.php +++ b/tests/SystemTest.php @@ -24,7 +24,7 @@ class SystemTest extends PHPUnit_Framework_TestCase public function testTheSystem() { // Don't run system tests on Travis - if (getenv('TRAVIS') === true) { + if (getenv('TRAVIS') == "true") { $this->markTestSkipped('This test should not run if on Travis.'); } @@ -103,7 +103,7 @@ class SystemTest extends PHPUnit_Framework_TestCase // Test the counts $counts = $profile->getCounts(); $this->assertEquals(1, $counts['habbo']); - $this->assertEquals(14, $counts['badges']); + $this->assertEquals(15, $counts['badges']); $this->assertEquals(1, $counts['friends']); $this->assertEquals(1, $counts['groups']); $this->assertEquals(2, $counts['rooms']);
fixes SystemTest and skipping feature for Travis
gerbenjacobs_HabboAPI
train
8db1470818c3ffbf16bed89fbf4be46a84f37576
diff --git a/core/interpreter/src/main/java/org/overture/interpreter/runtime/Context.java b/core/interpreter/src/main/java/org/overture/interpreter/runtime/Context.java index <HASH>..<HASH> 100644 --- a/core/interpreter/src/main/java/org/overture/interpreter/runtime/Context.java +++ b/core/interpreter/src/main/java/org/overture/interpreter/runtime/Context.java @@ -74,6 +74,7 @@ public class Context extends LexNameTokenMap<Value> public Context(IInterpreterAssistantFactory af,ILexLocation location, String title, Context outer) { + super(af.getLexNameTokenAssistant()); this.assistantFactory = af; this.location = location; this.outer = outer;
Thread LexNameToken Assistant into LexNameMap
overturetool_overture
train
1f3ebc854118b7f9210a2c60275695468f54c91a
diff --git a/PyFunceble/abstracts/infrastructure.py b/PyFunceble/abstracts/infrastructure.py index <HASH>..<HASH> 100644 --- a/PyFunceble/abstracts/infrastructure.py +++ b/PyFunceble/abstracts/infrastructure.py @@ -118,3 +118,10 @@ class Infrastructure: :type: str """ + + REPO_LINK = "https://git.io/vpZoI" + """ + Sets the link to the repository. + + :type: str + """ diff --git a/PyFunceble/abstracts/package.py b/PyFunceble/abstracts/package.py index <HASH>..<HASH> 100644 --- a/PyFunceble/abstracts/package.py +++ b/PyFunceble/abstracts/package.py @@ -73,7 +73,7 @@ class Package: :type: str """ - VERSION = "3.0.11.dev (Teal Blauwbok)" + VERSION = "3.0.12.dev (Teal Blauwbok)" """ Sets the package version. diff --git a/PyFunceble/output/prints.py b/PyFunceble/output/prints.py index <HASH>..<HASH> 100644 --- a/PyFunceble/output/prints.py +++ b/PyFunceble/output/prints.py @@ -216,8 +216,10 @@ class Prints: # * The given output does not exist. # We initiate the information about what generated the file. - link = "# File generated by {0} (v{1}) / {2}\n".format( - PyFunceble.NAME, PyFunceble.VERSION.split()[0], PyFunceble.LINKS.repo + link = "# Generated by {0} (v{1}) / {2}\n".format( + PyFunceble.NAME, + PyFunceble.VERSION.split()[0], + PyFunceble.abstracts.Infrastructure.REPO_LINK, ) # We initiate the information about the generation date of this file. diff --git a/tests/test_output_prints.py b/tests/test_output_prints.py index <HASH>..<HASH> 100644 --- a/tests/test_output_prints.py +++ b/tests/test_output_prints.py @@ -126,7 +126,7 @@ class TestPrints(StdoutBase): patch("PyFunceble.output.prints.datetime", new=datetime_patch).start() # pylint: disable=line-too-long - expected = f"""# File generated by {PyFunceble.NAME} (v{PyFunceble.VERSION.split()[0]}) / {PyFunceble.LINKS.repo} + expected = f"""# Generated by {PyFunceble.NAME} (v{PyFunceble.VERSION.split()[0]}) / {PyFunceble.abstracts.Infrastructure.REPO_LINK} # Date of generation: {datetime_patch.now().isoformat()} """ @@ -145,7 +145,7 @@ class TestPrints(StdoutBase): self.assertEqual(expected, actual) # pylint: disable=line-too-long - expected = f"""# File generated by {PyFunceble.NAME} (v{PyFunceble.VERSION.split()[0]}) / {PyFunceble.LINKS.repo} + expected = f"""# Generated by {PyFunceble.NAME} (v{PyFunceble.VERSION.split()[0]}) / {PyFunceble.abstracts.Infrastructure.REPO_LINK} # Date of generation: {datetime_patch.now().isoformat()} Hello World diff --git a/version.yaml b/version.yaml index <HASH>..<HASH> 100644 --- a/version.yaml +++ b/version.yaml @@ -1,4 +1,4 @@ -current_version: 3.0.11.dev (Teal Blauwbok) +current_version: 3.0.12.dev (Teal Blauwbok) deprecated: - 1.0.0 - 1.2.1
Reduce length of the first line in all the outputed files. Contributors: * @spirillen
funilrys_PyFunceble
train
6055bbedaa4b7b4bb2377ac87147196eebb2edc1
diff --git a/activerecord/lib/active_record/associations/association_collection.rb b/activerecord/lib/active_record/associations/association_collection.rb index <HASH>..<HASH> 100644 --- a/activerecord/lib/active_record/associations/association_collection.rb +++ b/activerecord/lib/active_record/associations/association_collection.rb @@ -314,7 +314,11 @@ module ActiveRecord transaction do delete(@target - other_array) - concat(other_array - @target) + + unless concat(other_array - @target) + raise RecordNotSaved, "Failed to replace #{@reflection.name} because one or more of the " + "new records could not be saved." + end end end diff --git a/activerecord/test/cases/associations/has_many_associations_test.rb b/activerecord/test/cases/associations/has_many_associations_test.rb index <HASH>..<HASH> 100644 --- a/activerecord/test/cases/associations/has_many_associations_test.rb +++ b/activerecord/test/cases/associations/has_many_associations_test.rb @@ -975,6 +975,19 @@ class HasManyAssociationsTest < ActiveRecord::TestCase assert !firm.clients.include?(:first_client) end + def test_replace_failure + firm = companies(:first_firm) + account = Account.new + orig_accounts = firm.accounts.to_a + + assert !account.valid? + assert !orig_accounts.empty? + assert_raise ActiveRecord::RecordNotSaved do + firm.accounts = [account] + end + assert_equal orig_accounts, firm.accounts + end + def test_get_ids assert_equal [companies(:first_client).id, companies(:second_client).id], companies(:first_firm).client_ids end
Raise ActiveRecord::RecordNotSaved if an AssociationCollection fails to be replaced
rails_rails
train
1888e48a34e9c5d62d73ffbf89f45489a239af7d
diff --git a/src/utils.js b/src/utils.js index <HASH>..<HASH> 100644 --- a/src/utils.js +++ b/src/utils.js @@ -23,7 +23,7 @@ module.exports = { return _.startCase(str); }, - slugify: str => _.kebabCase(_.deburr(str)).toLowerCase(), + slugify: str => _.deburr(str).replace(/\s+/g, '-').toLowerCase(), toJSON(item) { const obj = {};
fix(components): prevent underscores in component names getting converted to dashes closes #<I>
frctl_fractal
train
e46c560064021840c811ba0ab3713a7229f0a544
diff --git a/para-server/src/main/java/com/erudika/para/Para.java b/para-server/src/main/java/com/erudika/para/Para.java index <HASH>..<HASH> 100644 --- a/para-server/src/main/java/com/erudika/para/Para.java +++ b/para-server/src/main/java/com/erudika/para/Para.java @@ -137,8 +137,7 @@ public final class Para { exec.awaitTermination(60, TimeUnit.SECONDS); } if (!execAt.isShutdown()) { - execAt.shutdown(); - execAt.awaitTermination(60, TimeUnit.SECONDS); + execAt.shutdownNow(); } } catch (Exception e) { logger.error(null, e); diff --git a/para-server/src/main/java/com/erudika/para/search/ElasticSearch.java b/para-server/src/main/java/com/erudika/para/search/ElasticSearch.java index <HASH>..<HASH> 100644 --- a/para-server/src/main/java/com/erudika/para/search/ElasticSearch.java +++ b/para-server/src/main/java/com/erudika/para/search/ElasticSearch.java @@ -17,6 +17,7 @@ */ package com.erudika.para.search; +import com.erudika.para.Para; import com.erudika.para.core.Address; import com.erudika.para.core.ParaObject; import com.erudika.para.core.utils.ParaObjectUtils; @@ -352,7 +353,6 @@ public class ElasticSearch implements Search { return Collections.emptyList(); } QueryBuilder qb = QueryBuilders.wildcardQuery("tag", keyword.concat("*")); -// SortBuilder sb = SortBuilders.fieldSort("count").order(SortOrder.DESC); return searchQuery(appid, Utils.type(Tag.class), qb, pager); } @@ -404,13 +404,14 @@ public class ElasticSearch implements Search { * @param hits the search results from a query * @return the list of object found */ - private <P extends ParaObject> List<P> searchQuery(String appid, SearchHits hits) { + private <P extends ParaObject> List<P> searchQuery(final String appid, SearchHits hits) { if (hits == null) { return Collections.emptyList(); } ArrayList<P> results = new ArrayList<P>(hits.getHits().length); ArrayList<String> keys = new ArrayList<String>(hits.getHits().length); - boolean readFromIndex = Config.getConfigBoolean("read_from_index", false); + boolean readFromIndex = Config.getConfigBoolean("read_from_index", Config.ENVIRONMENT.equals("embedded")); + boolean restoreFromIndex = Config.getConfigBoolean("restore_from_index", false); try { for (SearchHit hit : hits) { keys.add(hit.getId()); @@ -421,25 +422,39 @@ public class ElasticSearch implements Search { } if (!readFromIndex && !keys.isEmpty()) { + final ArrayList<P> restoreUs = new ArrayList<P>(results.size()); + ArrayList<String> nullz = new ArrayList<String>(results.size()); Map<String, P> fromDB = dao.readAll(appid, keys, true); - if (!fromDB.isEmpty()) { - results.addAll(fromDB.values()); + int i = 0; + for (Map.Entry<String, P> row : fromDB.entrySet()) { + String key = row.getKey(); + P pobj = row.getValue(); + if (pobj == null) { + // object is still in index but not in DB + nullz.add(key); + if (restoreFromIndex && hits.getAt(i) != null) { + pobj = ParaObjectUtils.setAnnotatedFields(hits.getAt(i).getSource()); + restoreUs.add(pobj); + } + } else { + results.add(pobj); + } + i++; } - } - int sizeBefore = results.size(); - ArrayList<String> nullz = new ArrayList<String>(); - for (int i = 0; i < results.size(); i++) { - if (results.get(i) == null) { - nullz.add(keys.get(i)); + if (!nullz.isEmpty()) { + logger.warn("Found {} objects that are indexed but no longer exist in the database. Ids: {}", + nullz.size(), nullz); } - } - results.removeAll(Collections.singleton(null)); - int sizeAfter = results.size(); - if (sizeBefore > (sizeAfter + 1)) { - logger.warn("Found {} objects that are indexed but no longer exist in the database. Ids: {}", - sizeBefore - sizeAfter, nullz); + if (!restoreUs.isEmpty()) { + Para.asyncExecute(new Runnable() { + public void run() { + dao.createAll(appid, restoreUs); + logger.info("Restored {} objects from index to the database.", restoreUs.size()); + } + }); + } } logger.debug("Search.searchQuery() {}", results.size()); } catch (Exception e) {
rewrote searchQuery() to detect and restore objects from index
Erudika_para
train
c2d2a294c55edb549592b961734f91c28b8aef22
diff --git a/src/Sonata/OrderBundle/Entity/BaseOrder.php b/src/Sonata/OrderBundle/Entity/BaseOrder.php index <HASH>..<HASH> 100644 --- a/src/Sonata/OrderBundle/Entity/BaseOrder.php +++ b/src/Sonata/OrderBundle/Entity/BaseOrder.php @@ -206,7 +206,7 @@ abstract class BaseOrder implements OrderInterface */ public function __toString() { - return $this->getReference(); + return $this->getReference() ?: 'n/a'; } /** diff --git a/src/Sonata/OrderBundle/Entity/BaseOrderElement.php b/src/Sonata/OrderBundle/Entity/BaseOrderElement.php index <HASH>..<HASH> 100644 --- a/src/Sonata/OrderBundle/Entity/BaseOrderElement.php +++ b/src/Sonata/OrderBundle/Entity/BaseOrderElement.php @@ -504,7 +504,7 @@ abstract class BaseOrderElement implements OrderElementInterface */ public function __toString() { - return $this->getDesignation(); + return $this->getDesignation() ?: 'n/a'; } /**
fix Order __toString method
sonata-project_ecommerce
train
7c77db8d3c0ed1de6777a484f52e1477d8c8113b
diff --git a/panicwrap.go b/panicwrap.go index <HASH>..<HASH> 100644 --- a/panicwrap.go +++ b/panicwrap.go @@ -3,9 +3,8 @@ package bugsnag import ( + "github.com/ConradIrwin/panicwrap" "github.com/bugsnag/bugsnag-go/errors" - "github.com/mitchellh/panicwrap" - "os" ) // NOTE: this function does not return when you call it, instead it @@ -13,28 +12,16 @@ import ( func defaultPanicHandler() { defer defaultNotifier.dontPanic() - exitStatus, err := panicwrap.Wrap(&panicwrap.WrapConfig{ - CookieKey: "bugsnag_wrapped", - CookieValue: "bugsnag_wrapped", - Handler: func(output string) { + err := panicwrap.BasicMonitor(func(output string) { + toNotify, err := errors.ParsePanic(output) - toNotify, err := errors.ParsePanic(output) - - if err != nil { - defaultNotifier.Config.log("bugsnag.handleUncaughtPanic: %v", err) - } - Notify(toNotify, SeverityError, Configuration{Synchronous: true}) - }, + if err != nil { + defaultNotifier.Config.log("bugsnag.handleUncaughtPanic: %v", err) + } + Notify(toNotify, SeverityError, Configuration{Synchronous: true}) }) if err != nil { defaultNotifier.Config.log("bugsnag.handleUncaughtPanic: %v", err) - return - } - - if exitStatus >= 0 { - os.Exit(exitStatus) - } else { - return } } diff --git a/panicwrap_test.go b/panicwrap_test.go index <HASH>..<HASH> 100644 --- a/panicwrap_test.go +++ b/panicwrap_test.go @@ -22,13 +22,7 @@ func TestPanicHandler(t *testing.T) { // Use the same trick as panicwrap() to re-run ourselves. // In the init() block below, we will then panic. cmd := exec.Command(exePath, os.Args[1:]...) - cmd.Env = append(os.Environ(), "BUGSNAG_API_KEY="+testAPIKey, "BUGSNAG_ENDPOINT="+testEndpoint) - - for i := range cmd.Env { - if cmd.Env[i] == "bugsnag_wrapped=bugsnag_wrapped" { - cmd.Env[i] = "please_panic=please_panic" - } - } + cmd.Env = append(os.Environ(), "BUGSNAG_API_KEY="+testAPIKey, "BUGSNAG_ENDPOINT="+testEndpoint, "please_panic=please_panic") if err = cmd.Start(); err != nil { t.Fatal(err)
Upgrade panicwrap to use Monitor where possible This lets signal handling continue to work correctly, which is a significant advantage for running golang processes under supervision.
bugsnag_bugsnag-go
train
daaca3e80d5a2a1d61168fda5fe7ed21b05a0e4f
diff --git a/src/Interfaces/Uri.php b/src/Interfaces/Uri.php index <HASH>..<HASH> 100644 --- a/src/Interfaces/Uri.php +++ b/src/Interfaces/Uri.php @@ -44,16 +44,6 @@ interface Uri public function getScheme(); /** - * Retrieve the scheme specific part of the URI. - * - * If no specific part information is present, this method MUST return an empty - * string. - * - * @return string The URI authority, in "[user-info@]host[:port]" format. - */ - public function getSchemeSpecificPart(); - - /** * Retrieve the authority component of the URI. * * If no authority information is present, this method MUST return an empty diff --git a/src/Schemes/Generic/AbstractUri.php b/src/Schemes/Generic/AbstractUri.php index <HASH>..<HASH> 100644 --- a/src/Schemes/Generic/AbstractUri.php +++ b/src/Schemes/Generic/AbstractUri.php @@ -281,11 +281,15 @@ abstract class AbstractUri return $this->userInfo->getUriComponent().$this->host->getUriComponent().$port; } - /** - * {@inheritdoc} + * Retrieve the scheme specific part of the URI. + * + * If no specific part information is present, this method MUST return an empty + * string. + * + * @return string The URI authority, in "[user-info@]host[:port]" format. */ - public function getSchemeSpecificPart() + protected function getSchemeSpecificPart() { $auth = $this->getAuthority(); if (!empty($auth)) { diff --git a/src/Schemes/Generic/AuthorityValidatorTrait.php b/src/Schemes/Generic/AuthorityValidatorTrait.php index <HASH>..<HASH> 100644 --- a/src/Schemes/Generic/AuthorityValidatorTrait.php +++ b/src/Schemes/Generic/AuthorityValidatorTrait.php @@ -25,7 +25,7 @@ trait AuthorityValidatorTrait /** * {@inheritdoc} */ - abstract public function getSchemeSpecificPart(); + abstract protected function getSchemeSpecificPart(); /** * {@inheritdoc} diff --git a/test/Schemes/DataTest.php b/test/Schemes/DataTest.php index <HASH>..<HASH> 100644 --- a/test/Schemes/DataTest.php +++ b/test/Schemes/DataTest.php @@ -26,7 +26,6 @@ class DataTest extends PHPUnit_Framework_TestCase $this->assertSame($mediatype, $uri->path->getMediatype()); $this->assertSame($data, $uri->path->getData()); $this->assertSame($isBinaryData, $uri->path->isBinaryData()); - $this->assertSame($uri->getPath(), $uri->getSchemeSpecificPart()); $this->assertInstanceOf('League\Uri\Interfaces\Scheme', $uri->scheme); $this->assertInstanceOf('League\Uri\Interfaces\DataPath', $uri->path); }
Simplify League Uri Interface Uri::getSchemeSpecificPart is now protected. The League\Uri\Interfaces\Uri now exposes the same interface as PSR-7 UriInterface but the wording are different for the Scheme part which requires a separate interface
thephpleague_uri-manipulations
train
2417031b4690f5df1057421d1b3c3bd33c31d008
diff --git a/src/main/java/org/efaps/ui/wicket/behaviors/AjaxFieldUpdateBehavior.java b/src/main/java/org/efaps/ui/wicket/behaviors/AjaxFieldUpdateBehavior.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/efaps/ui/wicket/behaviors/AjaxFieldUpdateBehavior.java +++ b/src/main/java/org/efaps/ui/wicket/behaviors/AjaxFieldUpdateBehavior.java @@ -215,12 +215,13 @@ public class AjaxFieldUpdateBehavior return super.getCallbackScript(getComponent()); } + @Override protected CharSequence getCallbackScript(final Component _component) { CharSequence ret; if (isDojoCall()) { - ret= ""; + ret = ""; } else { ret = super.getCallbackScript(_component); } diff --git a/src/main/java/org/efaps/ui/wicket/behaviors/dojo/AutoCompleteBehavior.java b/src/main/java/org/efaps/ui/wicket/behaviors/dojo/AutoCompleteBehavior.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/efaps/ui/wicket/behaviors/dojo/AutoCompleteBehavior.java +++ b/src/main/java/org/efaps/ui/wicket/behaviors/dojo/AutoCompleteBehavior.java @@ -163,12 +163,12 @@ public class AutoCompleteBehavior js.append("searchAttr: \"name\"}, \"").append(_component.getMarkupId()).append("\");\n"); - js.append("on(").append(comboBoxId).append(", 'change', function() {") + js.append("on(").append(comboBoxId).append(", 'change', function() {\n") .append("var label=").append(comboBoxId).append(".item.label;") .append("if (!(label === undefined || label === null)) {") .append(comboBoxId).append(".item.name=label;") .append(comboBoxId).append(".set(\"item\",").append(comboBoxId).append(".item);") - .append("}"); + .append("}\n"); if (this.fieldUpdate != null) { js.append(this.fieldUpdate.getCallbackScript4Dojo()); } diff --git a/src/main/java/org/efaps/ui/wicket/components/autocomplete/AutoCompleteComboBox.java b/src/main/java/org/efaps/ui/wicket/components/autocomplete/AutoCompleteComboBox.java index <HASH>..<HASH> 100644 --- a/src/main/java/org/efaps/ui/wicket/components/autocomplete/AutoCompleteComboBox.java +++ b/src/main/java/org/efaps/ui/wicket/components/autocomplete/AutoCompleteComboBox.java @@ -85,17 +85,7 @@ public class AutoCompleteComboBox this.add(new SetSelectedRowBehavior(fieldName)); } if (uiAbstractCell.isFieldUpdate()) { - final AjaxFieldUpdateBehavior fieldUpdate = new AjaxFieldUpdateBehavior("onchange", _model) { - - /** Needed for serialization. */ - private static final long serialVersionUID = 1L; - - @Override - protected String getComponentMarkupId() - { - return getMarkupId() + "_hidden"; - } - }; + final AjaxFieldUpdateBehavior fieldUpdate = new AjaxFieldUpdateBehavior("domready", _model); fieldUpdate.setDojoCall(true); this.add(fieldUpdate); autocomplete.addFieldUpdate(fieldUpdate);
- webapp: fieldupdate event was executed multiple times git-svn-id: <URL>
eFaps_eFaps-WebApp
train
a9f995b28353d97e7a754174695df1e36966f802
diff --git a/src/main/org/codehaus/groovy/classgen/asm/sc/StaticTypesBinaryExpressionMultiTypeDispatcher.java b/src/main/org/codehaus/groovy/classgen/asm/sc/StaticTypesBinaryExpressionMultiTypeDispatcher.java index <HASH>..<HASH> 100644 --- a/src/main/org/codehaus/groovy/classgen/asm/sc/StaticTypesBinaryExpressionMultiTypeDispatcher.java +++ b/src/main/org/codehaus/groovy/classgen/asm/sc/StaticTypesBinaryExpressionMultiTypeDispatcher.java @@ -412,6 +412,7 @@ public class StaticTypesBinaryExpressionMultiTypeDispatcher extends BinaryExpres "putAt", ae ); + mce.setSourcePosition(parent); visitor.visitMethodCallExpression(mce); mce.visit(controller.getAcg()); // return value of assignment diff --git a/src/main/org/codehaus/groovy/transform/stc/StaticTypeCheckingVisitor.java b/src/main/org/codehaus/groovy/transform/stc/StaticTypeCheckingVisitor.java index <HASH>..<HASH> 100644 --- a/src/main/org/codehaus/groovy/transform/stc/StaticTypeCheckingVisitor.java +++ b/src/main/org/codehaus/groovy/transform/stc/StaticTypeCheckingVisitor.java @@ -487,6 +487,8 @@ public class StaticTypeCheckingVisitor extends ClassCodeVisitorSupport { List<MethodNode> nodes = findMethod(lType.redirect(), "putAt", arguments); if (nodes.size() == 1) { typeCheckMethodsWithGenerics(lType, arguments, nodes.get(0), expression); + } else if (nodes.isEmpty()) { + addNoMatchingMethodError(lType, "putAt", arguments, enclosingBinaryExpression); } } boolean isEmptyDeclaration = expression instanceof DeclarationExpression && rightExpression instanceof EmptyExpression; diff --git a/src/test/groovy/transform/stc/ArraysAndCollectionsSTCTest.groovy b/src/test/groovy/transform/stc/ArraysAndCollectionsSTCTest.groovy index <HASH>..<HASH> 100644 --- a/src/test/groovy/transform/stc/ArraysAndCollectionsSTCTest.groovy +++ b/src/test/groovy/transform/stc/ArraysAndCollectionsSTCTest.groovy @@ -490,5 +490,18 @@ class ArraysAndCollectionsSTCTest extends StaticTypeCheckingTestCase { } ''', 'into array of type' } + + // GROOVY-6131 + void testArraySetShouldGenerateBytecode() { + shouldFailWithMessages ''' + void addToCollection(Collection coll, int index, val) { + coll[index] = val + } + def list = ['a'] + addToCollection(list, 0, 'b') + assert list == ['b'] + ''', 'Cannot find matching method java.util.Collection#putAt(int, java.lang.Object)' + } + }
GROOVY-<I>: Collection#putAt should be refused by the type checker
groovy_groovy-core
train
04caa85b56aaf463af0166dd48926b56ac501446
diff --git a/lib/ghee/api/labels.rb b/lib/ghee/api/labels.rb index <HASH>..<HASH> 100644 --- a/lib/ghee/api/labels.rb +++ b/lib/ghee/api/labels.rb @@ -12,7 +12,6 @@ class Ghee module Labels class Proxy < ::Ghee::ResourceProxy - include Ghee::CUD end end
tighting up repo#labels
huboard_ghee
train
04f698f4623bfb5a5246e8efa1dd0f3c100621c3
diff --git a/test/same.js b/test/same.js index <HASH>..<HASH> 100644 --- a/test/same.js +++ b/test/same.js @@ -1419,21 +1419,3 @@ test("Test that must be done at the end because they extend some primitive's pro // between RegExp and Function constructor because typeof on a RegExpt instance is "function" equals(QUnit.equiv(function () {}, re), false, "Same conversely, but ensures that function and regexp are distinct because their constructor are different"); }); - -module("testers"); - -test("raises", function() { - function thrower1() { - throw 'Errored!'; - } - function thrower2() { - throw new TypeError("Type!"); - } - function thrower3() { - var e = {message:"Custom!"}; - throw e; - } - raises(thrower1, 'Errored!', 'throwing string'); - raises(thrower2, 'Type!', 'throwing TypeError instance'); - raises(thrower3, 'Custom!', 'throwing custom object'); -});
Cleaning up rubble from the previous commit.
JamesMGreene_qunit-assert-html
train
5d655ea3320a8ebd7d07e63359e48b21c0e9f908
diff --git a/rpcserver.go b/rpcserver.go index <HASH>..<HASH> 100644 --- a/rpcserver.go +++ b/rpcserver.go @@ -945,6 +945,11 @@ func (r *rpcServer) AddInvoice(ctx context.Context, "(maxsize=%v)", len(invoice.Receipt), channeldb.MaxReceiptSize) } + // Finally, the value of an invoice MUST NOT be zero. + if invoice.Value == 0 { + return nil, fmt.Errorf("zero value invoices are disallowed") + } + i := &channeldb.Invoice{ CreationDate: time.Now(), Memo: []byte(invoice.Memo),
rpcserver: disallow zero valued invoices
lightningnetwork_lnd
train
dac172fc73c449068f19e401a2f08dff031bbf23
diff --git a/core/Db.php b/core/Db.php index <HASH>..<HASH> 100644 --- a/core/Db.php +++ b/core/Db.php @@ -786,7 +786,7 @@ class Db { if (is_null(self::$lockPrivilegeGranted)) { try { - Db::lockTables(Common::prefixTable('log_visit')); + Db::lockTables(Common::prefixTable('site_url')); Db::unlockAllTables(); self::$lockPrivilegeGranted = true;
Don't lock log_visit table to check if privilege is granted (#<I>) Instead use a table we write to less frequently. Otherwise, whenever we over privacy settings or when it is trying to delete unused actions it will lock the log_visit table for a short time that could cause random tracking issues (eg heap of requests piling up trying to write to the log_visit table). It's only a read lock but according to MySQL it still blocks other sessions from writing from <URL>
matomo-org_matomo
train
54f209f556cb9cf1c63c0938a825a7ef15eb073d
diff --git a/extra/influx-copy/src/main/java/com/github/groupon/monsoon/history/influx/FileConvert.java b/extra/influx-copy/src/main/java/com/github/groupon/monsoon/history/influx/FileConvert.java index <HASH>..<HASH> 100644 --- a/extra/influx-copy/src/main/java/com/github/groupon/monsoon/history/influx/FileConvert.java +++ b/extra/influx-copy/src/main/java/com/github/groupon/monsoon/history/influx/FileConvert.java @@ -62,8 +62,10 @@ public class FileConvert { } // If verbose mode is requested, dial up the log spam. - if (verbose) + if (verbose) { Logger.getLogger("com.groupon.lex").setLevel(Level.INFO); + Logger.getLogger("com.github.groupon.monsoon").setLevel(Level.INFO); + } // If there are no files, comlain with a non-zero exit code. if (srcdir == null) @@ -77,7 +79,7 @@ public class FileConvert { try { final CollectHistory dst = new InfluxHistory(InfluxDBFactory.connect(influxDst), database); try { - src.stream().forEach(dst::add); + copy(src, dst); } finally { if (dst instanceof AutoCloseable) ((AutoCloseable) dst).close(); @@ -91,6 +93,7 @@ public class FileConvert { public static void main(String[] args) throws Exception { // Dial down the log spam. Logger.getLogger("com.groupon.lex").setLevel(Level.WARNING); + Logger.getLogger("com.github.groupon.monsoon").setLevel(Level.WARNING); try { new FileConvert(args).run(); @@ -99,4 +102,8 @@ public class FileConvert { System.exit(EX_TEMPFAIL); } } + + private static void copy(CollectHistory src, CollectHistory dst) { + dst.addAll(src.stream().iterator()); + } }
Use src iteration during copy.
groupon_monsoon
train
dc3d707141a39bb4dedee3f8e86d6b7ad39f2591
diff --git a/packages/ember-metal/lib/mixin.js b/packages/ember-metal/lib/mixin.js index <HASH>..<HASH> 100644 --- a/packages/ember-metal/lib/mixin.js +++ b/packages/ember-metal/lib/mixin.js @@ -167,6 +167,7 @@ function giveMethodSuper(obj, key, method, values, descs) { if (hasSuper === undefined) { hasSuper = method.toString().indexOf('_super'); + method.__hasSuper = hasSuper; } if (hasSuper) {
Prevent extra method.toString checks. It appears that the intent here was to only check the `method.toString().indexOf('_super')` once, and store the result so that we do not need to check the same method twice, but we were never actual storing `__hasSuper` on the method so it was just always going to check the `toString`.
emberjs_ember.js
train
7129af7ed75f0e5130aed7c85a250d7edfdd41a9
diff --git a/src/change-observer.js b/src/change-observer.js index <HASH>..<HASH> 100644 --- a/src/change-observer.js +++ b/src/change-observer.js @@ -27,7 +27,10 @@ class ChangeObserver { if (this.__observers.length > 0) { var currentValues = Immutable.Map() - this.__observers.forEach(entry => { + this.__observers.slice(0).forEach(entry => { + if (entry.unwatched) { + return + } var getter = entry.getter var code = hashCode(getter) var prevState = this.__prevState @@ -65,6 +68,7 @@ class ChangeObserver { var entry = { getter: getter, handler: handler, + unwatched: false, } this.__observers.push(entry) // return unwatch function @@ -72,6 +76,7 @@ class ChangeObserver { // TODO: untrack from change emitter var ind = this.__observers.indexOf(entry) if (ind > -1) { + entry.unwatched = true this.__observers.splice(ind, 1) } } diff --git a/tests/change-observer-tests.js b/tests/change-observer-tests.js index <HASH>..<HASH> 100644 --- a/tests/change-observer-tests.js +++ b/tests/change-observer-tests.js @@ -78,6 +78,46 @@ describe('ChangeObserver', () => { expect(mockFn2.calls.count()).toBe(1) }) }) + + it('should not skip observers when handler causes unobserve', () => { + var getter = ['foo', 'bar'] + var mockFn = jasmine.createSpy() + var unreg = observer.onChange(getter, () => unreg()) + observer.onChange(getter, mockFn) + + observer.notifyObservers(initialState.updateIn(getter, x => 2)) + + expect(mockFn.calls.count()).toBe(1) + }) + + it('should not call unwatched observers when removed during notify', () => { + var getter = ['foo', 'bar'] + var mockFn1 = jasmine.createSpy() + var mockFn2 = jasmine.createSpy() + observer.onChange(getter, () => { + mockFn1() + unreg() + }) + var unreg = observer.onChange(getter, mockFn2) + + observer.notifyObservers(initialState.updateIn(getter, x => 2)) + + expect(mockFn1.calls.count()).toBe(1) + expect(mockFn2.calls.count()).toBe(0) + }) + + it('should not call new observers when handlers attach them', () => { + var getter = ['foo', 'bar'] + var mockFn1 = jasmine.createSpy() + var mockFn2 = jasmine.createSpy() + observer.onChange(getter, mockFn1) + observer.onChange(getter, () => observer.onChange(getter, mockFn2)) + + observer.notifyObservers(initialState.updateIn(getter, x => 2)) + + expect(mockFn1.calls.count()).toBe(1) + expect(mockFn2.calls.count()).toBe(0) + }) }) // TODO: test the prevValues and registering an observable })
Iterate over copy of observers for notifyObservers [Fixes #<I>]
optimizely_nuclear-js
train
381304726faeca3450b83dc7ccf8363222f4c902
diff --git a/lib/instrument.js b/lib/instrument.js index <HASH>..<HASH> 100644 --- a/lib/instrument.js +++ b/lib/instrument.js @@ -50,7 +50,7 @@ Instrument.prototype.timeAsyncFunction = function(name, func) { var self = this; return function() { - var work = self.work(name) + var work = self.work(name), args = Array.prototype.slice.call(arguments), callback = args.pop();
fix(instrument): Add a missing comma so that args and callback are scoped properly.
jirwin_node-zither
train
a024ea2146257ec9ff127c3919ebcc74114c6090
diff --git a/cloudvolume/skeletonservice.py b/cloudvolume/skeletonservice.py index <HASH>..<HASH> 100644 --- a/cloudvolume/skeletonservice.py +++ b/cloudvolume/skeletonservice.py @@ -438,11 +438,11 @@ class PrecomputedSkeleton(object): if skel.edges.size == 0: return skel, [] - index = defaultdict(list) + index = defaultdict(set) visited = defaultdict(bool) for e1, e2 in skel.edges: - index[e1].append(e2) - index[e2].append(e1) + index[e1].add(e2) + index[e2].add(e1) def extract_component(start): tree = set() @@ -450,6 +450,9 @@ class PrecomputedSkeleton(object): while stack: node = int(stack.pop(0)) + if visited[node]: + continue + visited[node] = True tree.add(node) for child in index[node]:
fix(skeletonservice): _compute_components wasn't treating loops correctly Added check for visited.
seung-lab_cloud-volume
train
565ff76422c132250a88995b43ec387717e88e0f
diff --git a/edeposit/amqp/settings.py b/edeposit/amqp/settings.py index <HASH>..<HASH> 100644 --- a/edeposit/amqp/settings.py +++ b/edeposit/amqp/settings.py @@ -30,18 +30,22 @@ RABBITMQ_PORT = '5672' #: RABBITMQ_USER_NAME = 'guest' #: RABBITMQ_USER_PASSWORD = 'guest' #: +# aleph's settings RABBITMQ_ALEPH_VIRTUALHOST = "aleph" #: RABBITMQ_ALEPH_DAEMON_QUEUE = "daemon" #: RABBITMQ_ALEPH_PLONE_QUEUE = "plone" #: RABBITMQ_ALEPH_EXCHANGE = "search" #: RABBITMQ_ALEPH_DAEMON_KEY = "request" #: + RABBITMQ_ALEPH_PLONE_KEY = "result" #: RABBITMQ_ALEPH_EXCEPTION_KEY = "exception" #: +# calibre's settings RABBITMQ_CALIBRE_VIRTUALHOST = "calibre" #: RABBITMQ_CALIBRE_DAEMON_QUEUE = "daemon" #: RABBITMQ_CALIBRE_PLONE_QUEUE = "plone" #: RABBITMQ_CALIBRE_EXCHANGE = "convert" #: + RABBITMQ_CALIBRE_DAEMON_KEY = "request" #: RABBITMQ_CALIBRE_PLONE_KEY = "result" #:
Added comments to settings.py.
edeposit_edeposit.amqp
train
4cd5222ee077f8cc1016c3338ea8ff7676322b72
diff --git a/core/client/app/routes/posts.js b/core/client/app/routes/posts.js index <HASH>..<HASH> 100644 --- a/core/client/app/routes/posts.js +++ b/core/client/app/routes/posts.js @@ -9,7 +9,8 @@ export default AuthenticatedRoute.extend(ShortcutsRoute, PaginationRouteMixin, { paginationModel: 'post', paginationSettings: { status: 'all', - staticPages: 'all' + staticPages: 'all', + filter: '' }, model: function () { @@ -18,7 +19,7 @@ export default AuthenticatedRoute.extend(ShortcutsRoute, PaginationRouteMixin, { return this.get('session.user').then(function (user) { if (user.get('isAuthor')) { - paginationSettings.author = user.get('slug'); + paginationSettings.filter += `+author:${user.get('slug')}`; } return self.loadFirstPage().then(function () {
Update author query to use filter refs #<I>
TryGhost_Ghost
train
3a4c322b9d7eb2b9654f01061571a9be10b9d221
diff --git a/mutable_tree.go b/mutable_tree.go index <HASH>..<HASH> 100644 --- a/mutable_tree.go +++ b/mutable_tree.go @@ -207,10 +207,11 @@ func (tree *MutableTree) recursiveRemove(node *Node, key []byte, orphans *[]*Nod if len(*orphans) == 0 { return node.hash, node, nil, value - } else if newLeftHash == nil && newLeftNode == nil { // left node held value, was removed - return node.rightHash, node.rightNode, node.key, value } *orphans = append(*orphans, node) + if newLeftHash == nil && newLeftNode == nil { // left node held value, was removed + return node.rightHash, node.rightNode, node.key, value + } newNode := node.clone(version) newNode.leftHash, newNode.leftNode = newLeftHash, newLeftNode @@ -223,10 +224,11 @@ func (tree *MutableTree) recursiveRemove(node *Node, key []byte, orphans *[]*Nod if len(*orphans) == 0 { return node.hash, node, nil, value - } else if newRightHash == nil && newRightNode == nil { // right node held value, was removed - return node.leftHash, node.leftNode, nil, value } *orphans = append(*orphans, node) + if newRightHash == nil && newRightNode == nil { // right node held value, was removed + return node.leftHash, node.leftNode, nil, value + } newNode := node.clone(version) newNode.rightHash, newNode.rightNode = newRightHash, newRightNode diff --git a/tree_test.go b/tree_test.go index <HASH>..<HASH> 100644 --- a/tree_test.go +++ b/tree_test.go @@ -360,7 +360,7 @@ func TestVersionedTree(t *testing.T) { nodes3 := tree.ndb.leafNodes() require.Len(nodes3, 6, "wrong number of nodes") - require.Len(tree.ndb.orphans(), 6, "wrong number of orphans") + require.Len(tree.ndb.orphans(), 7, "wrong number of orphans") hash4, _, _ := tree.SaveVersion() require.EqualValues(hash3, hash4)
bugfix: orphans are not totally collected when Remove (#<I>)
tendermint_iavl
train
13614f24b22e2f4da348b118b222cc7f308685d1
diff --git a/lib-dempsycore/src/main/java/com/nokia/dempsy/monitoring/StatsCollector.java b/lib-dempsycore/src/main/java/com/nokia/dempsy/monitoring/StatsCollector.java index <HASH>..<HASH> 100644 --- a/lib-dempsycore/src/main/java/com/nokia/dempsy/monitoring/StatsCollector.java +++ b/lib-dempsycore/src/main/java/com/nokia/dempsy/monitoring/StatsCollector.java @@ -56,7 +56,7 @@ public interface StatsCollector { /** * The dispatcher calls this method in its <code>onMessage</code> handler - * when it discards a message. + * when it discards a message.statCollector */ void messageDiscarded(Object message); @@ -70,7 +70,7 @@ public interface StatsCollector { * The instance manager calls this method when it deletes a message processor * instance. */ - void messageProcessorDeleted(); + void messageProcessorDeleted(Object key); /** * Some stats collectors need to be stopped. diff --git a/lib-dempsyimpl/src/main/java/com/nokia/dempsy/container/MpContainer.java b/lib-dempsyimpl/src/main/java/com/nokia/dempsy/container/MpContainer.java index <HASH>..<HASH> 100644 --- a/lib-dempsyimpl/src/main/java/com/nokia/dempsy/container/MpContainer.java +++ b/lib-dempsyimpl/src/main/java/com/nokia/dempsy/container/MpContainer.java @@ -424,6 +424,7 @@ public class MpContainer implements Listener, OutputInvoker prototype.passivate(wrapper.getInstance()); wrapper.markPassivated(); instances.remove(key); + statCollector.messageProcessorDeleted(key); } } catch (InvocationTargetException e) diff --git a/lib-dempsyimpl/src/main/java/com/nokia/dempsy/monitoring/coda/StatsCollectorCoda.java b/lib-dempsyimpl/src/main/java/com/nokia/dempsy/monitoring/coda/StatsCollectorCoda.java index <HASH>..<HASH> 100644 --- a/lib-dempsyimpl/src/main/java/com/nokia/dempsy/monitoring/coda/StatsCollectorCoda.java +++ b/lib-dempsyimpl/src/main/java/com/nokia/dempsy/monitoring/coda/StatsCollectorCoda.java @@ -194,7 +194,7 @@ public class StatsCollectorCoda implements StatsCollector { } @Override - public void messageProcessorDeleted() { + public void messageProcessorDeleted(Object key) { mpsDeleted.mark(); numberOfMPs.decrementAndGet(); } diff --git a/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/basic/BasicStatsCollector.java b/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/basic/BasicStatsCollector.java index <HASH>..<HASH> 100644 --- a/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/basic/BasicStatsCollector.java +++ b/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/basic/BasicStatsCollector.java @@ -122,7 +122,7 @@ public class BasicStatsCollector implements StatsCollector } @Override - public void messageProcessorDeleted() + public void messageProcessorDeleted(Object key) { mpsDeleted.incrementAndGet(); numberOfMPs.decrementAndGet(); diff --git a/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/coda/TestStatsCollectorCoda.java b/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/coda/TestStatsCollectorCoda.java index <HASH>..<HASH> 100644 --- a/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/coda/TestStatsCollectorCoda.java +++ b/lib-dempsyimpl/src/test/java/com/nokia/dempsy/monitoring/coda/TestStatsCollectorCoda.java @@ -100,7 +100,7 @@ public class TestStatsCollectorCoda { @Test public void testMessageProcessorDeleted() { assertEquals("none yet", 0L, getStatValue(stats, StatsCollectorCoda.MN_MP_DELETE)); - stats.messageProcessorDeleted(); + stats.messageProcessorDeleted("abc"); assertEquals("del one", 1L, getStatValue(stats, StatsCollectorCoda.MN_MP_DELETE)); }
Register Eviction Counts with Stats Collector
Dempsy_dempsy
train
3f5fc52279413edb0151f51ccdd746612c669f75
diff --git a/ped_parser/parser.py b/ped_parser/parser.py index <HASH>..<HASH> 100755 --- a/ped_parser/parser.py +++ b/ped_parser/parser.py @@ -40,12 +40,14 @@ class FamilyParser(object): super(FamilyParser, self).__init__() self.family_type = family_type self.families = {} - self.header = ['Family_ID', 'Sample_ID', 'Father', 'Mother', 'Sex', 'Phenotype'] + self.header = ['FamilyID', 'SampleID', 'Father', 'Mother', 'Sex', 'Phenotype'] with open(infile, 'r') as f: line_count = 0 for line in f: individual_line = line.rstrip() - if not individual_line.startswith('#') and not all(c in whitespace for c in individual_line): + if individual_line.startswith('#'): + self.header = line[1:].split() + elif not all(c in whitespace for c in individual_line): if family_type in ['cmms', 'mip']: self.cmms_parser(individual_line, self.header, family_type) elif family_type == 'fam': @@ -54,8 +56,6 @@ class FamilyParser(object): self.ped_parser(individual_line) # elif family_type == 'broad': # self.broad_parser(individual_line, line_count) - else: - self.header = line[1:].split() def ped_parser(self, individual_line): """Parse a .ped ped file.""" @@ -109,7 +109,7 @@ class FamilyParser(object): info[header[i]] = line[i] fam_id = info.get('FamilyID', '0') - + if fam_id not in self.families: self.families[fam_id] = family.Family(fam_id) @@ -124,7 +124,6 @@ class FamilyParser(object): sex == '0' if phenotype not in ['1', '2']: phenotype == '0' - # If cmms type we can check the sample names if family_type == 'cmms': affection_status = ind.split('-')[-1][-1] # This in A (=affected) or U (=unaffected) @@ -135,7 +134,7 @@ class FamilyParser(object): raise SyntaxError('Gender code in id disagrees with sex:\n %s' % individual_line) models_of_inheritance = info.get('Inheritance_model', ['NA']) - + correct_model_names = [] for model in models_of_inheritance: if model in ['AR', 'AR_hom']: diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100755 --- a/setup.py +++ b/setup.py @@ -9,7 +9,7 @@ with open('README.txt') as file: long_description = file.read() setup(name="ped_parser", - version="0.3.5", + version="0.3.6", author="Mans Magnusson", author_email="[email protected]", license='BSD',
Fixed problem with non existing header and bumoed versio to <I>
moonso_ped_parser
train