hash
stringlengths 40
40
| diff
stringlengths 131
114k
| message
stringlengths 7
980
| project
stringlengths 5
67
| split
stringclasses 1
value |
---|---|---|---|---|
28860f095ffe86a033b7c1cc6f86500e4765c43d | diff --git a/lib/searchkick/relation_indexer.rb b/lib/searchkick/relation_indexer.rb
index <HASH>..<HASH> 100644
--- a/lib/searchkick/relation_indexer.rb
+++ b/lib/searchkick/relation_indexer.rb
@@ -63,9 +63,8 @@ module Searchkick
if relation.respond_to?(:primary_key)
# use total docs instead of max id since there's not a great way
# to get the max _id without scripting since it's a string
-
- # TODO use primary key and prefix with table name
- relation = relation.where("id > ?", index.total_docs)
+ where = relation.arel_table[relation.primary_key].gt(index.total_docs)
+ relation = relation.where(where)
else
raise Error, "Resume not supported for Mongoid"
end | Added support for non-id primary keys to resume [skip ci] | ankane_searchkick | train |
38caab951a9ff39f0e61294a1dbaaa074a39228b | diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -32,6 +32,7 @@ Bug Fixes:
- If --skipPackages is used and all packages are skipped, Ginkgo should exit 0.
- Fix tempfile leak when running in parallel
- Fix incorrect failure message when a panic occurs during a parallel test run
+- Fixed an issue where a pending test within a focused context (or a focused test within a pending context) would skip all other tests.
## 1.1.0 (8/2/2014)
diff --git a/internal/spec/specs.go b/internal/spec/specs.go
index <HASH>..<HASH> 100644
--- a/internal/spec/specs.go
+++ b/internal/spec/specs.go
@@ -52,7 +52,7 @@ func (e *Specs) ApplyFocus(description string, focusString string, skipString st
func (e *Specs) applyProgrammaticFocus() {
e.hasProgrammaticFocus = false
for _, spec := range e.specs {
- if spec.Focused() {
+ if spec.Focused() && !spec.Pending() {
e.hasProgrammaticFocus = true
break
}
diff --git a/internal/spec/specs_test.go b/internal/spec/specs_test.go
index <HASH>..<HASH> 100644
--- a/internal/spec/specs_test.go
+++ b/internal/spec/specs_test.go
@@ -231,6 +231,36 @@ var _ = Describe("Specs", func() {
})
})
+ Describe("With a focused spec within a pending context and a pending spec within a focused context", func() {
+ BeforeEach(func() {
+ pendingInFocused := New(
+ leafnodes.NewItNode("PendingInFocused", func() {}, pendingFlag, codelocation.New(0), 0, nil, 0),
+ []*containernode.ContainerNode{
+ containernode.New("", focusedFlag, codelocation.New(0)),
+ }, false)
+
+ focusedInPending := New(
+ leafnodes.NewItNode("FocusedInPending", func() {}, focusedFlag, codelocation.New(0), 0, nil, 0),
+ []*containernode.ContainerNode{
+ containernode.New("", pendingFlag, codelocation.New(0)),
+ }, false)
+
+ specs = NewSpecs([]*Spec{
+ newSpec("A", noneFlag),
+ newSpec("B", noneFlag),
+ pendingInFocused,
+ focusedInPending,
+ })
+ specs.ApplyFocus("", "", "")
+ })
+
+ It("should not have a programmatic focus and should run all tests", func() {
+ Ω(willRunTexts(specs)).Should(Equal([]string{"A", "B"}))
+ Ω(skippedTexts(specs)).Should(BeEmpty())
+ Ω(pendingTexts(specs)).Should(ConsistOf(ContainSubstring("PendingInFocused"), ContainSubstring("FocusedInPending")))
+ })
+ })
+
Describe("skipping measurements", func() {
BeforeEach(func() {
specs = NewSpecs([]*Spec{ | fix issue when nesting focused tests within pending containers | onsi_ginkgo | train |
add9e8d5bde3806a880be5864e24e19bc486d5a2 | diff --git a/src/ns.view.js b/src/ns.view.js
index <HASH>..<HASH> 100644
--- a/src/ns.view.js
+++ b/src/ns.view.js
@@ -1031,20 +1031,15 @@ var _ctors = {};
* @return {Function} Созданный View.
*/
ns.View.define = function(id, info, base) {
- if (id in _infos) {
- throw new Error("[ns.View] Can't redefine '" + id + "'");
- }
+ ns.assert(!(id in _infos), 'ns.View', "Can't redefine '%s'", id);
info = info || {};
var baseClass = ns.View;
if (typeof base === 'string') {
// если указана строка, то берем декларацию ns.View
- if (_ctors[base]) {
- baseClass = _ctors[base];
- } else {
- throw new Error("[ns.View] Can't find '" + base + "' to extend '" + id + "'");
- }
+ baseClass = _ctors[base];
+ ns.assert(baseClass, 'ns.View', "Can't find '%s' to extend '%s'", base, id);
} else if (typeof base === 'function') {
baseClass = base; | more ns.assert usages | yandex-ui_noscript | train |
1dfa529941643b486ceadb7f38cfd8f2b1f3d0eb | diff --git a/pkg/services/provisioning/dashboards/file_reader.go b/pkg/services/provisioning/dashboards/file_reader.go
index <HASH>..<HASH> 100644
--- a/pkg/services/provisioning/dashboards/file_reader.go
+++ b/pkg/services/provisioning/dashboards/file_reader.go
@@ -64,16 +64,24 @@ func (fr *fileReader) getCache(key string) (*dashboards.SaveDashboardItem, bool)
}
func (fr *fileReader) ReadAndListen(ctx context.Context) error {
- ticker := time.NewTicker(time.Second * 5)
+ ticker := time.NewTicker(time.Second * 3)
if err := fr.walkFolder(); err != nil {
fr.log.Error("failed to search for dashboards", "error", err)
}
+ running := false
+
for {
select {
case <-ticker.C:
- fr.walkFolder()
+ if !running { // avoid walking the filesystem in parallel. incase fs is very slow.
+ running = true
+ go func() {
+ fr.walkFolder()
+ running = false
+ }()
+ }
case <-ctx.Done():
return nil
} | dashboards as cfg: avoid walking fs in parallel | grafana_grafana | train |
7d83bf85e3d7b41d8e58217028cdcfc24029b8d1 | diff --git a/indra/preassembler/__init__.py b/indra/preassembler/__init__.py
index <HASH>..<HASH> 100644
--- a/indra/preassembler/__init__.py
+++ b/indra/preassembler/__init__.py
@@ -251,7 +251,12 @@ class Preassembler(object):
a stmts_by_hash dict and a stmts_to_compare dict as its input and
returns a dict of possible refinements where the keys are
statement hashes and the values are sets of statement hashes that
- the key statement possibly refines.
+ the key statement possibly refines. If not provided, a built-in
+ ontology-based pre-filter is applied. Note, that if a list of filter
+ functions is provided, the built-in ontology-based pre-filter is not
+ automatically appended to the list of filters. In this case,
+ consider adding the `ontology_refinement_filter` function from this
+ module to the filters list.
Returns
-------
@@ -426,6 +431,7 @@ class Preassembler(object):
maps = []
# We again iterate over statements
ts = time.time()
+ # Given the possible refinements in stmts_to_compare, we confirm each
for stmt_hash, possible_refined_hashes in stmts_to_compare.items():
# We use the previously constructed set of statements that this one
# can possibly refine
@@ -445,6 +451,12 @@ class Preassembler(object):
stmts_by_hash[stmt_hash],
stmts_by_hash[possible_refined_hash],
ontology=self.ontology,
+ # NOTE: here we assume that the entities at this point
+ # are definitely refined due to the use of an
+ # ontology-based pre-filter. If this is not the case
+ # for some reason then it is the responsibility of the
+ # user-supplied self.refinement_fun to disregard the
+ # entities_refined argument.
entities_refined=True)
if ref:
maps.append((stmt_hash, possible_refined_hash))
@@ -822,7 +834,7 @@ def flatten_evidence(stmts, collect_from=None):
def _flatten_evidence_for_stmt(stmt, collect_from):
supp_stmts = (stmt.supports if collect_from == 'supports'
- else stmt.supported_by)
+ else stmt.supported_by)
total_evidence = set(stmt.evidence)
for supp_stmt in supp_stmts:
child_evidence = _flatten_evidence_for_stmt(supp_stmt, collect_from)
@@ -952,6 +964,9 @@ def ontology_refinement_filter_by_stmt_type(stmts_by_hash, stmts_to_compare,
stmts_by_hash : dict
A dict whose keys are statement hashes that point to the
(deduplicated) statement with that hash as a value.
+ stmts_to_compare : dict or None
+ A dict of existing statements to compare that will be further
+ filtered down in this function and then returned.
ontology : indra.ontology.IndraOntology
An IndraOntology instance iwth respect to which this
filter is applied.
@@ -1047,6 +1062,7 @@ def ontology_refinement_filter_by_stmt_type(stmts_by_hash, stmts_to_compare,
def bio_ontology_refinement_filter(stmts_by_hash, stmts_to_compare):
+ """An ontology refinement filter that works with the INDRA BioOntology."""
from indra.ontology.bio import bio_ontology
return ontology_refinement_filter(stmts_by_hash, stmts_to_compare,
ontology=bio_ontology) | Improve some docstrings and comments | sorgerlab_indra | train |
0be8ae9ba36ec9a7a5e84d8e267181a4f87ed375 | diff --git a/tweepy/binder.py b/tweepy/binder.py
index <HASH>..<HASH> 100644
--- a/tweepy/binder.py
+++ b/tweepy/binder.py
@@ -7,7 +7,7 @@ from __future__ import print_function
import time
import re
-from six.moves.urllib.parse import quote
+from six.moves.urllib.parse import quote, urlencode
import requests
import logging
@@ -132,7 +132,7 @@ def bind_api(**config):
# Query the cache if one is available
# and this request uses a GET method.
if self.use_cache and self.api.cache and self.method == 'GET':
- cache_result = self.api.cache.get(url)
+ cache_result = self.api.cache.get('%s?%s' % (url, urllib.urlencode(self.session.params)))
# if cache result found and not expired, return it
if cache_result:
# must restore api reference
@@ -233,7 +233,7 @@ def bind_api(**config):
# Store result into cache if one is available.
if self.use_cache and self.api.cache and self.method == 'GET' and result:
- self.api.cache.store(url, result)
+ self.api.cache.store('%s?%s' % (url, urlencode(self.session.params)), result)
return result | Fix cache
Fixing the cache key for both get and store methods | tweepy_tweepy | train |
d64d6dcb54d69cc5c10e022eaa0f57e7338c638e | diff --git a/platform/disk/sfdisk_partitioner.go b/platform/disk/sfdisk_partitioner.go
index <HASH>..<HASH> 100644
--- a/platform/disk/sfdisk_partitioner.go
+++ b/platform/disk/sfdisk_partitioner.go
@@ -178,6 +178,9 @@ func (p sfdiskPartitioner) getPartitions(devicePath string) (partitions []Partit
partition := Partition{Type: partitionType}
if partition.Type != PartitionTypeEmpty {
+ if strings.Contains(partitionPath, "/dev/mapper/") {
+ partitionPath = partitionPath[0:len(partitionPath)-1] + "-part1"
+ }
size, err := p.GetDeviceSizeInBytes(partitionPath)
if err == nil {
partition.SizeInBytes = size
diff --git a/platform/disk/sfdisk_partitioner_test.go b/platform/disk/sfdisk_partitioner_test.go
index <HASH>..<HASH> 100644
--- a/platform/disk/sfdisk_partitioner_test.go
+++ b/platform/disk/sfdisk_partitioner_test.go
@@ -44,6 +44,15 @@ unit: sectors
/dev/sda4 : start= 0, size= 0, Id= 0
`
+const devMapperSfdiskDumpOnePartition = `# partition table of /dev/mapper/xxxxxx
+unit: sectors
+
+/dev/mapper/xxxxxx1 : start= 1, size= xxxx , Id=83
+/dev/mapper/xxxxxx2 : start= 0, size= 0, Id= 0
+/dev/mapper/xxxxxx3 : start= 0, size= 0, Id= 0
+/dev/mapper/xxxxxx4 : start= 0, size= 0, Id= 0
+`
+
const expectedDmSetupLs = `
xxxxxx-part1 (252:1)
xxxxxx (252:0)
@@ -136,6 +145,20 @@ var _ = Describe("sfdiskPartitioner", func() {
Expect(len(runner.RunCommandsWithInput)).To(Equal(0))
})
+ It("sfdisk partition when partitions already match for mutlitpath", func() {
+ runner.AddCmdResult("sfdisk -d /dev/mapper/xxxxxx", fakesys.FakeCmdResult{Stdout: devMapperSfdiskDumpOnePartition})
+ runner.AddCmdResult("sfdisk -s /dev/mapper/xxxxxx", fakesys.FakeCmdResult{Stdout: fmt.Sprintf("%d\n", 1024*1024+7000)})
+ runner.AddCmdResult("sfdisk -s /dev/mapper/xxxxxx-part1", fakesys.FakeCmdResult{Stdout: fmt.Sprintf("%d\n", 1024*1024)})
+
+ partitions := []Partition{
+ {Type: PartitionTypeLinux, SizeInBytes: 1024 * 1024 * 1024},
+ }
+
+ partitioner.Partition("/dev/mapper/xxxxxx", partitions)
+
+ Expect(len(runner.RunCommandsWithInput)).To(Equal(0))
+ })
+
It("sfdisk partition with last partition not matching size", func() {
runner.AddCmdResult("sfdisk -d /dev/sda", fakesys.FakeCmdResult{Stdout: devSdaSfdiskDumpOnePartition})
runner.AddCmdResult("sfdisk -s /dev/sda", fakesys.FakeCmdResult{Stdout: fmt.Sprintf("%d\n", 2048*1024)}) | Add -part1 for multipath device | cloudfoundry_bosh-agent | train |
f7e418134cd47166f2e6c7c7d9f3e9a181ed760f | diff --git a/lib/schemas/vital.js b/lib/schemas/vital.js
index <HASH>..<HASH> 100644
--- a/lib/schemas/vital.js
+++ b/lib/schemas/vital.js
@@ -27,6 +27,9 @@ module.exports = {
"value": {
"type": "number"
},
+ "text": {
+ "type": "string"
+ },
"unit": {
"type": "string"
} | allow text values on vitals to support references | amida-tech_blue-button-model | train |
79d9327c3c9ffff343907d64a1272a91328716e4 | diff --git a/gwpy/io/ligolw.py b/gwpy/io/ligolw.py
index <HASH>..<HASH> 100644
--- a/gwpy/io/ligolw.py
+++ b/gwpy/io/ligolw.py
@@ -104,7 +104,10 @@ def table_from_file(f, tablename, columns=None, filt=None,
if filt:
if verbose:
gprint('filtering rows ...', end=' ')
- out_ = table.new_from_template(out)
+ try:
+ out_ = out.copy()
+ except AttributeError:
+ out_ = table.new_from_template(out)
out_.extend(filter(filt, out))
out = out_
if verbose: | io.ligolw: fix Kipp's DeprecationWarning | gwpy_gwpy | train |
3dc82705d5b01d096e5a21741a2e3c782cff620a | diff --git a/src/router/varz.go b/src/router/varz.go
index <HASH>..<HASH> 100644
--- a/src/router/varz.go
+++ b/src/router/varz.go
@@ -2,6 +2,7 @@ package router
import (
"encoding/json"
+ "fmt"
metrics "github.com/rcrowley/go-metrics"
"net/http"
"router/stats"
@@ -93,11 +94,13 @@ func (x *HttpMetric) MarshalJSON() ([]byte, error) {
y.Responses5xx = x.Responses5xx.Count()
y.ResponsesXxx = x.ResponsesXxx.Count()
- z := x.Latency.Percentiles([]float64{0.5, 0.75, 0.99})
+ p := []float64{0.50, 0.75, 0.90, 0.95, 0.99}
+ z := x.Latency.Percentiles(p)
+
y.Latency = make(map[string]float64)
- y.Latency["50"] = z[0]
- y.Latency["75"] = z[1]
- y.Latency["99"] = z[2]
+ for i, e := range p {
+ y.Latency[fmt.Sprintf("%d", int(e*100))] = z[i] / float64(time.Second)
+ }
return json.Marshal(y)
}
diff --git a/src/router/varz_test.go b/src/router/varz_test.go
index <HASH>..<HASH> 100644
--- a/src/router/varz_test.go
+++ b/src/router/varz_test.go
@@ -199,3 +199,20 @@ func (s *VarzSuite) TestUpdateResponseWithTags(c *C) {
c.Assert(s.f("tags", "framework", "rails", "responses_2xx"), Equals, float64(0))
c.Assert(s.f("tags", "framework", "rails", "responses_4xx"), Equals, float64(2))
}
+
+func (s *VarzSuite) TestUpdateResponseLatency(c *C) {
+ var b Backend
+ var d = 1 * time.Millisecond
+
+ r := &http.Response{
+ StatusCode: http.StatusOK,
+ }
+
+ s.CaptureBackendResponse(b, r, d)
+
+ c.Assert(s.f("all", "latency", "50").(float64), Equals, float64(d)/float64(time.Second))
+ c.Assert(s.f("all", "latency", "75").(float64), Equals, float64(d)/float64(time.Second))
+ c.Assert(s.f("all", "latency", "90").(float64), Equals, float64(d)/float64(time.Second))
+ c.Assert(s.f("all", "latency", "95").(float64), Equals, float64(d)/float64(time.Second))
+ c.Assert(s.f("all", "latency", "99").(float64), Equals, float64(d)/float64(time.Second))
+} | Export latency metrics in seconds
Change-Id: Iaf<I>a<I>b<I>f<I>b6e9da<I>cc3fddeea0 | cloudfoundry_gorouter | train |
4bb8a37e75a206fc1290472f9055789b07143477 | diff --git a/structr-ui/src/main/resources/structr/js/schema.js b/structr-ui/src/main/resources/structr/js/schema.js
index <HASH>..<HASH> 100644
--- a/structr-ui/src/main/resources/structr/js/schema.js
+++ b/structr-ui/src/main/resources/structr/js/schema.js
@@ -885,7 +885,7 @@ var _Schema = {
var selectRelationshipOptions = function (rel) {
$('#source-type-name').text(nodes[rel.sourceId].name);
$('#source-multiplicity-selector').val(rel.sourceMultiplicity || '*');
- $('#relationship-type-name').val(rel.relationshipType === initialRelType ? ' ' : rel.relationshipType);
+ $('#relationship-type-name').val(rel.relationshipType === initialRelType ? '' : rel.relationshipType);
$('#target-multiplicity-selector').val(rel.targetMultiplicity || '*');
$('#target-type-name').text(nodes[rel.targetId].name);
@@ -922,7 +922,6 @@ var _Schema = {
sourceMultiplicity: $('#source-multiplicity-selector').val(),
relationshipType: $('#relationship-type-name').val(),
targetMultiplicity: $('#target-multiplicity-selector').val(),
-
cascadingDeleteFlag: parseInt($('#cascading-delete-selector').val()),
autocreationFlag: parseInt($('#autocreate-selector').val()),
permissionPropagation: $('#propagation-selector').val(),
@@ -934,8 +933,14 @@ var _Schema = {
};
Object.keys(newData).forEach(function (key) {
- if ( (entity[key] === newData[key]) || (key === 'cascadingDeleteFlag' && !(entity[key]) && newData[key] === 0) ||
- (key === 'autocreationFlag' && !(entity[key]) && newData[key] === 0) || (key === 'propertyMask' && !(entity[key]) && newData[key].trim() === '') ) {
+ if (key === 'relationshipType' && newData[key].trim() === '') {
+ newData[key] = initialRelType;
+ }
+ if ( (entity[key] === newData[key])
+ || (key === 'cascadingDeleteFlag' && !(entity[key]) && newData[key] === 0)
+ || (key === 'autocreationFlag' && !(entity[key]) && newData[key] === 0)
+ || (key === 'propertyMask' && !(entity[key]) && newData[key].trim() === '')
+ ) {
delete newData[key];
}
}); | remove superfluous ` ` in schema relationship edit dialog. also added functionality to reset the relationship type to the initial type if none is given | structr_structr | train |
15a48f0ea75962f6e82c9032f387b8c91cdc02de | diff --git a/lib/DB/dsql.php b/lib/DB/dsql.php
index <HASH>..<HASH> 100644
--- a/lib/DB/dsql.php
+++ b/lib/DB/dsql.php
@@ -455,6 +455,7 @@ class DB_dsql extends AbstractModel implements Iterator
* $q->table(array('user','salary'));
* $q->table(array('user','salary'),'user');
* $q->table(array('u'=>'user','s'=>'salary'));
+ * $q->table($q2->table('user')->where('active',1), 'active_users');
*
* If you specify multiple tables, you still need to make sure to add
* proper "where" conditions. All the above examples return $q (for chaining)
@@ -467,8 +468,8 @@ class DB_dsql extends AbstractModel implements Iterator
* Please avoid using table() without arguments as more tables may be
* dynamically added later.
*
- * @param string $table Specify table to use
- * @param string $alias Specify alias for the table
+ * @param string|DB_dsql $table Specify table to use or DSQL to use as derived table
+ * @param string $alias Specify alias for the table, if $table is DSQL, then alias is mandatory
*
* @return $this|string
*/
@@ -496,6 +497,11 @@ class DB_dsql extends AbstractModel implements Iterator
$this->main_table = false; // query from multiple tables
}
+ // if $table is DSQL, then alias is mandatory
+ if ($table instanceof DB_dsql && ($alias === UNDEFINED || !$alias)) {
+ throw $this->exception('If table is passed as DSQL, then table alias is mandatory!');
+ }
+
$this->args['table'][] = array($table, $alias);
return $this;
@@ -516,7 +522,22 @@ class DB_dsql extends AbstractModel implements Iterator
foreach ($this->args['table'] as $row) {
list($table, $alias) = $row;
- $table = $this->bt($table);
+ if (is_string($table)) {
+ // table name passed as string
+ $table = $this->bt($table);
+
+ } elseif ($table instanceof DB_dsql) {
+ // table passed as DSQL expression
+
+ // remove SQL_CALC_FOUND_ROWS from subquery
+ $i = @array_search('SQL_CALC_FOUND_ROWS', $table->args['options']);
+ if ($i !== false) {
+ unset($table->args['options'][$i]);
+ }
+
+ // consume subquery
+ $table = $this->consume($table);
+ }
if ($alias !== UNDEFINED && $alias) {
$table .= ' '.$this->bt($alias); | Feature: allow DSQL->table() to set table as sub-select (dsql object)
In <I> this was implemented long time ago: <URL>) | atk4_atk4 | train |
d12d11e79790199f1df64173e804dd2e89d7e64c | diff --git a/moment-timezone.js b/moment-timezone.js
index <HASH>..<HASH> 100644
--- a/moment-timezone.js
+++ b/moment-timezone.js
@@ -330,7 +330,7 @@
// use Intl API when available and returning valid time zone
try {
var intlName = Intl.DateTimeFormat().resolvedOptions().timeZone;
- if (intlName){
+ if (intlName && intlName.length > 3) {
var name = names[normalizeName(intlName)];
if (name) {
return name; | Ensure Intl response is valid
Fixes #<I> | moment_moment-timezone | train |
9e787db1b108941edab18209a7468e6c555002ce | diff --git a/cgroups/systemd/apply_systemd.go b/cgroups/systemd/apply_systemd.go
index <HASH>..<HASH> 100644
--- a/cgroups/systemd/apply_systemd.go
+++ b/cgroups/systemd/apply_systemd.go
@@ -43,6 +43,10 @@ var subsystems = map[string]subsystem{
"freezer": &fs.FreezerGroup{},
}
+const (
+ testScopeWait = 4
+)
+
var (
connLock sync.Mutex
theConn *systemd.Conn
@@ -86,16 +90,41 @@ func UseSystemd() bool {
}
}
+ // Ensure the scope name we use doesn't exist. Use the Pid to
+ // avoid collisions between multiple libcontainer users on a
+ // single host.
+ scope := fmt.Sprintf("libcontainer-%d-systemd-test-default-dependencies.scope", os.Getpid())
+ testScopeExists := true
+ for i := 0; i <= testScopeWait; i++ {
+ if _, err := theConn.StopUnit(scope, "replace"); err != nil {
+ if dbusError, ok := err.(dbus.Error); ok {
+ if strings.Contains(dbusError.Name, "org.freedesktop.systemd1.NoSuchUnit") {
+ testScopeExists = false
+ break
+ }
+ }
+ }
+ time.Sleep(time.Millisecond)
+ }
+
+ // Bail out if we can't kill this scope without testing for DefaultDependencies
+ if testScopeExists {
+ return hasStartTransientUnit
+ }
+
// Assume StartTransientUnit on a scope allows DefaultDependencies
hasTransientDefaultDependencies = true
ddf := newProp("DefaultDependencies", false)
- if _, err := theConn.StartTransientUnit("docker-systemd-test-default-dependencies.scope", "replace", ddf); err != nil {
+ if _, err := theConn.StartTransientUnit(scope, "replace", ddf); err != nil {
if dbusError, ok := err.(dbus.Error); ok {
if strings.Contains(dbusError.Name, "org.freedesktop.DBus.Error.PropertyReadOnly") {
hasTransientDefaultDependencies = false
}
}
}
+
+ // Not critical because of the stop unit logic above.
+ theConn.StopUnit(scope, "replace")
}
return hasStartTransientUnit
} | cgroups: systemd: attempt to stop test scope, if any
As reported in #<I> the test scope may not be cleaned up between runs.
In order to fix this we must be polite and remove the scope after we
have done our test and attempt to remove an existing scope if it exists.
This way we can guarantee our test will run. | opencontainers_runc | train |
4775c4233dd35d4ec667bcdede319ea0b72fc560 | diff --git a/format_checkers.go b/format_checkers.go
index <HASH>..<HASH> 100644
--- a/format_checkers.go
+++ b/format_checkers.go
@@ -154,7 +154,6 @@ func (c *FormatCheckerChain) Has(name string) bool {
// to see if it is the correct format
func (c *FormatCheckerChain) IsFormat(name string, input interface{}) bool {
f, ok := c.formatters[name]
-
if !ok {
return false
}
@@ -163,22 +162,19 @@ func (c *FormatCheckerChain) IsFormat(name string, input interface{}) bool {
}
func (f EmailFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
_, err := mail.ParseAddress(asString)
-
return err == nil
}
// Credit: https://github.com/asaskevich/govalidator
func (f IPV4FormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -188,9 +184,8 @@ func (f IPV4FormatChecker) IsFormat(input interface{}) bool {
// Credit: https://github.com/asaskevich/govalidator
func (f IPV6FormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -199,9 +194,8 @@ func (f IPV6FormatChecker) IsFormat(input interface{}) bool {
}
func (f DateTimeFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -224,7 +218,7 @@ func (f DateTimeFormatChecker) IsFormat(input interface{}) bool {
func (f DateFormatChecker) IsFormat(input interface{}) bool {
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
_, err := time.Parse("2006-01-02", asString)
@@ -233,7 +227,7 @@ func (f DateFormatChecker) IsFormat(input interface{}) bool {
func (f TimeFormatChecker) IsFormat(input interface{}) bool {
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -246,9 +240,8 @@ func (f TimeFormatChecker) IsFormat(input interface{}) bool {
}
func (f URIFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -262,9 +255,8 @@ func (f URIFormatChecker) IsFormat(input interface{}) bool {
}
func (f URIReferenceFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -274,7 +266,7 @@ func (f URIReferenceFormatChecker) IsFormat(input interface{}) bool {
func (f URITemplateFormatChecker) IsFormat(input interface{}) bool {
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -287,9 +279,8 @@ func (f URITemplateFormatChecker) IsFormat(input interface{}) bool {
}
func (f HostnameFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -297,9 +288,8 @@ func (f HostnameFormatChecker) IsFormat(input interface{}) bool {
}
func (f UUIDFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -308,9 +298,8 @@ func (f UUIDFormatChecker) IsFormat(input interface{}) bool {
// IsFormat implements FormatChecker interface.
func (f RegexFormatChecker) IsFormat(input interface{}) bool {
-
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -318,15 +307,12 @@ func (f RegexFormatChecker) IsFormat(input interface{}) bool {
return true
}
_, err := regexp.Compile(asString)
- if err != nil {
- return false
- }
- return true
+ return err == nil
}
func (f JSONPointerFormatChecker) IsFormat(input interface{}) bool {
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
}
@@ -335,7 +321,7 @@ func (f JSONPointerFormatChecker) IsFormat(input interface{}) bool {
func (f RelativeJSONPointerFormatChecker) IsFormat(input interface{}) bool {
asString, ok := input.(string)
- if ok == false {
+ if !ok {
return false
} | More idiomatic checks, and some minor reformatting | xeipuuv_gojsonschema | train |
1c57bb6604e50d19bdd9a5cab219aa0c770a1277 | diff --git a/library/Controller/Page.php b/library/Controller/Page.php
index <HASH>..<HASH> 100644
--- a/library/Controller/Page.php
+++ b/library/Controller/Page.php
@@ -2,24 +2,9 @@
namespace Municipio\Controller;
-class Page extends \Municipio\Controller\BaseController
+class Page extends \Municipio\Controller\Singular
{
public function init()
{
- global $post;
- $this->data['comments'] = get_comments(array(
- 'post_id' => $post->ID,
- 'order' => get_option('comment_order')
- ));
- $this->data['replyArgs'] = array(
- 'add_below' => 'comment',
- 'respond_id' => 'respond',
- 'reply_text' => __('Reply'),
- 'login_text' => __('Log in to Reply'),
- 'depth' => 1,
- 'before' => '',
- 'after' => '',
- 'max_depth' => get_option('thread_comments_depth')
- );
}
}
diff --git a/library/Controller/Singular.php b/library/Controller/Singular.php
index <HASH>..<HASH> 100644
--- a/library/Controller/Singular.php
+++ b/library/Controller/Singular.php
@@ -6,11 +6,17 @@ class Singular extends \Municipio\Controller\BaseController
{
public function init()
{
- global $post;
+ //Get post data
+ $this->data['post'] = get_post();
+ $this->data['post']->permalink = get_permalink($this->data['post']);
+
+ //Comments
$this->data['comments'] = get_comments(array(
- 'post_id' => $post->ID,
+ 'post_id' => $this->data['post']->ID,
'order' => get_option('comment_order')
));
+
+ //Replies
$this->data['replyArgs'] = array(
'add_below' => 'comment',
'respond_id' => 'respond',
@@ -21,11 +27,12 @@ class Singular extends \Municipio\Controller\BaseController
'after' => '',
'max_depth' => get_option('thread_comments_depth')
);
- $this->data['settingItems'] = apply_filters('Municipio/blog/post_settings', array(), $post);
- if (defined('MUNICIPIO_BLOCK_AUTHOR_PAGES') && ! MUNICIPIO_BLOCK_AUTHOR_PAGES) {
- $this->data['authorPages'] = true;
- }
+ //Post settings
+ $this->data['settingItems'] = apply_filters_deprecated('Municipio/blog/post_settings', array($this->data['post']), '3.0', 'Municipio/blog/postSettings');
+
+ //Should link author page
+ $this->data['authorPages'] = apply_filters('Municipio/author/hasAuthorPage', false);
}
/**
diff --git a/views/v3/templates/single.blade.php b/views/v3/templates/single.blade.php
index <HASH>..<HASH> 100644
--- a/views/v3/templates/single.blade.php
+++ b/views/v3/templates/single.blade.php
@@ -23,7 +23,7 @@
@while(have_posts())
{!! the_post() !!}
@section('loop')
- @include('partials.article')
+ @include('partials.article', $post)
@show
@endwhile | Update singular controller to embed post object.
Removes global var $post | helsingborg-stad_Municipio | train |
c60915c4c7936e267ea80bcee0701e46654a21a6 | diff --git a/session.go b/session.go
index <HASH>..<HASH> 100644
--- a/session.go
+++ b/session.go
@@ -161,6 +161,9 @@ func (q *Query) Iter() *Iter {
// were selected, ErrNotFound is returned.
func (q *Query) Scan(dest ...interface{}) error {
iter := q.Iter()
+ if len(iter.rows) == 0 {
+ return ErrNotFound
+ }
iter.Scan(dest...)
return iter.Close()
} | Return ErrNotFound when no row matches the query | gocql_gocql | train |
603aa61b89890d2d8bfcd71db089e5d7d8070bbc | diff --git a/sharding-core/src/main/java/io/shardingsphere/api/config/KeyGeneratorConfiguration.java b/sharding-core/src/main/java/io/shardingsphere/api/config/KeyGeneratorConfiguration.java
index <HASH>..<HASH> 100644
--- a/sharding-core/src/main/java/io/shardingsphere/api/config/KeyGeneratorConfiguration.java
+++ b/sharding-core/src/main/java/io/shardingsphere/api/config/KeyGeneratorConfiguration.java
@@ -17,9 +17,7 @@
package io.shardingsphere.api.config;
-import io.shardingsphere.core.exception.ShardingConfigurationException;
import io.shardingsphere.core.keygen.KeyGeneratorFactory;
-import io.shardingsphere.core.keygen.KeyGeneratorType;
import io.shardingsphere.core.keygen.generator.KeyGenerator;
import lombok.AllArgsConstructor;
import lombok.Getter;
@@ -55,17 +53,4 @@ public final class KeyGeneratorConfiguration {
result.setProperties(props);
return result;
}
-
- private String getKeyGeneratorClassName() {
- if (type.equalsIgnoreCase(KeyGeneratorType.SNOWFLAKE.name())) {
- return KeyGeneratorType.SNOWFLAKE.getKeyGeneratorClassName();
- }
- if (type.equalsIgnoreCase(KeyGeneratorType.UUID.name())) {
- return KeyGeneratorType.UUID.getKeyGeneratorClassName();
- }
- if (type.equalsIgnoreCase(KeyGeneratorType.LEAF.name())) {
- return KeyGeneratorType.LEAF.getKeyGeneratorClassName();
- }
- throw new ShardingConfigurationException("Invalid key generator type.");
- }
} | delete getKeyGeneratorClassName() | apache_incubator-shardingsphere | train |
8ab677002ac9649a67501d66f010d8af8dbcba2d | diff --git a/cogen/core/pollers.py b/cogen/core/pollers.py
index <HASH>..<HASH> 100644
--- a/cogen/core/pollers.py
+++ b/cogen/core/pollers.py
@@ -416,23 +416,27 @@ class IOCPPoller(Poller):
for op in self.registered_ops:
if self.registered_ops[op].object[1] is testcoro:
return op
-
+
+ def remove(self, op, coro):
+ #~ warnings.warn('Removing op', stacklevel=3)
+ if op in self.registered_ops:
+ win32file.CloseHandle(self.registered_ops[op].hEvent)
+ del self.registered_ops[op]
+
def run(self, timeout = 0):
# same resolution as epoll
ptimeout = int(timeout.microseconds/1000+timeout.seconds*1000
if timeout else (self.mRESOLUTION if timeout is None else 0))
if self.registered_ops:
- rc, nbytes, key, overlap = win32file.GetQueuedCompletionStatus(
- self.iocp,
- #win32event.INFINITE
- ptimeout
- )
-
+ try:
+ rc, nbytes, key, overlap = win32file.GetQueuedCompletionStatus(
+ self.iocp,
+ ptimeout
+ )
+ except Exception, e:
+ warnings.warn(e)
if overlap:
- #~ print '---', rc, nbytes, key, overlap
-
op, coro = overlap.object
- #~ del ... self.op.sock._fd.fileno()
op.iocp_done(rc, nbytes, key, overlap)
if rc == 0:
prev_op = op
@@ -456,7 +460,21 @@ class IOCPPoller(Poller):
prev_op.iocp(overlap)
del overlap
else:
+ #~ if rc==64: # ERROR_NETNAME_DELETED, need to reopen the accept sock ?!
+ #~ warnings.warn("ERROR_NETNAME_DELETED", stacklevel=3)
+ #~ return
+ del self.registered_ops[op]
+ del overlap
+
warnings.warn("%s on %s/%s" % (ctypes.FormatError(rc), op, coro))
+ self.scheduler.active.append((
+ events.CoroutineException((
+ events.ConnectionError, events.ConnectionError(
+ "%s:%s on %s" % (rc, ctypes.FormatError(rc), op)
+ )
+ )),
+ coro
+ ))
else:
time.sleep(self.RESOLUTION)
diff --git a/cogen/core/sockets.py b/cogen/core/sockets.py
index <HASH>..<HASH> 100644
--- a/cogen/core/sockets.py
+++ b/cogen/core/sockets.py
@@ -196,7 +196,7 @@ class ReadOperation(SocketOperation):
self.iocp_buff = win32file.AllocateReadBuffer(
self.len-self.sock._rl_list_sz
)
- rc, sz = win32file.WSARecv(self.sock, self.iocp_buff, overlap, 0)
+ rc, sz = win32file.WSARecv(self.sock._fd, self.iocp_buff, overlap, 0)
def iocp_done(self, rc, nbytes, key, overlap):
self.temp_buff = self.iocp_buff[:nbytes]
@@ -204,7 +204,7 @@ class ReadOperation(SocketOperation):
class WriteOperation(SocketOperation):
__slots__ = ['sent']
def iocp(self, overlap):
- rc, sz = win32file.WSASend(self.sock, self.buff, overlap, 0)
+ rc, sz = win32file.WSASend(self.sock._fd, self.buff, overlap, 0)
def iocp_done(self, rc, nbytes, key, overlap):
self.sent += nbytes
@@ -254,15 +254,39 @@ class SendFile(WriteOperation):
self.file_handle.seek(offset)
sent = self.sock._fd.send(self.file_handle.read(length))
return sent
+
+ def iocp_send(self, offset, length, overlap):
+ self.file_handle.seek(offset)
+ win32file.WSASend(self.sock._fd, self.file_handle.read(length), overlap, 0)
+
+ def iocp(self, overlap):
+ if self.length:
+ if self.blocksize:
+ self.iocp_send(
+ self.offset + self.sent,
+ min(self.length-self.sent, self.blocksize)
+ )
+ else:
+ self.iocp_send(self.offset+self.sent, self.length-self.sent)
+ else:
+ self.iocp_send(self.offset+self.sent, self.blocksize)
+
+ def iocp_done(self, rc, nbytes, key, overlap):
+ self.sent += nbytes
+
def run(self, reactor=True):
+ assert self.sent <= self.length
+ if self.sent == self.length:
+ return self
+
if self.length:
if self.blocksize:
self.sent += self.send(
self.offset + self.sent,
- min(self.length, self.blocksize)
+ min(self.length-self.sent, self.blocksize)
)
else:
- self.sent += self.send(self.offset+self.sent, self.length)
+ self.sent += self.send(self.offset+self.sent, self.length-self.sent)
if self.sent == self.length:
return self
else:
@@ -270,9 +294,12 @@ class SendFile(WriteOperation):
sent = self.send(self.offset+self.sent, self.blocksize)
else:
sent = self.send(self.offset+self.sent, self.blocksize)
+ # we would use self.length but we don't have any,
+ # and we don't know the file's length
self.sent += sent
if not sent:
return self
+
def __repr__(self):
return "<%s at 0x%X %s fh:%s offset:%r len:%s bsz:%s to:%s>" % (
self.__class__.__name__, | made sendfile wrapper work with iocp;
made iocp poller more robust | ionelmc_python-cogen | train |
201ebdd19035028ac66f60318354291fc8554a09 | diff --git a/lib/centurion/docker_registry.rb b/lib/centurion/docker_registry.rb
index <HASH>..<HASH> 100644
--- a/lib/centurion/docker_registry.rb
+++ b/lib/centurion/docker_registry.rb
@@ -39,15 +39,16 @@ class Centurion::DockerRegistry
uri = uri_for_repository_path(repository, path)
$stderr.puts "GET: #{uri.inspect}"
- options = {}
+
+ # Need to workaround a bug in Docker Hub to now pass port in Host header
+ options = { omit_default_port: true }
+
if @user
options[:user] = @user
options[:password] = @password
end
- response = Excon.get(
- uri,
- options
- )
+
+ response = Excon.get(uri, options)
raise response.inspect unless response.status == 200
tags = JSON.load(response.body) | Manually re-merge conflicted changes. | newrelic_centurion | train |
39797da0dfdf1dde97c190d154c57797bb76fcd2 | diff --git a/lib/ast/binary_send.rb b/lib/ast/binary_send.rb
index <HASH>..<HASH> 100644
--- a/lib/ast/binary_send.rb
+++ b/lib/ast/binary_send.rb
@@ -30,11 +30,13 @@ module Atomy
def bytecode(g)
pos(g)
@lhs.compile(g)
+ g.push_literal message_name.to_sym unless @namespace == "_"
@rhs.compile(g)
if @namespace == "_"
g.send @operator.to_sym, 1
else
- g.call_custom method_name.to_sym, 1
+ g.send :atomy_send, 2
+ #g.call_custom method_name.to_sym, 1
end
end
end
diff --git a/lib/ast/send.rb b/lib/ast/send.rb
index <HASH>..<HASH> 100644
--- a/lib/ast/send.rb
+++ b/lib/ast/send.rb
@@ -75,8 +75,8 @@ module Atomy
if @namespace == "_"
g.send_with_splat @method_name.to_sym, args, @private
else
- #g.call_custom_with_splat message_name.to_sym, args
g.send_with_splat :atomy_send, args + 1
+ #g.call_custom_with_splat message_name.to_sym, args
end
elsif block
block.compile(g)
diff --git a/lib/ast/unary.rb b/lib/ast/unary.rb
index <HASH>..<HASH> 100644
--- a/lib/ast/unary.rb
+++ b/lib/ast/unary.rb
@@ -30,7 +30,9 @@ module Atomy
if @namespace == "_"
g.send @operator.to_sym, 0
else
- g.call_custom method_name.to_sym, 0
+ g.push_literal message_name.to_sym
+ g.send :atomy_send, 1
+ #g.call_custom method_name.to_sym, 0
end
end
diff --git a/lib/ast/variable.rb b/lib/ast/variable.rb
index <HASH>..<HASH> 100644
--- a/lib/ast/variable.rb
+++ b/lib/ast/variable.rb
@@ -30,7 +30,9 @@ module Atomy
var.get_bytecode(g)
else
g.push_self
- g.call_custom message_name.to_sym, 0
+ g.push_literal message_name.to_sym
+ g.send :atomy_send, 1
+ #g.call_custom message_name.to_sym, 0
end
end | replace the rest of the call_custom usage | vito_atomy | train |
bf14f464003391977116a2fd4f1b91dca4a317ce | diff --git a/girc/client.py b/girc/client.py
index <HASH>..<HASH> 100755
--- a/girc/client.py
+++ b/girc/client.py
@@ -119,7 +119,7 @@ class ServerConnection(asyncio.Protocol):
self._imaps.append(new_string)
return new_string
- def ilist(self, in_list={}):
+ def ilist(self, in_list=[]):
new_list = IList(in_list)
new_list.set_std(self.features.get('casemapping'))
if not self._casemap_set:
diff --git a/girc/imapping.py b/girc/imapping.py
index <HASH>..<HASH> 100644
--- a/girc/imapping.py
+++ b/girc/imapping.py
@@ -76,10 +76,10 @@ class IDict(collections.MutableMapping, IMap):
class IList(collections.MutableSequence, IMap):
"""Case-insensitive IRC list, based on IRC casemapping standards."""
- def __init__(self, data=[], *args, **kwargs):
+ def __init__(self, data=[], *args):
self.store = list()
- self.update(data)
- self.update(dict(*args, **kwargs)) # use the free update to set keys
+ self.extend(data)
+ self.extend(dict(*args))
@property
def json(self):
@@ -92,13 +92,22 @@ class IList(collections.MutableSequence, IMap):
# XXX - could also simply make them IStrings
# or do some more complex processing on them below...
if isinstance(value, str) and self._lower_trans is not None:
- value = value.translate(self._lower_trans)
- return value.lower()
+ value = value.translate(self._lower_trans).lower()
+ return value
+
+ def __getitem__(self, index):
+ return self.store[index]
def __setitem__(self, index, value):
value = self.__valuetransform__(value)
self.store[index] = value
+ def __delitem__(self, index):
+ del self.store[index]
+
+ def __len__(self):
+ return len(self.store)
+
def append(self, value):
value = self.__valuetransform__(value)
self.store.append(value)
diff --git a/girc/info.py b/girc/info.py
index <HASH>..<HASH> 100644
--- a/girc/info.py
+++ b/girc/info.py
@@ -19,11 +19,17 @@ class Info:
def update_info(self, info):
if info['verb'] == 'join':
user = NickMask(info['source'])
+ channels = info['params'][0].split(',')
self.create_user(info['source'])
- self.create_channels(*info['params'][0].split(','))
+ self.create_channels(*channels)
- self.users[user.nick]['channels']
+ for chan in channels:
+ if chan not in self.users[user.nick]['channels']:
+ self.users[user.nick]['channels'].append(chan)
+
+ if user.nick not in self.channels[chan]['users']:
+ self.channels[chan]['users'][user.nick] = {}
elif info['verb'] in ['privmsg', 'pubmsg']:
from pprint import pprint
pprint(self.json)
@@ -33,7 +39,7 @@ class Info:
if user.nick not in self.users:
self.users[user.nick] = {
- 'channels': [], # ilist?
+ 'channels': self.s.ilist(),
'modes': {},
}
@@ -48,7 +54,9 @@ class Info:
def create_channels(self, *channels):
for channel in channels:
if channel not in self.channels:
- self.channels[channel] = {}
+ self.channels[channel] = {
+ 'users': self.s.idict(),
+ }
@property
def json(self): | Fix errors with IList and IMaps | goshuirc_irc | train |
e094d9c4be602b9c5794b947b037c6c081f4d4d1 | diff --git a/examples/src/test/java/com/carrotsearch/examples/randomizedrunner/Test001SimpleUseCase.java b/examples/src/test/java/com/carrotsearch/examples/randomizedrunner/Test001SimpleUseCase.java
index <HASH>..<HASH> 100644
--- a/examples/src/test/java/com/carrotsearch/examples/randomizedrunner/Test001SimpleUseCase.java
+++ b/examples/src/test/java/com/carrotsearch/examples/randomizedrunner/Test001SimpleUseCase.java
@@ -19,8 +19,8 @@ import com.carrotsearch.randomizedtesting.RandomizedRunner;
*/
@RunWith(RandomizedRunner.class)
public class Test001SimpleUseCase {
- @Before
- public void before() {
+ @Before @SuppressWarnings("unused")
+ private void before() {
// Ha! This won't work under the default JUnit runner.
} | Corrected the example with a private hook method. | randomizedtesting_randomizedtesting | train |
a7294bd3c94ecf3d97bb639e20f75ff29003ca14 | diff --git a/src/results.py b/src/results.py
index <HASH>..<HASH> 100644
--- a/src/results.py
+++ b/src/results.py
@@ -43,6 +43,10 @@ def format(exp_paths,
for item in zip([128,256,512,1024,2048], test_ters):
print("(%d, %f)" % item)
+def filter_labels(sent, labels):
+ """ Returns only the tokens present in the sentence that are in labels."""
+ return [tok for tok in sent if tok in labels]
+
def test_results(exp_path, phones, tones):
""" Gets results of the model on the test set. """
@@ -51,53 +55,26 @@ def test_results(exp_path, phones, tones):
line = test_f.readlines()[0]
test_ler = float(line.split()[2].strip(","))
- test_per = phones_only_error_rate(os.path.join(test_path, "hyps"),
+ test_per = filtered_error_rate(os.path.join(test_path, "hyps"),
os.path.join(test_path, "refs"),
phones)
- test_ter = tones_only_error_rate(os.path.join(test_path, "hyps"),
+ test_ter = filtered_error_rate(os.path.join(test_path, "hyps"),
os.path.join(test_path, "refs"),
tones)
return test_ler, test_per, test_ter
-def phones_only_error_rate(hyps_path, refs_path, phones):
-
- def phones_only(sent):
- """ Returns only the Na phones present in the sentence."""
- return [phone for phone in sent if phone in phones]
-
- with open(hyps_path) as hyps_f:
- lines = hyps_f.readlines()
- hyps = [phones_only(line.split()) for line in lines]
- with open(refs_path) as refs_f:
- lines = refs_f.readlines()
- refs = [phones_only(line.split()) for line in lines]
-
- # For the case where there are no tones (the experiment was phones only).
- only_empty = True
- for entry in hyps:
- if entry != []:
- only_empty = False
- if only_empty:
- return -1
-
- return utils.batch_per(hyps, refs)
-
-def tones_only_error_rate(hyps_path, refs_path, tones):
-
- def tones_only(sent):
- """ Returns only the Na tones present in the sentence."""
- return [tone for tone in sent if tone in tones]
+def filtered_error_rate(hyps_path, refs_path, labels):
with open(hyps_path) as hyps_f:
lines = hyps_f.readlines()
- hyps = [tones_only(line.split()) for line in lines]
+ hyps = [filter_labels(line.split(), labels) for line in lines]
with open(refs_path) as refs_f:
lines = refs_f.readlines()
- refs = [tones_only(line.split()) for line in lines]
+ refs = [filter_labels(line.split(), labels) for line in lines]
- # For the case where there are no tones (the experiment was phones only).
+ # For the case where there are no tokens left after filtering.
only_empty = True
for entry in hyps:
if entry != []: | Refactored results.py | persephone-tools_persephone | train |
b478428d37fc4da42f5f771acbb3376010a351ec | diff --git a/lib/vcr/request_matcher_registry.rb b/lib/vcr/request_matcher_registry.rb
index <HASH>..<HASH> 100644
--- a/lib/vcr/request_matcher_registry.rb
+++ b/lib/vcr/request_matcher_registry.rb
@@ -16,7 +16,7 @@ module VCR
end
def register(name, &block)
- if @registry[name]
+ if @registry.has_key?(name)
warn "WARNING: There is already a VCR request matcher registered for #{name.inspect}. Overriding it."
end | We really only care if the registry has the key, not what it is. | vcr_vcr | train |
abc051d009d49af88b34adecea4009e9ec4cb225 | diff --git a/CHANGELOG.md b/CHANGELOG.md
index <HASH>..<HASH> 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -18,6 +18,7 @@ This project adheres to [Semantic Versioning](http://semver.org/).
- QueryProxy `where` will now look for declared properties matching hash keys. When found, it will send the value through that property's type converter if the type matches the property's unconverted state.
- Improved handling of unpersisted nodes with associations. You can now use `<<` to create associations between unpersisted nodes. A `save` will cascade through unpersisted objects, creating nodes and rels along the way. See https://github.com/neo4jrb/neo4j/pull/871
- Support formatted cypher queries for easy reading by humans via the `pretty_logged_cypher_queries` configuration variable
+- Ability to query for just IDs on associations
## [5.0.5] - 2015-07-19
diff --git a/lib/neo4j/active_node/has_n.rb b/lib/neo4j/active_node/has_n.rb
index <HASH>..<HASH> 100644
--- a/lib/neo4j/active_node/has_n.rb
+++ b/lib/neo4j/active_node/has_n.rb
@@ -292,6 +292,8 @@ module Neo4j::ActiveNode
define_has_many_setter(name)
+ define_has_many_id_methods(name)
+
define_class_method(name) do |node = nil, rel = nil, options = {}|
association_proxy(name, {node: node, rel: rel, labels: options[:labels]}.merge!(options))
end
@@ -305,16 +307,38 @@ module Neo4j::ActiveNode
end
end
+ def define_has_many_id_methods(name)
+ define_method("#{name.to_s.singularize}_ids") do
+ association_proxy(name).pluck(:uuid)
+ end
+
+ define_method("#{name.to_s.singularize}_neo_ids") do
+ association_proxy(name).pluck(:neo_id)
+ end
+ end
+
def define_has_one_methods(name)
define_has_one_getter(name)
define_has_one_setter(name)
+ define_has_one_id_methods(name)
+
define_class_method(name) do |node = nil, rel = nil, options = {}|
association_proxy(name, {node: node, rel: rel, labels: options[:labels]}.merge!(options))
end
end
+ def define_has_one_id_methods(name)
+ define_method("#{name}_id") do
+ association_proxy(name).pluck(:uuid).first
+ end
+
+ define_method("#{name}_neo_id") do
+ association_proxy(name).pluck(:neo_id).first
+ end
+ end
+
def define_has_one_getter(name)
define_method(name) do |node = nil, rel = nil, options = {}|
return nil unless self._persisted_obj
diff --git a/spec/e2e/has_many_spec.rb b/spec/e2e/has_many_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/e2e/has_many_spec.rb
+++ b/spec/e2e/has_many_spec.rb
@@ -539,4 +539,24 @@ describe 'has_many' do
end
end
end
+ describe 'id methods' do
+ before(:each) do
+ stub_active_node_class('Post') do
+ has_many :in, :comments, type: :COMMENTS_ON
+ end
+
+ stub_active_node_class('Comment') do
+ has_one :out, :post, type: :COMMENTS_ON
+ end
+ end
+
+ let(:post) { Post.create }
+ let(:comment) { Comment.create }
+ before(:each) { comment.post = post }
+
+ it 'returns various IDs for associations' do
+ expect(post.comment_ids).to eq([comment.id])
+ expect(post.comment_neo_ids).to eq([comment.neo_id])
+ end
+ end
end
diff --git a/spec/e2e/has_one_spec.rb b/spec/e2e/has_one_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/e2e/has_one_spec.rb
+++ b/spec/e2e/has_one_spec.rb
@@ -200,4 +200,25 @@ describe 'has_one' do
expect(node1.failing_assoc).to be_nil
end
end
+
+ describe 'id methods' do
+ before(:each) do
+ stub_active_node_class('Post') do
+ has_many :in, :comments, type: :COMMENTS_ON
+ end
+
+ stub_active_node_class('Comment') do
+ has_one :out, :post, type: :COMMENTS_ON
+ end
+ end
+
+ let(:post) { Post.create }
+ let(:comment) { Comment.create }
+ before(:each) { comment.post = post }
+
+ it 'returns various IDs for associations' do
+ expect(comment.post_id).to eq(post.id)
+ expect(comment.post_neo_id).to eq(post.neo_id)
+ end
+ end
end | Add methods to make it easy to get IDs for associations. Might be good if these eventually supported checking to see if the data is already loaded so that we don't need to query again | neo4jrb_neo4j | train |
f5a9939bc95acd4da5077889154f8ada73ecb8a2 | diff --git a/src/Http/InternalRequest.php b/src/Http/InternalRequest.php
index <HASH>..<HASH> 100644
--- a/src/Http/InternalRequest.php
+++ b/src/Http/InternalRequest.php
@@ -4,5 +4,14 @@ namespace Dingo\Api\Http;
class InternalRequest extends Request
{
- //
+ public function __construct(array $query = [], array $request = [], array $attributes = [], array $cookies = [], array $files = [], array $server = [], $content = null)
+ {
+ parent::__construct($query, $request, $attributes, $cookies, $files, $server, $content);
+
+ // Pass parameters inside internal request into Laravel's JSON ParameterBag,
+ // so that they can be accessed using $request->input()
+ if ($this->isJson() && isset($this->request)) {
+ $this->setJson($this->request);
+ }
+ }
} | Fixes a problem whereby parameters passed through internal requests are not available in Laravel's $request->input() | laravie_api | train |
74abdccae55657502c8a6d11709289a4ed4c7de3 | diff --git a/psamm/datasource/native.py b/psamm/datasource/native.py
index <HASH>..<HASH> 100644
--- a/psamm/datasource/native.py
+++ b/psamm/datasource/native.py
@@ -301,11 +301,18 @@ class NativeModel(object):
if reaction.equation is not None:
database.set_reaction(reaction.id, reaction.equation)
+ # Warn about undefined compartments
+ compartments = set()
+ compartments_iter, boundaries = self.parse_compartments()
+ for compartment in compartments_iter:
+ compartments.add(compartment.id)
+
# Warn about undefined compounds
compounds = set()
for compound in self.parse_compounds():
compounds.add(compound.id)
+ undefined_compartments = set()
undefined_compounds = set()
extracellular_compounds = set()
extracellular = self.extracellular_compartment
@@ -315,6 +322,13 @@ class NativeModel(object):
undefined_compounds.add(compound.name)
if compound.compartment == extracellular:
extracellular_compounds.add(compound.name)
+ if compound.compartment not in compartments:
+ undefined_compartments.add(compound.compartment)
+
+ for compartment in sorted(undefined_compartments):
+ logger.warning(
+ 'The compartment {} was not defined in the list'
+ ' of compartments'.format(compartment))
for compound in sorted(undefined_compounds):
logger.warning( | native: Warn about undefined compartments
When creating a metabolic model, warn about compounds in
compartments that have not been specified in the model. | zhanglab_psamm | train |
048ef9326d713b54e5e1dc014bac071a98aee186 | diff --git a/fastlane/lib/fastlane/actions/appium.rb b/fastlane/lib/fastlane/actions/appium.rb
index <HASH>..<HASH> 100644
--- a/fastlane/lib/fastlane/actions/appium.rb
+++ b/fastlane/lib/fastlane/actions/appium.rb
@@ -195,7 +195,7 @@ module Fastlane
versionNumber: "9.1",
deviceName: "iPhone 6"
},
- appium_lib {
+ appium_lib: {
wait: 10
}
)' | missing colon in appium sample code (#<I>) | fastlane_fastlane | train |
6f9038ea1a0b55e4941c446a1d4f37179f44f288 | diff --git a/lib/createMedian.py b/lib/createMedian.py
index <HASH>..<HASH> 100644
--- a/lib/createMedian.py
+++ b/lib/createMedian.py
@@ -183,7 +183,6 @@ def _median(imageObjectList=None,configObj={},saveFiles=True):
#
# END Loop over input image list
#
- print 'readnoise list in createMedian: ',readnoiseList
# create an array for the median output image, use the size of the first image in the list
medianImageArray = np.zeros(singleDrizList[0].shape,dtype=singleDrizList[0].type()) | oops, took out bug check print line in createMedian
git-svn-id: <URL> | spacetelescope_drizzlepac | train |
0e671acc188ce6f3484459e5bfe1f2b42358c35c | diff --git a/spec/support/schema.rb b/spec/support/schema.rb
index <HASH>..<HASH> 100644
--- a/spec/support/schema.rb
+++ b/spec/support/schema.rb
@@ -34,7 +34,11 @@ class Person < ActiveRecord::Base
has_many :children, class_name: 'Person', foreign_key: :parent_id
has_many :articles
if ActiveRecord::VERSION::MAJOR == 3
- has_many :published_articles, conditions: { published: true }, class_name: "Article"
+ if RUBY_VERSION >= '2.3'
+ has_many :published_articles, class_name: "Article", conditions: "published = 't'"
+ else
+ has_many :published_articles, class_name: "Article", conditions: { published: true }
+ end
else
has_many :published_articles, ->{ where(published: true) }, class_name: "Article"
end | Fix specs broken by Rails 3 on Ruby <I>.
With the new Ruby <I>, Active Record 3 association conditions are not
being recognized when using hash syntax instead of SQL string literals.
Using string literals makes the spec work again, but it may be worth
checking whether it is Ransack or Active Record that is breaking with
Ruby <I> on this issue. | activerecord-hackery_ransack | train |
c30d0981d84155a3aa8837dd6995d24ba34b8584 | diff --git a/tests/test-pinyin.py b/tests/test-pinyin.py
index <HASH>..<HASH> 100644
--- a/tests/test-pinyin.py
+++ b/tests/test-pinyin.py
@@ -11,6 +11,8 @@ from zhon import pinyin
NUM_WORDS = 50 # Number of random words to test
WORD_LENGTH = 4 # Length of random words (number of syllables)
+NUM_SENT = 10 # Number of random sentences to test
+SENT_LENGTH = 5 # Length of random sentences (number of words)
VALID_SYLS = ( # 411 total syllables, including 'r'
'ba', 'pa', 'ma', 'fa', 'da', 'ta', 'na', 'la', 'ga', 'ka', 'ha', 'za',
@@ -58,8 +60,9 @@ VALID_SYLS = ( # 411 total syllables, including 'r'
'xuan', 'yuan', 'jun', 'qun', 'xun', 'yun', 'er'
)
-SYL = re.compile(pinyin.syl, re.X | re.I)
+SYL = re.compile(pinyin.syllable, re.X | re.I)
WORD = re.compile(pinyin.word, re.X | re.I)
+SENT = re.compile(pinyin.sentence, re.X | re.I)
VOWELS = 'aeiou\u00FC'
@@ -162,3 +165,25 @@ class TestPinyinWords(unittest.TestCase):
for n in range(0, NUM_WORDS):
word = create_word(accented=True)
self.assertEqual(WORD.match(word).group(0), word)
+
+
+def create_sentence(accented=False):
+ _sent = []
+ for n in range(0, SENT_LENGTH):
+ _sent.append(create_word(accented=accented))
+ sentence = [_sent.pop(0)]
+ sentence.extend([random.choice([' ', ', ', '; ']) + w for w in _sent])
+ return ''.join(sentence) + '.'
+
+
+class TestPinyinSentences(unittest.TestCase):
+
+ def test_number_sentences(self):
+ for n in range(0, NUM_SENT):
+ sentence = create_sentence()
+ self.assertEqual(SENT.match(sentence).group(0), sentence)
+
+ def test_accent_sentences(self):
+ for n in range(0, NUM_SENT):
+ sentence = create_sentence(accented=True)
+ self.assertEqual(SENT.match(sentence).group(0), sentence)
diff --git a/zhon/pinyin.py b/zhon/pinyin.py
index <HASH>..<HASH> 100644
--- a/zhon/pinyin.py
+++ b/zhon/pinyin.py
@@ -17,7 +17,8 @@ out non-Pinyin strings.
"""
from __future__ import unicode_literals
-from string import punctuation, whitespace
+from string import whitespace
+
vowels = (
'aɑeiouüvAEIOUÜV'
@@ -28,7 +29,9 @@ vowels = (
)
consonants = 'bpmfdtnlgkhjqxzcsrzcswyBPMFDTNLGKHJQXZCSRZCSWY'
marks = "·012345:-'"
-printable = vowels + consonants + marks[:-3] + whitespace + punctuation
+non_stops = """"#$%&'()*+,-/:;<=>@[\]^_`{|}~"""
+stops = '.!?'
+printable = vowels + consonants + marks[:-3] + whitespace + stops + non_stops
_a = 'a\u0101\u00E0\u00E1\u01CE'
_e = 'e\u0113\u00E9\u011B\u00E8'
@@ -87,3 +90,7 @@ syl = syllable = """
word = """
(?:%(as)s(?:-(?=%(as)s)|'(?=[%(a)s%(e)s%(o)s])(?=%(as)s))?[0-9]*)+
""" % {'as': syllable, 'a': _a, 'e': _e, 'o': _o}
+
+sent = sentence = """
+ (?:%(word)s|[%(non_stops)s\s])+[.!?]['"\]\}\)]*
+""" % {'word': word, 'non_stops': non_stops} | Adds Pinyin sentence constant. | tsroten_zhon | train |
4c27044bf723ac8414c189ee88abc3701725f1f7 | diff --git a/Entity/Image/AbstractImage.php b/Entity/Image/AbstractImage.php
index <HASH>..<HASH> 100644
--- a/Entity/Image/AbstractImage.php
+++ b/Entity/Image/AbstractImage.php
@@ -52,7 +52,7 @@ abstract class AbstractImage
/**
* @var string
*
- * @ORM\Column(type="string")
+ * @ORM\Column
*
* @Assert\NotBlank(groups={"AdminUpdateProperty"})
*/
@@ -61,14 +61,14 @@ abstract class AbstractImage
/**
* @var string
*
- * @ORM\Column(type="string")
+ * @ORM\Column
*/
private $extension;
/**
* @var string
*
- * @ORM\Column(type="string")
+ * @ORM\Column
*/
private $filename; | Remove default type="string" from mapping annotations. | DarvinStudio_DarvinImageBundle | train |
8648e02f528e21bd9cc426224006991f55b3dec5 | diff --git a/core/src/main/java/com/google/bitcoin/core/TCPNetworkConnection.java b/core/src/main/java/com/google/bitcoin/core/TCPNetworkConnection.java
index <HASH>..<HASH> 100644
--- a/core/src/main/java/com/google/bitcoin/core/TCPNetworkConnection.java
+++ b/core/src/main/java/com/google/bitcoin/core/TCPNetworkConnection.java
@@ -52,11 +52,9 @@ public class TCPNetworkConnection implements NetworkConnection {
private static final Date checksummingProtocolChangeDate = new Date(1329696000000L);
/**
- * Connect to the given IP address using the port specified as part of the network parameters. Once construction
- * is complete a functioning network channel is set up and running.
+ * Construct a network connection with the given params and version. To actually connect to a remote node, call
+ * {@link TCPNetworkConnection#connect(PeerAddress, int)}.
*
- * @param peerAddress address to connect to. IPv6 is not currently supported by BitCoin. If
- * port is not positive the default port from params is used.
* @param params Defines which network to connect to and details of the protocol.
* @param ver The VersionMessage to announce to the other side of the connection.
* @throws IOException if there is a network related failure.
@@ -106,11 +104,11 @@ public class TCPNetworkConnection implements NetworkConnection {
writeMessage(myVersionMessage);
// When connecting, the remote peer sends us a version message with various bits of
// useful data in it. We need to know the peer protocol version before we can talk to it.
- Message m = readMessage();
- if (!(m instanceof VersionMessage)) {
- // Bad peers might not follow the protocol. This has been seen in the wild (issue 81).
- throw new ProtocolException("First message received was not a version message but rather " + m);
- }
+ // There is a bug in Satoshis code such that it can sometimes send us alert messages before version negotiation
+ // has completed. There's no harm in ignoring them (they're meant for Bitcoin-Qt users anyway) so we just cycle
+ // here until we find the right message.
+ Message m;
+ while (!((m = readMessage()) instanceof VersionMessage));
versionMessage = (VersionMessage) m;
// Now it's our turn ...
// Send an ACK message stating we accept the peers protocol version. | Throw away messages until version negotiation is complete rather than throwing an exception. There's a bug in Satoshis code (bug <I>) that can cause alerts to be relayed before nego finishes. | bitcoinj_bitcoinj | train |
04f39527339990bbfa25d9c115eb36fa1d0354ed | diff --git a/packages/blueprint/lib/loader.js b/packages/blueprint/lib/loader.js
index <HASH>..<HASH> 100644
--- a/packages/blueprint/lib/loader.js
+++ b/packages/blueprint/lib/loader.js
@@ -18,22 +18,27 @@ const all = require ('require-all');
const { BO } = require ('base-object');
const _ = require ('lodash');
const path = require ('path');
-const fs = require ('fs');
+const fs = require ('fs').promises;
const assert = require ('assert');
-const {env} = require ('./environment');
+const {env} = require ('./environment');
+const debug = require ('debug') ('blueprint:loader');
function load (opts) {
- return new Promise ((resolve) => {
- fs.stat (opts.dirname, (err, stats) => {
- if (err || !stats.isDirectory ())
- return resolve ({});
+ debug (`loading resources in ${opts.dirname}`);
+
+ return fs.stat (opts.dirname)
+ .then (stats => {
+ if (!stats.isDirectory ())
+ return {};
- // Load all the objects in the directory.
- let objects = all (opts);
+ return all (opts);
+ })
+ .catch (err => {
+ return err && err.code !== 'ENOENT' ? Promise.reject (err) : {};
- resolve (objects);
+ if (stats === undefined || !stats.isDirectory ())
+ return resolve ({});
});
- });
}
/** | chore: switch to fs.promise | onehilltech_blueprint | train |
5367907eee578ebce6c30957d58f0c52c8f05c07 | diff --git a/themoviedbapi/src/com/moviejukebox/themoviedb/TheMovieDb.java b/themoviedbapi/src/com/moviejukebox/themoviedb/TheMovieDb.java
index <HASH>..<HASH> 100644
--- a/themoviedbapi/src/com/moviejukebox/themoviedb/TheMovieDb.java
+++ b/themoviedbapi/src/com/moviejukebox/themoviedb/TheMovieDb.java
@@ -98,14 +98,13 @@ public class TheMovieDb {
*/
public MovieDB moviedbSearch(String movieTitle, String language) {
MovieDB movie = null;
- Document doc = null;
-
- language = validateLanguage(language);
-
// If the title is null, then exit
if (movieTitle == null || movieTitle.equals(""))
return movie;
+ Document doc = null;
+ language = validateLanguage(language);
+
try {
String searchUrl = buildSearchUrl("Movie.search", URLEncoder.encode(movieTitle, "UTF-8"), language);
doc = DOMHelper.getEventDocFromUrl(searchUrl); | moved parameters declaration after the initial check for null or empty strings | Omertron_api-themoviedb | train |
b588dad10d0cd085ad529fbd07a03bc98d6eab0b | diff --git a/lib/verku/version.rb b/lib/verku/version.rb
index <HASH>..<HASH> 100644
--- a/lib/verku/version.rb
+++ b/lib/verku/version.rb
@@ -1,5 +1,5 @@
module Verku
- VERSION = '0.9.0.pre58'
+ VERSION = '0.9.0.pre59'
# module Version
# MAJOR = 0
# MINOR = 9 | Bump to <I>.pre<I> | Merovex_verku | train |
01d0874a70a9d2957a8f96501e72a31b5da63717 | diff --git a/pandas/core/arrays/arrow/_arrow_utils.py b/pandas/core/arrays/arrow/_arrow_utils.py
index <HASH>..<HASH> 100644
--- a/pandas/core/arrays/arrow/_arrow_utils.py
+++ b/pandas/core/arrays/arrow/_arrow_utils.py
@@ -92,7 +92,7 @@ class ArrowPeriodType(pyarrow.ExtensionType):
else:
return NotImplemented
- def __hash__(self):
+ def __hash__(self) -> int:
return hash((str(self), self.freq))
def to_pandas_dtype(self):
@@ -158,7 +158,7 @@ class ArrowIntervalType(pyarrow.ExtensionType):
else:
return NotImplemented
- def __hash__(self):
+ def __hash__(self) -> int:
return hash((str(self), str(self.subtype), self.inclusive))
def to_pandas_dtype(self):
diff --git a/pandas/core/arrays/base.py b/pandas/core/arrays/base.py
index <HASH>..<HASH> 100644
--- a/pandas/core/arrays/base.py
+++ b/pandas/core/arrays/base.py
@@ -14,6 +14,7 @@ from typing import (
TYPE_CHECKING,
Any,
Callable,
+ ClassVar,
Iterator,
Literal,
Sequence,
@@ -1442,7 +1443,7 @@ class ExtensionArray:
# https://github.com/python/typeshed/issues/2148#issuecomment-520783318
# Incompatible types in assignment (expression has type "None", base class
# "object" defined the type as "Callable[[object], int]")
- __hash__: None # type: ignore[assignment]
+ __hash__: ClassVar[None] # type: ignore[assignment]
# ------------------------------------------------------------------------
# Non-Optimized Default Methods; in the case of the private methods here,
diff --git a/pandas/core/arrays/sparse/dtype.py b/pandas/core/arrays/sparse/dtype.py
index <HASH>..<HASH> 100644
--- a/pandas/core/arrays/sparse/dtype.py
+++ b/pandas/core/arrays/sparse/dtype.py
@@ -99,7 +99,7 @@ class SparseDtype(ExtensionDtype):
self._fill_value = fill_value
self._check_fill_value()
- def __hash__(self):
+ def __hash__(self) -> int:
# Python3 doesn't inherit __hash__ when a base class overrides
# __eq__, so we explicitly do it here.
return super().__hash__()
diff --git a/pandas/core/generic.py b/pandas/core/generic.py
index <HASH>..<HASH> 100644
--- a/pandas/core/generic.py
+++ b/pandas/core/generic.py
@@ -13,6 +13,7 @@ from typing import (
TYPE_CHECKING,
Any,
Callable,
+ ClassVar,
Hashable,
Literal,
Mapping,
@@ -1882,7 +1883,7 @@ class NDFrame(PandasObject, indexing.IndexingMixin):
# https://github.com/python/typeshed/issues/2148#issuecomment-520783318
# Incompatible types in assignment (expression has type "None", base class
# "object" defined the type as "Callable[[object], int]")
- __hash__: None # type: ignore[assignment]
+ __hash__: ClassVar[None] # type: ignore[assignment]
def __iter__(self):
"""
diff --git a/pandas/core/indexes/base.py b/pandas/core/indexes/base.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/base.py
+++ b/pandas/core/indexes/base.py
@@ -8,6 +8,7 @@ from typing import (
TYPE_CHECKING,
Any,
Callable,
+ ClassVar,
Hashable,
Iterable,
Literal,
@@ -5296,7 +5297,7 @@ class Index(IndexOpsMixin, PandasObject):
# https://github.com/python/typeshed/issues/2148#issuecomment-520783318
# Incompatible types in assignment (expression has type "None", base class
# "object" defined the type as "Callable[[object], int]")
- __hash__: None # type: ignore[assignment]
+ __hash__: ClassVar[None] # type: ignore[assignment]
@final
def __setitem__(self, key, value):
diff --git a/pandas/core/indexes/frozen.py b/pandas/core/indexes/frozen.py
index <HASH>..<HASH> 100644
--- a/pandas/core/indexes/frozen.py
+++ b/pandas/core/indexes/frozen.py
@@ -89,7 +89,8 @@ class FrozenList(PandasObject, list):
def __reduce__(self):
return type(self), (list(self),)
- def __hash__(self):
+ # error: Signature of "__hash__" incompatible with supertype "list"
+ def __hash__(self) -> int: # type: ignore[override]
return hash(tuple(self))
def _disabled(self, *args, **kwargs): | TYP: fix some of the __hash__ methods (#<I>) | pandas-dev_pandas | train |
37de751929c584d0f3ff0de6531320f515562555 | diff --git a/nose/test_interp_potential.py b/nose/test_interp_potential.py
index <HASH>..<HASH> 100644
--- a/nose/test_interp_potential.py
+++ b/nose/test_interp_potential.py
@@ -20,8 +20,9 @@ def test_interpolation_potential():
zs= numpy.linspace(0.0,0.2,20)
for r in rs:
for z in zs:
- print numpy.fabs((rzpot(r,z)
- -potential.evaluatePotentials(r,z,potential.MWPotential))/potential.evaluatePotentials(r,z,potential.MWPotential))
assert numpy.fabs((rzpot(r,z)
-potential.evaluatePotentials(r,z,potential.MWPotential))/potential.evaluatePotentials(r,z,potential.MWPotential)) < 10.**-6., 'RZPot interpolation w/ interpRZPotential fails at (R,z) = (%g,%g)' % (r,z)
+ #Test all at the same time to use vector evaluation
+ mr,mz= numpy.meshgrid(rs,zs)
+ assert numpy.all(numpy.fabs((rzpot(mr,mz)-potential.evaluatePotentials(mr,mz,potential.MWPotential))/potential.evaluatePotentials(mr,mz,potential.MWPotential)) < 10.**-6.), 'RZPot interpolation w/ interpRZPotential fails for vector input'
return None | test vector input of interpRZPotential's evaluate | jobovy_galpy | train |
14b58f9e930821d5e7962a5cfbc2f36466e8b76e | diff --git a/pyblish_qml/models.py b/pyblish_qml/models.py
index <HASH>..<HASH> 100644
--- a/pyblish_qml/models.py
+++ b/pyblish_qml/models.py
@@ -246,7 +246,7 @@ class AbstractModel(QtCore.QAbstractListModel):
except Exception:
pass
- return Item()
+ return "QVariant"
def roleNames(self):
return { | Change `Item()` to "QVariant" for consistency | pyblish_pyblish-qml | train |
467d976f0a5e466b5463d1fbeadb653afe52c908 | diff --git a/sockets.go b/sockets.go
index <HASH>..<HASH> 100644
--- a/sockets.go
+++ b/sockets.go
@@ -18,26 +18,22 @@ package main
import (
"code.google.com/p/go.net/websocket"
- "crypto/ecdsa"
- "crypto/elliptic"
- "crypto/rand"
"crypto/sha256"
_ "crypto/sha512" // for cert generation
"crypto/subtle"
"crypto/tls"
"crypto/x509"
- "crypto/x509/pkix"
"encoding/base64"
"encoding/hex"
"encoding/json"
- "encoding/pem"
"errors"
"fmt"
"github.com/conformal/btcjson"
+ "github.com/conformal/btcutil"
"github.com/conformal/btcwallet/wallet"
"github.com/conformal/btcws"
"github.com/conformal/go-socks"
- "math/big"
+ "io/ioutil"
"net"
"net/http"
"os"
@@ -124,7 +120,7 @@ func newServer(listenAddrs []string) (*server, error) {
// Check for existence of cert file and key file
if !fileExists(cfg.RPCKey) && !fileExists(cfg.RPCCert) {
// if both files do not exist, we generate them.
- err := genKey(cfg.RPCKey, cfg.RPCCert)
+ err := genCertPair(cfg.RPCKey, cfg.RPCCert)
if err != nil {
return nil, err
}
@@ -169,94 +165,27 @@ func newServer(listenAddrs []string) (*server, error) {
return &s, nil
}
-// genkey generates a key/cert pair to the paths provided.
-// TODO(oga) wrap errors with fmt.Errorf for more context?
-func genKey(key, cert string) error {
+// genCertPair generates a key/cert pair to the paths provided.
+func genCertPair(certFile, keyFile string) error {
log.Infof("Generating TLS certificates...")
- priv, err := ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
- if err != nil {
- return err
- }
-
- notBefore := time.Now()
- notAfter := notBefore.Add(10 * 365 * 24 * time.Hour)
-
- // end of ASN.1 time
- endOfTime := time.Date(2049, 12, 31, 23, 59, 59, 0, time.UTC)
- if notAfter.After(endOfTime) {
- notAfter = endOfTime
- }
-
- template := x509.Certificate{
- SerialNumber: new(big.Int).SetInt64(0),
- Subject: pkix.Name{
- Organization: []string{"btcwallet autogenerated cert"},
- },
- NotBefore: notBefore,
- NotAfter: notAfter,
-
- KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageCertSign,
- ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
- IsCA: true, // so can sign self.
- BasicConstraintsValid: true,
- }
-
- host, err := os.Hostname()
- if err != nil {
- return err
- }
- template.DNSNames = append(template.DNSNames, host, "localhost")
-
- needLocalhost := true
- addrs, err := net.InterfaceAddrs()
- if err != nil {
- return err
- }
- for _, a := range addrs {
- ip, _, err := net.ParseCIDR(a.String())
- if err == nil {
- if ip.String() == "127.0.0.1" {
- needLocalhost = false
- }
- template.IPAddresses = append(template.IPAddresses, ip)
- }
- }
- if needLocalhost {
- localHost := net.ParseIP("127.0.0.1")
- template.IPAddresses = append(template.IPAddresses, localHost)
- }
- derBytes, err := x509.CreateCertificate(rand.Reader, &template,
- &template, &priv.PublicKey, priv)
- if err != nil {
- fmt.Fprintf(os.Stderr, "Failed to create certificate: %v\n", err)
- os.Exit(-1)
- }
-
- certOut, err := os.Create(cert)
+ org := "btcwallet autogenerated cert"
+ validUntil := time.Now().Add(10 * 365 * 24 * time.Hour)
+ cert, key, err := btcutil.NewTLSCertPair(org, validUntil, nil)
if err != nil {
return err
}
- pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes})
- certOut.Close()
- keyOut, err := os.OpenFile(key, os.O_WRONLY|os.O_CREATE|os.O_TRUNC,
- 0600)
- if err != nil {
- os.Remove(cert)
+ // Write cert and key files.
+ if err = ioutil.WriteFile(certFile, cert, 0666); err != nil {
return err
}
- keybytes, err := x509.MarshalECPrivateKey(priv)
- if err != nil {
- os.Remove(key)
- os.Remove(cert)
+ if err = ioutil.WriteFile(keyFile, key, 0600); err != nil {
+ os.Remove(certFile)
return err
}
- pem.Encode(keyOut, &pem.Block{Type: "EC PRIVATE KEY", Bytes: keybytes})
- keyOut.Close()
-
- log.Info("Done generating TLS certificates")
+ log.Infof("Done generating TLS certificates")
return nil
} | Switch to btcutil for certificate generation. | btcsuite_btcwallet | train |
19374efb9e6cb155ba54e6a635338ac9bf5bb30f | diff --git a/WindowImplBrowser.js b/WindowImplBrowser.js
index <HASH>..<HASH> 100644
--- a/WindowImplBrowser.js
+++ b/WindowImplBrowser.js
@@ -122,17 +122,6 @@ WindowImplBrowser.create = function(windowPex,settings){
setCanvasSize(canvas,width,height,settings.pixelRatio);
- //TODO: add MSAA multisample support
- //TODO: add stencil option support
- //TODO: add premultipliedAlpha support
- //TODO: add preserveDrawingBuffer support
- var options = DefaultWebGLContextOptions;
- var gl = getWebGLContext(canvas,options);
-
- if(gl === null){
- throw new Error('WindowImplBrowser: No WebGL context is available.');
- }
-
var impl = new WindowImplBrowser();
impl.canvas = canvas;
@@ -235,7 +224,23 @@ WindowImplBrowser.create = function(windowPex,settings){
windowPex._impl.width = width;
windowPex._impl.height = height;
- windowPex._ctx = new Context(gl);
+ if (settings.type == '2d') {
+ windowPex._ctx = canvas.getContext('2d');
+ }
+ else {
+ //TODO: add MSAA multisample support
+ //TODO: add stencil option support
+ //TODO: add premultipliedAlpha support
+ //TODO: add preserveDrawingBuffer support
+ var options = DefaultWebGLContextOptions;
+
+ var gl = getWebGLContext(canvas,options);
+ if(gl === null){
+ throw new Error('WindowImplBrowser: No WebGL context is available.');
+ }
+
+ windowPex._ctx = new Context(gl);
+ }
windowPex.init();
drawLoop(0); | WindowImplBrowser added support for 2d window with HTMLCanvas context | pex-gl_pex-sys | train |
62a996930e552b9d0076078b68e57347749593c0 | diff --git a/tests/integration/nupic/opf/hotgym_regression_test.py b/tests/integration/nupic/opf/hotgym_regression_test.py
index <HASH>..<HASH> 100644
--- a/tests/integration/nupic/opf/hotgym_regression_test.py
+++ b/tests/integration/nupic/opf/hotgym_regression_test.py
@@ -41,11 +41,9 @@ class HotgymRegressionTest(unittest.TestCase):
def testHotgymRegression(self):
- experimentDir = pkg_resources.resource_filename(
- "nupic",
- os.path.join(os.pardir, os.pardir, "examples", "opf", "experiments",
- "multistep", "hotgym")
- )
+ experimentDir = os.path.join(
+ os.path.dirname(__file__).partition("tests/integration/nupic/opf")[0],
+ "examples", "opf", "experiments", "multistep", "hotgym")
resultsDir = os.path.join(experimentDir, "inference")
savedModelsDir = os.path.join(experimentDir, "savedmodels") | Calculate experimentDir as relative to __file__ | numenta_nupic | train |
beeee369a48ca6c20c1f3942e3c555e40201f2c3 | diff --git a/Resources/public/js/media-browser.js b/Resources/public/js/media-browser.js
index <HASH>..<HASH> 100644
--- a/Resources/public/js/media-browser.js
+++ b/Resources/public/js/media-browser.js
@@ -391,7 +391,19 @@
this.downloadMedia($media);
return;
}
- var params = {href: $media.data('front'), autoSize: true, padding: 0};
+ var params = {
+ href : $media.data('front'),
+ maxWidth : 800,
+ maxHeight : 600,
+ fitToView : false,
+ width : '75%',
+ height : '75%',
+ autoSize : false,
+ closeClick : false,
+ openEffect : 'none',
+ closeEffect : 'none',
+ padding : 0
+ };
if ($media.data('type') == 'image') {
params.type = 'image';
} else { | [MediaBundle] Fix player modal sizing. | ekyna_MediaBundle | train |
fa086c679ec46a92a294416158344af3f90f0792 | diff --git a/cree-sro-syllabics.js b/cree-sro-syllabics.js
index <HASH>..<HASH> 100644
--- a/cree-sro-syllabics.js
+++ b/cree-sro-syllabics.js
@@ -173,31 +173,6 @@
}
}
- /**
- * Template tag for creating defining regular expressions with whitespace
- * and placeholders. Allows for (somewhat) more readable regexps.
- */
- function verboseRegExp (strings, ...placeholders) {
- let normalizedStrings = strings.map(removeWhitespace)
- let normalizedPlaceholders = placeholders.map(removeWhitespace)
- let regexpParts = []
-
- // there are always strings.length + 1 placeholders
- // the first string is either '' or the prefix
- regexpParts.push(normalizedStrings[0])
- // the last string is either '' or the suffix
- for (let [index, placeholder] of normalizedPlaceholders.entries()) {
- regexpParts.push(placeholder)
- regexpParts.push(normalizedStrings[index + 1])
- }
-
- return new RegExp(regexpParts.join(''))
-
- function removeWhitespace (string) {
- return string.replace(/\s/g, '')
- }
- }
-
if (typeof module !== 'undefined') {
/* Export for Node/CommonJS */
module.exports = exports | Remove template tag filter for verbose RegExps. | eddieantonio_cree-sro-syllabics.js | train |
4471472aa5d7d4b75ea0b15987c83707cdfee3df | diff --git a/core-bundle/src/Config/ChainFileLocator.php b/core-bundle/src/Config/ChainFileLocator.php
index <HASH>..<HASH> 100644
--- a/core-bundle/src/Config/ChainFileLocator.php
+++ b/core-bundle/src/Config/ChainFileLocator.php
@@ -41,7 +41,7 @@ class ChainFileLocator implements FileLocatorInterface
{
$files = [];
- foreach ($this->locators as $locator) {
+ foreach ($this->getLocators($first) as $locator) {
try {
$file = $locator->locate($name, $currentPath, $first);
@@ -64,4 +64,22 @@ class ChainFileLocator implements FileLocatorInterface
throw new \InvalidArgumentException(sprintf('No locator was able to find the file "%s".', $name));
}
+
+ /**
+ * If we're looking for all files, revers locator order so that higher priority overwrites lower priority locators.
+ *
+ * @param bool $first
+ *
+ * @return FileLocatorInterface[]
+ */
+ private function getLocators($first)
+ {
+ $locators = $this->locators;
+
+ if (!$first) {
+ array_reverse($locators);
+ }
+
+ return $locators;
+ }
} | [Core] Change order of locators when looking for all files | contao_contao | train |
ceb2d725f64be6c4573b28c76ea4fe2335a51974 | diff --git a/src/SAML2/Response/Processor.php b/src/SAML2/Response/Processor.php
index <HASH>..<HASH> 100644
--- a/src/SAML2/Response/Processor.php
+++ b/src/SAML2/Response/Processor.php
@@ -130,19 +130,13 @@ class SAML2_Response_Processor
}
if (!$this->responseIsSigned) {
- $containsSignedAssertion = FALSE;
foreach ($assertions as $assertion) {
- if ($assertion->getWasSignedAtConstruction()) {
- $containsSignedAssertion = TRUE;
- break;
+ if (!$assertion->getWasSignedAtConstruction()) {
+ throw new SAML2_Response_Exception_UnsignedResponseException(
+ 'Both the response and the assertion it containes are not signed.'
+ );
}
}
-
- if (!$containsSignedAssertion) {
- throw new SAML2_Response_Exception_UnsignedResponseException(
- 'Both the response and the assertions it containes are not signed.'
- );
- }
}
return $this->assertionProcessor->processAssertions($assertions); | Make the check the same as in simplesamlphp | simplesamlphp_saml2 | train |
775f53d34215432218e25ef64dfbdee80594f3ca | diff --git a/src/Morearty.js b/src/Morearty.js
index <HASH>..<HASH> 100644
--- a/src/Morearty.js
+++ b/src/Morearty.js
@@ -426,6 +426,7 @@ Context.prototype = Object.freeze( /** @lends Context.prototype */ {
});
} else {
self._componentQueue.forEach(function (c) {
+ savePreviousState(c);
c.forceUpdate();
});
self._componentQueue = [];
@@ -627,9 +628,12 @@ module.exports = {
var self = this;
var ctx = self.getMoreartyContext();
+ var previousState = self._previousState;
+ savePreviousState(self);
+
var shouldComponentUpdate = function () {
return ctx._fullUpdateInProgress ||
- stateChanged(self, getBinding(nextProps), getBinding(self.props), self._previousState) ||
+ stateChanged(self, getBinding(nextProps), getBinding(self.props), previousState) ||
observedPropsChanged(self, nextProps);
};
@@ -672,7 +676,6 @@ module.exports = {
componentDidUpdate: function () {
removeComponentFromRenderQueue(this.getMoreartyContext(), this);
- savePreviousState(this);
},
componentWillUnmount: function () { | Track component._previousState more rigorously. | moreartyjs_moreartyjs | train |
878593402951027d18fca8e063677564ba3d70d8 | diff --git a/lib/grasshopper-api.js b/lib/grasshopper-api.js
index <HASH>..<HASH> 100644
--- a/lib/grasshopper-api.js
+++ b/lib/grasshopper-api.js
@@ -11,6 +11,7 @@ var express = require('express'),
app.configure( function () {
app.use(function(req, res, next) {
res.setHeader("Access-Control-Allow-Origin", "*");
+ res.setHeader('Access-Control-Allow-Methods', 'GET,PUT,POST,DELETE');
res.setHeader("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept, Authorization");
return next();
});
@@ -29,6 +30,7 @@ internalSdk.on('ready', function(val){
app.get('/contentTypes', api.contentTypes.getList);
app.get('/contentTypes/:id', api.contentTypes.getById);
+ app.delete('/contentTypes/:id', api.contentTypes.deleteById);
app.post('/contentTypes', api.contentTypes.create);
app.get('/users/:id', api.users.getById); | GRAS-<I> Added route to api for deleting content types and also added allowed "verbs" | grasshopper-cms_grasshopper-api-js | train |
1f86e5646a70e96e47630d001a02bd6988c86436 | diff --git a/src/Vendor/Laravel/Models/Log.php b/src/Vendor/Laravel/Models/Log.php
index <HASH>..<HASH> 100644
--- a/src/Vendor/Laravel/Models/Log.php
+++ b/src/Vendor/Laravel/Models/Log.php
@@ -126,7 +126,7 @@ class Log extends Base {
$result = $this
->join('tracker_route_paths', 'tracker_route_paths.id', '=', 'tracker_log.route_path_id')
- ->join(
+ ->leftJoin(
'tracker_route_path_parameters',
'tracker_route_path_parameters.route_path_id',
'=', | Fix not being found when route has not parameters | antonioribeiro_tracker | train |
bc48747100de2b68d4caaa1f8757a74059e00cb8 | diff --git a/urlfetch/__init__.py b/urlfetch/__init__.py
index <HASH>..<HASH> 100644
--- a/urlfetch/__init__.py
+++ b/urlfetch/__init__.py
@@ -31,6 +31,7 @@ else:
import Cookie
import base64
from functools import partial
+import os
__all__ = [
@@ -94,7 +95,7 @@ def _encode_multipart(data, files):
if isinstance(f, tuple):
filename, f = f
elif hasattr(f, 'name'):
- filename = f.name
+ filename = os.path.basename(f.name)
else:
filename = None
raise UrlfetchException("file must has filename") | fix bug: f.name is filepath, use basename | ifduyue_urlfetch | train |
ea00ab9919040c378394b96301c9988566fb249d | diff --git a/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/embedded/TomcatWebServerFactoryCustomizer.java b/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/embedded/TomcatWebServerFactoryCustomizer.java
index <HASH>..<HASH> 100644
--- a/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/embedded/TomcatWebServerFactoryCustomizer.java
+++ b/spring-boot-project/spring-boot-autoconfigure/src/main/java/org/springframework/boot/autoconfigure/web/embedded/TomcatWebServerFactoryCustomizer.java
@@ -186,7 +186,13 @@ public class TomcatWebServerFactoryCustomizer
// The internal proxies default to a white list of "safe" internal IP
// addresses
valve.setInternalProxies(tomcatProperties.getInternalProxies());
- valve.setHostHeader(tomcatProperties.getHostHeader());
+ try {
+ valve.setHostHeader(tomcatProperties.getHostHeader());
+ }
+ catch (NoSuchMethodError ex) {
+ // Avoid failure with war deployments to Tomcat 8.5 before 8.5.44 and
+ // Tomcat 9 before 9.0.23
+ }
valve.setPortHeader(tomcatProperties.getPortHeader());
valve.setProtocolHeaderHttpsValue(tomcatProperties.getProtocolHeaderHttpsValue());
// ... so it's safe to add this valve by default. | Protect against NoSuchMethodError when deploying to old Tomcats
Fixes gh-<I> | spring-projects_spring-boot | train |
377d276390c768f67c41a6d6f0c112f42d993c27 | diff --git a/src/Joomlatools/Console/Command/PluginInstall.php b/src/Joomlatools/Console/Command/PluginInstall.php
index <HASH>..<HASH> 100644
--- a/src/Joomlatools/Console/Command/PluginInstall.php
+++ b/src/Joomlatools/Console/Command/PluginInstall.php
@@ -23,9 +23,9 @@ class PluginInstall extends Command
protected function execute(InputInterface $input, OutputInterface $output)
{
- $result = `command -v composer >/dev/null 2>&1 || { echo >&2 "false"; }`;
+ $result = shell_exec('command -v composer >/dev/null 2>&1 || { echo "false"; }');
- if ($result == 'false')
+ if (trim($result) == 'false')
{
$output->writeln('<error>Composer was not found. It is either not installed or globally available</error>');
return;
diff --git a/src/Joomlatools/Console/Command/Site/Deploy.php b/src/Joomlatools/Console/Command/Site/Deploy.php
index <HASH>..<HASH> 100644
--- a/src/Joomlatools/Console/Command/Site/Deploy.php
+++ b/src/Joomlatools/Console/Command/Site/Deploy.php
@@ -55,6 +55,8 @@ class Deploy extends AbstractSite
$this->user = $input->getOption('user');
$this->password = $input->getOption('password');
+ $this->check($input, $output);
+
chdir($this->target_dir);
$this->checkGit($input, $output);
@@ -62,6 +64,23 @@ class Deploy extends AbstractSite
$this->deploy();
}
+ public function check(InputInterface $input, OutputInterface $output)
+ {
+ if (!file_exists($this->target_dir)) {
+ throw new \RuntimeException(sprintf('Site %s does not exist', $this->site));
+ }
+
+ $result = shell_exec('command -v git-ftp >/dev/null 2>&1 || { echo "false"; }');
+
+ if (trim($result) == 'false')
+ {
+ $output->writeln('<error>ERROR:</error> git-ftp is not installed.');
+ $output->writeln('Refer to https://github.com/git-ftp/git-ftp/blob/develop/INSTALL.md for installation instructions.');
+
+ exit(1);
+ }
+ }
+
public function checkGit()
{
if(!file_exists($this->target_dir . '/.git'))
@@ -92,7 +111,7 @@ class Deploy extends AbstractSite
public function deploy()
{
$password = $this->_buildPasswordString();
-
+
passthru('git ftp push --user ' . $this->user . ' ' . $password . ' ' .$this->server);
} | re #<I> - Check if git-ftp is installed | joomlatools_joomlatools-console | train |
96df6e55f8a32c02c1fd612aab75520ace2cae01 | diff --git a/liquid_tags/notebook.py b/liquid_tags/notebook.py
index <HASH>..<HASH> 100644
--- a/liquid_tags/notebook.py
+++ b/liquid_tags/notebook.py
@@ -51,6 +51,7 @@ import warnings
import re
import os
from functools import partial
+from io import open
from .mdx_liquid_tags import LiquidTags
@@ -324,7 +325,7 @@ def notebook(preprocessor, tag, markup):
**subcell_kwarg)
# read and parse the notebook
- with open(nb_path) as f:
+ with open(nb_path, encoding='utf-8') as f:
nb_text = f.read()
if IPYTHON_VERSION < 3:
nb_json = IPython.nbformat.current.reads_json(nb_text) | liquid_tags: Open notebooks with utf-8 encoding
I discovered a problem with UTF-8 encoded notebooks. It throws a
DecodeError. We now use io.open() so we can define a encoding to open a
file. Works with ASCII and Unicode encoded notebooks. | getpelican_pelican-plugins | train |
47fe4c5f483399c0efda0d3cf275d5d12b0283d3 | diff --git a/lib/active_admin/inputs/filter_boolean_input.rb b/lib/active_admin/inputs/filter_boolean_input.rb
index <HASH>..<HASH> 100644
--- a/lib/active_admin/inputs/filter_boolean_input.rb
+++ b/lib/active_admin/inputs/filter_boolean_input.rb
@@ -11,28 +11,16 @@ module ActiveAdmin
end
end
- def check_box_html
- template.check_box_tag("#{object_name}[#{method}]", checked_value, checked?, input_html_options)
+ def label_text
+ super.sub(/_eq\z/, '') + '?'
end
- def search_method
- method.to_s.match(metasearch_conditions) ? method : "#{method}_eq"
+ def method
+ super.to_s =~ search_conditions ? super : "#{super}_eq"
end
- def checked?
- if defined? ActionView::Helpers::InstanceTag
- object && ActionView::Helpers::InstanceTag.check_box_checked?(object.send(search_method), checked_value)
- else
- object && boolean_checked?(object.send(search_method), checked_value)
- end
- end
-
- def input_html_options
- { :name => "q[#{search_method}]" }
- end
-
- def metasearch_conditions
- /(is_true|is_false|is_present|is_blank|is_null|is_not_null)$/
+ def search_conditions
+ /(is_true|is_false|is_present|is_blank|is_null|is_not_null)\z/
end
end | remove needless Formtastic overrides
This has the added benefit of resolving #<I> | activeadmin_activeadmin | train |
c236cabb4df33870562c3582429ca7a16802d6d8 | diff --git a/python_modules/dagster/dagster/core/execution/plan/active.py b/python_modules/dagster/dagster/core/execution/plan/active.py
index <HASH>..<HASH> 100644
--- a/python_modules/dagster/dagster/core/execution/plan/active.py
+++ b/python_modules/dagster/dagster/core/execution/plan/active.py
@@ -136,7 +136,7 @@ class ActiveExecution:
)
else:
raise DagsterUnknownStepStateError(
- "Execution of pipeline exited with steps {step_list} in an unknown state to this process.\n"
+ "Execution exited with steps {step_list} in an unknown state to this process.\n"
"This was likely caused by losing communication with the process performing step execution.".format(
step_list=self._unknown_state
) | Migrate DagsterUnknownStepStateError to crag (#<I>) | dagster-io_dagster | train |
1d24f3020a420ed6ff31ace68c39804b979ed418 | diff --git a/nut.js b/nut.js
index <HASH>..<HASH> 100644
--- a/nut.js
+++ b/nut.js
@@ -14,6 +14,13 @@ function has(obj, name) {
return Object.hasOwnProperty.call(obj, name)
}
+function clone (_obj) {
+ var obj = {}
+ for(var k in _obj)
+ obj[k] = _obj[k]
+ return obj
+}
+
module.exports = function (db, precodec, codec) {
var prehooks = hooks()
var posthooks = hooks()
@@ -114,9 +121,11 @@ module.exports = function (db, precodec, codec) {
}
return function () {}
},
- iterator: function (opts, cb) {
+ iterator: function (_opts, cb) {
+ var opts = clone(_opts || {})
var prefix = opts.prefix || []
+
function encodeKey(key) {
return encodePrefix(prefix, key, opts, {})
}
diff --git a/range.js b/range.js
index <HASH>..<HASH> 100644
--- a/range.js
+++ b/range.js
@@ -1,3 +1,5 @@
+var ltgt = require('ltgt')
+
//compare two array items
function isArrayLike (a) {
return Array.isArray(a) || Buffer.isBuffer(a)
@@ -7,8 +9,13 @@ function isPrimitive (a) {
return 'string' === typeof a || 'number' === typeof a
}
+function has(o, k) {
+ return Object.hasOwnProperty.call(o, k)
+}
+
function compare (a, b) {
- if(isArrayLike(a) && isArrayLike(b)) {
+
+ if(isArrayLike(a) && isArrayLike(b)) {
var l = Math.min(a.length, b.length)
for(var i = 0; i < l; i++) {
var c = compare(a[i], b[i])
@@ -55,6 +62,8 @@ exports = module.exports = function (range, key) {
//then check the last item starts with
if(isArrayLike(range)) return prefix(range, key)
+// return ltgt.contains(range, key, compare)
+
if(range.lt && compare(key, range.lt) >= 0) return false
if(range.lte && compare(key, range.lte) > 0) return false
if(range.gt && compare(key, range.gt) <= 0) return false
@@ -63,5 +72,16 @@ exports = module.exports = function (range, key) {
return true
}
+function addPrefix(prefix, range) {
+ var r = {}
+ if(has(range, 'lt')) r.lt = [prefix, range.lt]
+ if(has(range, 'gt')) r.gt = [prefix, range.gt]
+ if(has(range, 'lte')) r.lte = [prefix, range.lte]
+ if(has(range, 'gte')) r.gte = [prefix, range.gte]
+ r.reverse = !!range.reverse
+ return r
+}
+
exports.compare = compare
exports.prefix = prefix
+exports.addPrefix = addPrefix | clone instead of mutating | dominictarr_level-sublevel | train |
e66723a7e58eed27ebf6faeb9b9e18af67529e0f | diff --git a/Controller.php b/Controller.php
index <HASH>..<HASH> 100644
--- a/Controller.php
+++ b/Controller.php
@@ -18,6 +18,17 @@ class Controller extends ControllerBase
private $layout_map;
private $current_layout;
private $template_variables = array();
+ private $template_loader;
+
+ public function setTemplateLoader(TemplateLoader $loader)
+ {
+ $this->template_loader = $loader;
+ }
+
+ public function getTemplateLoader()
+ {
+ return $this->template_loader;
+ }
protected function initLayouts()
{
@@ -50,8 +61,7 @@ class Controller extends ControllerBase
protected function renderLayout()
{
if (isset($this->current_layout)) {
- $loader = $this->service('template_loader');
- $layout = $loader->load($this->current_layout);
+ $layout = $this->template_loader->load($this->current_layout);
$layout->set($this->template_variables);
echo $layout->render();
}
diff --git a/Module.php b/Module.php
index <HASH>..<HASH> 100644
--- a/Module.php
+++ b/Module.php
@@ -40,6 +40,8 @@ class Module extends \Miny\Application\Module
->setArguments('&template_environment');
$app->add('template_loader', __NAMESPACE__ . '\\TemplateLoader')
->setArguments('&template_environment', '&template_compiler', '&log');
+ $app->add('templating_controller', __NAMESPACE__ . '\\TemplateController')
+ ->addMethodCall('setTemplateLoader', '&template_loader');
}
private function setupAutoloader(BaseApplication $app)
@@ -47,7 +49,7 @@ class Module extends \Miny\Application\Module
$templating_options = $app->templating_options;
$namespace = $templating_options->cache_namespace;
$dirname = dirname($templating_options->cache_path);
- if(!is_dir($dirname)) {
+ if (!is_dir($dirname)) {
mkdir($dirname);
}
$app->autoloader->register('\\' . $namespace, $dirname); | Inject TemplateLoader into controller instead of pulling in as a service. | bugadani_Minty | train |
89d00c4f0801a1d00f57c4bedb81c23b888e18df | diff --git a/lib/capybara/server.rb b/lib/capybara/server.rb
index <HASH>..<HASH> 100644
--- a/lib/capybara/server.rb
+++ b/lib/capybara/server.rb
@@ -32,7 +32,7 @@ module Capybara
end
def host
- "localhost"
+ "127.0.0.1"
end
def url(path) | Use <I> instead of localhost
Apparently this prevents some issues and has no
downsides. | teamcapybara_capybara | train |
1ff19b08243bd403861c0db5debfea1702aa98bd | diff --git a/reactor.py b/reactor.py
index <HASH>..<HASH> 100644
--- a/reactor.py
+++ b/reactor.py
@@ -36,35 +36,25 @@ class Reactor(object):
while 1:
print 'Counter at', counter
rrlist, _, _ = select.select(self.select_list, [], [], 5)
- if not all(rrlist):
- print 'no rrlist'
- else:
- for i in rrlist: # Doesn't require if test
- print ('Object in rrlist is a {} at IP'
- '{}').format(i.__class__.__name__, i.getpeername())
- if i == self.sock:
- # TODO -- expand this into creating new peers
- newsocket, addr = self.sock.accept()
- self.select_list.append(newsocket)
- else:
- rclos = i.read
- self.subscribed['read'].append(rclos)
- lclos = i.logic
- self.subscribed['logic'].append(lclos)
- wclos = i.write
- self.subscribed['write'].append(wclos)
+ for i in rrlist: # Doesn't require if test
+ if i == self.sock:
+ # TODO -- expand this into creating new peers
+ newsocket, addr = self.sock.accept()
+ self.select_list.append(newsocket)
else:
- cclos = i.cleanup
- self.subscribed['cleanup'].append(cclos)
+ print 'fileno', i.fileno(), 'ready to read'
+ self.subscribed['read'].append(i.read)
+
+ wclos = i.write
+ self.subscribed['write'].append(wclos)
+ cclos = i.cleanup
+ self.subscribed['cleanup'].append(cclos)
- print 'triggering read'
self.trigger('read')
- print 'triggering logic'
self.trigger('logic')
- print 'triggering write'
self.trigger('write')
- print 'triggering cleanup'
self.trigger('cleanup')
+ time.sleep(0.2)
counter += 1
''' | Threw time.sleep in here so I don't overload the peers | jefflovejapan_drench | train |
c77c3f978661dede11e2f3c34b442bc472504f7a | diff --git a/bg/vertex.py b/bg/vertex.py
index <HASH>..<HASH> 100644
--- a/bg/vertex.py
+++ b/bg/vertex.py
@@ -12,4 +12,9 @@ class BGVertex(object):
self.info = info
def __hash__(self):
- return hash(self.name)
\ No newline at end of file
+ return hash(self.name)
+
+ def __eq__(self, other):
+ if not isinstance(other, BGVertex):
+ return False
+ return self.name == other.name
\ No newline at end of file
diff --git a/tests/test_vertex.py b/tests/test_vertex.py
index <HASH>..<HASH> 100644
--- a/tests/test_vertex.py
+++ b/tests/test_vertex.py
@@ -31,6 +31,17 @@ class BGVertexTestCase(unittest.TestCase):
v = BGVertex(name)
self.assertEqual(hash(name), hash(v))
+ def test_equality(self):
+ name1 = "name1"
+ v1 = BGVertex(name1)
+ name2 = "name1"
+ v2 = BGVertex(name2)
+ self.assertEqual(v1, v2)
+ name3 = "name3"
+ v3 = BGVertex(name3)
+ self.assertNotEqual(v1, v3)
+ self.assertNotEqual(v2, v3)
+
if __name__ == '__main__':
unittest.main() | implemented BGVertex equality and equipped it with suitable test case | aganezov_bg | train |
1b93973a7cfbcc90103b086c53d2a15dd833305b | diff --git a/test/codec.js b/test/codec.js
index <HASH>..<HASH> 100644
--- a/test/codec.js
+++ b/test/codec.js
@@ -441,5 +441,27 @@ describe('codec', function() {
assert.strictEqual(encoded, 10);
});
+
+ it('should encode each key in a dictionary-like object', function() {
+ var obj = {
+ f: new codec.Float(10),
+ i: new codec.Int(10),
+ };
+ var encoded = codec.encode(obj);
+ assert.deepEqual(encoded, {f: 10, i: '10'});
+ });
+
+ it('should only encode public properties of objects', function() {
+ var obj = {
+ hasOwnProperty: function(key) { // jshint ignore:line
+ return key === 'public';
+ },
+ _private: new codec.Int(10),
+ public: new codec.Int(10),
+ };
+ var encoded = codec.encode(obj);
+ assert.deepEqual(encoded._private, obj._private);
+ assert.deepEqual(encoded.public, 10);
+ });
});
});
diff --git a/test/partial-result-stream.js b/test/partial-result-stream.js
index <HASH>..<HASH> 100644
--- a/test/partial-result-stream.js
+++ b/test/partial-result-stream.js
@@ -76,6 +76,10 @@ describe('PartialResultStream', function() {
{}
]
};
+ var RESULT_WITHOUT_VALUE = {
+ resumeToken: '...',
+ values: []
+ };
before(function() {
partialResultStreamModule = proxyquire('../src/partial-result-stream.js', {
@@ -134,21 +138,22 @@ describe('PartialResultStream', function() {
fakeRequestStream.push(null);
});
- it('should not queue more than 10 results', function(done) {
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 1
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 2
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 3
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 4
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 5
-
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 6
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 7
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 8
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 9
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 10
+ it('should effectively skip rows without values', function(done) {
+ fakeRequestStream.push(RESULT_WITHOUT_VALUE);
+ fakeRequestStream.push(null);
- fakeRequestStream.push(RESULT_WITHOUT_TOKEN); // 11
+ partialResultStream
+ .on('error', done)
+ .pipe(concat(function(rows) {
+ assert.strictEqual(rows.length, 0);
+ done();
+ }));
+ });
+ it('should not queue more than 10 results', function(done) {
+ for (var i = 0; i < 11; i += 1) {
+ fakeRequestStream.push(RESULT_WITHOUT_TOKEN);
+ }
fakeRequestStream.push(null);
partialResultStream
@@ -225,6 +230,28 @@ describe('PartialResultStream', function() {
}));
});
+ it('should correctly handle multiple rows', function(done) {
+ var formattedRows = [[
+ {},
+ {}
+ ]];
+
+ partialResultStreamModule.formatRow_ = function() {
+ return formattedRows;
+ };
+
+ fakeRequestStream.push(RESULT_WITH_TOKEN);
+ fakeRequestStream.push(null);
+
+ partialResultStream
+ .on('error', done)
+ .pipe(concat(function(rows) {
+ assert.strictEqual(rows[0], formattedRows[0][0]);
+ assert.strictEqual(rows[1], formattedRows[0][1]);
+ done();
+ }));
+ });
+
it('should resume if there was an error', function(done) {
// This test will emit four rows total:
// - Two rows
@@ -323,6 +350,22 @@ describe('PartialResultStream', function() {
partialResultStream.abort();
});
+ it('should silently no-op abort if no active request', function(done) {
+ // If no request is ever made, then there should be no active
+ // stream to be aborted.
+ fakeRequestStream.abort = function() {
+ done(new Error('No request ever made; nothing to abort.'));
+ };
+
+ // Create a partial result stream and then abort it, without
+ // ever sending a request.
+ var partialResultStream = partialResultStreamModule(function() {
+ return fakeRequestStream;
+ });
+ partialResultStream.abort();
+ done();
+ });
+
it('should let user abort the most recent request', function(done) {
fakeRequestStream.abort = function() {
done(new Error('Wrong stream was aborted.'));
@@ -384,6 +427,31 @@ describe('PartialResultStream', function() {
values: VALUES
};
+ it('should omit rows from JSON representation with no name', function() {
+ // Define the second field to have no name.
+ var row = {
+ metadata: {rowType: {fields: [
+ {name: 'field-1'}, {}
+ ]}},
+ values: ['value-1', 'value-2'],
+ };
+ // Override our `decode` function to pass through the value.
+ decodeValueOverride = function(value) {
+ return value;
+ };
+
+ // Format the row.
+ var formattedRows = partialResultStreamModule.formatRow_(row);
+
+ // Both fields should exist in the formattedRows array.
+ assert.strictEqual(formattedRows.length, 2);
+ assert.strictEqual(formattedRows[0].value, 'value-1');
+ assert.strictEqual(formattedRows[1].value, 'value-2');
+
+ // Only the field with a name should exist in the JSON serialization.
+ assert.deepEqual(formattedRows.toJSON(), {'field-1': 'value-1'});
+ });
+
it('should chunk rows with more values than fields', function() {
decodeValueOverride = function(value) {
return value;
@@ -457,4 +525,4 @@ describe('PartialResultStream', function() {
});
});
});
-});
\ No newline at end of file
+}); | Additional Spanner tests. (#<I>) | googleapis_nodejs-spanner | train |
ef9f8f28c9d8e2cc10e1b56063e41d41db90efbf | diff --git a/pywal/colors.py b/pywal/colors.py
index <HASH>..<HASH> 100644
--- a/pywal/colors.py
+++ b/pywal/colors.py
@@ -54,6 +54,13 @@ def sort_colors(img, colors):
we will later save in json format."""
raw_colors = colors[:1] + colors[9:] + colors[8:]
+ # Darken the background color if it's too light.
+ if int(raw_colors[0][1]) in [3, 4]:
+ raw_colors[0] = util.darken_color(raw_colors[0], 0.50)
+
+ elif int(raw_colors[0][1]) >= 5:
+ raw_colors[0] = util.darken_color(raw_colors[0], 0.25)
+
colors = {"wallpaper": img}
colors_special = {}
diff --git a/pywal/util.py b/pywal/util.py
index <HASH>..<HASH> 100644
--- a/pywal/util.py
+++ b/pywal/util.py
@@ -20,7 +20,8 @@ class Color:
@property
def rgb(self):
"""Convert a hex color to rgb."""
- return hex_to_rgb(self.hex_color)
+ red, green, blue = hex_to_rgb(self.hex_color)
+ return f"{red},{green},{blue}"
@property
def xrgba(self):
@@ -99,8 +100,7 @@ def create_dir(directory):
def hex_to_rgb(color):
"""Convert a hex color to rgb."""
- red, green, blue = list(bytes.fromhex(color.strip("#")))
- return f"{red},{green},{blue}"
+ return tuple(bytes.fromhex(color.strip("#")))
def hex_to_xrgba(color):
@@ -109,6 +109,16 @@ def hex_to_xrgba(color):
return f"{col[1]}{col[2]}/{col[3]}{col[4]}/{col[5]}{col[6]}/ff"
+def rgb_to_hex(color):
+ """Convert an rgb color to hex."""
+ return f"#{color[0]:02x}{color[1]:02x}{color[2]:02x}"
+
+
+def darken_color(color, darkness):
+ """Darken a hex color."""
+ return rgb_to_hex([int(col * (1 - darkness)) for col in hex_to_rgb(color)])
+
+
def disown(*cmd):
"""Call a system command in the background,
disown it and hide it's output."""
diff --git a/tests/test_util.py b/tests/test_util.py
index <HASH>..<HASH> 100755
--- a/tests/test_util.py
+++ b/tests/test_util.py
@@ -64,23 +64,33 @@ class TestUtil(unittest.TestCase):
def test_hex_to_rgb_black(self):
"""> Convert #000000 to RGB."""
result = util.hex_to_rgb("#000000")
- self.assertEqual(result, "0,0,0")
+ self.assertEqual(result, (0, 0, 0))
def test_hex_to_rgb_white(self):
- """> Convert #FFFFFF to RGB."""
- result = util.hex_to_rgb("#FFFFFF")
- self.assertEqual(result, "255,255,255")
+ """> Convert #ffffff to RGB."""
+ result = util.hex_to_rgb("#ffffff")
+ self.assertEqual(result, (255, 255, 255))
def test_hex_to_rgb_rand(self):
- """> Convert #98AEC2 to RGB."""
- result = util.hex_to_rgb("#98AEC2")
- self.assertEqual(result, "152,174,194")
+ """> Convert #98aec2 to RGB."""
+ result = util.hex_to_rgb("#98aec2")
+ self.assertEqual(result, (152, 174, 194))
def test_hex_to_xrgba(self):
- """> Convert #98AEC2 to XRGBA."""
- result = util.hex_to_xrgba("#98AEC2")
+ """> Convert #98aec2 to XRGBA."""
+ result = util.hex_to_xrgba("#98aec2")
self.assertEqual(result, "98/ae/c2/ff")
+ def test_rgb_to_hex(self):
+ """> Convert 152,174,194 to HEX."""
+ result = util.rgb_to_hex((152, 174, 194))
+ self.assertEqual(result, "#98aec2")
+
+ def test_darken_color(self):
+ """> Darken #ffffff by 0.25."""
+ result = util.darken_color("#ffffff", 0.25)
+ self.assertEqual(result, "#bfbfbf")
+
if __name__ == "__main__":
unittest.main() | colors: Darken bg if contrast is too low. | dylanaraps_pywal | train |
3d642fbed00d9fb956e5484cfe13d77b35310a00 | diff --git a/dataviews/plots.py b/dataviews/plots.py
index <HASH>..<HASH> 100644
--- a/dataviews/plots.py
+++ b/dataviews/plots.py
@@ -16,11 +16,10 @@ import param
from .views import NdMapping, Stack, View
from .dataviews import DataStack, DataOverlay, DataLayer, Curve, Histogram,\
- Table, TableStack, ScatterPoints
+ Table, TableStack, Scatter
from .sheetviews import SheetView, SheetOverlay, Contours, \
SheetStack, Points, CoordinateGrid, DataGrid
from .views import GridLayout, Layout, Overlay, View, Annotation
-from .operation import RGBA, HCS, AlphaOverlay
class PlotSaver(param.ParameterizedFunction):
@@ -136,7 +135,6 @@ class Plot(param.Parameterized):
sideplots = {}
-
def __init__(self, zorder=0, **kwargs):
super(Plot, self).__init__(**kwargs)
self.zorder = zorder
@@ -163,7 +161,8 @@ class Plot(param.Parameterized):
title_format = self._stack.get_title(key if isinstance(key, tuple) else (key,), view)
if title_format is None:
return None
- return title_format.format(label=view._label_dim.pprint_label, type=view.__class__.__name__)
+ return title_format.format(label=view.label, value=str(view.value),
+ type=view.__class__.__name__)
def _update_title(self, n):
@@ -1192,7 +1191,7 @@ class DataPlot(Plot):
class ScatterPlot(Plot):
"""
- ScatterPlot can plot ScatterPoints and DataStacks of ScatterPoints,
+ ScatterPlot can plot Scatter and DataStacks of Scatter,
which can be displayed as a single frame or animation. Axes,
titles and legends are automatically generated from the metadata
and dim_info.
@@ -1222,7 +1221,7 @@ class ScatterPlot(Plot):
_stack_type = DataStack
def __init__(self, points, zorder=0, **kwargs):
- self._stack = self._check_stack(points, ScatterPoints)
+ self._stack = self._check_stack(points, Scatter)
self.ax = None
super(ScatterPlot, self).__init__(zorder, **kwargs)
@@ -1551,7 +1550,7 @@ class TablePlot(Plot):
respectively.
"""
- border = param.Number(default = 0.05, bounds=(0.0, 0.5), doc="""
+ border = param.Number(default=0.05, bounds=(0.0, 0.5), doc="""
The fraction of the plot that should be empty around the
edges.""")
@@ -1564,12 +1563,12 @@ class TablePlot(Plot):
table cell. Any strings longer than this length will be
truncated.""")
- max_font_size = param.Integer(default = 20, doc="""
+ max_font_size = param.Integer(default=20, doc="""
The largest allowable font size for the text in each table
cell.""")
- font_types = param.Dict(default = {'heading':FontProperties(weight='bold',
- family='monospace')},
+ font_types = param.Dict(default={'heading': FontProperties(weight='bold',
+ family='monospace')},
doc="""The font style used for heading labels used for emphasis.""") | Docstring and whitespace cleanup in plots | pyviz_holoviews | train |
fb436b34f590597b524c71f81daf585d9b7a8b7e | diff --git a/dolo/misc/caching.py b/dolo/misc/caching.py
index <HASH>..<HASH> 100644
--- a/dolo/misc/caching.py
+++ b/dolo/misc/caching.py
@@ -120,7 +120,7 @@ class DiskDictionary:
with file(filename) as f:
value = pickle.load(f)
return value
- except IOError:
+ except :
return None | ENH: very unsophisticated cached on disk dictionary object | EconForge_dolo | train |
6b22f1260d0737c235c85eb257727a371b9de5a4 | diff --git a/Listener/Resource/PathListener.php b/Listener/Resource/PathListener.php
index <HASH>..<HASH> 100644
--- a/Listener/Resource/PathListener.php
+++ b/Listener/Resource/PathListener.php
@@ -41,7 +41,6 @@ class PathListener extends ContainerAware
);
}
else {
-
$route = $this->container
->get('router')
->generate(
@@ -161,6 +160,9 @@ class PathListener extends ContainerAware
// Duplicate primary resources
if (!empty($step->primaryResource)) {
$processedNodes = array_merge($processedNodes, $this->copyResource($step->primaryResource, $parent, $processedNodes));
+
+ var_dump($processedNodes);
+ die();
}
// Duplicate secondary resources
@@ -198,29 +200,43 @@ class PathListener extends ContainerAware
// Current node is not the WS Root and not the Node which want to duplicate
$parentNode = $manager->getNode($ancestor['id']);
if ($parentNode) {
- $directoryRes = $manager->createResource('Claroline\CoreBundle\Entity\Resource\Directory', $parentNode->getName());
- $directory = $manager->create(
- $directoryRes,
- $parentNode->getResourceType(),
- $user,
- $newParent->getWorkspace(),
- $newParent,
- $parentNode->getIcon()
- );
-
- $newParent = $directory->getResourceNode();
- $processedNodes[$parentNode->getId()] = $newParent;
+ if (empty($processedNodes[$parentNode->getId()])) {
+ // Current Node has not been processed => create a copy
+ $directoryRes = $manager->createResource('Claroline\CoreBundle\Entity\Resource\Directory', $parentNode->getName());
+ $directory = $manager->create(
+ $directoryRes,
+ $parentNode->getResourceType(),
+ $user,
+ $newParent->getWorkspace(),
+ $newParent,
+ $parentNode->getIcon()
+ );
+
+ $newParent = $directory->getResourceNode();
+ $processedNodes[$parentNode->getId()] = $newParent;
+ } else {
+ die('coucou');
+ // Current has already been processed => get copy
+ $newParent = $processedNodes[$parentNode->getId()];
+ }
}
}
}
+ }
+ if (empty($processedNodes[$resourceNode->getId()])) {
+ // Current Node has not been processed => create a copy
// Duplicate Node
$copy = $manager->copy($resourceNode, $newParent, $user);
$copyNode = $copy->getResourceNode();
+
// Update structure with new id
$resource->resourceId = $copy->getResourceNode()->getId();
- $processedNodes[] = $copyNode;
+ $processedNodes[$resourceNode->getId()] = $copyNode;
+ } else {
+ // Current has already been processed => get copy
+ $resource->resourceId = $processedNodes[$resourceNode->getId()]->getId();
}
} | [PathBundle] WIP handle path copy | claroline_Distribution | train |
17ee41fe371c137758663545ed43575389d7e089 | diff --git a/app/models/pageflow/widget.rb b/app/models/pageflow/widget.rb
index <HASH>..<HASH> 100644
--- a/app/models/pageflow/widget.rb
+++ b/app/models/pageflow/widget.rb
@@ -64,9 +64,8 @@ module Pageflow
initial_widgets = placeholders_by_role.merge(defaults_by_role)
initial_widgets.merge(from_db_by_role) { |_role_key, old_val, new_val|
if old_val.configuration.present?
- old_val.configuration.each do |key, value|
- new_val.configuration[key] = value unless new_val.configuration.key?(key)
- end
+ new_val.configuration = {} if new_val.configuration.nil?
+ new_val.configuration = old_val.configuration.merge(new_val.configuration)
end
new_val
}.values
@@ -81,7 +80,7 @@ module Pageflow
result[role] = Widget.new(role: role, type_name: widget_type.name,
subject: nil,
configuration:
- config.widget_types.default_configuration(widget_type.name))
+ config.widget_types.default_configuration(role))
end
end
diff --git a/lib/pageflow/built_in_widget_types_plugin.rb b/lib/pageflow/built_in_widget_types_plugin.rb
index <HASH>..<HASH> 100644
--- a/lib/pageflow/built_in_widget_types_plugin.rb
+++ b/lib/pageflow/built_in_widget_types_plugin.rb
@@ -9,13 +9,12 @@ module Pageflow
.phone_horizontal_slideshow_mode)
end
+ config.widget_types. ('loading_spinner',
+ 'blur_strength' => 50,
+ 'remove_logo' => false,
+ 'invert' => false)
config.widget_types.register(Pageflow::BuiltInWidgetType.classic_loading_spinner,
- default: true,
- default_configurations: {
- blur_strength: 50,
- remove_logo: true,
- invert: false
- })
+ default: true)
config.features.register('title_loading_spinner') do |feature_config|
feature_config.widget_types.register(Pageflow::BuiltInWidgetType.title_loading_spinner)
diff --git a/lib/pageflow/widget_types.rb b/lib/pageflow/widget_types.rb
index <HASH>..<HASH> 100644
--- a/lib/pageflow/widget_types.rb
+++ b/lib/pageflow/widget_types.rb
@@ -15,8 +15,10 @@ module Pageflow
defaults_by_role[role] = widget_type
end
end
- @default_configurations[widget_type.name] = options[:default_configurations] if options[
- :default_configurations].present?
+ end
+
+ def register_widget_defaults(widget_role, default_configurations)
+ @default_configurations[widget_role] = default_configurations
end
def clear
diff --git a/spec/models/pageflow/widget_spec.rb b/spec/models/pageflow/widget_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/models/pageflow/widget_spec.rb
+++ b/spec/models/pageflow/widget_spec.rb
@@ -83,6 +83,38 @@ module Pageflow
expect(widgets.first.widget_type).to be_kind_of(WidgetType::Null)
end
+ it 'assigns default configurations to widgets' do
+ widget_type = TestWidgetType.new(name: 'default_header', roles: ['header'])
+ config = Configuration.new
+ default_configurations = {'test' => 'test value'}
+ config.widget_types.register_widget_defaults('header', default_configurations)
+ config.widget_types.register(widget_type, default: true)
+ revision = create(:revision)
+ create(:widget, subject: revision, role: 'header', type_name: 'default_header')
+ widgets = revision.widgets.resolve(config, scope: :editor)
+
+ expect(widgets.first.configuration).to eq(default_configurations)
+ end
+
+ it 'default configuration do not override existing configurations' do
+ widget_type = TestWidgetType.new(name: 'default_header', roles: ['header'])
+ config = Configuration.new
+ default_configurations = {'test' => 'test value', 'test2' => 'test 2'}
+ config.widget_types.register_widget_defaults('header', default_configurations)
+ config.widget_types.register(widget_type, default: true)
+ revision = create(:revision)
+ test_widget = create(:widget,
+ subject: revision,
+ role: 'header',
+ type_name: 'default_header')
+ test_widget.configuration = {'test2' => 'test'}
+ test_widget.save!
+ widgets = revision.widgets.resolve(config, scope: :editor)
+
+ expect(widgets.first.configuration['test']).to eq(default_configurations['test'])
+ expect(widgets.first.configuration['test2']).to eq('test')
+ end
+
it 'filters widgets disabled in editor' do
non_editor_widget_type = TestWidgetType.new(name: 'non_editor', enabled_in_editor: false)
non_preview_widget_type = TestWidgetType.new(name: 'non_preview', enabled_in_preview: false) | Widget default configurations are applied on role level. | codevise_pageflow | train |
2a3ff828a0aaa723820c18f3ef35920666bc8322 | diff --git a/test/utils/jmx/broker/playground.py b/test/utils/jmx/broker/playground.py
index <HASH>..<HASH> 100644
--- a/test/utils/jmx/broker/playground.py
+++ b/test/utils/jmx/broker/playground.py
@@ -28,6 +28,8 @@ queue = RemoteJmxQueue(session, broker_name='TEST.BROKER', queue_name='test.req'
queue.send_text_message("Message from RemoteJmxQueue")
print "Queue size = {}".format(queue.get_size())
+print "Message contents = {}".format(queue.get_message_contents())
+
browse_messages_payload = {
"type":"exec",
diff --git a/test/utils/jmx/broker/remote_jmx_queue.py b/test/utils/jmx/broker/remote_jmx_queue.py
index <HASH>..<HASH> 100644
--- a/test/utils/jmx/broker/remote_jmx_queue.py
+++ b/test/utils/jmx/broker/remote_jmx_queue.py
@@ -26,21 +26,15 @@ class RemoteJmxQueue(object):
}
return self.jolokia_session.request(attribute)
- def get_message_contents():
- pass
- # operation = {
- # type: 'exec',
- # mbean: queue_bean,
- # operation: 'browse()',
- # }
- # result = jolokia_session.request(operation)
- # result.map { |composite_data|
- # if composite_data.has_key?('Text')
- # composite_data['Text']
- # else
- # composite_data['BodyPreview'].to_a.pack('c*')
- #
- # }
+ def get_message_contents(self):
+
+ operation = {
+ 'type': 'exec',
+ 'mbean': self.queue_bean,
+ 'operation': 'browse()',
+ }
+ result = self.jolokia_session.request(operation)
+ return map(lambda r : r['Text'], result)
def purge():
pass | Implement get_message_contents. | julianghionoiu_tdl-client-python | train |
fbb61f77abe3e756965c3b39843611f36628132b | diff --git a/src/Fixer/Import/FullyQualifiedStrictTypesFixer.php b/src/Fixer/Import/FullyQualifiedStrictTypesFixer.php
index <HASH>..<HASH> 100644
--- a/src/Fixer/Import/FullyQualifiedStrictTypesFixer.php
+++ b/src/Fixer/Import/FullyQualifiedStrictTypesFixer.php
@@ -27,7 +27,6 @@ use PhpCsFixer\Tokenizer\Analyzer\NamespaceUsesAnalyzer;
use PhpCsFixer\Tokenizer\CT;
use PhpCsFixer\Tokenizer\Generator\NamespacedStringTokenGenerator;
use PhpCsFixer\Tokenizer\Resolver\TypeShortNameResolver;
-use PhpCsFixer\Tokenizer\Token;
use PhpCsFixer\Tokenizer\Tokens;
/**
@@ -103,6 +102,7 @@ class SomeClass
protected function applyFix(\SplFileInfo $file, Tokens $tokens): void
{
$lastIndex = $tokens->count() - 1;
+
for ($index = $lastIndex; $index >= 0; --$index) {
if (!$tokens[$index]->isGivenKind(T_FUNCTION)) {
continue;
@@ -149,27 +149,58 @@ class SomeClass
return;
}
- $typeName = $type->getName();
-
- if (0 !== strpos($typeName, '\\')) {
- return;
+ $typeStartIndex = $type->getStartIndex();
+ if ($tokens[$typeStartIndex]->isGivenKind(CT::T_NULLABLE_TYPE)) {
+ $typeStartIndex = $tokens->getNextMeaningfulToken($typeStartIndex);
}
- $shortType = (new TypeShortNameResolver())->resolve($tokens, $typeName);
- if ($shortType === $typeName) {
- return;
- }
+ foreach ($this->getSimpleTypes($tokens, $typeStartIndex, $type->getEndIndex()) as $simpleType) {
+ $typeName = $tokens->generatePartialCode($simpleType['start'], $simpleType['end']);
- $shortType = (new NamespacedStringTokenGenerator())->generate($shortType);
+ if (0 !== strpos($typeName, '\\')) {
+ continue;
+ }
- if (true === $type->isNullable()) {
- array_unshift($shortType, new Token([CT::T_NULLABLE_TYPE, '?']));
+ $shortType = (new TypeShortNameResolver())->resolve($tokens, $typeName);
+ if ($shortType === $typeName) {
+ continue;
+ }
+
+ $shortType = (new NamespacedStringTokenGenerator())->generate($shortType);
+
+ $tokens->overrideRange(
+ $simpleType['start'],
+ $simpleType['end'],
+ $shortType
+ );
}
+ }
- $tokens->overrideRange(
- $type->getStartIndex(),
- $type->getEndIndex(),
- $shortType
- );
+ /**
+ * @return \Generator<array<int>>
+ */
+ private function getSimpleTypes(Tokens $tokens, int $startIndex, int $endIndex): iterable
+ {
+ $index = $startIndex;
+
+ while (true) {
+ $prevIndex = $index;
+ $index = $tokens->getNextMeaningfulToken($index);
+
+ if (null === $startIndex) {
+ $startIndex = $index;
+ }
+
+ if ($index >= $endIndex) {
+ yield ['start' => $startIndex, 'end' => $index];
+
+ break;
+ }
+
+ if ($tokens[$index]->isGivenKind(CT::T_TYPE_ALTERNATION)) {
+ yield ['start' => $startIndex, 'end' => $prevIndex];
+ $startIndex = null;
+ }
+ }
}
}
diff --git a/tests/Fixer/Import/FullyQualifiedStrictTypesFixerTest.php b/tests/Fixer/Import/FullyQualifiedStrictTypesFixerTest.php
index <HASH>..<HASH> 100644
--- a/tests/Fixer/Import/FullyQualifiedStrictTypesFixerTest.php
+++ b/tests/Fixer/Import/FullyQualifiedStrictTypesFixerTest.php
@@ -492,4 +492,37 @@ class Two
],
];
}
+
+ /**
+ * @requires PHP 8.0
+ *
+ * @dataProvider provideFix80Cases
+ */
+ public function testFix80(string $expected, string $input): void
+ {
+ $this->doTest($expected, $input);
+ }
+
+ public static function provideFix80Cases(): iterable
+ {
+ yield [
+ '<?php function foo(A|B|C $x) {}',
+ '<?php function foo(\A|\B|\C $x) {}',
+ ];
+
+ yield [
+ '<?php function foo(): A|B|C {}',
+ '<?php function foo(): \A|\B|\C {}',
+ ];
+
+ yield 'aaa' => [
+ '<?php function foo(): A | B | C {}',
+ '<?php function foo(): \A | \B | \C {}',
+ ];
+
+ yield [
+ '<?php function f(): Foo|Bar|A\B\C {}',
+ '<?php function f(): Foo|\Bar|\A\B\C {}',
+ ];
+ }
} | FullyQualifiedStrictTypesFixer - fix for union types | FriendsOfPHP_PHP-CS-Fixer | train |
5c2455a1179c834a5a79ca7cf912db630d803bea | diff --git a/parsl/dataflow/strategy.py b/parsl/dataflow/strategy.py
index <HASH>..<HASH> 100644
--- a/parsl/dataflow/strategy.py
+++ b/parsl/dataflow/strategy.py
@@ -2,7 +2,8 @@ import logging
import time
import math
-from parsl.executors.ipp import IPyParallelExecutor
+from parsl.executors import IPyParallelExecutor, HighThroughputExecutor, ExtremeScaleExecutor
+
logger = logging.getLogger(__name__)
@@ -167,7 +168,7 @@ class Strategy(object):
continue
# Tasks that are either pending completion
- active_tasks = executor.executor.outstanding
+ active_tasks = executor.outstanding
status = executor.status()
self.unset_logging()
@@ -186,16 +187,18 @@ class Strategy(object):
active_blocks = running + submitting + pending
active_slots = active_blocks * tasks_per_node * nodes_per_block
- if isinstance(executor, IPyParallelExecutor):
+ if (isinstance(executor, IPyParallelExecutor) or
+ isinstance(executor, HighThroughputExecutor) or
+ isinstance(executor, ExtremeScaleExecutor)):
logger.debug('Executor {} has {} active tasks, {}/{}/{} running/submitted/pending blocks, and {} connected engines'.format(
- label, len(active_tasks), running, submitting, pending, len(executor.executor)))
+ label, active_tasks, running, submitting, pending, len(executor.connected_workers)))
else:
logger.debug('Executor {} has {} active tasks and {}/{}/{} running/submitted/pending blocks'.format(
- label, len(active_tasks), running, submitting, pending))
+ label, active_tasks, running, submitting, pending))
# Case 1
# No tasks.
- if len(active_tasks) == 0:
+ if active_tasks == 0:
# Case 1a
# Fewer blocks that min_blocks
if active_blocks <= min_blocks:
@@ -229,7 +232,7 @@ class Strategy(object):
# Case 2
# More tasks than the available slots.
- elif (float(active_slots) / len(active_tasks)) < parallelism:
+ elif (float(active_slots) / active_tasks) < parallelism:
# Case 2a
# We have the max blocks possible
if active_blocks >= max_blocks:
@@ -240,12 +243,12 @@ class Strategy(object):
# Case 2b
else:
# logger.debug("Strategy: Case.2b")
- excess = math.ceil((len(active_tasks) * parallelism) - active_slots)
+ excess = math.ceil((active_tasks * parallelism) - active_slots)
excess_blocks = math.ceil(float(excess) / (tasks_per_node * nodes_per_block))
logger.debug("Requesting {} more blocks".format(excess_blocks))
executor.scale_out(excess_blocks)
- elif active_slots == 0 and len(active_tasks) > 0:
+ elif active_slots == 0 and active_tasks > 0:
# Case 4
# Check if slots are being lost quickly ?
logger.debug("Requesting single slot")
diff --git a/parsl/executors/high_throughput/executor.py b/parsl/executors/high_throughput/executor.py
index <HASH>..<HASH> 100644
--- a/parsl/executors/high_throughput/executor.py
+++ b/parsl/executors/high_throughput/executor.py
@@ -380,7 +380,7 @@ class HighThroughputExecutor(ParslExecutor, RepresentationMixin):
def scaling_enabled(self):
return self._scaling_enabled
- def scale_out(self):
+ def scale_out(self, blocks=1):
"""Scales out the number of active workers by 1.
Raises:
diff --git a/parsl/executors/ipp.py b/parsl/executors/ipp.py
index <HASH>..<HASH> 100644
--- a/parsl/executors/ipp.py
+++ b/parsl/executors/ipp.py
@@ -207,6 +207,14 @@ sleep infinity
""".format(engine_dir, engine_json, container_image, debug_option=self.debug_option, uid=uid)
@property
+ def outstanding(self):
+ return len(self.executor.outstanding)
+
+ @property
+ def connected_workers(self):
+ return self.executor.ids
+
+ @property
def scaling_enabled(self):
return self._scaling_enabled | Adding outstanding and connected_workers properties to HighThroughput and IPP executors
This now enables scaling_up with the strategy pieces. | Parsl_parsl | train |
4ccbcb91c8d31670a88ac81fddd314cf6720f50d | diff --git a/etcdmain/etcd.go b/etcdmain/etcd.go
index <HASH>..<HASH> 100644
--- a/etcdmain/etcd.go
+++ b/etcdmain/etcd.go
@@ -208,7 +208,7 @@ func startEtcd(cfg *config) (<-chan struct{}, error) {
plog.Warningf("The scheme of peer url %s is http while peer key/cert files are presented. Ignored peer key/cert files.", u.String())
}
var l net.Listener
- l, err = transport.NewTimeoutListener(u.Host, u.Scheme, cfg.peerTLSInfo, rafthttp.ConnReadTimeout, rafthttp.ConnWriteTimeout)
+ l, err = rafthttp.NewListener(u, cfg.peerTLSInfo)
if err != nil {
return nil, err
}
diff --git a/etcdserver/server.go b/etcdserver/server.go
index <HASH>..<HASH> 100644
--- a/etcdserver/server.go
+++ b/etcdserver/server.go
@@ -40,7 +40,6 @@ import (
"github.com/coreos/etcd/pkg/pbutil"
"github.com/coreos/etcd/pkg/runtime"
"github.com/coreos/etcd/pkg/timeutil"
- "github.com/coreos/etcd/pkg/transport"
"github.com/coreos/etcd/pkg/types"
"github.com/coreos/etcd/pkg/wait"
"github.com/coreos/etcd/raft"
@@ -211,12 +210,10 @@ func NewServer(cfg *ServerConfig) (*EtcdServer, error) {
haveWAL := wal.Exist(cfg.WALDir())
ss := snap.New(cfg.SnapDir())
- // use timeout transport to pair with remote timeout listeners
- pt, err := transport.NewTimeoutTransport(cfg.PeerTLSInfo, cfg.peerDialTimeout(), 0, 0)
+ prt, err := rafthttp.NewRoundTripper(cfg.PeerTLSInfo, cfg.peerDialTimeout())
if err != nil {
return nil, err
}
- prt := http.RoundTripper(pt)
var remotes []*Member
switch {
case !haveWAL && !cfg.NewCluster:
diff --git a/rafthttp/transport.go b/rafthttp/transport.go
index <HASH>..<HASH> 100644
--- a/rafthttp/transport.go
+++ b/rafthttp/transport.go
@@ -136,15 +136,11 @@ type Transport struct {
func (t *Transport) Start() error {
var err error
- // Read/write timeout is set for stream roundTripper to promptly
- // find out broken status, which minimizes the number of messages
- // sent on broken connection.
- t.streamRt, err = transport.NewTimeoutTransport(t.TLSInfo, t.DialTimeout, ConnReadTimeout, ConnWriteTimeout)
+ t.streamRt, err = newStreamRoundTripper(t.TLSInfo, t.DialTimeout)
if err != nil {
return err
}
- // use timeout transport to pair with remote timeout listeners
- t.pipelineRt, err = transport.NewTimeoutTransport(t.TLSInfo, t.DialTimeout, 0, 0)
+ t.pipelineRt, err = NewRoundTripper(t.TLSInfo, t.DialTimeout)
if err != nil {
return err
}
diff --git a/rafthttp/util.go b/rafthttp/util.go
index <HASH>..<HASH> 100644
--- a/rafthttp/util.go
+++ b/rafthttp/util.go
@@ -18,11 +18,14 @@ import (
"encoding/binary"
"fmt"
"io"
+ "net"
"net/http"
"net/url"
"strings"
+ "time"
"github.com/coreos/etcd/Godeps/_workspace/src/github.com/coreos/go-semver/semver"
+ "github.com/coreos/etcd/pkg/transport"
"github.com/coreos/etcd/pkg/types"
"github.com/coreos/etcd/raft/raftpb"
"github.com/coreos/etcd/version"
@@ -30,6 +33,30 @@ import (
var errMemberRemoved = fmt.Errorf("the member has been permanently removed from the cluster")
+// NewListener returns a listener for raft message transfer between peers.
+// It uses timeout listener to identify broken streams promptly.
+func NewListener(u url.URL, tlsInfo transport.TLSInfo) (net.Listener, error) {
+ return transport.NewTimeoutListener(u.Host, u.Scheme, tlsInfo, ConnReadTimeout, ConnWriteTimeout)
+}
+
+// NewRoundTripper returns a roundTripper used to send requests
+// to rafthttp listener of remote peers.
+func NewRoundTripper(tlsInfo transport.TLSInfo, dialTimeout time.Duration) (http.RoundTripper, error) {
+ // It uses timeout transport to pair with remote timeout listeners.
+ // It sets no read/write timeout, because message in requests may
+ // take long time to write out before reading out the response.
+ return transport.NewTimeoutTransport(tlsInfo, dialTimeout, 0, 0)
+}
+
+// newStreamRoundTripper returns a roundTripper used to send stream requests
+// to rafthttp listener of remote peers.
+// Read/write timeout is set for stream roundTripper to promptly
+// find out broken status, which minimizes the number of messages
+// sent on broken connection.
+func newStreamRoundTripper(tlsInfo transport.TLSInfo, dialTimeout time.Duration) (http.RoundTripper, error) {
+ return transport.NewTimeoutTransport(tlsInfo, dialTimeout, ConnReadTimeout, ConnWriteTimeout)
+}
+
func writeEntryTo(w io.Writer, ent *raftpb.Entry) error {
size := ent.Size()
if err := binary.Write(w, binary.BigEndian, uint64(size)); err != nil { | rafthttp: add functions to create listener and roundTripper
This moves the code to create listener and roundTripper for raft communication
to the same place, and use explicit functions to build them. This prevents
possible development errors in the future. | etcd-io_etcd | train |
98f864b40c1970dce3f8a82e3fd16180343a50fd | diff --git a/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/olap/job/IndexRemoveJob.java b/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/olap/job/IndexRemoveJob.java
index <HASH>..<HASH> 100644
--- a/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/olap/job/IndexRemoveJob.java
+++ b/titan-core/src/main/java/com/thinkaurelius/titan/graphdb/olap/job/IndexRemoveJob.java
@@ -160,7 +160,14 @@ public class IndexRemoveJob extends IndexUpdateJob implements ScanJob {
public Predicate<StaticBuffer> getKeyFilter() {
if (isGlobalGraphIndex()) {
assert graphIndexId>0;
- return (k -> indexSerializer.getIndexIdFromKey(k)==graphIndexId);
+ return (k -> {
+ try {
+ return indexSerializer.getIndexIdFromKey(k) == graphIndexId;
+ } catch (RuntimeException e) {
+ log.debug("Filtering key {} due to exception", k, e);
+ return false;
+ }
+ });
} else {
return buffer -> {
long vertexId = idManager.getKeyID(buffer); | Allow empty relation type name in IndexRemoveJob
This is empty when removing a graph index. | thinkaurelius_titan | train |
53e30d6b1deda8d5960a80825e3c0d0a9de6ecea | diff --git a/lib/diameter/avp.rb b/lib/diameter/avp.rb
index <HASH>..<HASH> 100644
--- a/lib/diameter/avp.rb
+++ b/lib/diameter/avp.rb
@@ -129,6 +129,22 @@ class AVP
@content = new_content
end
+ def inner_avp(name)
+ avps = inner_avps(name)
+
+ if avps.empty?
+ nil
+ else
+ avps[0]
+ end
+ end
+
+ def inner_avps(name)
+ code, _type, _vendor = AVPNames.get(name)
+
+ parse_avps_int(@content).select { |a| a.code == code}
+ end
+
def octet_string
@content
end
diff --git a/test/test_avp.rb b/test/test_avp.rb
index <HASH>..<HASH> 100644
--- a/test/test_avp.rb
+++ b/test/test_avp.rb
@@ -43,6 +43,9 @@ describe "AVP", "A simple example" do
AVP.create("Vendor-Id", 10415)])
avp.code.must_equal 260
+ avp.inner_avp("Vendor-Id").code.must_equal 266
+ avp.inner_avp("Vendor-Id").uint32.must_equal 10415
+
# Wire representation taken from Wireshark
avp.to_wire.must_equal "\x00\x00\x01\x04\x40\x00\x00\x20\x00\x00\x01\x02\x40\x00\x00\x0c\x01\x00\x00\x00\x00\x00\x01\x0a\x40\x00\x00\x0c\x00\x00\x28\xaf".force_encoding("ASCII-8BIT")
end | Add inner_avp[s] methods | rkday_ruby-diameter | train |
685814aa619bfbce4824647e435858f097782491 | diff --git a/src/client/voice/dispatcher/StreamDispatcher.js b/src/client/voice/dispatcher/StreamDispatcher.js
index <HASH>..<HASH> 100644
--- a/src/client/voice/dispatcher/StreamDispatcher.js
+++ b/src/client/voice/dispatcher/StreamDispatcher.js
@@ -118,8 +118,8 @@ class StreamDispatcher extends Writable {
* Pauses playback
*/
pause() {
- if (!this.paused) this.pausedSince = Date.now();
this._setSpeaking(false);
+ if (!this.paused) this.pausedSince = Date.now();
}
/**
@@ -200,7 +200,9 @@ class StreamDispatcher extends Writable {
if (this.pausedSince) return;
if (!this.streams.broadcast) {
const next = FRAME_LENGTH + (this.count * FRAME_LENGTH) - (Date.now() - this.startTime - this.pausedTime);
- setTimeout(this._writeCallback.bind(this), next);
+ setTimeout(() => {
+ if (!this.pausedSince && this._writeCallback) this._writeCallback();
+ }, next);
}
this._sdata.sequence++;
this._sdata.timestamp += TIMESTAMP_INC; | voice: make sure speaking is false when paused | discordjs_discord.js | train |
f258c2a72fd316f6306df7383242a022004b98b6 | diff --git a/src/SearchResults/index.js b/src/SearchResults/index.js
index <HASH>..<HASH> 100644
--- a/src/SearchResults/index.js
+++ b/src/SearchResults/index.js
@@ -296,6 +296,9 @@ function SearchResults(state, results) {
this.automaticRadius = mainSubResponse.automaticRadius;
/**
* String identifying the server used to serve this request.
+ *
+ * getRankingInfo needs to be set to `true` for this to be returned
+ *
* @member {string}
* @example "c7-use-2.algolia.net",
*/ | docs(serverUsed): mention getRankingInfo (#<I>)
This is a basically useless response, and we maybe shoudl remove it, and similar field (as you can read them from _rawResponse), or we can arbitrarily set all remaining parameters.
fixes #<I> | algolia_algoliasearch-helper-js | train |
4382a5a7b4b49ff8a2910fd6324873ebd78d328a | diff --git a/twelvefactor/twelvefactor.go b/twelvefactor/twelvefactor.go
index <HASH>..<HASH> 100644
--- a/twelvefactor/twelvefactor.go
+++ b/twelvefactor/twelvefactor.go
@@ -180,6 +180,31 @@ type Scheduler interface {
Restart(context.Context, string, StatusStream) error
}
+// Trasnform wraps a Scheduler to perform transformations on the Manifest. This
+// can be used to, for example, add defaults placement constraints before
+// providing it to the backend scheduler.
+func Transform(s Scheduler, fn func(*Manifest) *Manifest) Scheduler {
+ return &transformer{s, fn}
+}
+
+// transfomer wraps a Scheduler to perform transformations on the Manifest. This
+// can be used to, for example, add defaults placement constraints before
+// providing it to the backend scheduler.
+type transformer struct {
+ Scheduler
+
+ // Transform will be called on Submit and Run.
+ Transform func(*Manifest) *Manifest
+}
+
+func (t *transformer) Submit(ctx context.Context, app *Manifest, ss StatusStream) error {
+ return t.Scheduler.Submit(ctx, t.Transform(app), ss)
+}
+
+func (t *transformer) Run(ctx context.Context, app *Manifest, in io.Reader, out io.Writer) error {
+ return t.Scheduler.Run(ctx, t.Transform(app), in, out)
+}
+
// Env merges the App environment with any environment variables provided
// in the process.
func Env(app *Manifest, process *Process) map[string]string { | Adds a twelvefactor.Transform method.
This commit adds a `twelvefactor.Transform` method, which returns a
wrapped `twelvefactor.Scheduler` that will transform the
`twelvefactor.Manifest` before passing it to the downstream scheduler.
This can be used to, for example, add default placement constraints to
processes that don't define any. | remind101_empire | train |
d8ad360d02e3743a4ba674c19996aea077707707 | diff --git a/lib/collection.js b/lib/collection.js
index <HASH>..<HASH> 100644
--- a/lib/collection.js
+++ b/lib/collection.js
@@ -17,13 +17,45 @@ class Collection {
this._opSubject = new Subject();
this.action$ = this._actionSubject.asObservable();
this.op$ = this._opSubject.asObservable();
+
+ ['insertOne', 'insertMany', 'count', 'findOne'].forEach(f => {
+ this.action(f);
+ });
+
+ this.action$ = this.action$.
+ map(action => {
+ if (action.action === 'insertOne') {
+ action.params[0] = this._archetype(action.params[0]);
+ }
+ if (action.action === 'insertMany') {
+ action.params[0] = action.params[0].map(doc => this._archetype(doc));
+ }
+ return action;
+ });
+
+ this.op$ = this.op$.map(op => {
+ if (['insertOne', 'insertMany'].includes(op.action)) {
+ op.promise = op.promise.then(({ result }) => result);
+ return op;
+ }
+
+ return op;
+ });
+ }
+
+ action(fn) {
+ const name = typeof fn === 'string' ? fn : fn.name;
+ this[name] = function() {
+ return this.$baseAction(fn, arguments);
+ };
+ return this;
}
$baseAction(action, params) {
const collection = this._collection.collectionName;
const _id = new ObjectId();
- const res = new Promise(resolve => {
+ const res = new Promise((resolve, reject) => {
const subscription = this.action$.
filter(op => op._id.toString() === _id.toString()).
subscribe(op => {
@@ -32,9 +64,7 @@ class Collection {
const promise = typeof action === 'function' ?
action.apply(null, params) :
this._collection[action].apply(this._collection, params);
- resolve({
- promise
- });
+ resolve({ promise });
});
});
@@ -69,28 +99,6 @@ class Collection {
return res;
});
}
-
- async insertOne(doc) {
- const collection = this;
- doc = collection._archetype(doc);
- return collection.$baseAction('insertOne', [doc]).
- then(({ result }) => result);
- }
-
- async insertMany(docs) {
- const collection = this;
- doc = collection._archetype(doc);
- return collection.$baseAction('insertMany', [docs]).
- then(({ result }) => result);
- }
-
- async count(filter, options) {
- return this.$baseAction('count', [filter, options]);
- }
-
- async findOne(filter, options) {
- return this.$baseAction('findOne', [filter, options]);
- }
}
module.exports = Collection; | refactor: use external facing action() for base actions | boosterfuels_monogram | train |
d57f6fd544cfeb864307dcc261e25d0a62a1111b | diff --git a/src/AutoComplete/AutoComplete.js b/src/AutoComplete/AutoComplete.js
index <HASH>..<HASH> 100644
--- a/src/AutoComplete/AutoComplete.js
+++ b/src/AutoComplete/AutoComplete.js
@@ -138,6 +138,11 @@ class AutoComplete extends React.Component {
onFocus: React.PropTypes.func,
/**
+ * Callback function that is fired when the `TextField` receives a keydown event.
+ */
+ onKeyDown: React.PropTypes.func,
+
+ /**
* Callback function that is fired when a list item is selected, or enter is pressed in the `TextField`.
*
* @param {string} chosenRequest Either the `TextField` input value, if enter is pressed in the `TextField`,
@@ -296,6 +301,8 @@ class AutoComplete extends React.Component {
};
handleKeyDown = (event) => {
+ if (this.props.onKeyDown) this.props.onKeyDown(event);
+
switch (keycode(event)) {
case 'enter':
this.close(); | [AutoComplete] Allow devs to hook into onKeyDown | mui-org_material-ui | train |
8bbf519a3824432ab5c05168746317c427acd046 | diff --git a/molecule/driver/delegated.py b/molecule/driver/delegated.py
index <HASH>..<HASH> 100644
--- a/molecule/driver/delegated.py
+++ b/molecule/driver/delegated.py
@@ -42,6 +42,7 @@ class Delegated(base.Base):
However, the developer must adhere to the instance-config API. The
developer's create playbook must provide the following instance-config
data, and the developer's destroy playbook must reset the instance-config.
+ Both `become` keys are optional and can be used independently.
.. code-block:: yaml
@@ -50,6 +51,8 @@ class Delegated(base.Base):
instance: instance_name
port: ssh_port_as_string
user: ssh_user
+ become_method: valid_ansible_become_method
+ become_pass: password_if_required
- address: winrm_endpoint
instance: instance_name
@@ -175,6 +178,10 @@ class Delegated(base.Base):
conn_dict['ansible_host'] = d.get('address')
conn_dict['ansible_port'] = d.get('port')
conn_dict['ansible_connection'] = d.get('connection', 'smart')
+ if d.get('become_method'):
+ conn_dict['ansible_become_method'] = d.get('become_method')
+ if d.get('become_pass'):
+ conn_dict['ansible_become_pass'] = d.get('become_pass')
if d.get('identity_file'):
conn_dict['ansible_private_key_file'] = d.get(
'identity_file')
diff --git a/test/unit/driver/test_delegated.py b/test/unit/driver/test_delegated.py
index <HASH>..<HASH> 100644
--- a/test/unit/driver/test_delegated.py
+++ b/test/unit/driver/test_delegated.py
@@ -168,6 +168,8 @@ def test_login_options_when_managed(mocker, _instance):
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
+ 'become_method': 'su',
+ 'become_pass': 'password',
'identity_file': '/foo/bar',
}
@@ -176,6 +178,8 @@ def test_login_options_when_managed(mocker, _instance):
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
+ 'become_method': 'su',
+ 'become_pass': 'password',
'identity_file': '/foo/bar',
}
assert x == _instance.login_options('foo')
@@ -199,6 +203,8 @@ def test_ansible_connection_options_when_managed(mocker, _instance):
'address': '172.16.0.2',
'user': 'cloud-user',
'port': 22,
+ 'become_method': 'su',
+ 'become_pass': 'password',
'identity_file': '/foo/bar',
}
@@ -209,6 +215,10 @@ def test_ansible_connection_options_when_managed(mocker, _instance):
22,
'ansible_user':
'cloud-user',
+ 'ansible_become_method':
+ 'su',
+ 'ansible_become_pass':
+ 'password',
'ansible_private_key_file':
'/foo/bar',
'ansible_connection': | allow for become_method and become_pass variables with the delegated driver (#<I>)
* allow for become_method and become_pass variables with the delegated driver | ansible_molecule | train |
fd726fa5d73cbebeb01f60411d4c6db387827674 | diff --git a/src/Records/Traits/Media/Covers/RecordTrait.php b/src/Records/Traits/Media/Covers/RecordTrait.php
index <HASH>..<HASH> 100644
--- a/src/Records/Traits/Media/Covers/RecordTrait.php
+++ b/src/Records/Traits/Media/Covers/RecordTrait.php
@@ -10,9 +10,8 @@ trait RecordTrait
use \ByTIC\Common\Records\Traits\AbstractTrait\RecordTrait;
- public $covers = array();
-
- protected $_coversCache = array();
+ protected $_cover = [];
+ protected $_coversCache = null;
protected $_coverTypes = array('full', 'default');
/**
@@ -154,29 +153,41 @@ trait RecordTrait
return $return;
}
- public function findCovers($type = "default")
+ public function checkCovers()
+ {
+ if ($this->_coversCache === null) {
+ $this->initCovers();
+ }
+ }
+
+ public function initCovers()
{
- if (!$this->_coverCache[$type]) {
- $return = array();
+ foreach ($this->_coverTypes as $type) {
+ $this->initCoversByType($type);
+ }
+ }
- $files = Nip_File_System::instance()->scanDirectory($this->getCoverBasePath($type));
+ public function getCovers($type = "default")
+ {
+ if (!$this->_coversCache[$type]) {
+ $this->initCoversByType($type);
+ }
+ return $this->_coversCache[$type];
+ }
- foreach ($files as $file) {
- $cover = $this->getNewCover($type);
- $cover->setName($file);
+ public function initCoversByType($type = "default")
+ {
- if ($file == $this->default_cover) {
- $return = array($cover->name => $cover) + $return;
- } else {
- $return[$cover->name] = $cover;
- }
- }
+ $files = Nip_File_System::instance()->scanDirectory($this->getCoverBasePath($type));
+ $return = array();
+ foreach ($files as $file) {
+ $cover = $this->getNewCover($type);
+ $cover->setName($file);
- $this->_coverCache[$type] = $return;
+ $return[$cover->name] = $cover;
}
- $this->covers = $this->_coverCache[$type];
- return $this->covers;
+ $this->_coversCache[$type] = $return;
}
public function findCover($type = "default")
@@ -184,25 +195,43 @@ trait RecordTrait
$this->findCovers($type);
if ($this->covers) {
- if ($this->default_cover && $this->covers[$this->default_cover]) {
- $this->cover = $this->covers[$this->default_cover];
- } else {
- $this->cover = reset($this->covers);
- $this->default_cover = $this->cover->name;
-// $this->update();
- }
}
}
public function getCover($type = "default")
{
- $this->findCover($type);
+ if (!isset($this->cover[$type])) {
+ $this->initCover($type);
+ }
+
+ return $this->cover[$type];
+ }
+
+ public function initCover($type = "default")
+ {
+ $this->checkCovers();
+ $covers = $this->getCovers($type);
- if ($this->cover) {
- return $this->cover->url;
+ if (count($covers)) {
+ if ($this->default_cover && $covers[$this->default_cover]) {
+ $cover = $covers[$this->default_cover];
+ } else {
+ $cover = reset($covers);
+ $this->default_cover = $this->cover->name;
+ // $this->update();
+ }
+ } else {
+ $cover = $this->getNewCover($type);
}
- return false;
+ $this->cover[$type] = $cover;
+
+ }
+
+ public function hasCover($type = "default")
+ {
+ $this->checkCovers();
+ return isset($this->_coversCache[$type]) && count($this->_coversCache[$type]) > 0;
}
public function deleteCovers()
@@ -292,9 +321,9 @@ trait RecordTrait
public function removeCover($request)
{
foreach ($this->_coverTypes as $type) {
- $this->findCovers($type);
+ $covers = $this->getCovers($type);
- if ($this->covers[$request['image']]) {
+ if ($covers[$request['image']]) {
$this->deleteCover($request['image']);
}
} | add uplload to center in covers | bytic_Common | train |
1911ebc0f6384678b64a69297f4cc362215b55d5 | diff --git a/src/Essence/Matchers/LengthMatcher.php b/src/Essence/Matchers/LengthMatcher.php
index <HASH>..<HASH> 100644
--- a/src/Essence/Matchers/LengthMatcher.php
+++ b/src/Essence/Matchers/LengthMatcher.php
@@ -14,11 +14,15 @@ class LengthMatcher extends AbstractMatcher
/**
* {@inheritdoc}
*/
+ protected $modes = ["normal", "configuration"];
+
+ /**
+ * {@inheritdoc}
+ */
public function run()
{
parent::run();
- list($length) = $this->arguments;
$actualLength = null;
if (is_string($this->value)) {
@@ -34,6 +38,15 @@ class LengthMatcher extends AbstractMatcher
$actualLength = count(get_object_vars($this->value));
}
+ // The configuration mode.
+ if ($this->configurationOnly) {
+ essence()->setMatcherConfiguration(__CLASS__, ["length" => $actualLength]);
+
+ return true;
+ }
+
+ list($length) = $this->arguments;
+
if ($length !== $actualLength) {
$this->setMessage(
"%s (expected length) is not equal to %s (actual length)", | LengthMatcher now supports config mode. | bound1ess_essence | train |
06140e1b164f4352e096bced5d68760cbd1e4e24 | diff --git a/lib/syllabification_context.rb b/lib/syllabification_context.rb
index <HASH>..<HASH> 100644
--- a/lib/syllabification_context.rb
+++ b/lib/syllabification_context.rb
@@ -24,6 +24,10 @@ module Pronounce
Syllable.new(@phones.slice(completed_length...@phone_index))
end
+ def previous_phone_in_coda?
+ pending_syllable.coda_contains? previous_phone
+ end
+
def word_beginning?
@phone_index == 0
end
diff --git a/lib/syllable_rules.rb b/lib/syllable_rules.rb
index <HASH>..<HASH> 100644
--- a/lib/syllable_rules.rb
+++ b/lib/syllable_rules.rb
@@ -1,9 +1,9 @@
module Pronounce::SyllableRules
# Breaks syllables at the low point of sonority between vowels.
def self.sonority_sequencing_principle(context)
- return true if context.current_phone.syllabic? && (context.current_phone == context.previous_phone || context.pending_syllable.coda_contains?(context.previous_phone))
+ return true if context.current_phone.syllabic? && (context.current_phone == context.previous_phone || context.previous_phone_in_coda?)
return false if context.word_end?
- (context.current_phone < context.next_phone && context.current_phone <= context.previous_phone) || context.pending_syllable.coda_contains?(context.previous_phone)
+ (context.current_phone < context.next_phone && context.current_phone <= context.previous_phone) || context.previous_phone_in_coda?
end
end
diff --git a/spec/syllabification_context_spec.rb b/spec/syllabification_context_spec.rb
index <HASH>..<HASH> 100644
--- a/spec/syllabification_context_spec.rb
+++ b/spec/syllabification_context_spec.rb
@@ -46,5 +46,24 @@ module Pronounce
end
end
+ describe '#previous_phone_in_coda?' do
+ context 'when pending syllable contains a vowel before previous phone' do
+ let(:index) { 2 }
+ let(:syllables) { [] }
+
+ it 'is true' do
+ expect(subject.previous_phone_in_coda?).to eq true
+ end
+ end
+
+ context 'when pending syllable does not contain a vowel before previous phone' do
+ let(:index) { 4 }
+
+ it 'is false' do
+ expect(subject.previous_phone_in_coda?).to eq false
+ end
+ end
+ end
+
end
end | Reduced duplication of a common query on the syllable context. | josephwilk_pronounce | train |
1a523affb8977e071b6f7e09f0bbe9b270668cfc | diff --git a/examples/download_dashboards.py b/examples/download_dashboards.py
index <HASH>..<HASH> 100755
--- a/examples/download_dashboards.py
+++ b/examples/download_dashboards.py
@@ -75,7 +75,7 @@ if not os.path.exists(sysdig_dashboard_dir):
for db in res['dashboards']:
sdclient.save_dashboard_to_file(db, os.path.join(sysdig_dashboard_dir, str(db['id'])))
- print(("Name: %s, # Charts: %d" % (db['name'], len(db['widgets']))))
+ print(("Name: %s" % (db['name'])))
zipf = zipfile.ZipFile(dashboard_state_file, 'w', zipfile.ZIP_DEFLATED)
zipdir(sysdig_dashboard_dir, zipf) | fix: Update dashboards to latest API (#<I>) | draios_python-sdc-client | train |
ac30794f13b6e6cc33bb811e0907725c7528fbae | diff --git a/index.js b/index.js
index <HASH>..<HASH> 100644
--- a/index.js
+++ b/index.js
@@ -55,7 +55,7 @@ function main (options, done) {
try {
src = fs.readFileSync(filename, 'utf8')
} catch (e) {
- throw new Error('Could not read file from /src/posts/' + filename)
+ throw new Error('Could not read file ' + filename)
}
const matter = frontMatter(src) | Fix error message when file failed to read | yoshuawuyts_markdown-to-medium | train |
454991485979eba66662d075bcf2341c656b3738 | diff --git a/src/project/ProjectManager.js b/src/project/ProjectManager.js
index <HASH>..<HASH> 100644
--- a/src/project/ProjectManager.js
+++ b/src/project/ProjectManager.js
@@ -1810,7 +1810,21 @@ define(function (require, exports, module) {
var escapedName = _.escape(entry.name);
_projectTree.jstree("set_text", $selected, escapedName);
_projectTree.jstree("rename");
- var indexOfExtension = escapedName.lastIndexOf('.');
+
+ var indexOfExtension = escapedName.lastIndexOf("."),
+ language = LanguageManager.getLanguageForPath(entry.name);
+ if (language) {
+ language.getFileExtensions().forEach(function (ext) {
+ ext = "." + ext;
+ if (escapedName.match(ext + "$")) {
+ var io = escapedName.lastIndexOf(ext);
+ if (io < indexOfExtension) {
+ indexOfExtension = io;
+ }
+ }
+ });
+ }
+
if (indexOfExtension > 0) {
$selected.children(".jstree-rename-input")[0].setSelectionRange(0, indexOfExtension);
} | Correctly selects filename when known extension with a dot inside is used | adobe_brackets | train |
106d2ef9d450b504a9a05846423ab28a2d102a3d | diff --git a/src/SoapHeaderEventSubscriber.php b/src/SoapHeaderEventSubscriber.php
index <HASH>..<HASH> 100644
--- a/src/SoapHeaderEventSubscriber.php
+++ b/src/SoapHeaderEventSubscriber.php
@@ -68,9 +68,16 @@ class SoapHeaderEventSubscriber implements EventSubscriberInterface
$document->firstChild->firstChild
)
);
+
+ if(null !== $metadata->xmlRootPrefix){
+ $tagName = $metadata->xmlRootPrefix . ':' . $metadata->xmlRootName;
+ }else{
+ $tagName = $metadata->xmlRootName;
+ }
+
$visitor->setCurrentNode(
$header->appendChild(
- $document->createElementNS($metadata->xmlRootNamespace, $metadata->xmlRootName)
+ $document->createElementNS($metadata->xmlRootNamespace, $tagName)
)
); | Update SoapHeaderEventSubscriber.php
Missing root prefix in soap header. | dmt-software_jms-soap-serializer | train |
a56cb821800aa7b7680079d431a92ba898060c24 | diff --git a/etcdserver/server_test.go b/etcdserver/server_test.go
index <HASH>..<HASH> 100644
--- a/etcdserver/server_test.go
+++ b/etcdserver/server_test.go
@@ -1369,10 +1369,11 @@ func (n *nodeRecorder) Step(ctx context.Context, msg raftpb.Message) error {
n.Record(testutil.Action{Name: "Step"})
return nil
}
-func (n *nodeRecorder) Status() raft.Status { return raft.Status{} }
-func (n *nodeRecorder) Ready() <-chan raft.Ready { return nil }
-func (n *nodeRecorder) ReadIndex(ctx context.Context, rctx []byte) error { return nil }
-func (n *nodeRecorder) Advance() {}
+func (n *nodeRecorder) Status() raft.Status { return raft.Status{} }
+func (n *nodeRecorder) Ready() <-chan raft.Ready { return nil }
+func (n *nodeRecorder) TransferLeadership(ctx context.Context, lead, transferee uint64) {}
+func (n *nodeRecorder) ReadIndex(ctx context.Context, rctx []byte) error { return nil }
+func (n *nodeRecorder) Advance() {}
func (n *nodeRecorder) ApplyConfChange(conf raftpb.ConfChange) *raftpb.ConfState {
n.Record(testutil.Action{Name: "ApplyConfChange", Params: []interface{}{conf}})
return &raftpb.ConfState{} | etcdserver: add TransferLeadership for raft.Node | etcd-io_etcd | train |
c389ac760e178d7ae36772ad714e1b470020f269 | diff --git a/RisWriter.php b/RisWriter.php
index <HASH>..<HASH> 100644
--- a/RisWriter.php
+++ b/RisWriter.php
@@ -84,7 +84,7 @@ class RisWriter
}
/* First position for TY (Type) */
- array_push($buffer, sprintf('TY - %s', $record['TY']));
+ array_push($buffer, sprintf('TY - %s', $record['TY'][0]));
unset($record['TY']);
/* Order the array */ | For Field TY, only get first position on array | Funstaff_RefLibRis | train |
3d7f884ccfe15da1b218903b37b255769223b4cf | diff --git a/jax/numpy/lax_numpy.py b/jax/numpy/lax_numpy.py
index <HASH>..<HASH> 100644
--- a/jax/numpy/lax_numpy.py
+++ b/jax/numpy/lax_numpy.py
@@ -3235,6 +3235,11 @@ def _unimplemented_setitem(self, i, x):
"jax.ops.index_add instead?")
raise TypeError(msg.format(type(self)))
+def _operator_round(number, ndigits=None):
+ out = round(number, decimals=ndigits or 0)
+ # If `ndigits` is None, for a builtin float round(7.5) returns an integer.
+ return out.astype(int_) if ndigits is None else out
+
_operators = {
"getitem": _rewriting_take,
"setitem": _unimplemented_setitem,
@@ -3276,6 +3281,7 @@ _operators = {
"invert": bitwise_not,
"lshift": left_shift,
"rshift": right_shift,
+ "round": _operator_round,
}
# These numpy.ndarray methods are just refs to an equivalent numpy function
diff --git a/tests/lax_numpy_test.py b/tests/lax_numpy_test.py
index <HASH>..<HASH> 100644
--- a/tests/lax_numpy_test.py
+++ b/tests/lax_numpy_test.py
@@ -828,6 +828,21 @@ class LaxBackedNumpyTests(jtu.JaxTestCase):
self._CompileAndCheck(lnp_fun, args_maker, check_dtypes=check_dtypes,
atol=tol, rtol=tol)
+ def testOperatorRound(self):
+ self.assertAllClose(round(onp.float32(7.532), 1),
+ round(lnp.float32(7.5), 1), check_dtypes=True)
+ self.assertAllClose(round(onp.float32(1.234), 2),
+ round(lnp.float32(1.234), 2), check_dtypes=True)
+ self.assertAllClose(round(onp.float32(1.234)),
+ round(lnp.float32(1.234)), check_dtypes=False)
+ self.assertAllClose(round(onp.float32(7.532), 1),
+ round(lnp.array(7.5, lnp.float32), 1), check_dtypes=True)
+ self.assertAllClose(round(onp.float32(1.234), 2),
+ round(lnp.array(1.234, lnp.float32), 2), check_dtypes=True)
+ self.assertAllClose(round(onp.float32(1.234)),
+ round(lnp.array(1.234, lnp.float32)),
+ check_dtypes=False)
+
@parameterized.named_parameters(jtu.cases_from_list(
{"testcase_name": "_shape={}_mode={}_rpadwidth={}_rconstantvalues={}".format(
jtu.format_shape_dtype_string(shape, dtype), mode, pad_width_rank, | Implement __round__ on JAX arrays. (#<I>)
* Implement __round__ on JAX arrays.
Avoids breakage from <URL> | tensorflow_probability | train |
cec82b1a3f4bd17a5cdf30405f8c159d414da3ff | diff --git a/README.md b/README.md
index <HASH>..<HASH> 100644
--- a/README.md
+++ b/README.md
@@ -71,7 +71,7 @@ The following date formats are parsed:
<a name="date_strftime"></a>
#### $date = \Missing\Date::strftime($date_string, $format, $else, $tz)
-Parses $date_string using \Missing\Date::parse(), if it parses correctly, return the date formatted with $format in the timezone $tz, otherwise return $else.
+Parses $date_string using \Missing\Date::parse() (also accepts a UTC timestamp), if it parses correctly, return the date formatted with $format in the timezone $tz, otherwise return $else.
<p>Date: <?php echo \Missing\Date::strftime($date, '%d/%m/%Y', 'unknown', 'Europe/London') ?></p>
diff --git a/src/Missing/Date.php b/src/Missing/Date.php
index <HASH>..<HASH> 100644
--- a/src/Missing/Date.php
+++ b/src/Missing/Date.php
@@ -32,9 +32,14 @@ class Date {
}
static function strftime($datetime, $format, $else, $tz) {
- list($t, $err) = self::parse($datetime);
- if ($err) {
- return $else;
+ // Allow timestamps
+ if (is_int($datetime)) {
+ $t = $datetime;
+ } else {
+ list($t, $err) = self::parse($datetime);
+ if ($err) {
+ return $else;
+ }
}
$old_tz = date_default_timezone_get();
diff --git a/test/date_test.php b/test/date_test.php
index <HASH>..<HASH> 100644
--- a/test/date_test.php
+++ b/test/date_test.php
@@ -75,4 +75,22 @@ class DateTest extends PHPUnit_Framework_TestCase {
1234
);
}
+
+ function testTimestamp() {
+ // It doesn't make sense to parse a timestamp
+ list($date, $err) = Missing\Date::parse(1339009200);
+ $this->assertTrue($err);
+ $this->assertNull($date);
+
+ // But it does make sense to strftime() a timestamp
+ $this->assertEquals(
+ '2012-06-06T20:00',
+ Missing\Date::strftime(
+ 1339009200,
+ '%Y-%m-%dT%H:%M',
+ 'unknown',
+ 'Europe/London'
+ )
+ );
+ }
} | Allow Date::strftime to accept a UTC timestamp | dxw_php-missing | train |
0ff30de5275530de955297478ecce7fc81a4bcb2 | diff --git a/IndexedRedis/__init__.py b/IndexedRedis/__init__.py
index <HASH>..<HASH> 100644
--- a/IndexedRedis/__init__.py
+++ b/IndexedRedis/__init__.py
@@ -394,7 +394,7 @@ class IndexedRedisModel(object):
def __getattribute__(self, keyName):
# If something on the class, just return it right away.
oga = object.__getattribute__
- if hasattr( oga(self, '__class__'), keyName):
+ if keyName in dir(oga(self, '__class__')):
return oga(self, keyName)
if keyName.endswith('__id'):
@@ -445,7 +445,7 @@ class IndexedRedisModel(object):
'''
ret = {}
for thisField in self.FIELDS:
- if not hasattr(self, str(thisField)):
+ if not str(thisField) in dir(self):
val = thisField.getDefaultValue()
else:
val = object.__getattribute__(self, thisField) | Dont use hasattr, which calls __getattribute__ and ends up resolving foreign fields. Instead, check if keyname is in dir(...) | kata198_indexedredis | train |
72811d64d595c606f0931f8ecc47ca8b81a45dfa | diff --git a/src/Psy/Shell.php b/src/Psy/Shell.php
index <HASH>..<HASH> 100644
--- a/src/Psy/Shell.php
+++ b/src/Psy/Shell.php
@@ -859,6 +859,12 @@ class Shell extends Application
}
}
+ /**
+ * Initialize tab completion matchers.
+ *
+ * If tab completion is enabled this adds tab completion matchers to the
+ * auto completer and sets context if needed.
+ */
protected function initializeTabCompletion()
{
// auto completer needs shell to be linked to configuration because of the context aware matchers | Add docblock to initializeTabCompletion method | bobthecow_psysh | train |
19c3a156859094d1f487a423c2f8eabb9c8b27f9 | diff --git a/src/path.js b/src/path.js
index <HASH>..<HASH> 100644
--- a/src/path.js
+++ b/src/path.js
@@ -27,7 +27,9 @@ const validate = require('./validate')();
*
* @type {RegExp}
*/
-const RESOURCE_PATH_RE = /^projects\/([^/]*)\/databases\/([^/]*)(?:\/documents\/)?(.*)$/;
+const RESOURCE_PATH_RE =
+ // Note: [\s\S] matches all characters including newlines.
+ /^projects\/([^/]*)\/databases\/([^/]*)(?:\/documents\/)?([\s\S]*)$/;
/*!
* A regular expression to verify whether a field name can be passed to the
@@ -295,6 +297,7 @@ class ResourcePath extends Path {
get databaseId() {
return this._databaseId;
}
+
/**
* Returns true if the given string can be used as a relative or absolute
* resource path.
diff --git a/test/path.js b/test/path.js
index <HASH>..<HASH> 100644
--- a/test/path.js
+++ b/test/path.js
@@ -61,6 +61,13 @@ describe('ResourcePath', function() {
);
}, /Resource name 'projects\/project\/databases' is not valid\./);
});
+
+ it('accepts newlines', function() {
+ const path = ResourcePath.fromSlashSeparatedString(
+ `${DATABASE_ROOT}/documents/foo\nbar`
+ );
+ assert.equal(path.formattedName, `${DATABASE_ROOT}/documents/foo\nbar`);
+ });
});
describe('FieldPath', function() { | Allow newlines in Resource Paths (#<I>) | googleapis_nodejs-firestore | train |
9622c0302969343844d8deacb5b71ddf01a38668 | diff --git a/lib/square.js b/lib/square.js
index <HASH>..<HASH> 100644
--- a/lib/square.js
+++ b/lib/square.js
@@ -758,10 +758,17 @@ Square.prototype.merge = function merge (extension, groupsout) {
function insert (match, commenttype, statement, file) {
var location = basepath(file, self.package.path);
- if (fs.existsSync(location)) return fs.readFileSync(location, 'utf8');
+ if (!fs.existsSync(location)) {
+ self.logger.error('ignoring [square] @%s statement %s does not exit', statement, file);
+ return '';
+ }
- self.logger.error('ignoring [square] @%s statement %s does not exit', statement, file);
- return '';
+ file = fs.readFileSync(location, 'utf8');
+ // figure out if we need to prefix the code with a semi-colon as it could be
+ // that it's included after a ~function()() that doesn't have an ending semi
+ // colon
+ if (file.charAt(0) !== ';' && extension === 'js') file = ';' + file;
+ return file;
}
Object.keys(this.package.groups).forEach(function eachGroup(group) { | [fix] Fixed an edge case where including files that didn't have a closing semi colon caused fucking mayhem. | observing_square | train |
0cc04b311d37bb0986afb0f4c53c183588677bf1 | diff --git a/setup.py b/setup.py
index <HASH>..<HASH> 100755
--- a/setup.py
+++ b/setup.py
@@ -15,7 +15,7 @@ setup(
keywords = 'zendesk api helpdesk',
description = 'Python API Wrapper for Zendesk',
classifiers = [
- 'Development Status :: 5 - Production/Stable',
+ 'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Topic :: Software Development :: Libraries :: Python Modules',
@@ -25,4 +25,3 @@ setup(
],
)
- | prep for release. drop status to beta | fprimex_zdesk | train |
74d7664b607a7ca2770c937a832b3339d7bf8203 | diff --git a/actionpack/lib/action_view/helpers/date_helper.rb b/actionpack/lib/action_view/helpers/date_helper.rb
index <HASH>..<HASH> 100644
--- a/actionpack/lib/action_view/helpers/date_helper.rb
+++ b/actionpack/lib/action_view/helpers/date_helper.rb
@@ -923,6 +923,7 @@ module ActionView
private
def datetime_selector(options, html_options)
datetime = value(object) || default_datetime(options)
+ @auto_index ||= nil
options = options.dup
options[:field_name] = @method_name | Avoid uninitialized variable warning. | rails_rails | train |
4390a4c617de106285b678601b5385dd312568c8 | diff --git a/config.py b/config.py
index <HASH>..<HASH> 100644
--- a/config.py
+++ b/config.py
@@ -60,6 +60,7 @@ EXTENSIONS = [
'invenio.ext.jasmine', # after assets
'flask_breadcrumbs:Breadcrumbs',
'invenio.modules.deposit.url_converters',
+ 'invenio.ext.iiif',
]
PACKAGES = [ | global: enable Flask-IIIF extension
* NEW Uses Flask-IIIF extension providing various image manipulation
capabilities.
* NEW Adds possibility to refer to documents and legacy BibDocFiles
via special path such as `/api/multimedia/image/recid:{recid}` or
`/api/multimedia/image/recid:{recid}-{filename}` or
`/api/multimedia/image/uuid` with proper permission checking.
(closes #<I>) (closes #<I>) | inveniosoftware_invenio-base | train |
3dded704fe405a2262584b3dd4717ca27e40f99e | diff --git a/kconfiglib.py b/kconfiglib.py
index <HASH>..<HASH> 100644
--- a/kconfiglib.py
+++ b/kconfiglib.py
@@ -1252,8 +1252,7 @@ class Config(object):
"MODULES, like older versions of the C "
"implementation did when 'option modules' "
"wasn't used.)",
- filename,
- linenr)
+ filename, linenr)
elif tokens.check(T_ALLNOCONFIG_Y):
if not isinstance(stmt, Symbol):
@@ -1774,9 +1773,7 @@ class Config(object):
def _stderr_msg(msg, filename, linenr):
if filename is not None:
- sys.stderr.write("{0}:".format(_clean_up_path(filename)))
- if linenr is not None:
- sys.stderr.write("{0}:".format(linenr))
+ sys.stderr.write("{0}:{1}: ".format(_clean_up_path(filename), linenr))
sys.stderr.write(msg + "\n")
def _get_expr_syms(expr): | linenr is non-None if filename is non-None.
Piggyback vertical nit. | ulfalizer_Kconfiglib | train |
534bbff06ff46bd21d71700c833486524273c951 | diff --git a/select2.js b/select2.js
index <HASH>..<HASH> 100644
--- a/select2.js
+++ b/select2.js
@@ -777,7 +777,7 @@ the specific language governing permissions and limitations under the Apache Lic
// for mouse events outside of itself so it can close itself. since the dropdown is now outside the select2's
// dom it will trigger the popup close, which is not what we want
// focusin can cause focus wars between modals and select2 since the dropdown is outside the modal.
- this.dropdown.on("click mouseup mousedown focusin", function (e) { e.stopPropagation(); });
+ this.dropdown.on("click mouseup mousedown touchstart touchend focusin", function (e) { e.stopPropagation(); });
this.nextSearchTerm = undefined; | trap touchstart touchend events within container | select2_select2 | train |
5c49ae3af1086fe1bf4df374a3b2bd6bdde37b87 | diff --git a/airflow/bin/cli.py b/airflow/bin/cli.py
index <HASH>..<HASH> 100644
--- a/airflow/bin/cli.py
+++ b/airflow/bin/cli.py
@@ -2395,7 +2395,7 @@ class CLIFactory(object):
subparsers.required = True
subparser_list = cls.dag_subparsers if dag_parser else cls.subparsers_dict.keys()
- for sub in subparser_list:
+ for sub in sorted(subparser_list):
sub = cls.subparsers_dict[sub]
sp = subparsers.add_parser(sub['func'].__name__, help=sub['help'])
sp.formatter_class = RawTextHelpFormatter | [AIRFLOW-<I>] Airflow CLI - Sort commands alphabetically (#<I>) | apache_airflow | train |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.