content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Python | Python | refuse datasets 2.5.0 while waiting for a patch | 114295c010dd9c94d48add7a0f091ba6ebdf482b | <ide><path>setup.py
<ide> "codecarbon==1.2.0",
<ide> "cookiecutter==1.7.3",
<ide> "dataclasses",
<del> "datasets",
<add> "datasets!=2.5.0",
<ide> "deepspeed>=0.6.5",
<ide> "dill<0.3.5",
<ide> "evaluate>=0.2.0",
<ide><path>src/transformers/dependency_versions_table.py
<ide> "codecarbon": "codecarbon==1.2.0",
<ide> "cookiecutter": "cookiecutter==1.7.3",
<ide> "dataclasses": "dataclasses",
<del> "datasets": "datasets",
<add> "datasets": "datasets!=2.5.0",
<ide> "deepspeed": "deepspeed>=0.6.5",
<ide> "dill": "dill<0.3.5",
<ide> "evaluate": "evaluate>=0.2.0", | 2 |
Text | Text | uppercase the es6 word | 25fa070622078efdd383e118bf2dd03cdaab3a8a | <ide><path>guide/english/redux/redux-reducers/index.md
<ide> const actions = [
<ide> ]
<ide> ```
<ide>
<del>you can generate new state based on those list of actions. How would we do that? By using a reducer function, like so:
<add>
<add>You can generate new state based on those list of actions. How would we do that? By using a reducer function, which takes in the current state and the action that was triggered and then returns a new state. Here we used the ES6 default parameter syntax to assign a default value to the state array, like so:
<ide>
<ide> ```javascript
<ide> | 1 |
Javascript | Javascript | add geometrycollection support to d3.geo.bounds | 9ae8feea6f05862a9c76c897d5a99334b25a0b18 | <ide><path>d3.chart.min.js
<del>(function(){function a(a){return[0,a.length-1]}function b(a){return[d3.quantile(a,.25),d3.quantile(a,.5),d3.quantile(a,.75)]}function c(a){return a.ranges}function d(a){return a.markers}function e(a){return a.measures}function f(a){return function(b){return"translate("+a(b)+",0)"}}function g(a){var b=a(0);return function(c){return Math.abs(a(c)-b)}}function j(a){return a[0]}function k(a){return a[1]}function l(a,b){var c=b.length-1;return b=b.slice().sort(d3.ascending),d3.range(a).map(function(d){return b[~~(d*c/a)]})}function m(a){return a.x}function n(a){return a.y}d3.chart={},d3.chart.box=function(){function k(a){a.each(function(a,b){a=a.map(g).sort(d3.ascending);var k=d3.select(this),l=a.length,m=a[0],n=a[l-1],o=a.quartiles=i(a),p=h&&h.call(this,a,b),q=p&&p.map(function(b){return a[b]}),r=p?d3.range(0,p[0]).concat(d3.range(p[1]+1,l)):d3.range(l),s=d3.scale.linear().domain(f&&f.call(this,a,b)||[m,n]).range([d,0]),t=this.__chart__||d3.scale.linear().domain([0,Infinity]).range(s.range());this.__chart__=s;var u=k.selectAll("line.center").data(q?[q]:[]);u.enter().insert("svg:line","rect").attr("class","center").attr("x1",c/2).attr("y1",function(a){return t(a[0])}).attr("x2",c/2).attr("y2",function(a){return t(a[1])}).style("opacity",1e-6).transition().duration(e).style("opacity",1).attr("y1",function(a){return s(a[0])}).attr("y2",function(a){return s(a[1])}),u.transition().duration(e).style("opacity",1).attr("y1",function(a){return s(a[0])}).attr("y2",function(a){return s(a[1])}),u.exit().transition().duration(e).style("opacity",1e-6).attr("y1",function(a){return s(a[0])}).attr("y2",function(a){return s(a[1])}).remove();var v=k.selectAll("rect.box").data([o]);v.enter().append("svg:rect").attr("class","box").attr("x",0).attr("y",function(a){return t(a[2])}).attr("width",c).attr("height",function(a){return t(a[0])-t(a[2])}).transition().duration(e).attr("y",function(a){return s(a[2])}).attr("height",function(a){return s(a[0])-s(a[2])}),v.transition().duration(e).attr("y",function(a){return s(a[2])}).attr("height",function(a){return s(a[0])-s(a[2])});var w=k.selectAll("line.median").data([o[1]]);w.enter().append("svg:line").attr("class","median").attr("x1",0).attr("y1",t).attr("x2",c).attr("y2",t).transition().duration(e).attr("y1",s).attr("y2",s),w.transition().duration(e).attr("y1",s).attr("y2",s);var x=k.selectAll("line.whisker").data(q||[]);x.enter().insert("svg:line","circle, text").attr("class","whisker").attr("x1",0).attr("y1",t).attr("x2",c).attr("y2",t).style("opacity",1e-6).transition().duration(e).attr("y1",s).attr("y2",s).style("opacity",1),x.transition().duration(e).attr("y1",s).attr("y2",s).style("opacity",1),x.exit().transition().duration(e).attr("y1",s).attr("y2",s).style("opacity",1e-6).remove();var y=k.selectAll("circle.outlier").data(r,Number);y.enter().insert("svg:circle","text").attr("class","outlier").attr("r",5).attr("cx",c/2).attr("cy",function(b){return t(a[b])}).style("opacity",1e-6).transition().duration(e).attr("cy",function(b){return s(a[b])}).style("opacity",1),y.transition().duration(e).attr("cy",function(b){return s(a[b])}).style("opacity",1),y.exit().transition().duration(e).attr("cy",function(b){return s(a[b])}).style("opacity",1e-6).remove();var z=j||s.tickFormat(8),A=k.selectAll("text.box").data(o);A.enter().append("svg:text").attr("class","box").attr("dy",".3em").attr("dx",function(a,b){return b&1?6:-6}).attr("x",function(a,b){return b&1?c:0}).attr("y",t).attr("text-anchor",function(a,b){return b&1?"start":"end"}).text(z).transition().duration(e).attr("y",s),A.transition().duration(e).text(z).attr("y",s);var B=k.selectAll("text.whisker").data(q||[]);B.enter().append("svg:text").attr("class","whisker").attr("dy",".3em").attr("dx",6).attr("x",c).attr("y",t).text(z).style("opacity",1e-6).transition().duration(e).attr("y",s).style("opacity",1),B.transition().duration(e).text(z).attr("y",s).style("opacity",1),B.exit().transition().duration(e).attr("y",s).style("opacity",1e-6).remove()}),d3.timer.flush()}var c=1,d=1,e=0,f=null,g=Number,h=a,i=b,j=null;return k.width=function(a){return arguments.length?(c=a,k):c},k.height=function(a){return arguments.length?(d=a,k):d},k.tickFormat=function(a){return arguments.length?(j=a,k):j},k.duration=function(a){return arguments.length?(e=a,k):e},k.domain=function(a){return arguments.length?(f=a==null?a:d3.functor(a),k):f},k.value=function(a){return arguments.length?(g=a,k):g},k.whiskers=function(a){return arguments.length?(h=a,k):h},k.quartiles=function(a){return arguments.length?(i=a,k):i},k},d3.chart.bullet=function(){function o(a){a.each(function(a,c){var d=i.call(this,a,c).slice().sort(d3.descending),e=j.call(this,a,c).slice().sort(d3.descending),o=k.call(this,a,c).slice().sort(d3.descending),p=d3.select(this),q=d3.scale.linear().domain([0,Math.max(d[0],e[0],o[0])]).range(b?[l,0]:[0,l]),r=this.__chart__||d3.scale.linear().domain([0,Infinity]).range(q.range());this.__chart__=q;var s=g(r),t=g(q),u=p.selectAll("rect.range").data(d);u.enter().append("svg:rect").attr("class",function(a,b){return"range s"+b}).attr("width",s).attr("height",m).attr("x",b?r:0).transition().duration(h).attr("width",t).attr("x",b?q:0),u.transition().duration(h).attr("x",b?q:0).attr("width",t).attr("height",m);var v=p.selectAll("rect.measure").data(o);v.enter().append("svg:rect").attr("class",function(a,b){return"measure s"+b}).attr("width",s).attr("height",m/3).attr("x",b?r:0).attr("y",m/3).transition().duration(h).attr("width",t).attr("x",b?q:0),v.transition().duration(h).attr("width",t).attr("height",m/3).attr("x",b?q:0).attr("y",m/3);var w=p.selectAll("line.marker").data(e);w.enter().append("svg:line").attr("class","marker").attr("x1",r).attr("x2",r).attr("y1",m/6).attr("y2",m*5/6).transition().duration(h).attr("x1",q).attr("x2",q),w.transition().duration(h).attr("x1",q).attr("x2",q).attr("y1",m/6).attr("y2",m*5/6);var x=n||q.tickFormat(8),y=p.selectAll("g.tick").data(q.ticks(8),function(a){return this.textContent||x(a)}),z=y.enter().append("svg:g").attr("class","tick").attr("transform",f(r)).style("opacity",1e-6);z.append("svg:line").attr("y1",m).attr("y2",m*7/6),z.append("svg:text").attr("text-anchor","middle").attr("dy","1em").attr("y",m*7/6).text(x),z.transition().duration(h).attr("transform",f(q)).style("opacity",1);var A=y.transition().duration(h).attr("transform",f(q)).style("opacity",1);A.select("line").attr("y1",m).attr("y2",m*7/6),A.select("text").attr("y",m*7/6),y.exit().transition().duration(h).attr("transform",f(q)).style("opacity",1e-6).remove()}),d3.timer.flush()}var a="left",b=!1,h=0,i=c,j=d,k=e,l=380,m=30,n=null;return o.orient=function(c){return arguments.length?(a=c,b=a=="right"||a=="bottom",o):a},o.ranges=function(a){return arguments.length?(i=a,o):i},o.markers=function(a){return arguments.length?(j=a,o):j},o.measures=function(a){return arguments.length?(k=a,o):k},o.width=function(a){return arguments.length?(l=a,o):l},o.height=function(a){return arguments.length?(m=a,o):m},o.tickFormat=function(a){return arguments.length?(n=a,o):n},o.duration=function(a){return arguments.length?(h=a,o):h},o},d3.chart.horizon=function(){function n(j){j.each(function(j,k){var n=d3.select(this),o=2*a+1,p=Infinity,q=-Infinity,r=-Infinity,s,t,u,v=j.map(function(a,b){var c=d.call(this,a,b),f=e.call(this,a,b);return c<p&&(p=c),c>q&&(q=c),-f>r&&(r=-f),f>r&&(r=f),[c,f]}),z=d3.scale.linear().domain([p,q]).range([0,f]),A=d3.scale.linear().domain([0,r]).range([0,g*a]);this.__chart__?(s=this.__chart__.x,t=this.__chart__.y,u=this.__chart__.id):(s=d3.scale.linear().domain([0,Infinity]).range(z.range()),t=d3.scale.linear().domain([0,Infinity]).range(A.range()),u=++i);var B=n.selectAll("defs").data([v]),C=B.enter().append("svg:defs");C.append("svg:clipPath").attr("id","d3_chart_horizon_clip"+u).append("svg:rect").attr("width",f).attr("height",g),B.select("rect").transition().duration(l).attr("width",f).attr("height",g),C.append("svg:path").attr("id","d3_chart_horizon_path"+u).attr("d",h.interpolate(c).x(function(a){return s(a[0])}).y0(g*a).y1(function(b){return g*a-t(b[1])})).transition().duration(l).attr("d",h.x(function(a){return z(a[0])}).y1(function(b){return g*a-A(b[1])})),B.select("path").transition().duration(l).attr("d",h),n.selectAll("g").data([null]).enter().append("svg:g").attr("clip-path","url(#d3_chart_horizon_clip"+u+")");var D=b=="offset"?function(b){return"translate(0,"+(b+(b<0)-a)*g+")"}:function(b){return(b<0?"scale(1,-1)":"")+"translate(0,"+(b-a)*g+")"},E=n.select("g").selectAll("use").data(d3.range(-1,-a-1,-1).concat(d3.range(1,a+1)),Number);E.enter().append("svg:use").attr("xlink:href","#d3_chart_horizon_path"+u).attr("transform",function(a){return D(a+(a>0?1:-1))}).style("fill",m).transition().duration(l).attr("transform",D),E.transition().duration(l).attr("transform",D).style("fill",m),E.exit().transition().duration(l).attr("transform",D).remove(),this.__chart__={x:z,y:A,id:u}}),d3.timer.flush()}var a=1,b="offset",c="linear",d=j,e=k,f=960,g=40,l=0,m=d3.scale.linear().domain([-1,0,1]).range(["#d62728","#fff","#1f77b4"]);return n.duration=function(a){return arguments.length?(l=+a,n):l},n.bands=function(b){return arguments.length?(a=+b,m.domain([-a,0,a]),n):a},n.mode=function(a){return arguments.length?(b=a+"",n):b},n.colors=function(a){return arguments.length?(m.range(a),n):m.range()},n.interpolate=function(a){return arguments.length?(c=a+"",n):c},n.x=function(a){return arguments.length?(d=a,n):d},n.y=function(a){return arguments.length?(e=a,n):e},n.width=function(a){return arguments.length?(f=+a,n):f},n.height=function(a){return arguments.length?(g=+a,n):g},n};var h=d3.svg.area(),i=0;d3.chart.qq=function(){function i(i){i.each(function(i,j){var k=d3.select(this),m=l(f,g.call(this,i,j)),n=l(f,h.call(this,i,j)),o=d&&d.call(this,i,j)||[d3.min(m),d3.max(m)],p=d&&d.call(this,i,j)||[d3.min(n),d3.max(n)],q,r,s=d3.scale.linear().domain(o).range([0,a]),t=d3.scale.linear().domain(p).range([b,0]);this.__chart__?(q=this.__chart__.x,r=this.__chart__.y):(q=d3.scale.linear().domain([0,Infinity]).range(s.range()),r=d3.scale.linear().domain([0,Infinity]).range(t.range())),this.__chart__={x:s,y:t};var u=k.selectAll("line.diagonal").data([null]);u.enter().append("svg:line").attr("class","diagonal").attr("x1",s(p[0])).attr("y1",t(o[0])).attr("x2",s(p[1])).attr("y2",t(o[1])),u.transition().duration(c).attr("x1",s(p[0])).attr("y1",t(o[0])).attr("x2",s(p[1])).attr("y2",t(o[1]));var v=k.selectAll("circle").data(d3.range(f).map(function(a){return{x:m[a],y:n[a]}}));v.enter().append("svg:circle").attr("class","quantile").attr("r",4.5).attr("cx",function(a){return q(a.x)}).attr("cy",function(a){return r(a.y)}).style("opacity",1e-6).transition().duration(c).attr("cx",function(a){return s(a.x)}).attr("cy",function(a){return t(a.y)}).style("opacity",1),v.transition().duration(c).attr("cx",function(a){return s(a.x)}).attr("cy",function(a){return t(a.y)}).style("opacity",1),v.exit().transition().duration(c).attr("cx",function(a){return s(a.x)}).attr("cy",function(a){return t(a.y)}).style("opacity",1e-6).remove();var w=e||s.tickFormat(4),z=e||t.tickFormat(4),A=function(a){return"translate("+s(a)+","+b+")"},B=function(a){return"translate(0,"+t(a)+")"},C=k.selectAll("g.x.tick").data(s.ticks(4),function(a){return this.textContent||w(a)}),D=C.enter().append("svg:g").attr("class","x tick").attr("transform",function(a){return"translate("+q(a)+","+b+")"}).style("opacity",1e-6);D.append("svg:line").attr("y1",0).attr("y2",-6),D.append("svg:text").attr("text-anchor","middle").attr("dy","1em").text(w),D.transition().duration(c).attr("transform",A).style("opacity",1),C.transition().duration(c).attr("transform",A).style("opacity",1),C.exit().transition().duration(c).attr("transform",A).style("opacity",1e-6).remove();var E=k.selectAll("g.y.tick").data(t.ticks(4),function(a){return this.textContent||z(a)}),F=E.enter().append("svg:g").attr("class","y tick").attr("transform",function(a){return"translate(0,"+r(a)+")"}).style("opacity",1e-6);F.append("svg:line").attr("x1",0).attr("x2",6),F.append("svg:text").attr("text-anchor","end").attr("dx","-.5em").attr("dy",".3em").text(z),F.transition().duration(c).attr("transform",B).style("opacity",1),E.transition().duration(c).attr("transform",B).style("opacity",1),E.exit().transition().duration(c).attr("transform",B).style("opacity",1e-6).remove()})}var a=1,b=1,c=0,d=null,e=null,f=100,g=m,h=n;return i.width=function(b){return arguments.length?(a=b,i):a},i.height=function(a){return arguments.length?(b=a,i):b},i.duration=function(a){return arguments.length?(c=a,i):c},i.domain=function(a){return arguments.length?(d=a==null?a:d3.functor(a),i):d},i.count=function(a){return arguments.length?(f=a,i):f},i.x=function(a){return arguments.length?(g=a,i):g},i.y=function(a){return arguments.length?(h=a,i):h},i.tickFormat=function(a){return arguments.length?(e=a,i):e},i}})();
<ide>\ No newline at end of file
<ide><path>d3.geo.js
<ide> function d3_geo_bounds(o, f) {
<ide> var d3_geo_boundsTypes = {
<ide> Feature: d3_geo_boundsFeature,
<ide> FeatureCollection: d3_geo_boundsFeatureCollection,
<add> GeometryCollection: d3_geo_boundsGeometryCollection,
<ide> LineString: d3_geo_boundsLineString,
<ide> MultiLineString: d3_geo_boundsMultiLineString,
<ide> MultiPoint: d3_geo_boundsLineString,
<ide> function d3_geo_boundsFeatureCollection(o, f) {
<ide> }
<ide> }
<ide>
<add>function d3_geo_boundsGeometryCollection(o, f) {
<add> for (var a = o.geometries, i = 0, n = a.length; i < n; i++) {
<add> d3_geo_bounds(a[i], f);
<add> }
<add>}
<add>
<ide> function d3_geo_boundsLineString(o, f) {
<ide> for (var a = o.coordinates, i = 0, n = a.length; i < n; i++) {
<ide> f.apply(null, a[i]);
<ide><path>d3.geo.min.js
<del>(function(){function b(a,b){return function(c){return c&&c.type in a?a[c.type](c):b}}function c(a){return"m0,"+a+"a"+a+","+a+" 0 1,1 0,"+ -2*a+"a"+a+","+a+" 0 1,1 0,"+2*a+"z"}function d(a,b){a.type in e&&e[a.type](a,b)}function f(a,b){d(a.geometry,b)}function g(a,b){for(var c=a.features,e=0,f=c.length;e<f;e++)d(c[e].geometry,b)}function h(a,b){for(var c=a.coordinates,d=0,e=c.length;d<e;d++)b.apply(null,c[d])}function i(a,b){for(var c=a.coordinates,d=0,e=c.length;d<e;d++)for(var f=c[d],g=0,h=f.length;g<h;g++)b.apply(null,f[g])}function j(a,b){for(var c=a.coordinates,d=0,e=c.length;d<e;d++)for(var f=c[d][0],g=0,h=f.length;g<h;g++)b.apply(null,f[g])}function k(a,b){b.apply(null,a.coordinates)}function l(a,b){for(var c=a.coordinates[0],d=0,e=c.length;d<e;d++)b.apply(null,c[d])}function m(a){return a.source}function n(a){return a.target}function o(b,c){function r(b){var c=Math.sin(p-(b*=p))/q,d=Math.sin(b)/q,g=c*h*e+d*n*k,j=c*h*f+d*n*l,m=c*i+d*o;return[Math.atan2(j,g)/a,Math.atan2(m,Math.sqrt(g*g+j*j))/a]}var d=b[0]*a,e=Math.cos(d),f=Math.sin(d),g=b[1]*a,h=Math.cos(g),i=Math.sin(g),j=c[0]*a,k=Math.cos(j),l=Math.sin(j),m=c[1]*a,n=Math.cos(m),o=Math.sin(m),p=r.d=Math.acos(Math.max(-1,Math.min(1,i*o+h*n*Math.cos(j-d)))),q=Math.sin(p);return r}d3.geo={};var a=Math.PI/180;d3.geo.azimuthal=function(){function j(c){var g=c[0]*a-f,j=c[1]*a,k=Math.cos(g),l=Math.sin(g),m=Math.cos(j),n=Math.sin(j),o=b!=="orthographic"?i*n+h*m*k:null,p,q=b==="stereographic"?1/(1+o):b==="gnomonic"?1/o:b==="equidistant"?(p=Math.acos(o),p?p/Math.sin(p):0):b==="equalarea"?Math.sqrt(2/(1+o)):1,r=q*m*l,s=q*(i*m*k-h*n);return[d*r+e[0],d*s+e[1]]}var b="orthographic",c,d=200,e=[480,250],f,g,h,i;return j.invert=function(c){var g=(c[0]-e[0])/d,j=(c[1]-e[1])/d,k=Math.sqrt(g*g+j*j),l=b==="stereographic"?2*Math.atan(k):b==="gnomonic"?Math.atan(k):b==="equidistant"?k:b==="equalarea"?2*Math.asin(.5*k):Math.asin(k),m=Math.sin(l),n=Math.cos(l);return[(f+Math.atan2(g*m,k*h*n+j*i*m))/a,Math.asin(n*i-(k?j*m*h/k:0))/a]},j.mode=function(a){return arguments.length?(b=a+"",j):b},j.origin=function(b){return arguments.length?(c=b,f=c[0]*a,g=c[1]*a,h=Math.cos(g),i=Math.sin(g),j):c},j.scale=function(a){return arguments.length?(d=+a,j):d},j.translate=function(a){return arguments.length?(e=[+a[0],+a[1]],j):e},j.origin([0,0])},d3.geo.albers=function(){function j(b){var c=g*(a*b[0]-f),j=Math.sqrt(h-2*g*Math.sin(a*b[1]))/g;return[d*j*Math.sin(c)+e[0],d*(j*Math.cos(c)-i)+e[1]]}function k(){var d=a*c[0],e=a*c[1],k=a*b[1],l=Math.sin(d),m=Math.cos(d);return f=a*b[0],g=.5*(l+Math.sin(e)),h=m*m+2*g*l,i=Math.sqrt(h-2*g*Math.sin(k))/g,j}var b=[-98,38],c=[29.5,45.5],d=1e3,e=[480,250],f,g,h,i;return j.invert=function(b){var c=(b[0]-e[0])/d,j=(b[1]-e[1])/d,k=i+j,l=Math.atan2(c,k),m=Math.sqrt(c*c+k*k);return[(f+l/g)/a,Math.asin((h-m*m*g*g)/(2*g))/a]},j.origin=function(a){return arguments.length?(b=[+a[0],+a[1]],k()):b},j.parallels=function(a){return arguments.length?(c=[+a[0],+a[1]],k()):c},j.scale=function(a){return arguments.length?(d=+a,j):d},j.translate=function(a){return arguments.length?(e=[+a[0],+a[1]],j):e},k()},d3.geo.albersUsa=function(){function e(e){var f=e[0],g=e[1];return(g>50?b:f<-140?c:g<21?d:a)(e)}var a=d3.geo.albers(),b=d3.geo.albers().origin([-160,60]).parallels([55,65]),c=d3.geo.albers().origin([-160,20]).parallels([8,18]),d=d3.geo.albers().origin([-60,10]).parallels([8,18]);return e.scale=function(f){return arguments.length?(a.scale(f),b.scale(f*.6),c.scale(f),d.scale(f*1.5),e.translate(a.translate())):a.scale()},e.translate=function(f){if(!arguments.length)return a.translate();var g=a.scale()/1e3,h=f[0],i=f[1];return a.translate(f),b.translate([h-400*g,i+170*g]),c.translate([h-190*g,i+200*g]),d.translate([h+580*g,i+430*g]),e},e.scale(a.scale())},d3.geo.bonne=function(){function h(h){var i=h[0]*a-d,j=h[1]*a-e;if(f){var k=g+f-j,l=i*Math.cos(j)/k;i=k*Math.sin(l),j=k*Math.cos(l)-g}else i*=Math.cos(j),j*=-1;return[b*i+c[0],b*j+c[1]]}var b=200,c=[480,250],d,e,f,g;return h.invert=function(e){var h=(e[0]-c[0])/b,i=(e[1]-c[1])/b;if(f){var j=g+i,k=Math.sqrt(h*h+j*j);i=g+f-k,h=d+k*Math.atan2(h,j)/Math.cos(i)}else i*=-1,h/=Math.cos(i);return[h/a,i/a]},h.parallel=function(b){return arguments.length?(g=1/Math.tan(f=b*a),h):f/a},h.origin=function(b){return arguments.length?(d=b[0]*a,e=b[1]*a,h):[d/a,e/a]},h.scale=function(a){return arguments.length?(b=+a,h):b},h.translate=function(a){return arguments.length?(c=[+a[0],+a[1]],h):c},h.origin([0,0]).parallel(45)},d3.geo.equirectangular=function(){function c(c){var d=c[0]/360,e=-c[1]/360;return[a*d+b[0],a*e+b[1]]}var a=500,b=[480,250];return c.invert=function(c){var d=(c[0]-b[0])/a,e=(c[1]-b[1])/a;return[360*d,-360*e]},c.scale=function(b){return arguments.length?(a=+b,c):a},c.translate=function(a){return arguments.length?(b=[+a[0],+a[1]],c):b},c},d3.geo.mercator=function(){function d(d){var e=d[0]/360,f=-(Math.log(Math.tan(Math.PI/4+d[1]*a/2))/a)/360;return[b*e+c[0],b*Math.max(-0.5,Math.min(.5,f))+c[1]]}var b=500,c=[480,250];return d.invert=function(d){var e=(d[0]-c[0])/b,f=(d[1]-c[1])/b;return[360*e,2*Math.atan(Math.exp(-360*f*a))/a-90]},d.scale=function(a){return arguments.length?(b=+a,d):b},d.translate=function(a){return arguments.length?(c=[+a[0],+a[1]],d):c},d},d3.geo.path=function(){function f(b,e){return typeof a=="function"&&(d=c(a.apply(this,arguments))),h(b)||null}function g(a){return e(a).join(",")}function j(a){var b=m(a[0]),c=0,d=a.length;while(++c<d)b-=m(a[c]);return b}function k(a){var b=d3.geom.polygon(a[0].map(e)),c=b.area(),d=b.centroid(c<0?(c*=-1,1):-1),f=d[0],g=d[1],h=c,i=0,j=a.length;while(++i<j)b=d3.geom.polygon(a[i].map(e)),c=b.area(),d=b.centroid(c<0?(c*=-1,1):-1),f-=d[0],g-=d[1],h-=c;return[f,g,6*h]}function m(a){return Math.abs(d3.geom.polygon(a.map(e)).area())}var a=4.5,d=c(a),e=d3.geo.albersUsa(),h=b({FeatureCollection:function(a){var b=[],c=a.features,d=-1,e=c.length;while(++d<e)b.push(h(c[d].geometry));return b.join("")},Feature:function(a){return h(a.geometry)},Point:function(a){return"M"+g(a.coordinates)+d},MultiPoint:function(a){var b=[],c=a.coordinates,e=-1,f=c.length;while(++e<f)b.push("M",g(c[e]),d);return b.join("")},LineString:function(a){var b=["M"],c=a.coordinates,d=-1,e=c.length;while(++d<e)b.push(g(c[d]),"L");return b.pop(),b.join("")},MultiLineString:function(a){var b=[],c=a.coordinates,d=-1,e=c.length,f,h,i;while(++d<e){f=c[d],h=-1,i=f.length,b.push("M");while(++h<i)b.push(g(f[h]),"L");b.pop()}return b.join("")},Polygon:function(a){var b=[],c=a.coordinates,d=-1,e=c.length,f,h,i;while(++d<e){f=c[d],h=-1;if((i=f.length-1)>0){b.push("M");while(++h<i)b.push(g(f[h]),"L");b[b.length-1]="Z"}}return b.join("")},MultiPolygon:function(a){var b=[],c=a.coordinates,d=-1,e=c.length,f,h,i,j,k,l;while(++d<e){f=c[d],h=-1,i=f.length;while(++h<i){j=f[h],k=-1;if((l=j.length-1)>0){b.push("M");while(++k<l)b.push(g(j[k]),"L");b[b.length-1]="Z"}}}return b.join("")},GeometryCollection:function(a){var b=[],c=a.geometries,d=-1,e=c.length;while(++d<e)b.push(h(c[d]));return b.join("")}}),i=f.area=b({FeatureCollection:function(a){var b=0,c=a.features,d=-1,e=c.length;while(++d<e)b+=i(c[d]);return b},Feature:function(a){return i(a.geometry)},Polygon:function(a){return j(a.coordinates)},MultiPolygon:function(a){var b=0,c=a.coordinates,d=-1,e=c.length;while(++d<e)b+=j(c[d]);return b},GeometryCollection:function(a){var b=0,c=a.geometries,d=-1,e=c.length;while(++d<e)b+=i(c[d]);return b}},0),l=f.centroid=b({Feature:function(a){return l(a.geometry)},Polygon:function(a){var b=k(a.coordinates);return[b[0]/b[2],b[1]/b[2]]},MultiPolygon:function(a){var b=0,c=a.coordinates,d,e=0,f=0,g=0,h=-1,i=c.length;while(++h<i)d=k(c[h]),e+=d[0],f+=d[1],g+=d[2];return[e/g,f/g]}});return f.projection=function(a){return e=a,f},f.pointRadius=function(b){return typeof b=="function"?a=b:(a=+b,d=c(a)),f},f},d3.geo.bounds=function(a){var b=Infinity,c=Infinity,e=-Infinity,f=-Infinity;return d(a,function(a,d){a<b&&(b=a),a>e&&(e=a),d<c&&(c=d),d>f&&(f=d)}),[[b,c],[e,f]]};var e={Feature:f,FeatureCollection:g,LineString:h,MultiLineString:i,MultiPoint:h,MultiPolygon:j,Point:k,Polygon:l};d3.geo.circle=function(){function g(){}function h(a){return f.distance(a)<e}function j(a){var b=-1,c=a.length,d=[],g,h,i,j,l;while(++b<c)l=f.distance(i=a[b]),l<e?(h&&d.push(o(h,i)((j-e)/(j-l))),d.push(i),g=h=null):(h=i,!g&&d.length&&(d.push(o(d[d.length-1],h)((e-j)/(l-j))),g=h)),j=l;return h&&d.length&&(l=f.distance(i=d[0]),d.push(o(h,i)((j-e)/(j-l)))),k(d)}function k(a){var b=0,c=a.length,d,e,g=c?[a[0]]:a,h,i=f.source();while(++b<c){h=f.source(a[b-1])(a[b]).coordinates;for(d=0,e=h.length;++d<e;)g.push(h[d])}return f.source(i),g}var c=[0,0],d=89.99,e=d*a,f=d3.geo.greatArc().target(Object);g.clip=function(a){return f.source(typeof c=="function"?c.apply(this,arguments):c),i(a)};var i=b({FeatureCollection:function(a){var b=a.features.map(i).filter(Object);return b&&(a=Object.create(a),a.features=b,a)},Feature:function(a){var b=i(a.geometry);return b&&(a=Object.create(a),a.geometry=b,a)},Point:function(a){return h(a.coordinates)&&a},MultiPoint:function(a){var b=a.coordinates.filter(h);return b.length&&{type:a.type,coordinates:b}},LineString:function(a){var b=j(a.coordinates);return b.length&&(a=Object.create(a),a.coordinates=b,a)},MultiLineString:function(a){var b=a.coordinates.map(j).filter(function(a){return a.length});return b.length&&(a=Object.create(a),a.coordinates=b,a)},Polygon:function(a){var b=a.coordinates.map(j);return b[0].length&&(a=Object.create(a),a.coordinates=b,a)},MultiPolygon:function(a){var b=a.coordinates.map(function(a){return a.map(j)}).filter(function(a){return a[0].length});return b.length&&(a=Object.create(a),a.coordinates=b,a)},GeometryCollection:function(a){var b=a.geometries.map(i).filter(Object);return b.length&&(a=Object.create(a),a.geometries=b,a)}});return g.origin=function(a){return arguments.length?(c=a,g):c},g.angle=function(b){return arguments.length?(e=(d=+b)*a,g):d},g.precision=function(a){return arguments.length?(f.precision(a),g):f.precision()},g},d3.geo.greatArc=function(){function e(){var a=typeof b=="function"?b.apply(this,arguments):b,e=typeof c=="function"?c.apply(this,arguments):c,f=o(a,e),g=d/f.d,h=0,i=[a];while((h+=g)<1)i.push(f(h));return i.push(e),{type:"LineString",coordinates:i}}var b=m,c=n,d=6*a;return e.distance=function(){var a=typeof b=="function"?b.apply(this,arguments):b,d=typeof c=="function"?c.apply(this,arguments):c;return o(a,d).d},e.source=function(a){return arguments.length?(b=a,e):b},e.target=function(a){return arguments.length?(c=a,e):c},e.precision=function(b){return arguments.length?(d=b*a,e):d/a},e},d3.geo.greatCircle=d3.geo.circle})();
<ide>\ No newline at end of file
<add>(function(){function b(a,b){return function(c){return c&&c.type in a?a[c.type](c):b}}function c(a){return"m0,"+a+"a"+a+","+a+" 0 1,1 0,"+ -2*a+"a"+a+","+a+" 0 1,1 0,"+2*a+"z"}function d(a,b){a.type in e&&e[a.type](a,b)}function f(a,b){d(a.geometry,b)}function g(a,b){for(var c=a.features,e=0,f=c.length;e<f;e++)d(c[e].geometry,b)}function h(a,b){for(var c=a.geometries,e=0,f=c.length;e<f;e++)d(c[e],b)}function i(a,b){for(var c=a.coordinates,d=0,e=c.length;d<e;d++)b.apply(null,c[d])}function j(a,b){for(var c=a.coordinates,d=0,e=c.length;d<e;d++)for(var f=c[d],g=0,h=f.length;g<h;g++)b.apply(null,f[g])}function k(a,b){for(var c=a.coordinates,d=0,e=c.length;d<e;d++)for(var f=c[d][0],g=0,h=f.length;g<h;g++)b.apply(null,f[g])}function l(a,b){b.apply(null,a.coordinates)}function m(a,b){for(var c=a.coordinates[0],d=0,e=c.length;d<e;d++)b.apply(null,c[d])}function n(a){return a.source}function o(a){return a.target}function p(b,c){function r(b){var c=Math.sin(p-(b*=p))/q,d=Math.sin(b)/q,g=c*h*e+d*n*k,j=c*h*f+d*n*l,m=c*i+d*o;return[Math.atan2(j,g)/a,Math.atan2(m,Math.sqrt(g*g+j*j))/a]}var d=b[0]*a,e=Math.cos(d),f=Math.sin(d),g=b[1]*a,h=Math.cos(g),i=Math.sin(g),j=c[0]*a,k=Math.cos(j),l=Math.sin(j),m=c[1]*a,n=Math.cos(m),o=Math.sin(m),p=r.d=Math.acos(Math.max(-1,Math.min(1,i*o+h*n*Math.cos(j-d)))),q=Math.sin(p);return r}d3.geo={};var a=Math.PI/180;d3.geo.azimuthal=function(){function j(c){var g=c[0]*a-f,j=c[1]*a,k=Math.cos(g),l=Math.sin(g),m=Math.cos(j),n=Math.sin(j),o=b!=="orthographic"?i*n+h*m*k:null,p,q=b==="stereographic"?1/(1+o):b==="gnomonic"?1/o:b==="equidistant"?(p=Math.acos(o),p?p/Math.sin(p):0):b==="equalarea"?Math.sqrt(2/(1+o)):1,r=q*m*l,s=q*(i*m*k-h*n);return[d*r+e[0],d*s+e[1]]}var b="orthographic",c,d=200,e=[480,250],f,g,h,i;return j.invert=function(c){var g=(c[0]-e[0])/d,j=(c[1]-e[1])/d,k=Math.sqrt(g*g+j*j),l=b==="stereographic"?2*Math.atan(k):b==="gnomonic"?Math.atan(k):b==="equidistant"?k:b==="equalarea"?2*Math.asin(.5*k):Math.asin(k),m=Math.sin(l),n=Math.cos(l);return[(f+Math.atan2(g*m,k*h*n+j*i*m))/a,Math.asin(n*i-(k?j*m*h/k:0))/a]},j.mode=function(a){return arguments.length?(b=a+"",j):b},j.origin=function(b){return arguments.length?(c=b,f=c[0]*a,g=c[1]*a,h=Math.cos(g),i=Math.sin(g),j):c},j.scale=function(a){return arguments.length?(d=+a,j):d},j.translate=function(a){return arguments.length?(e=[+a[0],+a[1]],j):e},j.origin([0,0])},d3.geo.albers=function(){function j(b){var c=g*(a*b[0]-f),j=Math.sqrt(h-2*g*Math.sin(a*b[1]))/g;return[d*j*Math.sin(c)+e[0],d*(j*Math.cos(c)-i)+e[1]]}function k(){var d=a*c[0],e=a*c[1],k=a*b[1],l=Math.sin(d),m=Math.cos(d);return f=a*b[0],g=.5*(l+Math.sin(e)),h=m*m+2*g*l,i=Math.sqrt(h-2*g*Math.sin(k))/g,j}var b=[-98,38],c=[29.5,45.5],d=1e3,e=[480,250],f,g,h,i;return j.invert=function(b){var c=(b[0]-e[0])/d,j=(b[1]-e[1])/d,k=i+j,l=Math.atan2(c,k),m=Math.sqrt(c*c+k*k);return[(f+l/g)/a,Math.asin((h-m*m*g*g)/(2*g))/a]},j.origin=function(a){return arguments.length?(b=[+a[0],+a[1]],k()):b},j.parallels=function(a){return arguments.length?(c=[+a[0],+a[1]],k()):c},j.scale=function(a){return arguments.length?(d=+a,j):d},j.translate=function(a){return arguments.length?(e=[+a[0],+a[1]],j):e},k()},d3.geo.albersUsa=function(){function e(e){var f=e[0],g=e[1];return(g>50?b:f<-140?c:g<21?d:a)(e)}var a=d3.geo.albers(),b=d3.geo.albers().origin([-160,60]).parallels([55,65]),c=d3.geo.albers().origin([-160,20]).parallels([8,18]),d=d3.geo.albers().origin([-60,10]).parallels([8,18]);return e.scale=function(f){return arguments.length?(a.scale(f),b.scale(f*.6),c.scale(f),d.scale(f*1.5),e.translate(a.translate())):a.scale()},e.translate=function(f){if(!arguments.length)return a.translate();var g=a.scale()/1e3,h=f[0],i=f[1];return a.translate(f),b.translate([h-400*g,i+170*g]),c.translate([h-190*g,i+200*g]),d.translate([h+580*g,i+430*g]),e},e.scale(a.scale())},d3.geo.bonne=function(){function h(h){var i=h[0]*a-d,j=h[1]*a-e;if(f){var k=g+f-j,l=i*Math.cos(j)/k;i=k*Math.sin(l),j=k*Math.cos(l)-g}else i*=Math.cos(j),j*=-1;return[b*i+c[0],b*j+c[1]]}var b=200,c=[480,250],d,e,f,g;return h.invert=function(e){var h=(e[0]-c[0])/b,i=(e[1]-c[1])/b;if(f){var j=g+i,k=Math.sqrt(h*h+j*j);i=g+f-k,h=d+k*Math.atan2(h,j)/Math.cos(i)}else i*=-1,h/=Math.cos(i);return[h/a,i/a]},h.parallel=function(b){return arguments.length?(g=1/Math.tan(f=b*a),h):f/a},h.origin=function(b){return arguments.length?(d=b[0]*a,e=b[1]*a,h):[d/a,e/a]},h.scale=function(a){return arguments.length?(b=+a,h):b},h.translate=function(a){return arguments.length?(c=[+a[0],+a[1]],h):c},h.origin([0,0]).parallel(45)},d3.geo.equirectangular=function(){function c(c){var d=c[0]/360,e=-c[1]/360;return[a*d+b[0],a*e+b[1]]}var a=500,b=[480,250];return c.invert=function(c){var d=(c[0]-b[0])/a,e=(c[1]-b[1])/a;return[360*d,-360*e]},c.scale=function(b){return arguments.length?(a=+b,c):a},c.translate=function(a){return arguments.length?(b=[+a[0],+a[1]],c):b},c},d3.geo.mercator=function(){function d(d){var e=d[0]/360,f=-(Math.log(Math.tan(Math.PI/4+d[1]*a/2))/a)/360;return[b*e+c[0],b*Math.max(-0.5,Math.min(.5,f))+c[1]]}var b=500,c=[480,250];return d.invert=function(d){var e=(d[0]-c[0])/b,f=(d[1]-c[1])/b;return[360*e,2*Math.atan(Math.exp(-360*f*a))/a-90]},d.scale=function(a){return arguments.length?(b=+a,d):b},d.translate=function(a){return arguments.length?(c=[+a[0],+a[1]],d):c},d},d3.geo.path=function(){function f(b,e){return typeof a=="function"&&(d=c(a.apply(this,arguments))),h(b)||null}function g(a){return e(a).join(",")}function j(a){var b=m(a[0]),c=0,d=a.length;while(++c<d)b-=m(a[c]);return b}function k(a){var b=d3.geom.polygon(a[0].map(e)),c=b.area(),d=b.centroid(c<0?(c*=-1,1):-1),f=d[0],g=d[1],h=c,i=0,j=a.length;while(++i<j)b=d3.geom.polygon(a[i].map(e)),c=b.area(),d=b.centroid(c<0?(c*=-1,1):-1),f-=d[0],g-=d[1],h-=c;return[f,g,6*h]}function m(a){return Math.abs(d3.geom.polygon(a.map(e)).area())}var a=4.5,d=c(a),e=d3.geo.albersUsa(),h=b({FeatureCollection:function(a){var b=[],c=a.features,d=-1,e=c.length;while(++d<e)b.push(h(c[d].geometry));return b.join("")},Feature:function(a){return h(a.geometry)},Point:function(a){return"M"+g(a.coordinates)+d},MultiPoint:function(a){var b=[],c=a.coordinates,e=-1,f=c.length;while(++e<f)b.push("M",g(c[e]),d);return b.join("")},LineString:function(a){var b=["M"],c=a.coordinates,d=-1,e=c.length;while(++d<e)b.push(g(c[d]),"L");return b.pop(),b.join("")},MultiLineString:function(a){var b=[],c=a.coordinates,d=-1,e=c.length,f,h,i;while(++d<e){f=c[d],h=-1,i=f.length,b.push("M");while(++h<i)b.push(g(f[h]),"L");b.pop()}return b.join("")},Polygon:function(a){var b=[],c=a.coordinates,d=-1,e=c.length,f,h,i;while(++d<e){f=c[d],h=-1;if((i=f.length-1)>0){b.push("M");while(++h<i)b.push(g(f[h]),"L");b[b.length-1]="Z"}}return b.join("")},MultiPolygon:function(a){var b=[],c=a.coordinates,d=-1,e=c.length,f,h,i,j,k,l;while(++d<e){f=c[d],h=-1,i=f.length;while(++h<i){j=f[h],k=-1;if((l=j.length-1)>0){b.push("M");while(++k<l)b.push(g(j[k]),"L");b[b.length-1]="Z"}}}return b.join("")},GeometryCollection:function(a){var b=[],c=a.geometries,d=-1,e=c.length;while(++d<e)b.push(h(c[d]));return b.join("")}}),i=f.area=b({FeatureCollection:function(a){var b=0,c=a.features,d=-1,e=c.length;while(++d<e)b+=i(c[d]);return b},Feature:function(a){return i(a.geometry)},Polygon:function(a){return j(a.coordinates)},MultiPolygon:function(a){var b=0,c=a.coordinates,d=-1,e=c.length;while(++d<e)b+=j(c[d]);return b},GeometryCollection:function(a){var b=0,c=a.geometries,d=-1,e=c.length;while(++d<e)b+=i(c[d]);return b}},0),l=f.centroid=b({Feature:function(a){return l(a.geometry)},Polygon:function(a){var b=k(a.coordinates);return[b[0]/b[2],b[1]/b[2]]},MultiPolygon:function(a){var b=0,c=a.coordinates,d,e=0,f=0,g=0,h=-1,i=c.length;while(++h<i)d=k(c[h]),e+=d[0],f+=d[1],g+=d[2];return[e/g,f/g]}});return f.projection=function(a){return e=a,f},f.pointRadius=function(b){return typeof b=="function"?a=b:(a=+b,d=c(a)),f},f},d3.geo.bounds=function(a){var b=Infinity,c=Infinity,e=-Infinity,f=-Infinity;return d(a,function(a,d){a<b&&(b=a),a>e&&(e=a),d<c&&(c=d),d>f&&(f=d)}),[[b,c],[e,f]]};var e={Feature:f,FeatureCollection:g,GeometryCollection:h,LineString:i,MultiLineString:j,MultiPoint:i,MultiPolygon:k,Point:l,Polygon:m};d3.geo.circle=function(){function g(){}function h(a){return f.distance(a)<e}function j(a){var b=-1,c=a.length,d=[],g,h,i,j,l;while(++b<c)l=f.distance(i=a[b]),l<e?(h&&d.push(p(h,i)((j-e)/(j-l))),d.push(i),g=h=null):(h=i,!g&&d.length&&(d.push(p(d[d.length-1],h)((e-j)/(l-j))),g=h)),j=l;return h&&d.length&&(l=f.distance(i=d[0]),d.push(p(h,i)((j-e)/(j-l)))),k(d)}function k(a){var b=0,c=a.length,d,e,g=c?[a[0]]:a,h,i=f.source();while(++b<c){h=f.source(a[b-1])(a[b]).coordinates;for(d=0,e=h.length;++d<e;)g.push(h[d])}return f.source(i),g}var c=[0,0],d=89.99,e=d*a,f=d3.geo.greatArc().target(Object);g.clip=function(a){return f.source(typeof c=="function"?c.apply(this,arguments):c),i(a)};var i=b({FeatureCollection:function(a){var b=a.features.map(i).filter(Object);return b&&(a=Object.create(a),a.features=b,a)},Feature:function(a){var b=i(a.geometry);return b&&(a=Object.create(a),a.geometry=b,a)},Point:function(a){return h(a.coordinates)&&a},MultiPoint:function(a){var b=a.coordinates.filter(h);return b.length&&{type:a.type,coordinates:b}},LineString:function(a){var b=j(a.coordinates);return b.length&&(a=Object.create(a),a.coordinates=b,a)},MultiLineString:function(a){var b=a.coordinates.map(j).filter(function(a){return a.length});return b.length&&(a=Object.create(a),a.coordinates=b,a)},Polygon:function(a){var b=a.coordinates.map(j);return b[0].length&&(a=Object.create(a),a.coordinates=b,a)},MultiPolygon:function(a){var b=a.coordinates.map(function(a){return a.map(j)}).filter(function(a){return a[0].length});return b.length&&(a=Object.create(a),a.coordinates=b,a)},GeometryCollection:function(a){var b=a.geometries.map(i).filter(Object);return b.length&&(a=Object.create(a),a.geometries=b,a)}});return g.origin=function(a){return arguments.length?(c=a,g):c},g.angle=function(b){return arguments.length?(e=(d=+b)*a,g):d},g.precision=function(a){return arguments.length?(f.precision(a),g):f.precision()},g},d3.geo.greatArc=function(){function e(){var a=typeof b=="function"?b.apply(this,arguments):b,e=typeof c=="function"?c.apply(this,arguments):c,f=p(a,e),g=d/f.d,h=0,i=[a];while((h+=g)<1)i.push(f(h));return i.push(e),{type:"LineString",coordinates:i}}var b=n,c=o,d=6*a;return e.distance=function(){var a=typeof b=="function"?b.apply(this,arguments):b,d=typeof c=="function"?c.apply(this,arguments):c;return p(a,d).d},e.source=function(a){return arguments.length?(b=a,e):b},e.target=function(a){return arguments.length?(c=a,e):c},e.precision=function(b){return arguments.length?(d=b*a,e):d/a},e},d3.geo.greatCircle=d3.geo.circle})();
<ide>\ No newline at end of file
<ide><path>src/geo/bounds.js
<ide> function d3_geo_bounds(o, f) {
<ide> var d3_geo_boundsTypes = {
<ide> Feature: d3_geo_boundsFeature,
<ide> FeatureCollection: d3_geo_boundsFeatureCollection,
<add> GeometryCollection: d3_geo_boundsGeometryCollection,
<ide> LineString: d3_geo_boundsLineString,
<ide> MultiLineString: d3_geo_boundsMultiLineString,
<ide> MultiPoint: d3_geo_boundsLineString,
<ide> function d3_geo_boundsFeatureCollection(o, f) {
<ide> }
<ide> }
<ide>
<add>function d3_geo_boundsGeometryCollection(o, f) {
<add> for (var a = o.geometries, i = 0, n = a.length; i < n; i++) {
<add> d3_geo_bounds(a[i], f);
<add> }
<add>}
<add>
<ide> function d3_geo_boundsLineString(o, f) {
<ide> for (var a = o.coordinates, i = 0, n = a.length; i < n; i++) {
<ide> f.apply(null, a[i]); | 4 |
PHP | PHP | fix cs after 282f6c80 | ab0da785290cd05acc2830873c86cca61619486d | <ide><path>src/Error/BaseErrorHandler.php
<ide> public function register()
<ide> }
<ide> $this->handleFatalError(
<ide> $error['type'],
<del> $error['message'],
<add> $error['message'],
<ide> $error['file'],
<ide> $error['line']
<ide> ); | 1 |
Javascript | Javascript | drop unused code | 2cb9069bd09ed9b4754eabb3f2edb02604676c91 | <ide><path>packages/ember-htmlbars/lib/keywords/customized_outlet.js
<ide> export default {
<ide> setupState(state, env, scope, params, hash) {
<ide> var read = env.hooks.getValue;
<ide> var viewClass = readViewFactory(read(hash.view), env.container);
<del> var outletName = read(params[0]) || 'main';
<del> return { viewClass, outletName };
<add> return { viewClass };
<ide> },
<ide> render(renderNode, env, scope, params, hash, template, inverse, visitor) {
<ide> var state = renderNode.state; | 1 |
Text | Text | add short read | f67d87faad50e8985dd29ba9bd87988a65972bc2 | <ide><path>guide/english/computational-genomics/index.md
<ide> This section will focus on the various data structures and algorithms that are u
<ide> Introduction to Computational Genomics by Matthew W. Hahn
<ide>
<ide> A great starter book for beginners
<add>
<add>
<add>### Short reads
<add>
<add>Data Structures and Algorithms Underlying Genome Reconstruction from Short Reads by Bruce F. Cockburn | 1 |
Ruby | Ruby | reduce need for interpolating `appdir` in casks | df3bbd0299b964e5bc759130a10fce93422a08f8 | <ide><path>Library/Homebrew/cask/artifact/moved.rb
<ide> def move(force: false, command: nil, **options)
<ide> command.run!("/bin/mv", args: [source, target], sudo: true)
<ide> end
<ide>
<add> FileUtils.ln_sf target, source
<add>
<ide> add_altname_metadata(target, source.basename, command: command)
<ide> end
<ide>
<ide> def move_back(skip: false, force: false, command: nil, **options)
<add> FileUtils.rm source if source.symlink? && source.dirname.join(source.readlink) == target
<add>
<ide> if Utils.path_occupied?(source)
<ide> message = "It seems there is already #{self.class.english_article} " \
<ide> "#{self.class.english_name} at '#{source}'"
<ide><path>Library/Homebrew/cask/installer.rb
<ide> def install
<ide> fetch
<ide> uninstall_existing_cask if reinstall?
<ide>
<add> backup if force? && @cask.staged_path.exist? && @cask.metadata_versioned_path.exist?
<add>
<ide> oh1 "Installing Cask #{Formatter.identifier(@cask)}"
<ide> opoo "macOS's Gatekeeper has been disabled for this Cask" unless quarantine?
<ide> stage
<ide> def install
<ide>
<ide> ::Utils::Analytics.report_event("cask_install", @cask.token) unless @cask.tap&.private?
<ide>
<add> purge_backed_up_versioned_files
<add>
<ide> puts summary
<add> rescue
<add> restore_backup
<add> raise
<ide> end
<ide>
<ide> def check_conflicts
<ide> def revert_upgrade
<ide> end
<ide>
<ide> def finalize_upgrade
<add> ohai "Purging files for version #{@cask.version} of Cask #{@cask}"
<add>
<ide> purge_backed_up_versioned_files
<ide>
<ide> puts summary
<ide> def gain_permissions_remove(path)
<ide> end
<ide>
<ide> def purge_backed_up_versioned_files
<del> ohai "Purging files for version #{@cask.version} of Cask #{@cask}"
<del>
<ide> # versioned staged distribution
<ide> gain_permissions_remove(backup_path) if backup_path&.exist?
<ide>
<ide><path>Library/Homebrew/test/cask/artifact/alt_target_spec.rb
<ide> install_phase
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide> end
<ide>
<ide> describe "when app is in a subdirectory" do
<ide> install_phase
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(appsubdir.join("Caffeine.app")).not_to exist
<add> expect(appsubdir.join("Caffeine.app")).to be_a_symlink
<ide> end
<ide> end
<ide>
<ide> install_phase
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide>
<ide> expect(cask.config.appdir.join("Caffeine Deluxe.app")).not_to exist
<ide> expect(cask.staged_path.join("Caffeine Deluxe.app")).to be_a_directory
<ide><path>Library/Homebrew/test/cask/artifact/app_spec.rb
<ide> install_phase
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide> end
<ide>
<ide> describe "when app is in a subdirectory" do
<ide> install_phase
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(appsubdir.join("Caffeine.app")).not_to exist
<add> expect(appsubdir.join("Caffeine.app")).to be_a_symlink
<ide> end
<ide> end
<ide>
<ide> install_phase
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide>
<ide> expect(cask.config.appdir.join("Caffeine Deluxe.app")).not_to exist
<ide> expect(cask.staged_path.join("Caffeine Deluxe.app")).to exist
<ide> .to output(stdout).to_stdout
<ide> .and output(stderr).to_stderr
<ide>
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide> expect(target_path).to be_a_directory
<ide>
<ide> contents_path = target_path.join("Contents/Info.plist")
<ide> .to output(stdout).to_stdout
<ide> .and output(stderr).to_stderr
<ide>
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide> expect(target_path).to be_a_directory
<ide>
<ide> contents_path = target_path.join("Contents/Info.plist")
<ide> .to output(stdout).to_stdout
<ide> .and output(stderr).to_stderr
<ide>
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide> expect(target_path).to be_a_directory
<ide>
<ide> contents_path = target_path.join("Contents/Info.plist")
<ide><path>Library/Homebrew/test/cask/artifact/generic_artifact_spec.rb
<ide> install_phase.call
<ide>
<ide> expect(target_path).to be_a_directory
<del> expect(source_path).not_to exist
<add> expect(source_path).to be_a_symlink
<ide> end
<ide>
<ide> it "avoids clobbering an existing artifact" do
<ide><path>Library/Homebrew/test/cask/artifact/two_apps_correct_spec.rb
<ide> install_phase
<ide>
<ide> expect(target_path_mini).to be_a_directory
<del> expect(source_path_mini).not_to exist
<add> expect(source_path_mini).to be_a_symlink
<ide>
<ide> expect(target_path_pro).to be_a_directory
<del> expect(source_path_pro).not_to exist
<add> expect(source_path_pro).to be_a_symlink
<ide> end
<ide>
<ide> describe "when apps are in a subdirectory" do
<ide> let(:cask) { Cask::CaskLoader.load(cask_path("with-two-apps-subdir")) }
<ide>
<add> let(:source_path_mini) { cask.staged_path.join("Caffeines", "Caffeine Mini.app") }
<add> let(:source_path_pro) { cask.staged_path.join("Caffeines", "Caffeine Pro.app") }
<add>
<ide> it "installs both apps using the proper target directory" do
<ide> install_phase
<ide>
<ide> expect(target_path_mini).to be_a_directory
<del> expect(source_path_mini).not_to exist
<add> expect(source_path_mini).to be_a_symlink
<ide>
<ide> expect(target_path_pro).to be_a_directory
<del> expect(source_path_pro).not_to exist
<add> expect(source_path_pro).to be_a_symlink
<ide> end
<ide> end
<ide>
<ide> install_phase
<ide>
<ide> expect(target_path_mini).to be_a_directory
<del> expect(source_path_mini).not_to exist
<add> expect(source_path_mini).to be_a_symlink
<ide>
<ide> expect(cask.config.appdir.join("Caffeine Deluxe.app")).not_to exist
<ide> expect(cask.staged_path.join("Caffeine Deluxe.app")).to exist | 6 |
Javascript | Javascript | fix bug where cachegroup index was inverted | 35c7801b5e5320b0830e84ca10a3dfdab0999847 | <ide><path>lib/optimize/SplitChunksPlugin.js
<ide> const compareEntries = (a, b) => {
<ide> const diffSizeReduce = aSizeReduce - bSizeReduce;
<ide> if (diffSizeReduce) return diffSizeReduce;
<ide> // 4. by cache group index
<del> const indexDiff = a.cacheGroupIndex - b.cacheGroupIndex;
<add> const indexDiff = b.cacheGroupIndex - a.cacheGroupIndex;
<ide> if (indexDiff) return indexDiff;
<ide> // 5. by number of modules (to be able to compare by identifier)
<ide> const modulesA = a.modules; | 1 |
Javascript | Javascript | remove navexperimental from uiexplorer | 761d528153e26390e4d6d3caabc70341812a8617 | <ide><path>Examples/UIExplorer/js/UIExplorerActions.js
<ide> */
<ide> 'use strict';
<ide>
<del>export type UIExplorerListWithFilterAction = {
<del> type: 'UIExplorerListWithFilterAction',
<del> filter: ?string;
<add>export type UIExplorerBackAction = {
<add> type: 'UIExplorerBackAction',
<add>};
<add>
<add>export type UIExplorerListAction = {
<add> type: 'UIExplorerListAction',
<ide> };
<ide>
<ide> export type UIExplorerExampleAction = {
<ide> type: 'UIExplorerExampleAction',
<del> openExample: string;
<add> openExample: string,
<ide> };
<ide>
<del>export type UIExplorerAction = UIExplorerListWithFilterAction | UIExplorerExampleAction;
<add>export type UIExplorerAction = (
<add> UIExplorerBackAction |
<add> UIExplorerListAction |
<add> UIExplorerExampleAction
<add>);
<add>
<add>
<add>function Back(): UIExplorerBackAction {
<add> return {
<add> type: 'UIExplorerBackAction',
<add> };
<add>}
<ide>
<del>function ExampleListWithFilter(filter: ?string): UIExplorerListWithFilterAction {
<add>function ExampleList(): UIExplorerListAction {
<ide> return {
<del> type: 'UIExplorerListWithFilterAction',
<del> filter,
<add> type: 'UIExplorerListAction',
<ide> };
<ide> }
<ide>
<ide> function ExampleAction(openExample: string): UIExplorerExampleAction {
<ide> }
<ide>
<ide> const UIExplorerActions = {
<del> ExampleListWithFilter,
<add> Back,
<add> ExampleList,
<ide> ExampleAction,
<ide> };
<ide>
<ide><path>Examples/UIExplorer/js/UIExplorerApp.android.js
<ide> const React = require('react');
<ide> const StatusBar = require('StatusBar');
<ide> const StyleSheet = require('StyleSheet');
<ide> const ToolbarAndroid = require('ToolbarAndroid');
<add>const UIExplorerActions = require('./UIExplorerActions');
<ide> const UIExplorerExampleContainer = require('./UIExplorerExampleContainer');
<ide> const UIExplorerExampleList = require('./UIExplorerExampleList');
<ide> const UIExplorerList = require('./UIExplorerList');
<ide> const UIExplorerNavigationReducer = require('./UIExplorerNavigationReducer');
<del>const UIExplorerStateTitleMap = require('./UIExplorerStateTitleMap');
<ide> const UIManager = require('UIManager');
<ide> const URIActionMap = require('./URIActionMap');
<ide> const View = require('View');
<ide>
<ide> const nativeImageSource = require('nativeImageSource');
<ide>
<del>import type {UIExplorerNavigationState} from './UIExplorerNavigationReducer';
<add>import type { UIExplorerNavigationState } from './UIExplorerNavigationReducer';
<ide>
<ide> UIManager.setLayoutAnimationEnabledExperimental(true);
<ide>
<ide> type Props = {
<ide> exampleFromAppetizeParams: string,
<ide> };
<ide>
<del>type State = UIExplorerNavigationState & {
<del> externalExample: ?string,
<del>};
<add>const APP_STATE_KEY = 'UIExplorerAppState.v2';
<add>
<add>const HEADER_LOGO_ICON = nativeImageSource({
<add> android: 'launcher_icon',
<add> width: 132,
<add> height: 144
<add>});
<add>
<add>const HEADER_NAV_ICON = nativeImageSource({
<add> android: 'ic_menu_black_24dp',
<add> width: 48,
<add> height: 48
<add>});
<ide>
<ide> class UIExplorerApp extends React.Component {
<del> _handleAction: Function;
<del> _renderDrawerContent: Function;
<del> state: State;
<del> constructor(props: Props) {
<del> super(props);
<del> this._handleAction = this._handleAction.bind(this);
<del> this._renderDrawerContent = this._renderDrawerContent.bind(this);
<del> }
<add> props: Props;
<add> state: UIExplorerNavigationState;
<ide>
<ide> componentWillMount() {
<del> BackAndroid.addEventListener('hardwareBackPress', this._handleBackButtonPress.bind(this));
<add> BackAndroid.addEventListener('hardwareBackPress', this._handleBackButtonPress);
<ide> }
<ide>
<ide> componentDidMount() {
<ide> Linking.getInitialURL().then((url) => {
<del> AsyncStorage.getItem('UIExplorerAppState', (err, storedString) => {
<add> AsyncStorage.getItem(APP_STATE_KEY, (err, storedString) => {
<ide> const exampleAction = URIActionMap(this.props.exampleFromAppetizeParams);
<ide> const urlAction = URIActionMap(url);
<ide> const launchAction = exampleAction || urlAction;
<ide> class UIExplorerApp extends React.Component {
<ide> );
<ide> }
<ide>
<del> _renderDrawerContent() {
<add> _renderDrawerContent = () => {
<ide> return (
<ide> <View style={styles.drawerContentWrapper}>
<ide> <UIExplorerExampleList
<ide> class UIExplorerApp extends React.Component {
<ide> />
<ide> </View>
<ide> );
<del> }
<add> };
<ide>
<ide> _renderApp() {
<ide> const {
<del> externalExample,
<del> stack,
<add> openExample,
<ide> } = this.state;
<del> if (externalExample) {
<del> const Component = UIExplorerList.Modules[externalExample];
<del> return (
<del> <Component
<del> onExampleExit={() => {
<del> this._handleAction({ type: 'BackAction' });
<del> }}
<del> ref={(example) => { this._exampleRef = example; }}
<del> />
<del> );
<del> }
<del> const title = UIExplorerStateTitleMap(stack.routes[stack.index]);
<del> const index = stack.routes.length <= 1 ? 1 : stack.index;
<ide>
<del> if (stack && stack.routes[index]) {
<del> const {key} = stack.routes[index];
<del> const ExampleModule = UIExplorerList.Modules[key];
<del> if (ExampleModule) {
<add> if (openExample) {
<add> const ExampleModule = UIExplorerList.Modules[openExample];
<add> if (ExampleModule.external) {
<add> return (
<add> <ExampleModule
<add> onExampleExit={() => {
<add> this._handleAction(UIExplorerActions.Back());
<add> }}
<add> ref={(example) => { this._exampleRef = example; }}
<add> />
<add> );
<add> } else if (ExampleModule) {
<ide> return (
<ide> <View style={styles.container}>
<ide> <ToolbarAndroid
<del> logo={nativeImageSource({
<del> android: 'launcher_icon',
<del> width: 132,
<del> height: 144
<del> })}
<del> navIcon={nativeImageSource({
<del> android: 'ic_menu_black_24dp',
<del> width: 48,
<del> height: 48
<del> })}
<add> logo={HEADER_LOGO_ICON}
<add> navIcon={HEADER_NAV_ICON}
<ide> onIconClicked={() => this.drawer.openDrawer()}
<ide> style={styles.toolbar}
<del> title={title}
<add> title={ExampleModule.title}
<ide> />
<ide> <UIExplorerExampleContainer
<ide> module={ExampleModule}
<ide> class UIExplorerApp extends React.Component {
<ide> );
<ide> }
<ide> }
<add>
<ide> return (
<ide> <View style={styles.container}>
<ide> <ToolbarAndroid
<del> logo={nativeImageSource({
<del> android: 'launcher_icon',
<del> width: 132,
<del> height: 144
<del> })}
<del> navIcon={nativeImageSource({
<del> android: 'ic_menu_black_24dp',
<del> width: 48,
<del> height: 48
<del> })}
<add> logo={HEADER_LOGO_ICON}
<add> navIcon={HEADER_NAV_ICON}
<ide> onIconClicked={() => this.drawer.openDrawer()}
<ide> style={styles.toolbar}
<del> title={title}
<add> title="UIExplorer"
<ide> />
<ide> <UIExplorerExampleList
<ide> onNavigate={this._handleAction}
<ide> list={UIExplorerList}
<del> {...stack.routes[0]}
<ide> />
<ide> </View>
<ide> );
<ide> }
<ide>
<del> _handleAction(action: Object): boolean {
<add> _handleAction = (action: Object): boolean => {
<ide> this.drawer && this.drawer.closeDrawer();
<ide> const newState = UIExplorerNavigationReducer(this.state, action);
<ide> if (this.state !== newState) {
<ide> this.setState(
<ide> newState,
<del> () => AsyncStorage.setItem('UIExplorerAppState', JSON.stringify(this.state))
<add> () => AsyncStorage.setItem(APP_STATE_KEY, JSON.stringify(this.state))
<ide> );
<ide> return true;
<ide> }
<ide> return false;
<del> }
<add> };
<ide>
<del> _handleBackButtonPress() {
<add> _handleBackButtonPress = () => {
<ide> if (this._overrideBackPressForDrawerLayout) {
<ide> // This hack is necessary because drawer layout provides an imperative API
<ide> // with open and close methods. This code would be cleaner if the drawer
<ide> class UIExplorerApp extends React.Component {
<ide> ) {
<ide> return true;
<ide> }
<del> return this._handleAction({ type: 'BackAction' });
<del> }
<add> return this._handleAction(UIExplorerActions.Back());
<add> };
<ide> }
<ide>
<ide> const styles = StyleSheet.create({
<ide><path>Examples/UIExplorer/js/UIExplorerApp.ios.js
<ide> const AsyncStorage = require('AsyncStorage');
<ide> const Linking = require('Linking');
<ide> const React = require('react');
<ide> const ReactNative = require('react-native');
<add>const UIExplorerActions = require('./UIExplorerActions');
<ide> const UIExplorerExampleContainer = require('./UIExplorerExampleContainer');
<ide> const UIExplorerExampleList = require('./UIExplorerExampleList');
<ide> const UIExplorerList = require('./UIExplorerList.ios');
<ide> const UIExplorerNavigationReducer = require('./UIExplorerNavigationReducer');
<del>const UIExplorerStateTitleMap = require('./UIExplorerStateTitleMap');
<ide> const URIActionMap = require('./URIActionMap');
<ide>
<ide> const {
<add> Button,
<ide> AppRegistry,
<del> NavigationExperimental,
<ide> SnapshotViewIOS,
<ide> StyleSheet,
<add> Text,
<ide> View,
<ide> } = ReactNative;
<ide>
<del>const {
<del> CardStack: NavigationCardStack,
<del> Header: NavigationHeader,
<del>} = NavigationExperimental;
<del>
<del>import type { NavigationSceneRendererProps } from 'NavigationTypeDefinition';
<del>
<del>import type { UIExplorerNavigationState } from './UIExplorerNavigationReducer';
<del>
<ide> import type { UIExplorerExample } from './UIExplorerList.ios';
<add>import type { UIExplorerAction } from './UIExplorerActions';
<add>import type { UIExplorerNavigationState } from './UIExplorerNavigationReducer';
<ide>
<ide> type Props = {
<ide> exampleFromAppetizeParams: string,
<ide> };
<ide>
<del>type State = UIExplorerNavigationState & {
<del> externalExample?: string,
<del>};
<add>const APP_STATE_KEY = 'UIExplorerAppState.v2';
<ide>
<del>const APP_STATE_KEY = 'UIExplorerAppState.v1';
<add>const Header = ({ onBack, title}) => (
<add> <View style={styles.header}>
<add> <View style={styles.headerCenter}>
<add> <Text style={styles.title}>{title}</Text>
<add> </View>
<add> {onBack && <View style={styles.headerLeft}>
<add> <Button title="Back" onPress={onBack} />
<add> </View>}
<add> </View>
<add>);
<ide>
<ide> class UIExplorerApp extends React.Component {
<del> _handleBack: Function;
<del> _handleAction: Function;
<del> _renderCard: Function;
<del> _renderHeader: Function;
<del> _renderScene: Function;
<del> _renderTitleComponent: Function;
<del> state: State;
<del>
<del> constructor(props: Props) {
<del> super(props);
<del> }
<del>
<del> componentWillMount() {
<del> this._handleAction = this._handleAction.bind(this);
<del> this._handleBack = this._handleAction.bind(this, {type: 'back'});
<del> this._renderHeader = this._renderHeader.bind(this);
<del> this._renderScene = this._renderScene.bind(this);
<del> this._renderTitleComponent = this._renderTitleComponent.bind(this);
<del> }
<add> props: Props;
<add> state: UIExplorerNavigationState;
<ide>
<ide> componentDidMount() {
<ide> Linking.getInitialURL().then((url) => {
<ide> class UIExplorerApp extends React.Component {
<ide> const launchAction = exampleAction || urlAction;
<ide> if (err || !storedString) {
<ide> const initialAction = launchAction || {type: 'InitialAction'};
<del> this.setState(UIExplorerNavigationReducer(null, initialAction));
<add> this.setState(UIExplorerNavigationReducer(undefined, initialAction));
<ide> return;
<ide> }
<ide> const storedState = JSON.parse(storedString);
<ide> class UIExplorerApp extends React.Component {
<ide> });
<ide> }
<ide>
<del> _handleAction(action: Object) {
<add> _handleBack = () => {
<add> this._handleAction(UIExplorerActions.Back());
<add> }
<add>
<add> _handleAction = (action: ?UIExplorerAction) => {
<ide> if (!action) {
<ide> return;
<ide> }
<ide> class UIExplorerApp extends React.Component {
<ide> if (!this.state) {
<ide> return null;
<ide> }
<del> if (this.state.externalExample) {
<del> const Component = UIExplorerList.Modules[this.state.externalExample];
<del> return (
<del> <Component
<del> onExampleExit={() => {
<del> this._handleAction({ type: 'BackAction' });
<del> }}
<del> />
<del> );
<del> }
<del> return (
<del> <NavigationCardStack
<del> navigationState={this.state.stack}
<del> style={styles.container}
<del> renderHeader={this._renderHeader}
<del> renderScene={this._renderScene}
<del> onNavigateBack={this._handleBack}
<del> />
<del> );
<del> }
<del>
<del> _renderHeader(props: NavigationSceneRendererProps): React.Element<any> {
<del> return (
<del> <NavigationHeader
<del> {...props}
<del> onNavigateBack={this._handleBack}
<del> renderTitleComponent={this._renderTitleComponent}
<del> />
<del> );
<del> }
<add> if (this.state.openExample) {
<add> const Component = UIExplorerList.Modules[this.state.openExample];
<add> if (Component.external) {
<add> return (
<add> <Component
<add> onExampleExit={this._handleBack}
<add> />
<add> );
<add> } else {
<add> return (
<add> <View style={styles.exampleContainer}>
<add> <Header onBack={this._handleBack} title={Component.title} />
<add> <UIExplorerExampleContainer module={Component} />
<add> </View>
<add> );
<add> }
<ide>
<del> _renderTitleComponent(props: NavigationSceneRendererProps): React.Element<any> {
<add> }
<ide> return (
<del> <NavigationHeader.Title>
<del> {UIExplorerStateTitleMap(props.scene.route)}
<del> </NavigationHeader.Title>
<del> );
<del> }
<del>
<del> _renderScene(props: NavigationSceneRendererProps): ?React.Element<any> {
<del> const state = props.scene.route;
<del> if (state.key === 'AppList') {
<del> return (
<add> <View style={styles.exampleContainer}>
<add> <Header title="UIExplorer" />
<ide> <UIExplorerExampleList
<ide> onNavigate={this._handleAction}
<ide> list={UIExplorerList}
<del> style={styles.exampleContainer}
<del> {...state}
<ide> />
<del> );
<del> }
<del>
<del> const Example = UIExplorerList.Modules[state.key];
<del> if (Example) {
<del> return (
<del> <View style={styles.exampleContainer}>
<del> <UIExplorerExampleContainer module={Example} />
<del> </View>
<del> );
<del> }
<del> return null;
<add> </View>
<add> );
<ide> }
<ide> }
<ide>
<ide> const styles = StyleSheet.create({
<del> container: {
<add> header: {
<add> height: 60,
<add> borderBottomWidth: StyleSheet.hairlineWidth,
<add> borderBottomColor: '#96969A',
<add> backgroundColor: '#F5F5F6',
<add> flexDirection: 'row',
<add> paddingTop: 20,
<add> },
<add> headerLeft: {
<add> },
<add> headerCenter: {
<ide> flex: 1,
<add> position: 'absolute',
<add> top: 27,
<add> left: 0,
<add> right: 0,
<add> },
<add> title: {
<add> fontSize: 19,
<add> fontWeight: '600',
<add> textAlign: 'center',
<ide> },
<ide> exampleContainer: {
<ide> flex: 1,
<ide><path>Examples/UIExplorer/js/UIExplorerExampleList.js
<ide> class UIExplorerExampleList extends React.Component {
<ide> }}}
<ide> onNavigate={this.props.onNavigate}
<ide> onPress={() => {
<del> this.props.onNavigate(
<del> UIExplorerActions.ExampleListWithFilter('')
<del> );
<add> this.props.onNavigate(UIExplorerActions.ExampleList());
<ide> }}
<ide> />
<ide> );
<ide><path>Examples/UIExplorer/js/UIExplorerNavigationReducer.js
<ide> */
<ide> 'use strict';
<ide>
<del>const ReactNative = require('react-native');
<ide> // $FlowFixMe : This is a platform-forked component, and flow seems to only run on iOS?
<ide> const UIExplorerList = require('./UIExplorerList');
<ide>
<del>const {
<del> NavigationExperimental,
<del>} = ReactNative;
<del>
<del>
<del>const {
<del> StateUtils: NavigationStateUtils,
<del>} = NavigationExperimental;
<del>
<del>import type {NavigationState} from 'NavigationTypeDefinition';
<del>
<ide> export type UIExplorerNavigationState = {
<del> externalExample: ?string;
<del> stack: NavigationState;
<add> openExample: ?string,
<ide> };
<ide>
<del>const defaultGetReducerForState = (initialState) => (state) => state || initialState;
<add>function UIExplorerNavigationReducer(
<add> state: ?UIExplorerNavigationState,
<add> action: any
<add>): UIExplorerNavigationState {
<ide>
<del>function getNavigationState(state: any): ?NavigationState {
<ide> if (
<del> (state instanceof Object) &&
<del> (state.routes instanceof Array) &&
<del> (state.routes[0] !== undefined) &&
<del> (typeof state.index === 'number') &&
<del> (state.routes[state.index] !== undefined)
<del> ) {
<del> return state;
<del> }
<del> return null;
<del>}
<del>
<del>function StackReducer({initialState, getReducerForState, getPushedReducerForAction}: any): Function {
<del> const getReducerForStateWithDefault = getReducerForState || defaultGetReducerForState;
<del> return function (lastState: ?NavigationState, action: any): NavigationState {
<del> if (!lastState) {
<del> return initialState;
<del> }
<del> const lastParentState = getNavigationState(lastState);
<del> if (!lastParentState) {
<del> return lastState;
<del> }
<add> // Default value is to see example list
<add> !state ||
<ide>
<del> const activeSubState = lastParentState.routes[lastParentState.index];
<del> const activeSubReducer = getReducerForStateWithDefault(activeSubState);
<del> const nextActiveState = activeSubReducer(activeSubState, action);
<del> if (nextActiveState !== activeSubState) {
<del> const nextChildren = [...lastParentState.routes];
<del> nextChildren[lastParentState.index] = nextActiveState;
<del> return {
<del> ...lastParentState,
<del> routes: nextChildren,
<del> };
<del> }
<add> // Handle the explicit list action
<add> action.type === 'UIExplorerListAction' ||
<ide>
<del> const subReducerToPush = getPushedReducerForAction(action, lastParentState);
<del> if (subReducerToPush) {
<del> return NavigationStateUtils.push(
<del> lastParentState,
<del> subReducerToPush(null, action)
<del> );
<del> }
<del>
<del> switch (action.type) {
<del> case 'back':
<del> case 'BackAction':
<del> if (lastParentState.index === 0 || lastParentState.routes.length === 1) {
<del> return lastParentState;
<del> }
<del> return NavigationStateUtils.pop(lastParentState);
<del> }
<del>
<del> return lastParentState;
<del> };
<del>}
<del>
<del>const UIExplorerStackReducer = StackReducer({
<del> getPushedReducerForAction: (action, lastState) => {
<del> if (action.type === 'UIExplorerExampleAction' && UIExplorerList.Modules[action.openExample]) {
<del> if (lastState.routes.find(route => route.key === action.openExample)) {
<del> // The example is already open, we should avoid pushing examples twice
<del> return null;
<del> }
<del> return (state) => state || {key: action.openExample};
<del> }
<del> return null;
<del> },
<del> getReducerForState: (initialState) => (state) => state || initialState,
<del> initialState: {
<del> key: 'UIExplorerMainStack',
<del> index: 0,
<del> routes: [
<del> {key: 'AppList'},
<del> ],
<del> },
<del>});
<del>
<del>function UIExplorerNavigationReducer(lastState: ?UIExplorerNavigationState, action: any): UIExplorerNavigationState {
<del> if (!lastState) {
<del> return {
<del> externalExample: null,
<del> stack: UIExplorerStackReducer(null, action),
<del> };
<del> }
<del> if (action.type === 'UIExplorerListWithFilterAction') {
<del> return {
<del> externalExample: null,
<del> stack: {
<del> key: 'UIExplorerMainStack',
<del> index: 0,
<del> routes: [
<del> {
<del> key: 'AppList',
<del> filter: action.filter,
<del> },
<del> ],
<del> },
<del> };
<del> }
<del> if (action.type === 'BackAction' && lastState.externalExample) {
<add> // Handle requests to go back to the list when an example is open
<add> (state.openExample && action.type === 'UIExplorerBackAction')
<add> ) {
<ide> return {
<del> ...lastState,
<del> externalExample: null,
<add> // A null openExample will cause the views to display the UIExplorer example list
<add> openExample: null,
<ide> };
<ide> }
<add>
<ide> if (action.type === 'UIExplorerExampleAction') {
<add>
<add> // Make sure we see the module before returning the new state
<ide> const ExampleModule = UIExplorerList.Modules[action.openExample];
<del> if (ExampleModule && ExampleModule.external) {
<add>
<add> if (ExampleModule) {
<ide> return {
<del> ...lastState,
<del> externalExample: action.openExample,
<add> openExample: action.openExample,
<ide> };
<ide> }
<ide> }
<del> const newStack = UIExplorerStackReducer(lastState.stack, action);
<del> if (newStack !== lastState.stack) {
<del> return {
<del> externalExample: null,
<del> stack: newStack,
<del> };
<del> }
<del> return lastState;
<add>
<add> return state;
<ide> }
<ide>
<ide> module.exports = UIExplorerNavigationReducer;
<ide><path>Examples/UIExplorer/js/UIExplorerStateTitleMap.js
<del>/**
<del> * Copyright (c) 2013-present, Facebook, Inc.
<del> * All rights reserved.
<del> *
<del> * This source code is licensed under the BSD-style license found in the
<del> * LICENSE file in the root directory of this source tree. An additional grant
<del> * of patent rights can be found in the PATENTS file in the same directory.
<del> *
<del> * The examples provided by Facebook are for non-commercial testing and
<del> * evaluation purposes only.
<del> *
<del> * Facebook reserves all rights not expressly granted.
<del> *
<del> * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
<del> * OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
<del> * FITNESS FOR A PARTICULAR PURPOSE AND NON INFRINGEMENT. IN NO EVENT SHALL
<del> * FACEBOOK BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN
<del> * AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
<del> * CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
<del> *
<del> * @flow
<del> * @providesModule UIExplorerStateTitleMap
<del> */
<del>'use strict';
<del>
<del>// $FlowFixMe : This is a platform-forked component, and flow seems to only run on iOS?
<del>const UIExplorerList = require('./UIExplorerList');
<del>
<del>import type {NavigationRoute} from 'NavigationTypeDefinition';
<del>
<del>function StateTitleMap(route: NavigationRoute): string {
<del> if (UIExplorerList.Modules[route.key]) {
<del> return UIExplorerList.Modules[route.key].title;
<del> }
<del> if (route.key === 'AppList') {
<del> return 'UIExplorer';
<del> }
<del> return 'Unknown';
<del>}
<del>
<del>module.exports = StateTitleMap;
<ide><path>Examples/UIExplorer/js/URIActionMap.js
<ide> const {
<ide> Alert,
<ide> } = ReactNative;
<ide>
<del>function PathActionMap(path: string): ?Object {
<add>import type { UIExplorerAction } from './UIExplorerActions';
<add>
<add>function PathActionMap(path: string): ?UIExplorerAction {
<ide> // Warning! Hacky parsing for example code. Use a library for this!
<ide> const exampleParts = path.split('/example/');
<ide> const exampleKey = exampleParts[1];
<ide> function PathActionMap(path: string): ?Object {
<ide> return null;
<ide> }
<ide>
<del>function URIActionMap(uri: ?string): ?Object {
<del> // Warning! Hacky parsing for example code. Use a library for this!
<add>function URIActionMap(uri: ?string): ?UIExplorerAction {
<ide> if (!uri) {
<ide> return null;
<ide> }
<add> // Warning! Hacky parsing for example code. Use a library for this!
<ide> const parts = uri.split('rnuiexplorer:/');
<ide> if (!parts[1]) {
<ide> return null; | 7 |
Text | Text | fix small language nits | b606dac461070c368f688c929a6ff0af6a5570e4 | <ide><path>docs/_posts/2014-07-28-community-roundup-20.md
<ide> layout: post
<ide> author: Lou Husson
<ide> ---
<ide>
<del>It's an exciting time for React as there are now more commits from open source contributors than Facebook engineers! Keep up the good work :)
<add>It's an exciting time for React as there are now more commits from open source contributors than from Facebook engineers! Keep up the good work :)
<ide>
<ide> ## Atom moves to React
<ide>
<del>[Atom, GitHub's code editor, is now using React](http://blog.atom.io/2014/07/02/moving-atom-to-react.html) to build the editing experience. They did the move in order to improve performance. By default, React helped them eliminate unnecessary reflows. Then, they were able to architect the rendering pipeline in order to minimize repaints by using hardware acceleration. This is a testament to the fact that React's architecture is viable for high performant applications.
<add>[Atom, GitHub's code editor, is now using React](http://blog.atom.io/2014/07/02/moving-atom-to-react.html) to build the editing experience. They made the move in order to improve performance. By default, React helped them eliminate unnecessary reflows, enabling them to focus on architecting the rendering pipeline in order to minimize repaints by using hardware acceleration. This is a testament to the fact that React's architecture is perfect for high performant applications.
<ide>
<ide> [<img src="http://blog.atom.io/img/posts/gpu-cursor-move.gif" style="width: 100%;" />](http://blog.atom.io/2014/07/02/moving-atom-to-react.html)
<ide>
<ide>
<ide> ## Why Does React Scale?
<ide>
<del>At the last [JSConf.us](http://2014.jsconf.us/), Vjeux talked about the design decisions of the API that makes it scale to a large number of developers. If you don't have 20 minutes, take a look at the [annotated slides](https://speakerdeck.com/vjeux/why-does-react-scale-jsconf-2014).
<add>At the last [JSConf.us](http://2014.jsconf.us/), Vjeux talked about the design decisions made in the API that allows it to scale to a large number of developers. If you don't have 20 minutes, take a look at the [annotated slides](https://speakerdeck.com/vjeux/why-does-react-scale-jsconf-2014).
<ide>
<ide> <iframe width="650" height="315" src="//www.youtube.com/embed/D-ioDiacTm8" frameborder="0" allowfullscreen></iframe>
<ide>
<ide>
<ide> ## Live Editing
<ide>
<del>The best feature of React is that it provides foundations to implement concepts that were otherwise extremely hard to like server-side rendering, undo-redo, rendering to non-DOM environments like canvas... [Dan Abramov](https://twitter.com/dan_abramov) got hot code reloading working with webpack in order to [live edit a React project](http://gaearon.github.io/react-hot-loader/)!
<add>One of the best features of React is that it provides the foundations to implement concepts that were otherwise extremely difficult, like server-side rendering, undo-redo, rendering to non-DOM environments like canvas... [Dan Abramov](https://twitter.com/dan_abramov) got hot code reloading working with webpack in order to [live edit a React project](http://gaearon.github.io/react-hot-loader/)!
<ide>
<ide> <iframe src="//player.vimeo.com/video/100010922" width="650" height="315" webkitallowfullscreen mozallowfullscreen allowfullscreen></iframe>
<ide>
<ide>
<ide> ## ReactIntl Mixin by Yahoo
<ide>
<del>There's a couple of React-related projects that recently appeared on Yahoo's GitHub. The first one is an [internationalization mixin](https://github.com/yahoo/react-intl). It's exciting to see them contributing.
<add>There are a couple of React-related projects that recently appeared on Yahoo's GitHub, the first one being an [internationalization mixin](https://github.com/yahoo/react-intl). It's great to see them getting excitied about React and contributing back to the community.
<ide>
<ide> ```javascript
<ide> var MyComponent = React.createClass({
<ide> Josephine Hall, working at Ice Lab studio, used React to write a mobile-focused
<ide>
<ide> ## London React Meetup
<ide>
<del>If you missed the last [London React Meetup](http://www.meetup.com/London-React-User-Group/events/191406572/), the video is available. Lots of great content.
<add>If you missed the last [London React Meetup](http://www.meetup.com/London-React-User-Group/events/191406572/), the video is available, with lots of great content.
<ide>
<ide> - What's new in React 0.11 and how to improve performance by guaranteeing immutability
<ide> - State handling in React with Morearty.JS
<ide> If you missed the last [London React Meetup](http://www.meetup.com/London-React-
<ide>
<ide> <iframe width="650" height="315" src="//www.youtube.com/embed/CP3lvm5Ppqo" frameborder="0" allowfullscreen></iframe>
<ide>
<del>On a related news, the next [React SF Meetup](http://www.meetup.com/ReactJS-San-Francisco/events/195518392/) is going to be from Prezi: [“Immediate Mode on the Web: How We Implemented the Prezi Viewer in JavaScript”](https://medium.com/prezi-engineering/how-and-why-prezi-turned-to-javascript-56e0ca57d135). While not in React, their tech is really awesome and shares a lot of React's design principles and perf optimizations.
<add>In related news, the next [React SF Meetup](http://www.meetup.com/ReactJS-San-Francisco/events/195518392/) will be from Prezi: [“Immediate Mode on the Web: How We Implemented the Prezi Viewer in JavaScript”](https://medium.com/prezi-engineering/how-and-why-prezi-turned-to-javascript-56e0ca57d135). While not in React, their tech is really awesome and shares a lot of React's design principles and perf optimizations.
<ide>
<ide>
<ide> ## Using React and KendoUI Together
<ide>
<del>One of the strength of React is that it plays nicely with other libraries. Jim Cowart proved it by writing a tutorial that explains how to write [React component adapters for KendoUI](http://www.ifandelse.com/using-reactjs-and-kendoui-together/).
<add>One of the strengths of React is that it plays nicely with other libraries. Jim Cowart proved it by writing a tutorial that explains how to write [React component adapters for KendoUI](http://www.ifandelse.com/using-reactjs-and-kendoui-together/).
<ide>
<ide> <figure><a href="http://www.ifandelse.com/using-reactjs-and-kendoui-together/"><img src="/react/img/blog/kendoui.png" /></a></figure>
<ide> | 1 |
PHP | PHP | add a wordwrap function | 80765d4a4474df5460d0aaecd388f6364a7f6ff3 | <ide><path>src/Utility/Text.php
<ide> public static function wrap($text, $options = [])
<ide> return $wrapped;
<ide> }
<ide>
<add> /**
<add> * Wraps a complete block of text to a specific width, can optionally wrap
<add> * at word breaks.
<add> *
<add> * ### Options
<add> *
<add> * - `width` The width to wrap to. Defaults to 72.
<add> * - `wordWrap` Only wrap on words breaks (spaces) Defaults to true.
<add> * - `indent` String to indent with. Defaults to null.
<add> * - `indentAt` 0 based index to start indenting at. Defaults to 0.
<add> *
<add> * @param string $text The text to format.
<add> * @param array|int $options Array of options to use, or an integer to wrap the text to.
<add> * @return string Formatted text.
<add> */
<add> public static function wrapBlock($text, $options = [])
<add> {
<add> if (is_numeric($options)) {
<add> $options = ['width' => $options];
<add> }
<add> $options += ['width' => 72, 'wordWrap' => true, 'indent' => null, 'indentAt' => 0];
<add>
<add> if (!empty($options['indentAt']) && $options['indentAt'] === 0) {
<add> $indentLength = !empty($options['indent']) ? strlen($options['indent']) : 0;
<add> $options['width'] = $options['width'] - $indentLength;
<add> return self::wrap($text, $options);
<add> }
<add>
<add> $wrapped = self::wrap($text, $options);
<add>
<add> if (!empty($options['indent'])) {
<add> $indentationLength = !empty($options['indent']) ? strlen($options['indent']) : 0;
<add> $chunks = explode("\n", $wrapped);
<add> if (count($chunks) < 2) {
<add> return $wrapped;
<add> }
<add> $toRewrap = '';
<add> for ($i = $options['indentAt'], $len = count($chunks); $i < $len; $i++) {
<add> $toRewrap .= substr($chunks[$i], $indentationLength) . ' ';
<add> unset($chunks[$i]);
<add> }
<add> $options['width'] = $options['width'] - $indentationLength;
<add> $options['indentAt'] = 0;
<add> $rewrapped = self::wrap($toRewrap, $options);
<add> $newChunks = explode("\n", $rewrapped);
<add>
<add> $chunks = array_merge($chunks, $newChunks);
<add> $wrapped = implode("\n", $chunks);
<add> }
<add> return $wrapped;
<add> }
<add>
<ide> /**
<ide> * Unicode and newline aware version of wordwrap.
<ide> *
<ide><path>tests/TestCase/Utility/TextTest.php
<ide> public function testWrapIndent()
<ide> TEXT;
<ide> $this->assertTextEquals($expected, $result);
<ide> }
<add> /**
<add> * test wrapBlock() indentical to wrap()
<add> *
<add> * @return void
<add> */
<add> public function testWrapBlockIndenticalToWrap()
<add> {
<add> $text = 'This is the song that never ends. This is the song that never ends. This is the song that never ends.';
<add> $result = Text::wrapBlock($text, 33);
<add> $expected = Text::wrap($text, 33);
<add> $this->assertTextEquals($expected, $result);
<add>
<add> $result = Text::wrapBlock($text, ['width' => 33, 'indentAt' => 0]);
<add> $expected = Text::wrap($text, ['width' => 33, 'indentAt' => 0]);
<add> $this->assertTextEquals($expected, $result);
<add> }
<add> /**
<add> * test wrapBlock() indenting from first line
<add> *
<add> * @return void
<add> */
<add> public function testWrapBlockWithIndentAt0()
<add> {
<add> $text = 'This is the song that never ends. This is the song that never ends. This is the song that never ends.';
<add> $result = Text::wrapBlock($text, ['width' => 33, 'indent' => "\t", 'indentAt' => 0]);
<add> $expected = <<<TEXT
<add> This is the song that never
<add> ends. This is the song that
<add> never ends. This is the song
<add> that never ends.
<add>TEXT;
<add> $this->assertTextEquals($expected, $result);
<add> }
<add> /**
<add> * test wrapBlock() indenting from second line
<add> *
<add> * @return void
<add> */
<add> public function testWrapBlockWithIndentAt1()
<add> {
<add> $text = 'This is the song that never ends. This is the song that never ends. This is the song that never ends.';
<add> $result = Text::wrapBlock($text, ['width' => 33, 'indent' => "\t", 'indentAt' => 1]);
<add> $expected = <<<TEXT
<add>This is the song that never ends.
<add> This is the song that never
<add> ends. This is the song that
<add> never ends.
<add>TEXT;
<add> $this->assertTextEquals($expected, $result);
<add> }
<ide>
<ide> /**
<ide> * testTruncate method | 2 |
Mixed | Javascript | fix docs for react.children.map, .foreach, .only | 827c44fcd332443d8ac615d7f70895c6edda1b10 | <ide><path>docs/docs/ref-01-top-level-api.md
<ide> next: component-api.html
<ide> #### React.Children.map
<ide>
<ide> ```javascript
<del>array React.Children.map(object children, function fn [, object context])
<add>object React.Children.map(object children, function fn [, object context])
<ide> ```
<ide>
<del>Invoke `fn` on every immediate child contained within `children` with `this` set to `context`. If `children` is a nested object or array it will be traversed: `fn` will never be passed the container objects.
<add>Invoke `fn` on every immediate child contained within `children` with `this` set to `context`. If `children` is a nested object or array it will be traversed: `fn` will never be passed the container objects. If children is `null` or `undefined` returns `null` or `undefined` rather than an empty object.
<ide>
<ide> #### React.Children.forEach
<ide>
<ide> ```javascript
<ide> React.Children.forEach(object children, function fn [, object context])
<ide> ```
<ide>
<del>Like `React.Children.map()` but does not return an array.
<add>Like `React.Children.map()` but does not return an object.
<ide>
<del>#### React.children.only()
<add>#### React.Children.only
<ide>
<ide> ```javascript
<ide> object React.Children.only(object children)
<ide> ```
<ide>
<del>Return the only child in `children`. If `children` is a nested object or array it will be traversed.
<add>Return the only child in `children`. Throws otherwise.
<ide>
<ide>
<ide> ### React.DOM
<ide><path>src/utils/ReactChildren.js
<ide> function forEachSingleChild(traverseContext, child, name, i) {
<ide> * The provided forEachFunc(child, index) will be called for each
<ide> * leaf child.
<ide> *
<del> * @param {array} children
<add> * @param {?*} children Children tree container.
<ide> * @param {function(*, int)} forEachFunc.
<ide> * @param {*} forEachContext Context for forEachContext.
<ide> */
<ide> function mapSingleChildIntoContext(traverseContext, child, name, i) {
<ide> * TODO: This may likely break any calls to `ReactChildren.map` that were
<ide> * previously relying on the fact that we guarded against null children.
<ide> *
<del> * @param {array} children
<add> * @param {?*} children Children tree container.
<ide> * @param {function(*, int)} mapFunction.
<ide> * @param {*} mapContext Context for mapFunction.
<del> * @return {array} mirrored array with mapped children.
<add> * @return {object} Object containing the ordered map of results.
<ide> */
<ide> function mapChildren(children, func, context) {
<ide> if (children == null) { | 2 |
Ruby | Ruby | add libpcap to list | e757d7c9563fb30e0d5b06ffeb6e42e3160d7e0a | <ide><path>Library/Homebrew/brew.h.rb
<ide> def make url
<ide> force_text = "If you really want to make this formula use --force."
<ide>
<ide> case name.downcase
<del> when /vim/, /screen/
<add> when 'vim', 'screen'
<ide> raise <<-EOS
<ide> #{name} is blacklisted for creation
<ide> Apple distributes this program with OS X.
<ide>
<ide> #{force_text}
<ide> EOS
<del> when /libarchive/
<add> when 'libarchive', 'libpcap'
<ide> raise <<-EOS
<ide> #{name} is blacklisted for creation
<ide> Apple distributes this library with OS X, you can find it in /usr/lib.
<ide>
<ide> #{force_text}
<ide> EOS
<del> when /libxml/, /libxlst/, /freetype/, /libpng/
<add> when 'libxml', 'libxlst', 'freetype', 'libpng'
<ide> raise <<-EOS
<ide> #{name} is blacklisted for creation
<ide> Apple distributes this library with OS X, you can find it in /usr/X11/lib.
<ide> def make url
<ide>
<ide> #{force_text}
<ide> EOS
<del> when /rubygem/
<add> when 'rubygem'
<ide> raise "Sorry RubyGems comes with OS X so we don't package it.\n\n#{force_text}"
<del> when /wxwidgets/
<add> when 'wxwidgets'
<ide> raise <<-EOS
<ide> #{name} is blacklisted for creation
<ide> An older version of wxWidgets is provided by Apple with OS X, but | 1 |
Ruby | Ruby | add bind values to the manager class | f0ba9e4e56cf4dfa266147fad7e8f58ca577d614 | <ide><path>lib/arel/tree_manager.rb
<ide> class TreeManager
<ide>
<ide> attr_reader :ast, :engine
<ide>
<add> attr_accessor :bind_values
<add>
<ide> def initialize engine
<ide> @engine = engine
<ide> @ctx = nil
<add> @bind_values = []
<ide> end
<ide>
<ide> def to_dot
<ide><path>test/test_select_manager.rb
<ide> def test_join_sources
<ide> assert_equal "SELECT FROM 'foo'", manager.to_sql
<ide> end
<ide>
<add> def test_manager_stores_bind_values
<add> manager = Arel::SelectManager.new Table.engine
<add> assert_equal [], manager.bind_values
<add> manager.bind_values = [1]
<add> assert_equal [1], manager.bind_values
<add> end
<add>
<ide> describe 'backwards compatibility' do
<ide> describe 'project' do
<ide> it 'accepts symbols as sql literals' do | 2 |
Text | Text | remove the old reference to `jsonresponse` | adaf97a739dc2c29589b25052daac04d5d706c1b | <ide><path>docs/tutorial/2-requests-and-responses.md
<ide> The wrappers also provide behaviour such as returning `405 Method Not Allowed` r
<ide>
<ide> ## Pulling it all together
<ide>
<del>Okay, let's go ahead and start using these new components to write a few views.
<del>
<del>We don't need our `JSONResponse` class in `views.py` any more, so go ahead and delete that. Once that's done we can start refactoring our views slightly.
<add>Okay, let's go ahead and start using these new components to refactor our views slightly.
<ide>
<ide> from rest_framework import status
<ide> from rest_framework.decorators import api_view | 1 |
Javascript | Javascript | remove feature flag enablestricteffects | 987292815c68be0fd5916d1f9b0d6c983e2db7ce | <ide><path>packages/react-reconciler/src/ReactFiber.new.js
<ide> import {
<ide> import {
<ide> createRootStrictEffectsByDefault,
<ide> enableCache,
<del> enableStrictEffects,
<ide> enableProfilerTimer,
<ide> enableScopeAPI,
<ide> enableLegacyHidden,
<ide> export function createHostRootFiber(
<ide> let mode;
<ide> if (tag === ConcurrentRoot) {
<ide> mode = ConcurrentMode;
<del> if (isStrictMode === true) {
<del> mode |= StrictLegacyMode;
<del>
<del> if (enableStrictEffects) {
<del> mode |= StrictEffectsMode;
<del> }
<del> } else if (enableStrictEffects && createRootStrictEffectsByDefault) {
<add> if (isStrictMode === true || createRootStrictEffectsByDefault) {
<ide> mode |= StrictLegacyMode | StrictEffectsMode;
<ide> }
<ide> if (
<ide> export function createFiberFromTypeAndProps(
<ide> case REACT_STRICT_MODE_TYPE:
<ide> fiberTag = Mode;
<ide> mode |= StrictLegacyMode;
<del> if (enableStrictEffects && (mode & ConcurrentMode) !== NoMode) {
<add> if ((mode & ConcurrentMode) !== NoMode) {
<ide> // Strict effects should never run on legacy roots
<ide> mode |= StrictEffectsMode;
<ide> }
<ide><path>packages/react-reconciler/src/ReactFiber.old.js
<ide> import {
<ide> import {
<ide> createRootStrictEffectsByDefault,
<ide> enableCache,
<del> enableStrictEffects,
<ide> enableProfilerTimer,
<ide> enableScopeAPI,
<ide> enableLegacyHidden,
<ide> export function createHostRootFiber(
<ide> let mode;
<ide> if (tag === ConcurrentRoot) {
<ide> mode = ConcurrentMode;
<del> if (isStrictMode === true) {
<del> mode |= StrictLegacyMode;
<del>
<del> if (enableStrictEffects) {
<del> mode |= StrictEffectsMode;
<del> }
<del> } else if (enableStrictEffects && createRootStrictEffectsByDefault) {
<add> if (isStrictMode === true || createRootStrictEffectsByDefault) {
<ide> mode |= StrictLegacyMode | StrictEffectsMode;
<ide> }
<ide> if (
<ide> export function createFiberFromTypeAndProps(
<ide> case REACT_STRICT_MODE_TYPE:
<ide> fiberTag = Mode;
<ide> mode |= StrictLegacyMode;
<del> if (enableStrictEffects && (mode & ConcurrentMode) !== NoMode) {
<add> if ((mode & ConcurrentMode) !== NoMode) {
<ide> // Strict effects should never run on legacy roots
<ide> mode |= StrictEffectsMode;
<ide> }
<ide><path>packages/react-reconciler/src/ReactFiberClassComponent.new.js
<ide> import {
<ide> enableSchedulingProfiler,
<ide> warnAboutDeprecatedLifecycles,
<ide> enableLazyContextPropagation,
<del> enableStrictEffects,
<ide> } from 'shared/ReactFeatureFlags';
<ide> import ReactStrictModeWarnings from './ReactStrictModeWarnings.new';
<ide> import {isMounted} from './ReactFiberTreeReflection';
<ide> function mountClassInstance(
<ide>
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide> function resumeMountClassInstance(
<ide> // effect even though we're bailing out, so that cWU/cDU are called.
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide> function resumeMountClassInstance(
<ide> }
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide> function resumeMountClassInstance(
<ide> // effect even though we're bailing out, so that cWU/cDU are called.
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide><path>packages/react-reconciler/src/ReactFiberClassComponent.old.js
<ide> import {
<ide> enableSchedulingProfiler,
<ide> warnAboutDeprecatedLifecycles,
<ide> enableLazyContextPropagation,
<del> enableStrictEffects,
<ide> } from 'shared/ReactFeatureFlags';
<ide> import ReactStrictModeWarnings from './ReactStrictModeWarnings.old';
<ide> import {isMounted} from './ReactFiberTreeReflection';
<ide> function mountClassInstance(
<ide>
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide> function resumeMountClassInstance(
<ide> // effect even though we're bailing out, so that cWU/cDU are called.
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide> function resumeMountClassInstance(
<ide> }
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide> function resumeMountClassInstance(
<ide> // effect even though we're bailing out, so that cWU/cDU are called.
<ide> if (typeof instance.componentDidMount === 'function') {
<ide> let fiberFlags: Flags = Update | LayoutStatic;
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> fiberFlags |= MountLayoutDev;
<ide> }
<ide> workInProgress.flags |= fiberFlags;
<ide><path>packages/react-reconciler/src/ReactFiberCommitWork.new.js
<ide> import {
<ide> enableCache,
<ide> enableTransitionTracing,
<ide> enableUseEventHook,
<del> enableStrictEffects,
<ide> enableFloat,
<ide> enableLegacyHidden,
<ide> enableHostSingletons,
<ide> function commitPassiveUnmountInsideDeletedTreeOnFiber(
<ide> }
<ide>
<ide> function invokeLayoutEffectMountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide> function invokeLayoutEffectMountInDEV(fiber: Fiber): void {
<ide> }
<ide>
<ide> function invokePassiveEffectMountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide> function invokePassiveEffectMountInDEV(fiber: Fiber): void {
<ide> }
<ide>
<ide> function invokeLayoutEffectUnmountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide> function invokeLayoutEffectUnmountInDEV(fiber: Fiber): void {
<ide> }
<ide>
<ide> function invokePassiveEffectUnmountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide><path>packages/react-reconciler/src/ReactFiberCommitWork.old.js
<ide> import {
<ide> enableCache,
<ide> enableTransitionTracing,
<ide> enableUseEventHook,
<del> enableStrictEffects,
<ide> enableFloat,
<ide> enableLegacyHidden,
<ide> enableHostSingletons,
<ide> function commitPassiveUnmountInsideDeletedTreeOnFiber(
<ide> }
<ide>
<ide> function invokeLayoutEffectMountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide> function invokeLayoutEffectMountInDEV(fiber: Fiber): void {
<ide> }
<ide>
<ide> function invokePassiveEffectMountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide> function invokePassiveEffectMountInDEV(fiber: Fiber): void {
<ide> }
<ide>
<ide> function invokeLayoutEffectUnmountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide> function invokeLayoutEffectUnmountInDEV(fiber: Fiber): void {
<ide> }
<ide>
<ide> function invokePassiveEffectUnmountInDEV(fiber: Fiber): void {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> // We don't need to re-check StrictEffectsMode here.
<ide> // This function is only called if that check has already passed.
<ide> switch (fiber.tag) {
<ide><path>packages/react-reconciler/src/ReactFiberHooks.new.js
<ide> import {
<ide> enableUseHook,
<ide> enableUseMemoCacheHook,
<ide> enableUseEventHook,
<del> enableStrictEffects,
<ide> } from 'shared/ReactFeatureFlags';
<ide> import {
<ide> REACT_CONTEXT_TYPE,
<ide> export function bailoutHooks(
<ide> workInProgress.updateQueue = current.updateQueue;
<ide> // TODO: Don't need to reset the flags here, because they're reset in the
<ide> // complete phase (bubbleProperties).
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> workInProgress.flags &= ~(
<ide> MountPassiveDevEffect |
<ide> MountLayoutDevEffect |
<ide> function mountEffect(
<ide> ): void {
<ide> if (
<ide> __DEV__ &&
<del> enableStrictEffects &&
<ide> (currentlyRenderingFiber.mode & StrictEffectsMode) !== NoMode
<ide> ) {
<ide> return mountEffectImpl(
<ide> function mountLayoutEffect(
<ide> let fiberFlags: Flags = UpdateEffect | LayoutStaticEffect;
<ide> if (
<ide> __DEV__ &&
<del> enableStrictEffects &&
<ide> (currentlyRenderingFiber.mode & StrictEffectsMode) !== NoMode
<ide> ) {
<ide> fiberFlags |= MountLayoutDevEffect;
<ide> function mountImperativeHandle<T>(
<ide> let fiberFlags: Flags = UpdateEffect | LayoutStaticEffect;
<ide> if (
<ide> __DEV__ &&
<del> enableStrictEffects &&
<ide> (currentlyRenderingFiber.mode & StrictEffectsMode) !== NoMode
<ide> ) {
<ide> fiberFlags |= MountLayoutDevEffect;
<ide><path>packages/react-reconciler/src/ReactFiberHooks.old.js
<ide> import {
<ide> enableUseHook,
<ide> enableUseMemoCacheHook,
<ide> enableUseEventHook,
<del> enableStrictEffects,
<ide> } from 'shared/ReactFeatureFlags';
<ide> import {
<ide> REACT_CONTEXT_TYPE,
<ide> export function bailoutHooks(
<ide> workInProgress.updateQueue = current.updateQueue;
<ide> // TODO: Don't need to reset the flags here, because they're reset in the
<ide> // complete phase (bubbleProperties).
<del> if (
<del> __DEV__ &&
<del> enableStrictEffects &&
<del> (workInProgress.mode & StrictEffectsMode) !== NoMode
<del> ) {
<add> if (__DEV__ && (workInProgress.mode & StrictEffectsMode) !== NoMode) {
<ide> workInProgress.flags &= ~(
<ide> MountPassiveDevEffect |
<ide> MountLayoutDevEffect |
<ide> function mountEffect(
<ide> ): void {
<ide> if (
<ide> __DEV__ &&
<del> enableStrictEffects &&
<ide> (currentlyRenderingFiber.mode & StrictEffectsMode) !== NoMode
<ide> ) {
<ide> return mountEffectImpl(
<ide> function mountLayoutEffect(
<ide> let fiberFlags: Flags = UpdateEffect | LayoutStaticEffect;
<ide> if (
<ide> __DEV__ &&
<del> enableStrictEffects &&
<ide> (currentlyRenderingFiber.mode & StrictEffectsMode) !== NoMode
<ide> ) {
<ide> fiberFlags |= MountLayoutDevEffect;
<ide> function mountImperativeHandle<T>(
<ide> let fiberFlags: Flags = UpdateEffect | LayoutStaticEffect;
<ide> if (
<ide> __DEV__ &&
<del> enableStrictEffects &&
<ide> (currentlyRenderingFiber.mode & StrictEffectsMode) !== NoMode
<ide> ) {
<ide> fiberFlags |= MountLayoutDevEffect;
<ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.new.js
<ide> import {
<ide> enableDebugTracing,
<ide> enableSchedulingProfiler,
<ide> disableSchedulerTimeoutInWorkLoop,
<del> enableStrictEffects,
<ide> skipUnmountedBoundaries,
<ide> enableUpdaterTracking,
<ide> enableCache,
<ide> function commitRootImpl(
<ide> legacyErrorBoundariesThatAlreadyFailed = null;
<ide> }
<ide>
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> if (!rootDidHavePassiveEffects) {
<ide> commitDoubleInvokeEffectsInDEV(root, false);
<ide> }
<ide> function flushPassiveEffectsImpl() {
<ide> markPassiveEffectsStopped();
<ide> }
<ide>
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> commitDoubleInvokeEffectsInDEV(root, true);
<ide> }
<ide>
<ide> function commitDoubleInvokeEffectsInDEV(
<ide> root: FiberRoot,
<ide> hasPassiveEffects: boolean,
<ide> ) {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> if (useModernStrictMode) {
<ide> let doubleInvokeEffects = true;
<ide>
<ide><path>packages/react-reconciler/src/ReactFiberWorkLoop.old.js
<ide> import {
<ide> enableDebugTracing,
<ide> enableSchedulingProfiler,
<ide> disableSchedulerTimeoutInWorkLoop,
<del> enableStrictEffects,
<ide> skipUnmountedBoundaries,
<ide> enableUpdaterTracking,
<ide> enableCache,
<ide> function commitRootImpl(
<ide> legacyErrorBoundariesThatAlreadyFailed = null;
<ide> }
<ide>
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> if (!rootDidHavePassiveEffects) {
<ide> commitDoubleInvokeEffectsInDEV(root, false);
<ide> }
<ide> function flushPassiveEffectsImpl() {
<ide> markPassiveEffectsStopped();
<ide> }
<ide>
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> commitDoubleInvokeEffectsInDEV(root, true);
<ide> }
<ide>
<ide> function commitDoubleInvokeEffectsInDEV(
<ide> root: FiberRoot,
<ide> hasPassiveEffects: boolean,
<ide> ) {
<del> if (__DEV__ && enableStrictEffects) {
<add> if (__DEV__) {
<ide> if (useModernStrictMode) {
<ide> let doubleInvokeEffects = true;
<ide>
<ide><path>packages/react-reconciler/src/__tests__/ReactOffscreenStrictMode-test.js
<ide> describe('ReactOffscreenStrictMode', () => {
<ide> return <span>label</span>;
<ide> }
<ide>
<del> // @gate __DEV__ && enableStrictEffects && enableOffscreen
<add> // @gate __DEV__ && enableOffscreen
<ide> it('should trigger strict effects when offscreen is visible', () => {
<ide> act(() => {
<ide> ReactNoop.render(
<ide> describe('ReactOffscreenStrictMode', () => {
<ide> ]);
<ide> });
<ide>
<del> // @gate __DEV__ && enableStrictEffects && enableOffscreen && useModernStrictMode
<add> // @gate __DEV__ && enableOffscreen && useModernStrictMode
<ide> it('should not trigger strict effects when offscreen is hidden', () => {
<ide> act(() => {
<ide> ReactNoop.render(
<ide> describe('ReactOffscreenStrictMode', () => {
<ide> });
<ide> });
<ide>
<del> // @gate __DEV__ && enableStrictEffects && enableOffscreen
<add> // @gate __DEV__ && enableOffscreen
<ide> it('should double invoke effects on unsuspended child', async () => {
<ide> let shouldSuspend = true;
<ide> let resolve;
<ide><path>packages/react-reconciler/src/__tests__/StrictEffectsMode-test.js
<ide> describe('StrictEffectsMode', () => {
<ide> return gate(
<ide> flags =>
<ide> flags.build === 'development' &&
<del> flags.enableStrictEffects &&
<ide> flags.createRootStrictEffectsByDefault &&
<ide> flags.dfsEffectsRefactor,
<ide> );
<ide><path>packages/react-reconciler/src/__tests__/StrictEffectsModeDefaults-test.internal.js
<ide> describe('StrictEffectsMode defaults', () => {
<ide> act = require('jest-react').act;
<ide>
<ide> const ReactFeatureFlags = require('shared/ReactFeatureFlags');
<del> ReactFeatureFlags.enableStrictEffects = __DEV__;
<ide> ReactFeatureFlags.createRootStrictEffectsByDefault = __DEV__;
<ide> });
<ide>
<ide><path>packages/react/src/__tests__/ReactStrictMode-test.internal.js
<ide> describe('ReactStrictMode', () => {
<ide> ReactDOMClient = require('react-dom/client');
<ide>
<ide> act = require('jest-react').act;
<del>
<del> const ReactFeatureFlags = require('shared/ReactFeatureFlags');
<del> ReactFeatureFlags.enableStrictEffects = __DEV__;
<ide> });
<ide>
<ide> describe('levels', () => {
<ide><path>packages/shared/ReactFeatureFlags.js
<ide> export const enableSchedulingProfiler = __PROFILE__;
<ide> // reducers by double invoking them in StrictLegacyMode.
<ide> export const debugRenderPhaseSideEffectsForStrictMode = __DEV__;
<ide>
<del>// Helps identify code that is not safe for planned Offscreen API and Suspense semantics;
<del>// this feature flag only impacts StrictEffectsMode.
<del>export const enableStrictEffects = __DEV__;
<del>
<ide> // To preserve the "Pause on caught exceptions" behavior of the debugger, we
<ide> // replay the begin phase of a failed component inside invokeGuardedCallback.
<ide> export const replayFailedUnitOfWorkWithInvokeGuardedCallback = __DEV__;
<ide><path>packages/shared/forks/ReactFeatureFlags.native-fb.js
<ide> export const enableGetInspectorDataForInstanceInProduction = true;
<ide> export const enableNewReconciler = false;
<ide> export const deferRenderPhaseUpdateToNextBatch = false;
<ide>
<del>export const enableStrictEffects = __DEV__;
<ide> export const createRootStrictEffectsByDefault = false;
<ide>
<ide> export const disableSchedulerTimeoutInWorkLoop = false;
<ide><path>packages/shared/forks/ReactFeatureFlags.native-oss.js
<ide> export const enableGetInspectorDataForInstanceInProduction = false;
<ide> export const enableNewReconciler = false;
<ide> export const deferRenderPhaseUpdateToNextBatch = false;
<ide>
<del>export const enableStrictEffects = false;
<ide> export const createRootStrictEffectsByDefault = false;
<ide> export const enableUseRefAccessWarning = false;
<ide>
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.js
<ide> export const enableGetInspectorDataForInstanceInProduction = false;
<ide> export const enableNewReconciler = false;
<ide> export const deferRenderPhaseUpdateToNextBatch = false;
<ide>
<del>export const enableStrictEffects = false;
<ide> export const createRootStrictEffectsByDefault = false;
<ide> export const enableUseRefAccessWarning = false;
<ide>
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.native.js
<ide> export const enableUseMemoCacheHook = false;
<ide> export const enableUseEventHook = false;
<ide> export const enableCapturePhaseSelectiveHydrationWithoutDiscreteEventReplay = true;
<ide> export const enableClientRenderFallbackOnTextMismatch = true;
<del>export const enableStrictEffects = false;
<ide> export const createRootStrictEffectsByDefault = false;
<ide> export const enableUseRefAccessWarning = false;
<ide>
<ide><path>packages/shared/forks/ReactFeatureFlags.test-renderer.www.js
<ide> export const enableGetInspectorDataForInstanceInProduction = false;
<ide> export const enableNewReconciler = false;
<ide> export const deferRenderPhaseUpdateToNextBatch = false;
<ide>
<del>export const enableStrictEffects = true;
<ide> export const createRootStrictEffectsByDefault = false;
<ide> export const enableUseRefAccessWarning = false;
<ide>
<ide><path>packages/shared/forks/ReactFeatureFlags.testing.js
<ide> export const enableGetInspectorDataForInstanceInProduction = false;
<ide> export const enableNewReconciler = false;
<ide> export const deferRenderPhaseUpdateToNextBatch = false;
<ide>
<del>export const enableStrictEffects = false;
<ide> export const createRootStrictEffectsByDefault = false;
<ide> export const enableUseRefAccessWarning = false;
<ide>
<ide><path>packages/shared/forks/ReactFeatureFlags.testing.www.js
<ide> export const enableGetInspectorDataForInstanceInProduction = false;
<ide> export const enableNewReconciler = false;
<ide> export const deferRenderPhaseUpdateToNextBatch = false;
<ide>
<del>export const enableStrictEffects = false;
<ide> export const createRootStrictEffectsByDefault = false;
<ide> export const enableUseRefAccessWarning = false;
<ide>
<ide><path>packages/shared/forks/ReactFeatureFlags.www-dynamic.js
<ide> export const enableTrustedTypesIntegration = false;
<ide> export const disableSchedulerTimeoutBasedOnReactExpirationTime = false;
<ide> export const disableNativeComponentFrames = false;
<ide> export const createRootStrictEffectsByDefault = false;
<del>export const enableStrictEffects = false;
<ide> export const allowConcurrentByDefault = true;
<ide> // You probably *don't* want to add more hardcoded ones.
<ide> // Instead, try to add them above with the __VARIANT__ value.
<ide><path>packages/shared/forks/ReactFeatureFlags.www.js
<ide> export const {
<ide> // On WWW, __EXPERIMENTAL__ is used for a new modern build.
<ide> // It's not used anywhere in production yet.
<ide>
<del>export const enableStrictEffects: boolean =
<del> __DEV__ && dynamicFeatureFlags.enableStrictEffects;
<ide> export const debugRenderPhaseSideEffectsForStrictMode = __DEV__;
<ide> export const enableProfilerTimer = __PROFILE__;
<ide> export const enableProfilerCommitHooks = __PROFILE__; | 24 |
Ruby | Ruby | add formula? method and sandbox core | 6e887fbf5ac5bc1fb551a3e4222ad1804b490def | <ide><path>Library/Homebrew/sandbox.rb
<ide>
<ide> class Sandbox
<ide> SANDBOX_EXEC = "/usr/bin/sandbox-exec".freeze
<add> SANDBOXED_TAPS = [
<add> "homebrew/core",
<add> ].freeze
<ide>
<ide> def self.available?
<ide> OS.mac? && File.executable?(SANDBOX_EXEC)
<ide> end
<ide>
<add> def self.formula?(formula)
<add> return false unless available?
<add> ARGV.sandbox? || SANDBOXED_TAPS.include?(formula.tap.to_s)
<add> end
<add>
<ide> def self.test?
<ide> return false unless available?
<ide> !ARGV.no_sandbox?
<ide><path>Library/Homebrew/test/test_sandbox.rb
<ide> def teardown
<ide> @dir.rmtree
<ide> end
<ide>
<add> def test_formula?
<add> f = formula { url "foo-1.0" }
<add> f2 = formula { url "bar-1.0" }
<add> f2.stubs(:tap).returns(Tap.fetch("test/tap"))
<add>
<add> ARGV.stubs(:sandbox?).returns true
<add> assert Sandbox.formula?(f),
<add> "Formulae should be sandboxed if --sandbox was passed."
<add>
<add> ARGV.stubs(:sandbox?).returns false
<add> assert Sandbox.formula?(f),
<add> "Formulae should be sandboxed if in a sandboxed tap."
<add> refute Sandbox.formula?(f2),
<add> "Formulae should not be sandboxed if not in a sandboxed tap."
<add> end
<add>
<ide> def test_test?
<ide> ARGV.stubs(:no_sandbox?).returns false
<ide> assert Sandbox.test?, | 2 |
Python | Python | fix shebang to use python3 by default | 50ad9fe9294bd177d349f8feb0c5a27689e5b5c1 | <ide><path>tools/test.py
<del>#!/usr/bin/env python
<add>#!/usr/bin/env python3
<ide> #
<ide> # Copyright 2008 the V8 project authors. All rights reserved.
<ide> # Redistribution and use in source and binary forms, with or without | 1 |
Python | Python | fix typo in meshgrid example | 85a2a7776e8cc01ce3efdc92e262a7764f5fc061 | <ide><path>numpy/lib/function_base.py
<ide> def meshgrid(*xi, copy=True, sparse=False, indexing='xy'):
<ide>
<ide> >>> x = np.linspace(-5, 5, 101)
<ide> >>> y = np.linspace(-5, 5, 101)
<del> >>> # full coorindate arrays
<add> >>> # full coordinate arrays
<ide> >>> xx, yy = np.meshgrid(x, y)
<ide> >>> zz = np.sqrt(xx**2 + yy**2)
<ide> >>> xx.shape, yy.shape, zz.shape | 1 |
Text | Text | fix lint errors for examples/introduction | 4e6ee825aee3753b0999b195a57f3f51ed1ec31e | <ide><path>docs/introduction/PriorArt.md
<ide> Does it make sense to use Redux together with Rx? Sure! They work great together
<ide> function toObservable(store) {
<ide> return {
<ide> subscribe({ onNext }) {
<del> let dispose = store.subscribe(() => onNext(store.getState()));
<del> onNext(store.getState());
<del> return { dispose };
<add> let dispose = store.subscribe(() => onNext(store.getState()))
<add> onNext(store.getState())
<add> return { dispose }
<ide> }
<ide> }
<ide> }
<ide><path>docs/introduction/ThreePrinciples.md
<ide> Redux can be described in three fundamental principles:
<ide> This makes it easy to create universal apps. The state from the server can be serialized and hydrated into the client with no extra coding effort. It is easier to debug an application when there is a single state tree. You can also persist your app’s state in development for a faster development cycle. And with a single state tree, you get previously difficult functionality like Undo/Redo for free.
<ide>
<ide> ```js
<del>console.log(store.getState());
<add>console.log(store.getState())
<ide>
<ide> {
<ide> visibilityFilter: 'SHOW_ALL',
<del> todos: [{
<del> text: 'Consider using Redux',
<del> completed: true,
<del> }, {
<del> text: 'Keep all state in a single tree',
<del> completed: false
<del> }]
<add> todos: [
<add> {
<add> text: 'Consider using Redux',
<add> completed: true,
<add> },
<add> {
<add> text: 'Keep all state in a single tree',
<add> completed: false
<add> }
<add> ]
<ide> }
<ide> ```
<ide>
<ide> This ensures that the views or the network callbacks never write directly to the
<ide> store.dispatch({
<ide> type: 'COMPLETE_TODO',
<ide> index: 1
<del>});
<add>})
<ide>
<ide> store.dispatch({
<ide> type: 'SET_VISIBILITY_FILTER',
<ide> filter: 'SHOW_COMPLETED'
<del>});
<add>})
<ide> ```
<ide>
<ide> ### Mutations are written as pure functions
<ide> store.dispatch({
<ide> Reducers are just pure functions that take the previous state and an action, and return the next state. Remember to return new state objects, instead of mutating the previous state. You can start with a single reducer, but as your app grows, you can split it into smaller reducers that manage specific parts of the state tree. Because reducers are just functions, you can control the order in which they are called, pass additional data, or even make reusable reducers for common tasks such as pagination.
<ide>
<ide> ```js
<add>
<ide> function visibilityFilter(state = 'SHOW_ALL', action) {
<ide> switch (action.type) {
<del> case 'SET_VISIBILITY_FILTER':
<del> return action.filter;
<del> default:
<del> return state;
<add> case 'SET_VISIBILITY_FILTER':
<add> return action.filter
<add> default:
<add> return state
<ide> }
<ide> }
<ide>
<ide> function todos(state = [], action) {
<ide> switch (action.type) {
<del> case 'ADD_TODO':
<del> return [...state, {
<del> text: action.text,
<del> completed: false
<del> }];
<del> case 'COMPLETE_TODO':
<del> return [
<del> ...state.slice(0, action.index),
<del> Object.assign({}, state[action.index], {
<del> completed: true
<del> }),
<del> ...state.slice(action.index + 1)
<del> ];
<del> default:
<del> return state;
<add> case 'ADD_TODO':
<add> return [
<add> ...state,
<add> {
<add> text: action.text,
<add> completed: false
<add> }
<add> ]
<add> case 'COMPLETE_TODO':
<add> return [
<add> ...state.slice(0, action.index),
<add> Object.assign({}, state[action.index], {
<add> completed: true
<add> }),
<add> ...state.slice(action.index + 1)
<add> ]
<add> default:
<add> return state
<ide> }
<ide> }
<ide>
<del>import { combineReducers, createStore } from 'redux';
<del>let reducer = combineReducers({ visibilityFilter, todos });
<del>let store = createStore(reducer);
<add>import { combineReducers, createStore } from 'redux'
<add>let reducer = combineReducers({ visibilityFilter, todos })
<add>let store = createStore(reducer)
<ide> ```
<ide>
<ide> That’s it! Now you know what Redux is all about. | 2 |
PHP | PHP | add array typehinting in method signatures | ad9b866f7f63662977147d3bb5bf287659cb1e57 | <ide><path>src/Console/Helper.php
<ide> public function __construct(ConsoleIo $io, array $config = [])
<ide> * @param array $args The arguments for the helper.
<ide> * @return void
<ide> */
<del> abstract public function output($args);
<add> abstract public function output(array $args);
<ide> }
<ide><path>src/Error/Debugger.php
<ide> public static function trace(array $options = [])
<ide> * @return mixed Formatted stack trace.
<ide> * @link https://book.cakephp.org/3.0/en/development/debugging.html#generating-stack-traces
<ide> */
<del> public static function formatTrace($backtrace, $options = [])
<add> public static function formatTrace($backtrace, array $options = [])
<ide> {
<ide> if ($backtrace instanceof Exception) {
<ide> $backtrace = $backtrace->getTrace();
<ide><path>src/Http/Client.php
<ide> protected function _mergeOptions($options)
<ide> * @param array $options Additional options to use.
<ide> * @return \Cake\Http\Client\Response
<ide> */
<del> public function send(Request $request, $options = [])
<add> public function send(Request $request, array $options = [])
<ide> {
<ide> $redirects = 0;
<ide> if (isset($options['redirect'])) {
<ide> protected function _sendRequest(Request $request, $options)
<ide> * @param array $options The config options stored with Client::config()
<ide> * @return string A complete url with scheme, port, host, and path.
<ide> */
<del> public function buildUrl($url, $query = [], $options = [])
<add> public function buildUrl($url, $query = [], array $options = [])
<ide> {
<ide> if (empty($options) && empty($query)) {
<ide> return $url;
<ide><path>src/Http/Response.php
<ide> public function withLength($bytes)
<ide> * @return static
<ide> * @since 3.6.0
<ide> */
<del> public function withAddedLink($url, $options = [])
<add> public function withAddedLink($url, array $options = [])
<ide> {
<ide> $params = [];
<ide> foreach ($options as $key => $option) {
<ide> public function withCookie($name, $data = '')
<ide> * @param array $options An array of cookie options.
<ide> * @return static
<ide> */
<del> public function withExpiredCookie($name, $options = [])
<add> public function withExpiredCookie($name, array $options = [])
<ide> {
<ide> if ($name instanceof CookieInterface) {
<ide> $cookie = $name->withExpired();
<ide><path>src/Shell/Helper/ProgressHelper.php
<ide> class ProgressHelper extends Helper
<ide> * @param array $args The arguments/options to use when outputing the progress bar.
<ide> * @return void
<ide> */
<del> public function output($args)
<add> public function output(array $args)
<ide> {
<ide> $args += ['callback' => null];
<ide> if (isset($args[0])) {
<ide><path>src/Shell/Helper/TableHelper.php
<ide> class TableHelper extends Helper
<ide> * @param array $rows The rows on which the columns width will be calculated on.
<ide> * @return array
<ide> */
<del> protected function _calculateWidths($rows)
<add> protected function _calculateWidths(array $rows)
<ide> {
<ide> $widths = [];
<ide> foreach ($rows as $line) {
<ide> protected function _calculateWidths($rows)
<ide> * @param array $widths The widths of each column to output.
<ide> * @return void
<ide> */
<del> protected function _rowSeparator($widths)
<add> protected function _rowSeparator(array $widths)
<ide> {
<ide> $out = '';
<ide> foreach ($widths as $column) {
<ide> protected function _rowSeparator($widths)
<ide> * @param array $options Options to be passed.
<ide> * @return void
<ide> */
<del> protected function _render(array $row, $widths, $options = [])
<add> protected function _render(array $row, array $widths, array $options = [])
<ide> {
<ide> if (count($row) === 0) {
<ide> return;
<ide><path>src/View/Form/ArrayContext.php
<ide> public function isCreate()
<ide> * context's schema should be used if it's not explicitly provided.
<ide> * @return mixed
<ide> */
<del> public function val($field, $options = [])
<add> public function val($field, array $options = [])
<ide> {
<ide> $options += [
<ide> 'default' => null,
<ide><path>src/View/Form/EntityContext.php
<ide> public function isCreate()
<ide> * schema should be used if it's not explicitly provided.
<ide> * @return mixed The value of the field or null on a miss.
<ide> */
<del> public function val($field, $options = [])
<add> public function val($field, array $options = [])
<ide> {
<ide> $options += [
<ide> 'default' => null,
<ide><path>src/View/Form/FormContext.php
<ide> public function isCreate()
<ide> /**
<ide> * {@inheritDoc}
<ide> */
<del> public function val($field, $options = [])
<add> public function val($field, array $options = [])
<ide> {
<ide> $options += [
<ide> 'default' => null,
<ide><path>tests/TestCase/Http/ClientTest.php
<ide> public function testBuildUrl($expected, $url, $query, $opts)
<ide> {
<ide> $http = new Client();
<ide>
<del> $result = $http->buildUrl($url, $query, $opts);
<add> $result = $http->buildUrl($url, $query, (array)$opts);
<ide> $this->assertEquals($expected, $result);
<ide> }
<ide>
<ide><path>tests/test_app/Plugin/TestPlugin/src/Shell/Helper/ExampleHelper.php
<ide>
<ide> class ExampleHelper extends Helper
<ide> {
<del> public function output($args)
<add> public function output(array $args)
<ide> {
<ide> $this->_io->out('Plugins work!');
<ide> }
<ide><path>tests/test_app/TestApp/Command/Helper/CommandHelper.php
<ide>
<ide> class CommandHelper extends Helper
<ide> {
<del> public function output($args)
<add> public function output(array $args)
<ide> {
<ide> $this->_io->out('I am helping ' . implode(' ', $args));
<ide> }
<ide><path>tests/test_app/TestApp/Shell/Helper/SimpleHelper.php
<ide>
<ide> class SimpleHelper extends Helper
<ide> {
<del> public function output($args)
<add> public function output(array $args)
<ide> {
<ide> $this->_io->out('It works!' . implode(' ', $args));
<ide> } | 13 |
Go | Go | pull parent layers first before children | 7301fbe035ca450e2aea86b8f2467cffd3fac18b | <ide><path>server.go
<ide> func (srv *Server) pullImage(r *registry.Registry, out io.Writer, imgID, endpoin
<ide> // FIXME: Try to stream the images?
<ide> // FIXME: Launch the getRemoteImage() in goroutines
<ide>
<del> for _, id := range history {
<add> for i := len(history) - 1; i >= 0; i-- {
<add> id := history[i]
<ide>
<ide> // ensure no two downloads of the same layer happen at the same time
<ide> if err := srv.poolAdd("pull", "layer:"+id); err != nil { | 1 |
Javascript | Javascript | prevent aborted event when already completed | 1980a36dd45997e9dee03fe11688319c2f5c3644 | <ide><path>lib/_http_client.js
<ide> function socketCloseListener() {
<ide> var parser = socket.parser;
<ide> if (req.res && req.res.readable) {
<ide> // Socket closed before we emitted 'end' below.
<del> req.res.emit('aborted');
<add> if (!req.res.complete) req.res.emit('aborted');
<ide> var res = req.res;
<ide> res.on('end', function() {
<ide> res.emit('close'); | 1 |
Ruby | Ruby | add missing require for compact_blank | c1659150a7f76baa757aec4b3d5f90d6c330911f | <ide><path>activerecord/lib/active_record/database_configurations/connection_url_resolver.rb
<ide> # frozen_string_literal: true
<ide>
<add>require "active_support/core_ext/enumerable"
<add>
<ide> module ActiveRecord
<ide> class DatabaseConfigurations
<ide> # Expands a connection string into a hash. | 1 |
Javascript | Javascript | remove outdated comment re assert() | cb53cfd8b58907074af464b0a94a550d0969b6b7 | <ide><path>src/node.js
<ide>
<ide> var assert;
<ide> startup.processAssert = function() {
<del> // Note that calls to assert() are pre-processed out by JS2C for the
<del> // normal build of node. They persist only in the node_g build.
<del> // Similarly for debug().
<ide> assert = process.assert = function(x, msg) {
<ide> if (!x) throw new Error(msg || 'assertion error');
<ide> }; | 1 |
Javascript | Javascript | use modularize.js to generate jsm | 16ffe39a867ee15723a7ffb2c97e773f3920cd29 | <ide><path>examples/js/loaders/TGALoader.js
<ide> THREE.TGALoader = function ( manager ) {
<ide>
<del> THREE.Loader.call( this, manager );
<add> THREE.DataTextureLoader.call( this, manager );
<ide>
<ide> };
<ide>
<del>THREE.TGALoader.prototype = Object.assign( Object.create( THREE.Loader.prototype ), {
<add>THREE.TGALoader.prototype = Object.assign( Object.create( THREE.DataTextureLoader.prototype ), {
<ide>
<ide> constructor: THREE.TGALoader,
<ide>
<del> load: function ( url, onLoad, onProgress, onError ) {
<del>
<del> var scope = this;
<del>
<del> var texture = new THREE.Texture();
<del>
<del> var loader = new THREE.FileLoader( this.manager );
<del> loader.setResponseType( 'arraybuffer' );
<del> loader.setPath( this.path );
<del> loader.setWithCredentials( this.withCredentials );
<del>
<del> loader.load( url, function ( buffer ) {
<del>
<del> texture.image = scope.parse( buffer );
<del> texture.needsUpdate = true;
<del>
<del> if ( onLoad !== undefined ) {
<del>
<del> onLoad( texture );
<del>
<del> }
<del>
<del> }, onProgress, onError );
<del>
<del> return texture;
<del>
<del> },
<del>
<ide> parse: function ( buffer ) {
<ide>
<ide> // reference from vthibault, https://github.com/vthibault/roBrowser/blob/master/src/Loaders/Targa.js
<ide> THREE.TGALoader.prototype = Object.assign( Object.create( THREE.Loader.prototype
<ide>
<ide> //
<ide>
<del> var useOffscreen = typeof OffscreenCanvas !== 'undefined';
<del>
<del> var canvas = useOffscreen ? new OffscreenCanvas( header.width, header.height ) : document.createElement( 'canvas' );
<del> canvas.width = header.width;
<del> canvas.height = header.height;
<del>
<del> var context = canvas.getContext( '2d' );
<del> var imageData = context.createImageData( header.width, header.height );
<del>
<add> var imageData = new Uint8Array( header.width * header.height * 4 );
<ide> var result = tgaParse( use_rle, use_pal, header, offset, content );
<del> getTgaRGBA( imageData.data, header.width, header.height, result.pixel_data, result.palettes );
<add> getTgaRGBA( imageData, header.width, header.height, result.pixel_data, result.palettes );
<add>
<add> return {
<ide>
<del> context.putImageData( imageData, 0, 0 );
<add> data: imageData,
<add> width: header.width,
<add> height: header.height,
<ide>
<del> return canvas;
<add> };
<ide>
<ide> }
<ide>
<ide><path>examples/jsm/loaders/TGALoader.js
<ide> import {
<del> DataTextureLoader,
<add> DataTextureLoader
<ide> } from '../../../build/three.module.js';
<ide>
<ide> var TGALoader = function ( manager ) { | 2 |
Mixed | Python | add dtype to graph inputs | aeb954dd49cdc72b520c35f177c31407e9bc9d06 | <ide><path>docs/sources/models.md
<ide> Arbitrary connection graph. It can have any number of inputs and outputs, with e
<ide> model = keras.models.Graph()
<ide> ```
<ide> - __Methods__:
<del> - __add_input__(ndim=2): Add an input with shape dimensionality `ndim`. Use `ndim=2` for vector input (`(samples, features)`), ndim=3 for temporal input (`(samples, time, features)`), ndim=4 for image input (`(samples, channels, height, width)`).
<add> - __add_input__(ndim=2, dtype='float'): Add an input with shape dimensionality `ndim`.
<add> - __Arguments__:
<add> - __ndim__: Use `ndim=2` for vector input `(samples, features)`, ndim=3 for temporal input `(samples, time, features)`, ndim=4 for image input `(samples, channels, height, width)`.
<add> - __dtype__: `float` or `int`. Use `int` if the input is connected to an Embedding layer, `float` otherwise.
<ide> - __add_output__(name, input=None, inputs=[], merge_mode='concat'): Add an output connect to `input` or `inputs`.
<ide> - __Arguments__:
<ide> - __name__: str. unique identifier of the output.
<ide><path>keras/layers/containers.py
<ide> def get_output(self, train=False):
<ide> when it has exactly one input and one output.')
<ide> return self.outputs[self.output_order[0]].get_output(train)
<ide>
<del> def add_input(self, name, ndim=2):
<add> def add_input(self, name, ndim=2, dtype='float'):
<ide> if name in self.namespace:
<ide> raise Exception('Duplicate node identifier: ' + name)
<ide> self.namespace.add(name)
<ide> self.input_order.append(name)
<ide> layer = Layer() # empty layer
<del> layer.input = ndim_tensor(ndim)
<add> if dtype == 'float':
<add> layer.input = ndim_tensor(ndim)
<add> else:
<add> if ndim == 2:
<add> layer.input = T.imatrix()
<add> else:
<add> raise Exception('Type "int" can only be used with ndim==2.')
<ide> layer.input.name = name
<ide> self.inputs[name] = layer
<del> self.output_config.append({'name':name, 'ndim':ndim})
<add> self.output_config.append({'name':name, 'ndim':ndim, 'dtype':dtype})
<ide>
<ide> def add_node(self, layer, name, input=None, inputs=[], merge_mode='concat'):
<ide> if hasattr(layer, 'set_name'): | 2 |
Ruby | Ruby | fix sharelock issues | b3d78e8c23e3cbd5325719474efb9f7ee4168f72 | <ide><path>activesupport/lib/active_support/concurrency/share_lock.rb
<ide> def start_exclusive(purpose: nil, compatible: [], no_wait: false)
<ide> return false if no_wait
<ide>
<ide> loose_shares = @sharing.delete(Thread.current)
<del> @waiting[Thread.current] = compatible if loose_shares
<add> @waiting[Thread.current] = compatible
<ide>
<ide> @cv.wait_while { busy?(purpose) }
<ide>
<ide> def sharing
<ide> # Must be called within synchronize
<ide> def busy?(purpose)
<ide> (@exclusive_thread && @exclusive_thread != Thread.current) ||
<del> @waiting.any? { |k, v| k != Thread.current && !v.include?(purpose) } ||
<add> (purpose && @waiting.any? { |k, v| k != Thread.current && !v.include?(purpose) }) ||
<ide> @sharing.size > (@sharing[Thread.current] > 0 ? 1 : 0)
<ide> end
<ide> end | 1 |
Text | Text | add troubleshooting guide to sidebar | 26ef0b0f33b142fbb87a4d8b643231ac305409a7 | <ide><path>docs/Troubleshooting.md
<ide> id: troubleshooting
<ide> title: Troubleshooting
<ide> layout: docs
<del>category: Quick Start
<add>category: Guides
<ide> permalink: docs/troubleshooting.html
<add>next: native-modules-ios
<add>previous: upgrading
<ide> ---
<ide>
<add>
<ide> These are some common issues you may run into while setting up React Native. If you encounter something that is not listed here, try [searching for the issue in GitHub](https://github.com/facebook/react-native/issues/).
<ide>
<ide> ### Port already in use
<ide><path>docs/Upgrading.md
<ide> title: Upgrading to new React Native versions
<ide> layout: docs
<ide> category: Guides
<ide> permalink: docs/upgrading.html
<del>next: native-modules-ios
<add>next: troubleshooting
<ide> previous: running-on-device
<ide> ---
<ide> | 2 |
Go | Go | move supportsmultiplelowerdir to utils | d5687079ad8ad27c467ef5c8758a73c519b45d9b | <ide><path>daemon/graphdriver/overlay2/check.go
<ide> func doesSupportNativeDiff(d string) error {
<ide>
<ide> return nil
<ide> }
<del>
<del>// supportsMultipleLowerDir checks if the system supports multiple lowerdirs,
<del>// which is required for the overlay2 driver. On 4.x kernels, multiple lowerdirs
<del>// are always available (so this check isn't needed), and backported to RHEL and
<del>// CentOS 3.x kernels (3.10.0-693.el7.x86_64 and up). This function is to detect
<del>// support on those kernels, without doing a kernel version compare.
<del>func supportsMultipleLowerDir(d string) error {
<del> td, err := ioutil.TempDir(d, "multiple-lowerdir-check")
<del> if err != nil {
<del> return err
<del> }
<del> defer func() {
<del> if err := os.RemoveAll(td); err != nil {
<del> logger.Warnf("Failed to remove check directory %v: %v", td, err)
<del> }
<del> }()
<del>
<del> for _, dir := range []string{"lower1", "lower2", "upper", workDirName, mergedDirName} {
<del> if err := os.Mkdir(filepath.Join(td, dir), 0755); err != nil {
<del> return err
<del> }
<del> }
<del>
<del> opts := fmt.Sprintf("lowerdir=%s:%s,upperdir=%s,workdir=%s", path.Join(td, "lower2"), path.Join(td, "lower1"), path.Join(td, "upper"), path.Join(td, workDirName))
<del> if err := unix.Mount("overlay", filepath.Join(td, mergedDirName), "overlay", 0, opts); err != nil {
<del> return errors.Wrap(err, "failed to mount overlay")
<del> }
<del> if err := unix.Unmount(filepath.Join(td, mergedDirName), 0); err != nil {
<del> logger.Warnf("Failed to unmount check directory %v: %v", filepath.Join(td, mergedDirName), err)
<del> }
<del> return nil
<del>}
<ide><path>daemon/graphdriver/overlay2/overlay.go
<ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap
<ide> if opts.overrideKernelCheck {
<ide> logger.Warn("Using pre-4.0.0 kernel for overlay2, mount failures may require kernel update")
<ide> } else {
<del> if err := supportsMultipleLowerDir(testdir); err != nil {
<add> if err := overlayutils.SupportsMultipleLowerDir(testdir); err != nil {
<ide> logger.Debugf("Multiple lower dirs not supported: %v", err)
<ide> return nil, graphdriver.ErrNotSupported
<ide> }
<ide><path>daemon/graphdriver/overlayutils/overlayutils.go
<ide> package overlayutils // import "github.com/docker/docker/daemon/graphdriver/over
<ide>
<ide> import (
<ide> "fmt"
<add> "io/ioutil"
<add> "os"
<add> "path"
<add> "path/filepath"
<ide>
<ide> "github.com/docker/docker/daemon/graphdriver"
<add> "github.com/pkg/errors"
<add> "github.com/sirupsen/logrus"
<add> "golang.org/x/sys/unix"
<ide> )
<ide>
<ide> // ErrDTypeNotSupported denotes that the backing filesystem doesn't support d_type.
<ide> func ErrDTypeNotSupported(driver, backingFs string) error {
<ide>
<ide> return graphdriver.NotSupportedError(msg)
<ide> }
<add>
<add>// SupportsMultipleLowerDir checks if the system supports multiple lowerdirs,
<add>// which is required for the overlay2 driver. On 4.x kernels, multiple lowerdirs
<add>// are always available (so this check isn't needed), and backported to RHEL and
<add>// CentOS 3.x kernels (3.10.0-693.el7.x86_64 and up). This function is to detect
<add>// support on those kernels, without doing a kernel version compare.
<add>func SupportsMultipleLowerDir(d string) error {
<add> td, err := ioutil.TempDir(d, "multiple-lowerdir-check")
<add> if err != nil {
<add> return err
<add> }
<add> defer func() {
<add> if err := os.RemoveAll(td); err != nil {
<add> logrus.Warnf("Failed to remove check directory %v: %v", td, err)
<add> }
<add> }()
<add>
<add> for _, dir := range []string{"lower1", "lower2", "upper", "work", "merged"} {
<add> if err := os.Mkdir(filepath.Join(td, dir), 0755); err != nil {
<add> return err
<add> }
<add> }
<add>
<add> mnt := filepath.Join(td, "merged")
<add> opts := fmt.Sprintf("lowerdir=%s:%s,upperdir=%s,workdir=%s", path.Join(td, "lower2"), path.Join(td, "lower1"), path.Join(td, "upper"), path.Join(td, "work"))
<add> if err := unix.Mount("overlay", mnt, "overlay", 0, opts); err != nil {
<add> return errors.Wrap(err, "failed to mount overlay")
<add> }
<add> if err := unix.Unmount(mnt, 0); err != nil {
<add> logrus.Warnf("Failed to unmount check directory %v: %v", mnt, err)
<add> }
<add> return nil
<add>} | 3 |
Text | Text | add morph to token api docs | fcd3a4abe3a5b0c9d296456f134a7de6fd187edf | <ide><path>website/docs/api/token.md
<ide> The L2 norm of the token's vector representation.
<ide> | `pos_` | str | Coarse-grained part-of-speech from the [Universal POS tag set](https://universaldependencies.org/docs/u/pos/). |
<ide> | `tag` | int | Fine-grained part-of-speech. |
<ide> | `tag_` | str | Fine-grained part-of-speech. |
<add>| `morph` | `MorphAnalysis` | Morphological analysis. |
<ide> | `dep` | int | Syntactic dependency relation. |
<ide> | `dep_` | str | Syntactic dependency relation. |
<ide> | `lang` | int | Language of the parent document's vocabulary. | | 1 |
Ruby | Ruby | fix wrong typo in test | 36efba05206aef9e587cb971a67d931cd941b90a | <ide><path>actionmailer/test/base_test.rb
<ide> class BaseTest < ActiveSupport::TestCase
<ide> assert_equal("multipart/mixed", email.mime_type)
<ide> end
<ide>
<del> test "set mime type to text/html when attachment is inclued and body is set" do
<add> test "set mime type to text/html when attachment is included and body is set" do
<ide> email = BaseMailer.attachment_with_content(body: "Hello there", content_type: "text/html")
<ide> assert_equal("text/html", email.mime_type)
<ide> end | 1 |
Go | Go | remove redundant error message | 894266c1bbdfeb53bf278f3cb762945bac69e592 | <ide><path>api/client/kill.go
<ide> func (cli *DockerCli) CmdKill(args ...string) error {
<ide> var errs []string
<ide> for _, name := range cmd.Args() {
<ide> if err := cli.client.ContainerKill(name, *signal); err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to kill container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%s\n", name)
<ide> }
<ide><path>api/client/pause.go
<ide> func (cli *DockerCli) CmdPause(args ...string) error {
<ide> var errs []string
<ide> for _, name := range cmd.Args() {
<ide> if err := cli.client.ContainerPause(name); err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to pause container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%s\n", name)
<ide> }
<ide><path>api/client/restart.go
<ide> func (cli *DockerCli) CmdRestart(args ...string) error {
<ide> var errs []string
<ide> for _, name := range cmd.Args() {
<ide> if err := cli.client.ContainerRestart(name, *nSeconds); err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to kill container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%s\n", name)
<ide> }
<ide><path>api/client/rm.go
<ide> func (cli *DockerCli) removeContainer(containerID string, removeVolumes, removeL
<ide> Force: force,
<ide> }
<ide> if err := cli.client.ContainerRemove(options); err != nil {
<del> return fmt.Errorf("Failed to remove container (%s): %v", containerID, err)
<add> return err
<ide> }
<ide> return nil
<ide> }
<ide><path>api/client/rmi.go
<ide> func (cli *DockerCli) CmdRmi(args ...string) error {
<ide>
<ide> dels, err := cli.client.ImageRemove(options)
<ide> if err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to remove image (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> for _, del := range dels {
<ide> if del.Deleted != "" {
<ide><path>api/client/stop.go
<ide> func (cli *DockerCli) CmdStop(args ...string) error {
<ide> var errs []string
<ide> for _, name := range cmd.Args() {
<ide> if err := cli.client.ContainerStop(name, *nSeconds); err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to stop container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%s\n", name)
<ide> }
<ide><path>api/client/unpause.go
<ide> func (cli *DockerCli) CmdUnpause(args ...string) error {
<ide> var errs []string
<ide> for _, name := range cmd.Args() {
<ide> if err := cli.client.ContainerUnpause(name); err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to unpause container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%s\n", name)
<ide> }
<ide><path>api/client/update.go
<ide> func (cli *DockerCli) CmdUpdate(args ...string) error {
<ide> var errs []string
<ide> for _, name := range names {
<ide> if err := cli.client.ContainerUpdate(name, updateConfig); err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to update container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%s\n", name)
<ide> }
<ide><path>api/client/wait.go
<ide> func (cli *DockerCli) CmdWait(args ...string) error {
<ide> for _, name := range cmd.Args() {
<ide> status, err := cli.client.ContainerWait(name)
<ide> if err != nil {
<del> errs = append(errs, fmt.Sprintf("Failed to wait container (%s): %s", name, err))
<add> errs = append(errs, err.Error())
<ide> } else {
<ide> fmt.Fprintf(cli.out, "%d\n", status)
<ide> }
<ide><path>daemon/delete.go
<ide> func (daemon *Daemon) ContainerRm(name string, config *types.ContainerRmConfig)
<ide> // do not fail when the removal is in progress started by other request.
<ide> return nil
<ide> }
<del> return derr.ErrorCodeRmState.WithArgs(err)
<add> return derr.ErrorCodeRmState.WithArgs(container.ID, err)
<ide> }
<ide> defer container.ResetRemovalInProgress()
<ide>
<ide> func (daemon *Daemon) rmLink(container *container.Container, name string) error
<ide> func (daemon *Daemon) cleanupContainer(container *container.Container, forceRemove bool) (err error) {
<ide> if container.IsRunning() {
<ide> if !forceRemove {
<del> return derr.ErrorCodeRmRunning
<add> return derr.ErrorCodeRmRunning.WithArgs(container.ID)
<ide> }
<ide> if err := daemon.Kill(container); err != nil {
<del> return derr.ErrorCodeRmFailed.WithArgs(err)
<add> return derr.ErrorCodeRmFailed.WithArgs(container.ID, err)
<ide> }
<ide> }
<ide>
<ide><path>daemon/kill.go
<ide> func (daemon *Daemon) killWithSignal(container *container.Container, sig int) er
<ide> }
<ide>
<ide> if err := daemon.kill(container, sig); err != nil {
<del> return err
<add> return derr.ErrorCodeCantKill.WithArgs(container.ID, err)
<ide> }
<ide>
<ide> attributes := map[string]string{
<ide><path>daemon/pause.go
<ide> func (daemon *Daemon) ContainerPause(name string) error {
<ide> }
<ide>
<ide> if err := daemon.containerPause(container); err != nil {
<del> return derr.ErrorCodePauseError.WithArgs(name, err)
<add> return err
<ide> }
<ide>
<ide> return nil
<ide> func (daemon *Daemon) containerPause(container *container.Container) error {
<ide> }
<ide>
<ide> if err := daemon.execDriver.Pause(container.Command); err != nil {
<del> return err
<add> return derr.ErrorCodeCantPause.WithArgs(container.ID, err)
<ide> }
<ide> container.Paused = true
<ide> daemon.LogContainerEvent(container, "pause")
<ide><path>daemon/stop.go
<ide> func (daemon *Daemon) ContainerStop(name string, seconds int) error {
<ide> return err
<ide> }
<ide> if !container.IsRunning() {
<del> return derr.ErrorCodeStopped
<add> return derr.ErrorCodeStopped.WithArgs(name)
<ide> }
<ide> if err := daemon.containerStop(container, seconds); err != nil {
<ide> return derr.ErrorCodeCantStop.WithArgs(name, err)
<ide><path>daemon/unpause.go
<ide> func (daemon *Daemon) ContainerUnpause(name string) error {
<ide> }
<ide>
<ide> if err := daemon.containerUnpause(container); err != nil {
<del> return derr.ErrorCodeCantUnpause.WithArgs(name, err)
<add> return err
<ide> }
<ide>
<ide> return nil
<ide> func (daemon *Daemon) containerUnpause(container *container.Container) error {
<ide> }
<ide>
<ide> if err := daemon.execDriver.Unpause(container.Command); err != nil {
<del> return err
<add> return derr.ErrorCodeCantUnpause.WithArgs(container.ID, err)
<ide> }
<ide>
<ide> container.Paused = false
<ide><path>daemon/update.go
<ide> package daemon
<ide> import (
<ide> "fmt"
<ide>
<add> derr "github.com/docker/docker/errors"
<ide> "github.com/docker/engine-api/types/container"
<ide> )
<ide>
<ide> func (daemon *Daemon) update(name string, hostConfig *container.HostConfig) erro
<ide> }
<ide>
<ide> if container.RemovalInProgress || container.Dead {
<del> return fmt.Errorf("Container is marked for removal and cannot be \"update\".")
<add> errMsg := fmt.Errorf("Container is marked for removal and cannot be \"update\".")
<add> return derr.ErrorCodeCantUpdate.WithArgs(container.ID, errMsg)
<ide> }
<ide>
<ide> if container.IsRunning() && hostConfig.KernelMemory != 0 {
<del> return fmt.Errorf("Can not update kernel memory to a running container, please stop it first.")
<add> errMsg := fmt.Errorf("Can not update kernel memory to a running container, please stop it first.")
<add> return derr.ErrorCodeCantUpdate.WithArgs(container.ID, errMsg)
<ide> }
<ide>
<ide> if err := container.UpdateContainer(hostConfig); err != nil {
<del> return err
<add> return derr.ErrorCodeCantUpdate.WithArgs(container.ID, err.Error())
<ide> }
<ide>
<ide> // If container is not running, update hostConfig struct is enough,
<ide> func (daemon *Daemon) update(name string, hostConfig *container.HostConfig) erro
<ide> // to the real world.
<ide> if container.IsRunning() {
<ide> if err := daemon.execDriver.Update(container.Command); err != nil {
<del> return err
<add> return derr.ErrorCodeCantUpdate.WithArgs(container.ID, err.Error())
<ide> }
<ide> }
<ide>
<ide><path>errors/daemon.go
<ide> var (
<ide> HTTPStatusCode: http.StatusInternalServerError,
<ide> })
<ide>
<add> // ErrorCodeCantPause is generated when there's an error while trying
<add> // to pause a container.
<add> ErrorCodeCantPause = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "CANTPAUSE",
<add> Message: "Cannot pause container %s: %s",
<add> Description: "An error occurred while trying to pause the specified container",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<ide> // ErrorCodeCantUnpause is generated when there's an error while trying
<ide> // to unpause a container.
<ide> ErrorCodeCantUnpause = errcode.Register(errGroup, errcode.ErrorDescriptor{
<ide> var (
<ide> HTTPStatusCode: http.StatusInternalServerError,
<ide> })
<ide>
<add> // ErrorCodeCantKill is generated when there's an error while trying
<add> // to kill a container.
<add> ErrorCodeCantKill = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "CANTKILL",
<add> Message: "Cannot kill container %s: %s",
<add> Description: "An error occurred while trying to kill the specified container",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<add>
<add> // ErrorCodeCantUpdate is generated when there's an error while trying
<add> // to update a container.
<add> ErrorCodeCantUpdate = errcode.Register(errGroup, errcode.ErrorDescriptor{
<add> Value: "CANTUPDATE",
<add> Message: "Cannot update container %s: %s",
<add> Description: "An error occurred while trying to update the specified container",
<add> HTTPStatusCode: http.StatusInternalServerError,
<add> })
<ide> // ErrorCodePSError is generated when trying to run 'ps'.
<ide> ErrorCodePSError = errcode.Register(errGroup, errcode.ErrorDescriptor{
<ide> Value: "PSError",
<ide> var (
<ide> // that is already stopped.
<ide> ErrorCodeStopped = errcode.Register(errGroup, errcode.ErrorDescriptor{
<ide> Value: "STOPPED",
<del> Message: "Container already stopped",
<add> Message: "Container %s is already stopped",
<ide> Description: "An attempt was made to stop a container, but the container is already stopped",
<ide> HTTPStatusCode: http.StatusNotModified,
<ide> })
<ide> var (
<ide> // but its still running.
<ide> ErrorCodeRmRunning = errcode.Register(errGroup, errcode.ErrorDescriptor{
<ide> Value: "RMRUNNING",
<del> Message: "Conflict, You cannot remove a running container. Stop the container before attempting removal or use -f",
<add> Message: "You cannot remove a running container %s. Stop the container before attempting removal or use -f",
<ide> Description: "An attempt was made to delete a container but the container is still running, try to either stop it first or use '-f'",
<ide> HTTPStatusCode: http.StatusConflict,
<ide> })
<ide> var (
<ide> // but it failed for some reason.
<ide> ErrorCodeRmFailed = errcode.Register(errGroup, errcode.ErrorDescriptor{
<ide> Value: "RMFAILED",
<del> Message: "Could not kill running container, cannot remove - %v",
<add> Message: "Could not kill running container %s, cannot remove - %v",
<ide> Description: "An error occurred while trying to delete a running container",
<ide> HTTPStatusCode: http.StatusInternalServerError,
<ide> })
<ide> var (
<ide> // but couldn't set its state to RemovalInProgress.
<ide> ErrorCodeRmState = errcode.Register(errGroup, errcode.ErrorDescriptor{
<ide> Value: "RMSTATE",
<del> Message: "Failed to set container state to RemovalInProgress: %s",
<add> Message: "Failed to set container %s state to RemovalInProgress: %s",
<ide> Description: "An attempt to delete a container was made, but there as an error trying to set its state to 'RemovalInProgress'",
<ide> HTTPStatusCode: http.StatusInternalServerError,
<ide> })
<ide><path>integration-cli/docker_cli_rm_test.go
<ide> package main
<ide>
<ide> import (
<ide> "os"
<del> "strings"
<ide>
<ide> "github.com/docker/docker/pkg/integration/checker"
<ide> "github.com/go-check/check"
<ide> func (s *DockerSuite) TestRmContainerOrphaning(c *check.C) {
<ide> }
<ide>
<ide> func (s *DockerSuite) TestRmInvalidContainer(c *check.C) {
<del> if out, _, err := dockerCmdWithError("rm", "unknown"); err == nil {
<del> c.Fatal("Expected error on rm unknown container, got none")
<del> } else if !strings.Contains(out, "Failed to remove container") {
<del> c.Fatalf("Expected output to contain 'Failed to remove container', got %q", out)
<del> }
<add> out, _, err := dockerCmdWithError("rm", "unknown")
<add> c.Assert(err, checker.NotNil, check.Commentf("Expected error on rm unknown container, got none"))
<add> c.Assert(out, checker.Contains, "No such container")
<ide> }
<ide>
<ide> func createRunningContainer(c *check.C, name string) { | 17 |
PHP | PHP | add mask setting to filelog | 278700677ae2ee3cc57644397c8ee0da715b552f | <ide><path>lib/Cake/Log/Engine/FileLog.php
<ide> class FileLog extends BaseLog {
<ide> 'types' => null,
<ide> 'scopes' => array(),
<ide> 'rotate' => 10,
<del> 'size' => 10485760 // 10MB
<add> 'size' => 10485760, // 10MB
<add> 'mask' => null,
<ide> );
<ide>
<ide> /**
<ide> class FileLog extends BaseLog {
<ide> * human reabable string values like '10MB', '100KB' etc.
<ide> * - `rotate` Log files are rotated specified times before being removed.
<ide> * If value is 0, old versions are removed rather then rotated.
<add> * - `mask` A mask is applied when log files are created. Left empty no chmod
<add> * is made.
<ide> *
<ide> * @param array $options Options for the FileLog, see above.
<ide> */
<ide> public function write($type, $message) {
<ide> $this->_rotateFile($filename);
<ide> }
<ide>
<del> return file_put_contents($this->_path . $filename, $output, FILE_APPEND);
<add> $pathname = $this->_path . $filename;
<add> if (empty($this->_config['mask'])) {
<add> return file_put_contents($pathname, $output, FILE_APPEND);
<add> }
<add>
<add> $exists = file_exists($pathname);
<add> $r = file_put_contents($pathname, $output, FILE_APPEND);
<add> static $selfError = false;
<add> if (!$selfError && !$exists && !chmod($pathname, (int)$this->_config['mask'])) {
<add> $selfError = true;
<add> trigger_error(__d(
<add> 'cake_dev', 'Could not apply permission mask "%s" on log file "%s"',
<add> array($pathname, $this->_config['mask'])), E_USER_WARNING);
<add> $selfError = false;
<add> }
<ide> }
<ide>
<ide> /**
<ide><path>lib/Cake/Test/Case/Log/Engine/FileLogTest.php
<ide> public function testRotation() {
<ide> $this->assertEquals(0, count(glob($path . 'debug.log.*')));
<ide> }
<ide>
<add> public function testMaskSetting() {
<add> if (DS === '\\') {
<add> $this->markTestSkipped('File permission testing does not work on Windows.');
<add> }
<add>
<add> $path = TMP . 'tests' . DS;
<add> $this->_deleteLogs($path);
<add>
<add> $log = new FileLog(array('path' => $path, 'mask' => 0666));
<add> $log->write('warning', 'Test warning one');
<add> $result = substr(sprintf('%o',fileperms($path . 'error.log')), -4);
<add> $expected = '0666';
<add> $this->assertEquals($expected, $result);
<add> unlink($path . 'error.log');
<add>
<add> $log = new FileLog(array('path' => $path, 'mask' => 0644));
<add> $log->write('warning', 'Test warning two');
<add> $result = substr(sprintf('%o',fileperms($path . 'error.log')), -4);
<add> $expected = '0644';
<add> $this->assertEquals($expected, $result);
<add> unlink($path . 'error.log');
<add>
<add> $log = new FileLog(array('path' => $path, 'mask' => 0640));
<add> $log->write('warning', 'Test warning three');
<add> $result = substr(sprintf('%o',fileperms($path . 'error.log')), -4);
<add> $expected = '0640';
<add> $this->assertEquals($expected, $result);
<add> unlink($path . 'error.log');
<add> }
<add>
<ide> /**
<ide> * helper function to clears all log files in specified directory
<ide> * | 2 |
Text | Text | add internal link in governance.md | 874839c6dc42f4b0bb92db7cc5bdf756d771e35e | <ide><path>GOVERNANCE.md
<ide> responsibility for the change. In the case of pull requests proposed
<ide> by an existing Collaborator, an additional Collaborator is required
<ide> for sign-off. Consensus should be sought if additional Collaborators
<ide> participate and there is disagreement around a particular
<del>modification. See _Consensus Seeking Process_ below for further detail
<add>modification. See [Consensus Seeking Process](#consensus-seeking-process) below for further detail
<ide> on the consensus model used for governance.
<ide>
<ide> Collaborators may opt to elevate significant or controversial | 1 |
Javascript | Javascript | add internal [@@ refresh()] function | bb5575aa75fd3071724d5eccde39a3041e1af57a | <ide><path>lib/internal/http2/core.js
<ide> const {
<ide> const {
<ide> kTimeout,
<ide> setUnrefTimeout,
<del> validateTimerDuration
<add> validateTimerDuration,
<add> refreshFnSymbol
<ide> } = require('internal/timers');
<ide>
<del>const { _unrefActive } = require('timers');
<del>
<ide> const { ShutdownWrap, WriteWrap } = process.binding('stream_wrap');
<ide> const { constants } = binding;
<ide>
<ide> class Http2Session extends EventEmitter {
<ide> [kUpdateTimer]() {
<ide> if (this.destroyed)
<ide> return;
<del> if (this[kTimeout]) _unrefActive(this[kTimeout]);
<add> if (this[kTimeout]) this[kTimeout][refreshFnSymbol]();
<ide> }
<ide>
<ide> // Sets the id of the next stream to be created by this Http2Session.
<ide> class Http2Stream extends Duplex {
<ide> if (this.destroyed)
<ide> return;
<ide> if (this[kTimeout])
<del> _unrefActive(this[kTimeout]);
<add> this[kTimeout][refreshFnSymbol]();
<ide> if (this[kSession])
<ide> this[kSession][kUpdateTimer]();
<ide> }
<ide><path>lib/internal/timers.js
<ide> const errors = require('internal/errors');
<ide> // Timeout values > TIMEOUT_MAX are set to 1.
<ide> const TIMEOUT_MAX = 2 ** 31 - 1;
<ide>
<add>const refreshFnSymbol = Symbol('refresh()');
<add>const unrefedSymbol = Symbol('unrefed');
<add>
<ide> module.exports = {
<ide> TIMEOUT_MAX,
<ide> kTimeout: Symbol('timeout'), // For hiding Timeouts on other internals.
<ide> async_id_symbol,
<ide> trigger_async_id_symbol,
<ide> Timeout,
<add> refreshFnSymbol,
<ide> setUnrefTimeout,
<ide> validateTimerDuration
<ide> };
<ide> function getTimers() {
<ide>
<ide> // Timer constructor function.
<ide> // The entire prototype is defined in lib/timers.js
<del>function Timeout(callback, after, args, isRepeat) {
<add>function Timeout(callback, after, args, isRepeat, isUnrefed) {
<ide> after *= 1; // coalesce to number or NaN
<ide> if (!(after >= 1 && after <= TIMEOUT_MAX)) {
<ide> if (after > TIMEOUT_MAX) {
<ide> function Timeout(callback, after, args, isRepeat) {
<ide> this._repeat = isRepeat ? after : null;
<ide> this._destroyed = false;
<ide>
<add> this[unrefedSymbol] = isUnrefed;
<add>
<ide> this[async_id_symbol] = ++async_id_fields[kAsyncIdCounter];
<ide> this[trigger_async_id_symbol] = getDefaultTriggerAsyncId();
<ide> if (async_hook_fields[kInit] > 0) {
<ide> function Timeout(callback, after, args, isRepeat) {
<ide> }
<ide> }
<ide>
<add>Timeout.prototype[refreshFnSymbol] = function refresh() {
<add> if (this._handle) {
<add> // Would be more ideal with uv_timer_again(), however that API does not
<add> // cause libuv's sorted timers data structure (a binary heap at the time
<add> // of writing) to re-sort itself. This causes ordering inconsistencies.
<add> this._handle.stop();
<add> this._handle.start(this._idleTimeout);
<add> } else if (this[unrefedSymbol]) {
<add> getTimers()._unrefActive(this);
<add> } else {
<add> getTimers().active(this);
<add> }
<add>};
<ide>
<ide> function setUnrefTimeout(callback, after, arg1, arg2, arg3) {
<ide> // Type checking identical to setTimeout()
<ide> function setUnrefTimeout(callback, after, arg1, arg2, arg3) {
<ide> break;
<ide> }
<ide>
<del> const timer = new Timeout(callback, after, args, false);
<add> const timer = new Timeout(callback, after, args, false, true);
<ide> getTimers()._unrefActive(timer);
<ide>
<ide> return timer;
<ide><path>lib/net.js
<ide>
<ide> const EventEmitter = require('events');
<ide> const stream = require('stream');
<del>const timers = require('timers');
<ide> const util = require('util');
<ide> const internalUtil = require('internal/util');
<ide> const {
<ide> const exceptionWithHostPort = util._exceptionWithHostPort;
<ide> const {
<ide> kTimeout,
<ide> setUnrefTimeout,
<del> validateTimerDuration
<add> validateTimerDuration,
<add> refreshFnSymbol
<ide> } = require('internal/timers');
<ide>
<ide> function noop() {}
<ide> util.inherits(Socket, stream.Duplex);
<ide> Socket.prototype._unrefTimer = function _unrefTimer() {
<ide> for (var s = this; s !== null; s = s._parent) {
<ide> if (s[kTimeout])
<del> timers._unrefActive(s[kTimeout]);
<add> s[kTimeout][refreshFnSymbol]();
<ide> }
<ide> };
<ide>
<ide><path>lib/timers.js
<ide> function setTimeout(callback, after, arg1, arg2, arg3) {
<ide> break;
<ide> }
<ide>
<del> const timeout = new Timeout(callback, after, args, false);
<add> const timeout = new Timeout(callback, after, args, false, false);
<ide> active(timeout);
<ide>
<ide> return timeout;
<ide> }
<ide>
<ide> setTimeout[internalUtil.promisify.custom] = function(after, value) {
<ide> const promise = createPromise();
<del> const timeout = new Timeout(promise, after, [value], false);
<add> const timeout = new Timeout(promise, after, [value], false, false);
<ide> active(timeout);
<ide>
<ide> return promise;
<ide> exports.setInterval = function(callback, repeat, arg1, arg2, arg3) {
<ide> break;
<ide> }
<ide>
<del> const timeout = new Timeout(callback, repeat, args, true);
<add> const timeout = new Timeout(callback, repeat, args, true, false);
<ide> active(timeout);
<ide>
<ide> return timeout;
<ide><path>test/parallel/test-timers-refresh.js
<add>// Flags: --expose-internals
<add>
<add>'use strict';
<add>
<add>const common = require('../common');
<add>
<add>const { strictEqual } = require('assert');
<add>const { setUnrefTimeout, refreshFnSymbol } = require('internal/timers');
<add>
<add>// Schedule the unrefed cases first so that the later case keeps the event loop
<add>// active.
<add>
<add>// Every case in this test relies on implicit sorting within either Node's or
<add>// libuv's timers storage data structures.
<add>
<add>// unref()'d timer
<add>{
<add> let called = false;
<add> const timer = setTimeout(common.mustCall(() => {
<add> called = true;
<add> }), 1);
<add> timer.unref();
<add>
<add> // This relies on implicit timers handle sorting withing libuv.
<add>
<add> setTimeout(common.mustCall(() => {
<add> strictEqual(called, false, 'unref()\'d timer returned before check');
<add> }), 1);
<add>
<add> timer[refreshFnSymbol]();
<add>}
<add>
<add>// unref pooled timer
<add>{
<add> let called = false;
<add> const timer = setUnrefTimeout(common.mustCall(() => {
<add> called = true;
<add> }), 1);
<add>
<add> setUnrefTimeout(common.mustCall(() => {
<add> strictEqual(called, false, 'unref pooled timer returned before check');
<add> }), 1);
<add>
<add> timer[refreshFnSymbol]();
<add>}
<add>
<add>// regular timer
<add>{
<add> let called = false;
<add> const timer = setTimeout(common.mustCall(() => {
<add> called = true;
<add> }), 1);
<add>
<add> setTimeout(common.mustCall(() => {
<add> strictEqual(called, false, 'pooled timer returned before check');
<add> }), 1);
<add>
<add> timer[refreshFnSymbol]();
<add>} | 5 |
Go | Go | use bitmask for conflict checking | 883be4893049aa97cb874d239cf6b06325fc74f2 | <ide><path>daemon/image_delete.go
<ide> import (
<ide> "github.com/docker/engine-api/types"
<ide> )
<ide>
<add>type conflictType int
<add>
<add>const (
<add> conflictDependentChild conflictType = (1 << iota)
<add> conflictRunningContainer
<add> conflictActiveReference
<add> conflictStoppedContainer
<add> conflictHard = conflictDependentChild | conflictRunningContainer
<add> conflictSoft = conflictActiveReference | conflictStoppedContainer
<add>)
<add>
<ide> // ImageDelete deletes the image referenced by the given imageRef from this
<ide> // daemon. The given imageRef can be an image ID, ID prefix, or a repository
<ide> // reference (with an optional tag or digest, defaulting to the tag name
<ide> func (daemon *Daemon) ImageDelete(imageRef string, force, prune bool) ([]types.I
<ide> // remove that reference.
<ide> // FIXME: Is this the behavior we want?
<ide> if len(repoRefs) == 1 {
<del> if conflict := daemon.checkImageDeleteConflict(imgID, force, true); conflict != nil {
<add> c := conflictHard
<add> if !force {
<add> c |= conflictSoft &^ conflictActiveReference
<add> }
<add> if conflict := daemon.checkImageDeleteConflict(imgID, c); conflict != nil {
<ide> return nil, conflict
<ide> }
<ide>
<ide> func (idc *imageDeleteConflict) Error() string {
<ide> func (daemon *Daemon) imageDeleteHelper(imgID image.ID, records *[]types.ImageDelete, force, prune, quiet bool) error {
<ide> // First, determine if this image has any conflicts. Ignore soft conflicts
<ide> // if force is true.
<del> if conflict := daemon.checkImageDeleteConflict(imgID, force, false); conflict != nil {
<add> c := conflictHard
<add> if !force {
<add> c |= conflictSoft
<add> }
<add> if conflict := daemon.checkImageDeleteConflict(imgID, c); conflict != nil {
<ide> if quiet && (!daemon.imageIsDangling(imgID) || conflict.used) {
<ide> // Ignore conflicts UNLESS the image is "dangling" or not being used in
<ide> // which case we want the user to know.
<ide> func (daemon *Daemon) imageDeleteHelper(imgID image.ID, records *[]types.ImageDe
<ide> // using the image. A soft conflict is any tags/digest referencing the given
<ide> // image or any stopped container using the image. If ignoreSoftConflicts is
<ide> // true, this function will not check for soft conflict conditions.
<del>func (daemon *Daemon) checkImageDeleteConflict(imgID image.ID, ignoreSoftConflicts bool, ignoreRefConflict bool) *imageDeleteConflict {
<del> // Check for hard conflicts first.
<del> if conflict := daemon.checkImageDeleteHardConflict(imgID); conflict != nil {
<del> return conflict
<del> }
<del>
<del> // Then check for soft conflicts.
<del> if ignoreSoftConflicts {
<del> // Don't bother checking for soft conflicts.
<del> return nil
<del> }
<del>
<del> return daemon.checkImageDeleteSoftConflict(imgID, ignoreRefConflict)
<del>}
<del>
<del>func (daemon *Daemon) checkImageDeleteHardConflict(imgID image.ID) *imageDeleteConflict {
<add>func (daemon *Daemon) checkImageDeleteConflict(imgID image.ID, mask conflictType) *imageDeleteConflict {
<ide> // Check if the image has any descendent images.
<del> if len(daemon.imageStore.Children(imgID)) > 0 {
<add> if mask&conflictDependentChild != 0 && len(daemon.imageStore.Children(imgID)) > 0 {
<ide> return &imageDeleteConflict{
<ide> hard: true,
<ide> imgID: imgID,
<ide> message: "image has dependent child images",
<ide> }
<ide> }
<ide>
<del> // Check if any running container is using the image.
<del> for _, container := range daemon.List() {
<del> if !container.IsRunning() {
<del> // Skip this until we check for soft conflicts later.
<del> continue
<del> }
<add> if mask&conflictRunningContainer != 0 {
<add> // Check if any running container is using the image.
<add> for _, container := range daemon.List() {
<add> if !container.IsRunning() {
<add> // Skip this until we check for soft conflicts later.
<add> continue
<add> }
<ide>
<del> if container.ImageID == imgID {
<del> return &imageDeleteConflict{
<del> imgID: imgID,
<del> hard: true,
<del> used: true,
<del> message: fmt.Sprintf("image is being used by running container %s", stringid.TruncateID(container.ID)),
<add> if container.ImageID == imgID {
<add> return &imageDeleteConflict{
<add> imgID: imgID,
<add> hard: true,
<add> used: true,
<add> message: fmt.Sprintf("image is being used by running container %s", stringid.TruncateID(container.ID)),
<add> }
<ide> }
<ide> }
<ide> }
<ide>
<del> return nil
<del>}
<del>
<del>func (daemon *Daemon) checkImageDeleteSoftConflict(imgID image.ID, ignoreRefConflict bool) *imageDeleteConflict {
<ide> // Check if any repository tags/digest reference this image.
<del> if !ignoreRefConflict && len(daemon.referenceStore.References(imgID)) > 0 {
<add> if mask&conflictActiveReference != 0 && len(daemon.referenceStore.References(imgID)) > 0 {
<ide> return &imageDeleteConflict{
<ide> imgID: imgID,
<ide> message: "image is referenced in one or more repositories",
<ide> }
<ide> }
<ide>
<del> // Check if any stopped containers reference this image.
<del> for _, container := range daemon.List() {
<del> if container.IsRunning() {
<del> // Skip this as it was checked above in hard conflict conditions.
<del> continue
<del> }
<add> if mask&conflictStoppedContainer != 0 {
<add> // Check if any stopped containers reference this image.
<add> for _, container := range daemon.List() {
<add> if container.IsRunning() {
<add> // Skip this as it was checked above in hard conflict conditions.
<add> continue
<add> }
<ide>
<del> if container.ImageID == imgID {
<del> return &imageDeleteConflict{
<del> imgID: imgID,
<del> used: true,
<del> message: fmt.Sprintf("image is being used by stopped container %s", stringid.TruncateID(container.ID)),
<add> if container.ImageID == imgID {
<add> return &imageDeleteConflict{
<add> imgID: imgID,
<add> used: true,
<add> message: fmt.Sprintf("image is being used by stopped container %s", stringid.TruncateID(container.ID)),
<add> }
<ide> }
<ide> }
<ide> } | 1 |
Go | Go | remove unused "testutil" package | 47af9f625d3f9e94729ec5aee4abcce813914d07 | <ide><path>pkg/testutil/pkg.go
<del>package testutil | 1 |
PHP | PHP | add assertions for | b4323b32ab8267493d5a45e046a071c210d38f56 | <ide><path>tests/TestCase/ORM/Behavior/TranslateBehaviorTest.php
<ide> public function testFindSingleLocaleBelongstoLoadInto()
<ide> $entity = $table->get(1);
<ide> $result = $table->loadInto($entity, ['Authors']);
<ide> $this->assertSame($entity, $result);
<add> $this->assertNotEmpty($entity->author);
<add> $this->assertNotEmpty($entity->author->name);
<ide>
<ide> $expected = $table->get(1, ['contain' => ['Authors']]);
<ide> $this->assertEquals($expected, $result);
<add> $this->assertNotEmpty($entity->author);
<add> $this->assertNotEmpty($entity->author->name);
<ide> }
<ide>
<ide> /** | 1 |
Ruby | Ruby | implement inferred cvs dependency | 1f190890fd60f9e66aeaf71c4c42473334cc514a | <ide><path>Library/Homebrew/dependency_collector.rb
<ide> def resource_dep(spec, tags)
<ide> Dependency.new("fossil", tags)
<ide> when strategy <= BazaarDownloadStrategy
<ide> Dependency.new("bazaar", tags)
<add> when strategy <= CVSDownloadStrategy
<add> Dependency.new("cvs", tags) if MacOS.version >= :mavericks
<ide> when strategy < AbstractDownloadStrategy
<ide> # allow unknown strategies to pass through
<ide> else
<ide><path>Library/Homebrew/download_strategy.rb
<ide> def checkout_submodules(dst)
<ide> end
<ide>
<ide> class CVSDownloadStrategy < VCSDownloadStrategy
<add> def cvspath
<add> @path ||= %W[
<add> /usr/bin/cvs
<add> #{HOMEBREW_PREFIX}/bin/cvs
<add> #{HOMEBREW_PREFIX}/opt/cvs/bin/cvs
<add> #{which("cvs")}
<add> ].find { |p| File.executable? p }
<add> end
<add>
<ide> def cache_tag; "cvs" end
<ide>
<ide> def fetch
<ide> def fetch
<ide>
<ide> unless @clone.exist?
<ide> HOMEBREW_CACHE.cd do
<del> safe_system '/usr/bin/cvs', '-d', url, 'login'
<del> safe_system '/usr/bin/cvs', '-d', url, 'checkout', '-d', cache_filename("cvs"), mod
<add> safe_system cvspath, '-d', url, 'login'
<add> safe_system cvspath, '-d', url, 'checkout', '-d', cache_filename("cvs"), mod
<ide> end
<ide> else
<ide> puts "Updating #{@clone}"
<del> @clone.cd { safe_system '/usr/bin/cvs', 'up' }
<add> @clone.cd { safe_system cvspath, 'up' }
<ide> end
<ide> end
<ide> | 2 |
Javascript | Javascript | add mustcall to net-connect-buffer test | 73a686f1bbacbdb40d552388d22a815340ae012a | <ide><path>test/parallel/test-net-connect-buffer.js
<ide> const tcp = net.Server(common.mustCall((s) => {
<ide> buf += d;
<ide> });
<ide>
<del> s.on('end', function() {
<add> s.on('end', common.mustCall(function() {
<ide> console.error('SERVER: end', buf);
<ide> assert.strictEqual(buf, "L'État, c'est moi");
<ide> s.end();
<del> });
<add> }));
<ide> }));
<ide>
<ide> tcp.listen(0, common.mustCall(function() { | 1 |
Mixed | Javascript | expose async_wrap providers | d10085bcb1f4c5695a54a532c8aa23d5391fb627 | <ide><path>doc/api/async_hooks.md
<ide> const server = net.createServer((conn) => {
<ide> Promise contexts may not get valid `triggerAsyncId`s by default. See
<ide> the section on [promise execution tracking][].
<ide>
<add>### `async_hooks.asyncWrapProviders`
<add>
<add><!-- YAML
<add>added: REPLACEME
<add>-->
<add>
<add>* Returns: A map of provider types to the corresponding numeric id.
<add> This map contains all the event types that might be emitted by the `async_hooks.init()` event.
<add>
<add>This feature suppresses the deprecated usage of `process.binding('async_wrap').Providers`.
<add>See: [DEP0111][]
<add>
<ide> ## Promise execution tracking
<ide>
<ide> By default, promise executions are not assigned `asyncId`s due to the relatively
<ide> The documentation for this class has moved [`AsyncResource`][].
<ide>
<ide> The documentation for this class has moved [`AsyncLocalStorage`][].
<ide>
<add>[DEP0111]: deprecations.md#dep0111-processbinding
<ide> [Hook Callbacks]: #hook-callbacks
<ide> [PromiseHooks]: https://docs.google.com/document/d/1rda3yKGHimKIhg5YeoAmCOtyURgsbTH_qaYR79FELlk/edit
<ide> [`AsyncLocalStorage`]: async_context.md#class-asynclocalstorage
<ide><path>lib/async_hooks.js
<ide> const {
<ide> ObjectIs,
<ide> ReflectApply,
<ide> Symbol,
<add> ObjectFreeze,
<ide> } = primordials;
<ide>
<ide> const {
<ide> const internal_async_hooks = require('internal/async_hooks');
<ide> // resource gets gced.
<ide> const { registerDestroyHook } = internal_async_hooks;
<ide> const {
<add> asyncWrap,
<ide> executionAsyncId,
<ide> triggerAsyncId,
<ide> // Private API
<ide> module.exports = {
<ide> executionAsyncId,
<ide> triggerAsyncId,
<ide> executionAsyncResource,
<add> asyncWrapProviders: ObjectFreeze({ __proto__: null, ...asyncWrap.Providers }),
<ide> // Embedder API
<ide> AsyncResource,
<ide> };
<ide><path>lib/internal/async_hooks.js
<ide> module.exports = {
<ide> after: emitAfterNative,
<ide> destroy: emitDestroyNative,
<ide> promise_resolve: emitPromiseResolveNative
<add> },
<add> asyncWrap: {
<add> Providers: async_wrap.Providers,
<ide> }
<ide> };
<ide><path>test/async-hooks/test-async-wrap-providers.js
<add>// Flags: --expose-internals
<add>'use strict';
<add>
<add>const common = require('../common');
<add>const { internalBinding } = require('internal/test/binding');
<add>const providers = internalBinding('async_wrap').Providers;
<add>const assert = require('assert');
<add>const { asyncWrapProviders } = require('async_hooks');
<add>
<add>assert.ok(typeof asyncWrapProviders === 'object');
<add>assert.deepStrictEqual(asyncWrapProviders, { __proto__: null, ...providers });
<add>
<add>const providerKeys = Object.keys(asyncWrapProviders);
<add>assert.throws(() => {
<add> asyncWrapProviders[providerKeys[0]] = 'another value';
<add>}, common.expectsError({
<add> name: 'TypeError',
<add>}), 'should not allow modify asyncWrap providers'); | 4 |
Javascript | Javascript | simplify regex a bit, update comments | 796539369ef1a808ccd6b250ec2b2a327cf7bd37 | <ide><path>src/animation/PropertyBinding.js
<ide> Object.assign( PropertyBinding, {
<ide> // be matched to parse the rest of the track name.
<ide> var directoryRe = /((?:[\w-]+[\/:])*)/;
<ide>
<del> // Target node. May contain word characters (a-zA-Z0-9_) and '.' or '-'
<del> // characters, but must begin and end with a word character.
<del> var nodeRe = /(\w(?:[\w-\.]*\w)?)?/;
<add> // Target node. May contain word characters (a-zA-Z0-9_) and '.' or '-'.
<add> var nodeRe = /([\w-\.]+)?/;
<ide>
<ide> // Object on target node, and accessor. May contain only word characters,
<ide> // and must be a member of the supportedObjectNames whitelist. Accessor may
<del> // contain any non-bracket characters.
<add> // contain any character except closing bracket.
<ide> var objectRe = /(?:\.([\w-]+)(?:\[(.+)\])?)?/;
<ide>
<ide> // Property and accessor. May contain only word characters. Accessor may
<ide> Object.assign( PropertyBinding, {
<ide>
<ide> var results = {
<ide> // directoryName: matches[ 1 ], // (tschw) currently unused
<del> nodeName: matches[ 2 ], // allowed to be null, specified root node.
<add> nodeName: matches[ 2 ],
<ide> objectName: matches[ 3 ],
<ide> objectIndex: matches[ 4 ],
<del> propertyName: matches[ 5 ],
<del> propertyIndex: matches[ 6 ] // allowed to be null, specifies that the whole property is set.
<add> propertyName: matches[ 5 ], // required
<add> propertyIndex: matches[ 6 ]
<ide> };
<ide>
<ide> var lastDot = results.nodeName && results.nodeName.lastIndexOf( '.' );
<ide> Object.assign( PropertyBinding, {
<ide>
<ide> if ( supportedObjectNames.indexOf( objectName ) !== -1 ) {
<ide>
<del> results.nodeName = results.nodeName.substring( 0, lastDot )
<add> results.nodeName = results.nodeName.substring( 0, lastDot );
<ide> results.objectName = objectName;
<ide>
<ide> }
<ide><path>test/unit/src/animation/PropertyBinding.js
<ide> * @author TristanVALCKE / https://github.com/TristanVALCKE
<ide> */
<ide>
<del>QUnit.module( 'BufferAttribute' );
<add>QUnit.module( 'PropertyBinding' );
<ide>
<ide> QUnit.test( 'parseTrackName' , function( assert ) {
<ide> | 2 |
Go | Go | add support for xz compression | 3c5d2e4661e76e83e35637561153578ddd87f249 | <ide><path>archive.go
<ide> const (
<ide> Uncompressed Compression = iota
<ide> Bzip2
<ide> Gzip
<add> Xz
<ide> )
<ide>
<ide> func (compression *Compression) Flag() string {
<ide> func (compression *Compression) Flag() string {
<ide> return "j"
<ide> case Gzip:
<ide> return "z"
<add> case Xz:
<add> return "J"
<ide> }
<ide> return ""
<ide> } | 1 |
Text | Text | add starkwang to collaborators | 35d8ef5a85cac82e5acf8c8f50b320d62682cb68 | <ide><path>README.md
<ide> For more information about the governance of the Node.js project, see
<ide> **Roman Reiss** <[email protected]>
<ide> * [srl295](https://github.com/srl295) -
<ide> **Steven R Loomis** <[email protected]>
<add>* [starkwang](https://github.com/starkwang) -
<add>**Weijia Wang** <[email protected]>
<ide> * [stefanmb](https://github.com/stefanmb) -
<ide> **Stefan Budeanu** <[email protected]>
<ide> * [targos](https://github.com/targos) - | 1 |
Text | Text | add upgrade note on execution_date -> run_id | 1ee65bb8ae9f98233208ebb7918cf9aa1e01823e | <ide><path>UPDATING.md
<ide> Before updating to this 2.2 release you will have to manually resolve any incons
<ide>
<ide> As part of this change the `clean_tis_without_dagrun_interval` config option under `[scheduler]` section has been removed and has no effect.
<ide>
<add>### TaskInstance and TaskReschedule now define `run_id` instead of `execution_date`
<add>
<add>As a part of the TaskInstance-DagRun relation change, the `execution_date` columns on TaskInstance and TaskReschedule have been removed from the database, and replaced by [association proxy](https://docs.sqlalchemy.org/en/13/orm/extensions/associationproxy.html) fields at the ORM level. If you access Airflow’s metadatabase directly, you should rewrite the implementation to use the `run_id` columns instead.
<add>
<add>Note that Airflow’s metadatabase definition on both the database and ORM levels are considered implementation detail without strict backward compatibility guarantees.
<add>
<ide> ### DaskExecutor - Dask Worker Resources and queues
<ide>
<ide> If dask workers are not started with complementary resources to match the specified queues, it will now result in an `AirflowException`, whereas before it would have just ignored the `queue` argument. | 1 |
PHP | PHP | add absolute path to cake/vendors | f799d1c056ec818f43de6e7685e5245b4362131f | <ide><path>lib/Cake/Core/App.php
<ide> protected static function _packageFormat() {
<ide> '%s' . 'locale' . DS
<ide> ),
<ide> 'Vendor' => array(
<del> '%s' . 'Vendor' . DS, VENDORS
<add> '%s' . 'Vendor' . DS,
<add> dirname(dirname(CAKE)) . DS . 'vendors' . DS,
<add> VENDORS
<ide> ),
<ide> 'Plugin' => array(
<ide> APP . 'Plugin' . DS, | 1 |
Ruby | Ruby | add tests for relation#only | e3246ef5b50d48aa86ee41a161b42611a936f4fb | <ide><path>activerecord/test/cases/relations_test.rb
<ide> def test_except
<ide> assert_equal Post.all, all_posts.all
<ide> end
<ide>
<add> def test_only
<add> relation = Post.where(:author_id => 1).order('id ASC').limit(1)
<add> assert_equal [posts(:welcome)], relation.all
<add>
<add> author_posts = relation.only(:where)
<add> assert_equal Post.where(:author_id => 1).all, author_posts.all
<add>
<add> all_posts = relation.only(:limit)
<add> assert_equal Post.limit(1).all.first, all_posts.first
<add> end
<add>
<ide> def test_anonymous_extension
<ide> relation = Post.where(:author_id => 1).order('id ASC').extending do
<ide> def author | 1 |
Text | Text | add twitter follow button | a3e552156943ad4131aa07bd546f445e4f852e4d | <ide><path>docs/index.md
<ide> For support please see the [REST framework discussion group][group], try the `#
<ide>
<ide> [Paid support is available][paid-support] from [DabApps][dabapps], and can include work on REST framework core, or support with building your REST framework API. Please [contact DabApps][contact-dabapps] if you'd like to discuss commercial support options.
<ide>
<add>For updates on REST framework development, you may also want to follow [the author][twitter] on Twitter.
<add>
<add><a style="padding-top: 10px" href="https://twitter.com/_tomchristie" class="twitter-follow-button" data-show-count="false">Follow @_tomchristie</a>
<add><script>!function(d,s,id){var js,fjs=d.getElementsByTagName(s)[0];if(!d.getElementById(id)){js=d.createElement(s);js.id=id;js.src="//platform.twitter.com/widgets.js";fjs.parentNode.insertBefore(js,fjs);}}(document,"script","twitter-wjs");</script>
<add>
<ide> ## License
<ide>
<ide> Copyright (c) 2011-2013, Tom Christie
<ide> OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
<ide> [paid-support]: http://dabapps.com/services/build/api-development/
<ide> [dabapps]: http://dabapps.com
<ide> [contact-dabapps]: http://dabapps.com/contact/
<add>[twitter]: https://twitter.com/_tomchristie | 1 |
Javascript | Javascript | move netinfo js module to fb internal | 5a30c2a2052ba76e88dbf71b5b5c92966591bf26 | <ide><path>Libraries/Network/NetInfo.js
<del>/**
<del> * Copyright (c) Facebook, Inc. and its affiliates.
<del> *
<del> * This source code is licensed under the MIT license found in the
<del> * LICENSE file in the root directory of this source tree.
<del> *
<del> * @format
<del> * @flow
<del> */
<del>
<del>'use strict';
<del>
<del>const NativeEventEmitter = require('../EventEmitter/NativeEventEmitter');
<del>const NativeModules = require('../BatchedBridge/NativeModules');
<del>const Platform = require('../Utilities/Platform');
<del>const RCTNetInfo = NativeModules.NetInfo;
<del>
<del>const NetInfoEventEmitter = new NativeEventEmitter(RCTNetInfo);
<del>
<del>const DEVICE_CONNECTIVITY_EVENT = 'networkStatusDidChange';
<del>
<del>type ChangeEventName = $Keys<{
<del> connectionChange: string,
<del> change: string,
<del>}>;
<del>
<del>type ReachabilityStateIOS = $Keys<{
<del> cell: string,
<del> none: string,
<del> unknown: string,
<del> wifi: string,
<del>}>;
<del>
<del>type ConnectivityStateAndroid = $Keys<{
<del> NONE: string,
<del> MOBILE: string,
<del> WIFI: string,
<del> MOBILE_MMS: string,
<del> MOBILE_SUPL: string,
<del> MOBILE_DUN: string,
<del> MOBILE_HIPRI: string,
<del> WIMAX: string,
<del> BLUETOOTH: string,
<del> DUMMY: string,
<del> ETHERNET: string,
<del> MOBILE_FOTA: string,
<del> MOBILE_IMS: string,
<del> MOBILE_CBS: string,
<del> WIFI_P2P: string,
<del> MOBILE_IA: string,
<del> MOBILE_EMERGENCY: string,
<del> PROXY: string,
<del> VPN: string,
<del> UNKNOWN: string,
<del>}>;
<del>
<del>const _subscriptions = new Map();
<del>
<del>let _isConnectedDeprecated;
<del>if (Platform.OS === 'ios') {
<del> _isConnectedDeprecated = function(
<del> reachability: ReachabilityStateIOS,
<del> ): boolean {
<del> return reachability !== 'none' && reachability !== 'unknown';
<del> };
<del>} else if (Platform.OS === 'android') {
<del> _isConnectedDeprecated = function(
<del> connectionType: ConnectivityStateAndroid,
<del> ): boolean {
<del> return connectionType !== 'NONE' && connectionType !== 'UNKNOWN';
<del> };
<del>}
<del>
<del>function _isConnected(connection) {
<del> return connection.type !== 'none' && connection.type !== 'unknown';
<del>}
<del>
<del>const _isConnectedSubscriptions = new Map();
<del>
<del>/**
<del> * NetInfo exposes info about online/offline status.
<del> *
<del> * See https://facebook.github.io/react-native/docs/netinfo.html
<del> */
<del>const NetInfo = {
<del> /**
<del> * Adds an event handler.
<del> *
<del> * See https://facebook.github.io/react-native/docs/netinfo.html#addeventlistener
<del> */
<del> addEventListener(
<del> eventName: ChangeEventName,
<del> handler: Function,
<del> ): {remove: () => void} {
<del> let listener;
<del> if (eventName === 'connectionChange') {
<del> listener = NetInfoEventEmitter.addListener(
<del> DEVICE_CONNECTIVITY_EVENT,
<del> appStateData => {
<del> handler({
<del> type: appStateData.connectionType,
<del> effectiveType: appStateData.effectiveConnectionType,
<del> });
<del> },
<del> );
<del> } else if (eventName === 'change') {
<del> console.warn(
<del> 'NetInfo\'s "change" event is deprecated. Listen to the "connectionChange" event instead.',
<del> );
<del>
<del> listener = NetInfoEventEmitter.addListener(
<del> DEVICE_CONNECTIVITY_EVENT,
<del> appStateData => {
<del> handler(appStateData.network_info);
<del> },
<del> );
<del> } else {
<del> console.warn('Trying to subscribe to unknown event: "' + eventName + '"');
<del> return {
<del> remove: () => {},
<del> };
<del> }
<del>
<del> _subscriptions.set(handler, listener);
<del> return {
<del> remove: () => NetInfo.removeEventListener(eventName, handler),
<del> };
<del> },
<del>
<del> /**
<del> * Removes the listener for network status changes.
<del> *
<del> * See https://facebook.github.io/react-native/docs/netinfo.html#removeeventlistener
<del> */
<del> removeEventListener(eventName: ChangeEventName, handler: Function): void {
<del> const listener = _subscriptions.get(handler);
<del> if (!listener) {
<del> return;
<del> }
<del> listener.remove();
<del> _subscriptions.delete(handler);
<del> },
<del>
<del> /**
<del> * This function is deprecated. Use `getConnectionInfo` instead.
<del> * Returns a promise that resolves with one of the deprecated connectivity
<del> * types:
<del> *
<del> * The following connectivity types are deprecated. They're used by the
<del> * deprecated APIs `fetch` and the `change` event.
<del> *
<del> * iOS connectivity types (deprecated):
<del> * - `none` - device is offline
<del> * - `wifi` - device is online and connected via wifi, or is the iOS simulator
<del> * - `cell` - device is connected via Edge, 3G, WiMax, or LTE
<del> * - `unknown` - error case and the network status is unknown
<del> *
<del> * Android connectivity types (deprecated).
<del> * - `NONE` - device is offline
<del> * - `BLUETOOTH` - The Bluetooth data connection.
<del> * - `DUMMY` - Dummy data connection.
<del> * - `ETHERNET` - The Ethernet data connection.
<del> * - `MOBILE` - The Mobile data connection.
<del> * - `MOBILE_DUN` - A DUN-specific Mobile data connection.
<del> * - `MOBILE_HIPRI` - A High Priority Mobile data connection.
<del> * - `MOBILE_MMS` - An MMS-specific Mobile data connection.
<del> * - `MOBILE_SUPL` - A SUPL-specific Mobile data connection.
<del> * - `VPN` - A virtual network using one or more native bearers. Requires
<del> * API Level 21
<del> * - `WIFI` - The WIFI data connection.
<del> * - `WIMAX` - The WiMAX data connection.
<del> * - `UNKNOWN` - Unknown data connection.
<del> *
<del> * The rest of the connectivity types are hidden by the Android API, but can
<del> * be used if necessary.
<del> */
<del> fetch(): Promise<any> {
<del> console.warn(
<del> 'NetInfo.fetch() is deprecated. Use NetInfo.getConnectionInfo() instead.',
<del> );
<del> return RCTNetInfo.getCurrentConnectivity().then(resp => resp.network_info);
<del> },
<del>
<del> /**
<del> * See https://facebook.github.io/react-native/docs/netinfo.html#getconnectioninfo
<del> */
<del> getConnectionInfo(): Promise<any> {
<del> return RCTNetInfo.getCurrentConnectivity().then(resp => {
<del> return {
<del> type: resp.connectionType,
<del> effectiveType: resp.effectiveConnectionType,
<del> };
<del> });
<del> },
<del>
<del> /**
<del> * An object with the same methods as above but the listener receives a
<del> * boolean which represents the internet connectivity.
<del> *
<del> * See https://facebook.github.io/react-native/docs/netinfo.html#isconnected
<del> */
<del> isConnected: {
<del> addEventListener(
<del> eventName: ChangeEventName,
<del> handler: Function,
<del> ): {remove: () => void} {
<del> const listener = connection => {
<del> if (eventName === 'change') {
<del> handler(_isConnectedDeprecated(connection));
<del> } else if (eventName === 'connectionChange') {
<del> handler(_isConnected(connection));
<del> }
<del> };
<del> _isConnectedSubscriptions.set(handler, listener);
<del> NetInfo.addEventListener(eventName, listener);
<del> return {
<del> remove: () =>
<del> NetInfo.isConnected.removeEventListener(eventName, handler),
<del> };
<del> },
<del>
<del> removeEventListener(eventName: ChangeEventName, handler: Function): void {
<del> const listener = _isConnectedSubscriptions.get(handler);
<del> NetInfo.removeEventListener(eventName, listener);
<del> _isConnectedSubscriptions.delete(handler);
<del> },
<del>
<del> fetch(): Promise<any> {
<del> return NetInfo.getConnectionInfo().then(_isConnected);
<del> },
<del> },
<del>
<del> isConnectionExpensive(): Promise<boolean> {
<del> return Platform.OS === 'android'
<del> ? RCTNetInfo.isConnectionMetered()
<del> : Promise.reject(new Error('Currently not supported on iOS'));
<del> },
<del>};
<del>
<del>module.exports = NetInfo;
<ide><path>Libraries/react-native/react-native-implementation.js
<ide> module.exports = {
<ide> get NativeEventEmitter() {
<ide> return require('NativeEventEmitter');
<ide> },
<del> get NetInfo() {
<del> warnOnce(
<del> 'netinfo-moved',
<del> 'NetInfo has been extracted from react-native core and will be removed in a future release. ' +
<del> "It can now be installed and imported from '@react-native-community/netinfo' instead of 'react-native'. " +
<del> 'See https://github.com/react-native-community/react-native-netinfo',
<del> );
<del> return require('NetInfo');
<del> },
<ide> get PanResponder() {
<ide> return require('PanResponder');
<ide> },
<ide><path>RNTester/js/NetInfoExample.js
<del>/**
<del> * Copyright (c) Facebook, Inc. and its affiliates.
<del> *
<del> * This source code is licensed under the MIT license found in the
<del> * LICENSE file in the root directory of this source tree.
<del> *
<del> * @format
<del> * @flow
<del> */
<del>
<del>'use strict';
<del>
<del>const React = require('react');
<del>const {NetInfo, Text, View, TouchableWithoutFeedback} = require('react-native');
<del>
<del>class ConnectionInfoSubscription extends React.Component<{}, $FlowFixMeState> {
<del> state = {
<del> connectionInfoHistory: [],
<del> };
<del>
<del> componentDidMount() {
<del> NetInfo.addEventListener('change', this._handleConnectionInfoChange);
<del> }
<del>
<del> componentWillUnmount() {
<del> NetInfo.removeEventListener('change', this._handleConnectionInfoChange);
<del> }
<del>
<del> _handleConnectionInfoChange = connectionInfo => {
<del> const connectionInfoHistory = this.state.connectionInfoHistory.slice();
<del> connectionInfoHistory.push(connectionInfo);
<del> this.setState({
<del> connectionInfoHistory,
<del> });
<del> };
<del>
<del> render() {
<del> return (
<del> <View>
<del> <Text>{JSON.stringify(this.state.connectionInfoHistory)}</Text>
<del> </View>
<del> );
<del> }
<del>}
<del>
<del>class ConnectionInfoCurrent extends React.Component<{}, $FlowFixMeState> {
<del> state = {
<del> connectionInfo: null,
<del> };
<del>
<del> componentDidMount() {
<del> NetInfo.addEventListener('change', this._handleConnectionInfoChange);
<del> NetInfo.fetch().done(connectionInfo => {
<del> this.setState({connectionInfo});
<del> });
<del> }
<del>
<del> componentWillUnmount() {
<del> NetInfo.removeEventListener('change', this._handleConnectionInfoChange);
<del> }
<del>
<del> _handleConnectionInfoChange = connectionInfo => {
<del> this.setState({
<del> connectionInfo,
<del> });
<del> };
<del>
<del> render() {
<del> return (
<del> <View>
<del> <Text>{this.state.connectionInfo}</Text>
<del> </View>
<del> );
<del> }
<del>}
<del>
<del>class IsConnected extends React.Component<{}, $FlowFixMeState> {
<del> state = {
<del> isConnected: null,
<del> };
<del>
<del> componentDidMount() {
<del> NetInfo.isConnected.addEventListener(
<del> 'change',
<del> this._handleConnectivityChange,
<del> );
<del> NetInfo.isConnected.fetch().done(isConnected => {
<del> this.setState({isConnected});
<del> });
<del> }
<del>
<del> componentWillUnmount() {
<del> NetInfo.isConnected.removeEventListener(
<del> 'change',
<del> this._handleConnectivityChange,
<del> );
<del> }
<del>
<del> _handleConnectivityChange = isConnected => {
<del> this.setState({
<del> isConnected,
<del> });
<del> };
<del>
<del> render() {
<del> return (
<del> <View>
<del> <Text>{this.state.isConnected ? 'Online' : 'Offline'}</Text>
<del> </View>
<del> );
<del> }
<del>}
<del>
<del>class IsConnectionExpensive extends React.Component<{}, $FlowFixMeState> {
<del> state = {
<del> isConnectionExpensive: (null: ?boolean),
<del> };
<del>
<del> _checkIfExpensive = () => {
<del> NetInfo.isConnectionExpensive().then(isConnectionExpensive => {
<del> this.setState({isConnectionExpensive});
<del> });
<del> };
<del>
<del> render() {
<del> return (
<del> <View>
<del> <TouchableWithoutFeedback onPress={this._checkIfExpensive}>
<del> <View>
<del> <Text>
<del> Click to see if connection is expensive:
<del> {this.state.isConnectionExpensive === true
<del> ? 'Expensive'
<del> : this.state.isConnectionExpensive === false
<del> ? 'Not expensive'
<del> : 'Unknown'}
<del> </Text>
<del> </View>
<del> </TouchableWithoutFeedback>
<del> </View>
<del> );
<del> }
<del>}
<del>
<del>exports.title = 'NetInfo';
<del>exports.description = 'Monitor network status';
<del>exports.examples = [
<del> {
<del> title: 'NetInfo.isConnected',
<del> description: 'Asynchronously load and observe connectivity',
<del> render(): React.Element<any> {
<del> return <IsConnected />;
<del> },
<del> },
<del> {
<del> title: 'NetInfo.update',
<del> description: 'Asynchronously load and observe connectionInfo',
<del> render(): React.Element<any> {
<del> return <ConnectionInfoCurrent />;
<del> },
<del> },
<del> {
<del> title: 'NetInfo.updateHistory',
<del> description: 'Observed updates to connectionInfo',
<del> render(): React.Element<any> {
<del> return <ConnectionInfoSubscription />;
<del> },
<del> },
<del> {
<del> platform: 'android',
<del> title: 'NetInfo.isConnectionExpensive (Android)',
<del> description: 'Asynchronously check isConnectionExpensive',
<del> render(): React.Element<any> {
<del> return <IsConnectionExpensive />;
<del> },
<del> },
<del>];
<ide><path>RNTester/js/RNTesterList.android.js
<ide> const APIExamples: Array<RNTesterExample> = [
<ide> key: 'NativeAnimationsExample',
<ide> module: require('./NativeAnimationsExample'),
<ide> },
<del> {
<del> key: 'NetInfoExample',
<del> module: require('./NetInfoExample'),
<del> },
<ide> {
<ide> key: 'OrientationChangeExample',
<ide> module: require('./OrientationChangeExample'),
<ide><path>RNTester/js/RNTesterList.ios.js
<ide> const APIExamples: Array<RNTesterExample> = [
<ide> module: require('./NativeAnimationsExample'),
<ide> supportsTVOS: true,
<ide> },
<del> {
<del> key: 'NetInfoExample',
<del> module: require('./NetInfoExample'),
<del> supportsTVOS: true,
<del> },
<ide> {
<ide> key: 'OrientationChangeExample',
<ide> module: require('./OrientationChangeExample'), | 5 |
Java | Java | remove "preparereact" call from the android bridge | 80f9e1f7de407ea417cecb04b3ba20b05696b478 | <ide><path>ReactAndroid/src/main/java/com/facebook/react/ReactInstanceManager.java
<ide> public interface ReactInstanceEventListener {
<ide> private final @Nullable JSBundleLoader mBundleLoader;
<ide> private final @Nullable String mJSMainModulePath; /* path to JS bundle root on packager server */
<ide> private final List<ReactPackage> mPackages;
<del> private final List<CatalystInstanceImpl.PendingJSCall> mInitFunctions;
<ide> private final DevSupportManager mDevSupportManager;
<ide> private final boolean mUseDeveloperSupport;
<ide> private final @Nullable NotThreadSafeBridgeIdleDebugListener mBridgeIdleDebugListener;
<ide> public static ReactInstanceManagerBuilder builder() {
<ide> mBundleLoader = bundleLoader;
<ide> mJSMainModulePath = jsMainModulePath;
<ide> mPackages = new ArrayList<>();
<del> mInitFunctions = new ArrayList<>();
<ide> mUseDeveloperSupport = useDeveloperSupport;
<ide> mDevSupportManager =
<ide> DevSupportManagerFactory.create(
<ide> public void registerAdditionalPackages(List<ReactPackage> packages) {
<ide> catalystInstance.extendNativeModules(nativeModuleRegistry);
<ide> }
<ide>
<del> /**
<del> * If the JavaScript bundle for this app requires initialization as part of bridge start up,
<del> * register a function using its @param module and @param method and optional arguments.
<del> */
<del> public void registerInitFunction(String module, String method, @Nullable NativeArray arguments) {
<del> CatalystInstanceImpl.PendingJSCall init =
<del> new CatalystInstanceImpl.PendingJSCall(module, method, arguments);
<del> synchronized (this) {
<del> mInitFunctions.add(init);
<del> }
<del> ReactContext context = getCurrentReactContext();
<del> CatalystInstance catalystInstance = context == null ? null : context.getCatalystInstance();
<del> if (catalystInstance == null) {
<del> return;
<del> } else {
<del> // CatalystInstance is only visible after running jsBundle, so these will be put on the native
<del> // JS queue
<del> // TODO T20546472 remove cast when catalystInstance and InstanceImpl are renamed/merged
<del> ((CatalystInstanceImpl) catalystInstance).callFunction(init);
<del> }
<del> }
<del>
<ide> /**
<ide> * Recreate the react application and context. This should be called if configuration has changed
<ide> * or the developer has requested the app to be reloaded. It should only be called after an
<ide> private ReactApplicationContext createReactContext(
<ide> }
<ide> ReactMarker.logMarker(ReactMarkerConstants.PRE_RUN_JS_BUNDLE_START);
<ide> catalystInstance.runJSBundle();
<del> // Transitions functions in the minitFunctions list to catalystInstance, to run after the bundle
<del> // TODO T20546472
<del> if (!mInitFunctions.isEmpty()) {
<del> for (CatalystInstanceImpl.PendingJSCall function : mInitFunctions) {
<del> ((CatalystInstanceImpl) catalystInstance).callFunction(function);
<del> }
<del> }
<ide> reactContext.initializeWithInstance(catalystInstance);
<ide>
<ide> return reactContext; | 1 |
Go | Go | move network aspect of links into driver as a job | 167403988dca37060edf37abbdd28360ee0d5d4a | <ide><path>container.go
<ide> func (container *Container) Start() (err error) {
<ide> }
<ide>
<ide> if len(children) > 0 {
<del> panic("todo crosbymichael")
<del> /*
<del> linking is specific to iptables and the bridge we need to move this to a job
<del>
<del> container.activeLinks = make(map[string]*Link, len(children))
<del>
<del> // If we encounter an error make sure that we rollback any network
<del> // config and ip table changes
<del> rollback := func() {
<del> for _, link := range container.activeLinks {
<del> link.Disable()
<del> }
<del> container.activeLinks = nil
<del> }
<add> container.activeLinks = make(map[string]*Link, len(children))
<ide>
<del> for p, child := range children {
<del> link, err := NewLink(container, child, p, runtime.networkManager.bridgeIface)
<del> if err != nil {
<del> rollback()
<del> return err
<del> }
<del>
<del> container.activeLinks[link.Alias()] = link
<del> if err := link.Enable(); err != nil {
<del> rollback()
<del> return err
<del> }
<del>
<del> for _, envVar := range link.ToEnv() {
<del> env = append(env, envVar)
<del> }
<del> }
<del> */
<add> // If we encounter an error make sure that we rollback any network
<add> // config and ip table changes
<add> rollback := func() {
<add> for _, link := range container.activeLinks {
<add> link.Disable()
<add> }
<add> container.activeLinks = nil
<add> }
<add>
<add> for p, child := range children {
<add> link, err := NewLink(container, child, p, runtime.eng)
<add> if err != nil {
<add> rollback()
<add> return err
<add> }
<add>
<add> container.activeLinks[link.Alias()] = link
<add> if err := link.Enable(); err != nil {
<add> rollback()
<add> return err
<add> }
<add>
<add> for _, envVar := range link.ToEnv() {
<add> env = append(env, envVar)
<add> }
<add> }
<ide> }
<ide>
<ide> for _, elem := range container.Config.Env {
<ide><path>links.go
<ide> package docker
<ide>
<ide> import (
<ide> "fmt"
<del> "github.com/dotcloud/docker/pkg/iptables"
<add> "github.com/dotcloud/docker/engine"
<ide> "path"
<ide> "strings"
<ide> )
<ide> type Link struct {
<ide> ParentIP string
<ide> ChildIP string
<ide> Name string
<del> BridgeInterface string
<ide> ChildEnvironment []string
<ide> Ports []Port
<ide> IsEnabled bool
<add> eng *engine.Engine
<ide> }
<ide>
<del>func NewLink(parent, child *Container, name, bridgeInterface string) (*Link, error) {
<add>func NewLink(parent, child *Container, name string, eng *engine.Engine) (*Link, error) {
<ide> if parent.ID == child.ID {
<ide> return nil, fmt.Errorf("Cannot link to self: %s == %s", parent.ID, child.ID)
<ide> }
<ide> func NewLink(parent, child *Container, name, bridgeInterface string) (*Link, err
<ide> }
<ide>
<ide> l := &Link{
<del> BridgeInterface: bridgeInterface,
<ide> Name: name,
<ide> ChildIP: child.NetworkSettings.IPAddress,
<ide> ParentIP: parent.NetworkSettings.IPAddress,
<ide> ChildEnvironment: child.Config.Env,
<ide> Ports: ports,
<add> eng: eng,
<ide> }
<ide> return l, nil
<ide>
<ide> func (l *Link) Disable() {
<ide> }
<ide>
<ide> func (l *Link) toggle(action string, ignoreErrors bool) error {
<del> for _, p := range l.Ports {
<del> if output, err := iptables.Raw(action, "FORWARD",
<del> "-i", l.BridgeInterface, "-o", l.BridgeInterface,
<del> "-p", p.Proto(),
<del> "-s", l.ParentIP,
<del> "--dport", p.Port(),
<del> "-d", l.ChildIP,
<del> "-j", "ACCEPT"); !ignoreErrors && err != nil {
<del> return err
<del> } else if len(output) != 0 {
<del> return fmt.Errorf("Error toggle iptables forward: %s", output)
<del> }
<add> job := l.eng.Job("link", action)
<add>
<add> job.Setenv("ParentIP", l.ParentIP)
<add> job.Setenv("ChildIP", l.ChildIP)
<add> job.SetenvBool("IgnoreErrors", ignoreErrors)
<add>
<add> out := make([]string, len(l.Ports))
<add> for i, p := range l.Ports {
<add> out[i] = fmt.Sprintf("%s/%s", p.Port(), p.Proto())
<add> }
<add> job.SetenvList("Ports", out)
<add>
<add> if err := job.Run(); err != nil {
<add> // TODO: get ouput from job
<add> return err
<ide> }
<ide> return nil
<ide> }
<ide><path>networkdriver/lxc/driver.go
<ide> import (
<ide> "io/ioutil"
<ide> "log"
<ide> "net"
<add> "strings"
<ide> "syscall"
<ide> "unsafe"
<ide> )
<ide> func InitDriver(job *engine.Job) engine.Status {
<ide> "allocate_interface": Allocate,
<ide> "release_interface": Release,
<ide> "allocate_port": AllocatePort,
<add> "link": LinkContainers,
<ide> } {
<ide> if err := job.Eng.Register(name, f); err != nil {
<ide> job.Error(err)
<ide> func AllocatePort(job *engine.Job) engine.Status {
<ide>
<ide> return engine.StatusOK
<ide> }
<add>
<add>func LinkContainers(job *engine.Job) engine.Status {
<add> var (
<add> action = job.Args[0]
<add> childIP = job.Getenv("ChildIP")
<add> parentIP = job.Getenv("ParentIP")
<add> ignoreErrors = job.GetenvBool("IgnoreErrors")
<add> ports = job.GetenvList("Ports")
<add> )
<add> split := func(p string) (string, string) {
<add> parts := strings.Split(p, "/")
<add> return parts[0], parts[1]
<add> }
<add>
<add> for _, p := range ports {
<add> port, proto := split(p)
<add> if output, err := iptables.Raw(action, "FORWARD",
<add> "-i", bridgeIface, "-o", bridgeIface,
<add> "-p", proto,
<add> "-s", parentIP,
<add> "--dport", port,
<add> "-d", childIP,
<add> "-j", "ACCEPT"); !ignoreErrors && err != nil {
<add> job.Error(err)
<add> return engine.StatusErr
<add> } else if len(output) != 0 {
<add> job.Errorf("Error toggle iptables forward: %s", output)
<add> return engine.StatusErr
<add> }
<add> }
<add> return engine.StatusOK
<add>} | 3 |
Python | Python | introduce various linting fixes to `numpy.typing` | afd99d7c76b4caeee419d09f1d2bed661226a59e | <ide><path>numpy/typing/__init__.py
<ide> Timedelta64
<ide> ~~~~~~~~~~~
<ide>
<del>The `~numpy.timedelta64` class is not considered a subclass of `~numpy.signedinteger`,
<del>the former only inheriting from `~numpy.generic` while static type checking.
<add>The `~numpy.timedelta64` class is not considered a subclass of
<add>`~numpy.signedinteger`, the former only inheriting from `~numpy.generic`
<add>while static type checking.
<ide>
<ide> 0D arrays
<ide> ~~~~~~~~~
<ide> # NOTE: The API section will be appended with additional entries
<ide> # further down in this file
<ide>
<add>from __future__ import annotations
<add>
<ide> from numpy import ufunc
<del>from typing import TYPE_CHECKING, List, final
<add>from typing import TYPE_CHECKING, final
<ide>
<ide> if not TYPE_CHECKING:
<ide> __all__ = ["ArrayLike", "DTypeLike", "NBitBase", "NDArray"]
<ide> #
<ide> # Declare to mypy that `__all__` is a list of strings without assigning
<ide> # an explicit value
<del> __all__: List[str]
<del> __path__: List[str]
<add> __all__: list[str]
<add> __path__: list[str]
<ide>
<ide>
<ide> @final # Disallow the creation of arbitrary `NBitBase` subclasses
<ide> class NBitBase:
<ide> """
<del> An object representing `numpy.number` precision during static type checking.
<add> A type representing `numpy.number` precision during static type checking.
<ide>
<ide> Used exclusively for the purpose static type checking, `NBitBase`
<ide> represents the base of a hierarchical set of subclasses.
<ide> class NBitBase:
<ide>
<ide> Examples
<ide> --------
<del> Below is a typical usage example: `NBitBase` is herein used for annotating a
<del> function that takes a float and integer of arbitrary precision as arguments
<del> and returns a new float of whichever precision is largest
<add> Below is a typical usage example: `NBitBase` is herein used for annotating
<add> a function that takes a float and integer of arbitrary precision
<add> as arguments and returns a new float of whichever precision is largest
<ide> (*e.g.* ``np.float16 + np.int64 -> np.float64``).
<ide>
<ide> .. code-block:: python
<ide> def __init_subclass__(cls) -> None:
<ide>
<ide>
<ide> # Silence errors about subclassing a `@final`-decorated class
<del>class _256Bit(NBitBase): ... # type: ignore[misc]
<del>class _128Bit(_256Bit): ... # type: ignore[misc]
<del>class _96Bit(_128Bit): ... # type: ignore[misc]
<del>class _80Bit(_96Bit): ... # type: ignore[misc]
<del>class _64Bit(_80Bit): ... # type: ignore[misc]
<del>class _32Bit(_64Bit): ... # type: ignore[misc]
<del>class _16Bit(_32Bit): ... # type: ignore[misc]
<del>class _8Bit(_16Bit): ... # type: ignore[misc]
<add>class _256Bit(NBitBase): # type: ignore[misc]
<add> pass
<add>
<add>class _128Bit(_256Bit): # type: ignore[misc]
<add> pass
<add>
<add>class _96Bit(_128Bit): # type: ignore[misc]
<add> pass
<add>
<add>class _80Bit(_96Bit): # type: ignore[misc]
<add> pass
<add>
<add>class _64Bit(_80Bit): # type: ignore[misc]
<add> pass
<add>
<add>class _32Bit(_64Bit): # type: ignore[misc]
<add> pass
<add>
<add>class _16Bit(_32Bit): # type: ignore[misc]
<add> pass
<add>
<add>class _8Bit(_16Bit): # type: ignore[misc]
<add> pass
<ide>
<ide>
<ide> from ._nested_sequence import _NestedSequence
<ide> class _8Bit(_16Bit): ... # type: ignore[misc]
<ide> _GUFunc_Nin2_Nout1 = ufunc
<ide>
<ide> # Clean up the namespace
<del>del TYPE_CHECKING, final, List, ufunc
<add>del TYPE_CHECKING, final, ufunc
<ide>
<ide> if __doc__ is not None:
<ide> from ._add_docstring import _docstrings
<ide><path>numpy/typing/_add_docstring.py
<ide> def _parse_docstrings() -> str:
<ide> new_lines.append("")
<ide> else:
<ide> new_lines.append(f"{indent}{line}")
<del> s = "\n".join(new_lines)
<ide>
<del> # Done.
<del> type_list_ret.append(f""".. data:: {name}\n :value: {value}\n {s}""")
<add> s = "\n".join(new_lines)
<add> s_block = f""".. data:: {name}\n :value: {value}\n {s}"""
<add> type_list_ret.append(s_block)
<ide> return "\n".join(type_list_ret)
<ide>
<ide>
<ide> add_newdoc('ArrayLike', 'typing.Union[...]',
<ide> """
<del> A `~typing.Union` representing objects that can be coerced into an `~numpy.ndarray`.
<add> A `~typing.Union` representing objects that can be coerced
<add> into an `~numpy.ndarray`.
<ide>
<ide> Among others this includes the likes of:
<ide>
<ide> def _parse_docstrings() -> str:
<ide>
<ide> add_newdoc('DTypeLike', 'typing.Union[...]',
<ide> """
<del> A `~typing.Union` representing objects that can be coerced into a `~numpy.dtype`.
<add> A `~typing.Union` representing objects that can be coerced
<add> into a `~numpy.dtype`.
<ide>
<ide> Among others this includes the likes of:
<ide>
<ide> def _parse_docstrings() -> str:
<ide> See Also
<ide> --------
<ide> :ref:`Specifying and constructing data types <arrays.dtypes.constructing>`
<del> A comprehensive overview of all objects that can be coerced into data types.
<add> A comprehensive overview of all objects that can be coerced
<add> into data types.
<ide>
<ide> Examples
<ide> --------
<ide><path>numpy/typing/_dtype_like.py
<del>from typing import Any, List, Sequence, Tuple, Union, Type, TypeVar, Protocol, TypedDict
<add>from typing import (
<add> Any,
<add> List,
<add> Sequence,
<add> Tuple,
<add> Union,
<add> Type,
<add> TypeVar,
<add> Protocol,
<add> TypedDict,
<add>)
<ide>
<ide> import numpy as np
<ide>
<ide> class _DTypeDictBase(TypedDict):
<ide> names: Sequence[str]
<ide> formats: Sequence[_DTypeLikeNested]
<ide>
<add>
<ide> # Mandatory + optional keys
<ide> class _DTypeDict(_DTypeDictBase, total=False):
<add> # Only `str` elements are usable as indexing aliases,
<add> # but `titles` can in principle accept any object
<ide> offsets: Sequence[int]
<del> titles: Sequence[Any] # Only `str` elements are usable as indexing aliases, but all objects are legal
<add> titles: Sequence[Any]
<ide> itemsize: int
<ide> aligned: bool
<ide>
<add>
<ide> # A protocol for anything with the dtype attribute
<ide> class _SupportsDType(Protocol[_DType_co]):
<ide> @property
<ide> def dtype(self) -> _DType_co: ...
<ide>
<add>
<ide> # Would create a dtype[np.void]
<ide> _VoidDTypeLike = Union[
<ide> # (flexible_dtype, itemsize)
<ide><path>numpy/typing/_extended_precision.py
<del>"""A module with platform-specific extended precision `numpy.number` subclasses.
<add>"""A module with platform-specific extended precision
<add>`numpy.number` subclasses.
<ide>
<ide> The subclasses are defined here (instead of ``__init__.pyi``) such
<ide> that they can be imported conditionally via the numpy's mypy plugin.
<ide><path>numpy/typing/mypy_plugin.py
<ide>
<ide> from __future__ import annotations
<ide>
<del>import typing as t
<add>from collections.abc import Iterable
<add>from typing import Final, TYPE_CHECKING, Callable
<ide>
<ide> import numpy as np
<ide>
<ide> from mypy.nodes import MypyFile, ImportFrom, Statement
<ide> from mypy.build import PRI_MED
<ide>
<del> _HookFunc = t.Callable[[AnalyzeTypeContext], Type]
<add> _HookFunc = Callable[[AnalyzeTypeContext], Type]
<ide> MYPY_EX: None | ModuleNotFoundError = None
<ide> except ModuleNotFoundError as ex:
<ide> MYPY_EX = ex
<ide>
<del>__all__: t.List[str] = []
<add>__all__: list[str] = []
<ide>
<ide>
<del>def _get_precision_dict() -> t.Dict[str, str]:
<add>def _get_precision_dict() -> dict[str, str]:
<ide> names = [
<ide> ("_NBitByte", np.byte),
<ide> ("_NBitShort", np.short),
<ide> def _get_precision_dict() -> t.Dict[str, str]:
<ide> return ret
<ide>
<ide>
<del>def _get_extended_precision_list() -> t.List[str]:
<add>def _get_extended_precision_list() -> list[str]:
<ide> extended_types = [np.ulonglong, np.longlong, np.longdouble, np.clongdouble]
<ide> extended_names = {
<ide> "uint128",
<ide> def _get_c_intp_name() -> str:
<ide>
<ide> #: A dictionary mapping type-aliases in `numpy.typing._nbit` to
<ide> #: concrete `numpy.typing.NBitBase` subclasses.
<del>_PRECISION_DICT: t.Final = _get_precision_dict()
<add>_PRECISION_DICT: Final = _get_precision_dict()
<ide>
<ide> #: A list with the names of all extended precision `np.number` subclasses.
<del>_EXTENDED_PRECISION_LIST: t.Final = _get_extended_precision_list()
<add>_EXTENDED_PRECISION_LIST: Final = _get_extended_precision_list()
<ide>
<ide> #: The name of the ctypes quivalent of `np.intp`
<del>_C_INTP: t.Final = _get_c_intp_name()
<add>_C_INTP: Final = _get_c_intp_name()
<ide>
<ide>
<ide> def _hook(ctx: AnalyzeTypeContext) -> Type:
<ide> def _hook(ctx: AnalyzeTypeContext) -> Type:
<ide> return api.named_type(name_new)
<ide>
<ide>
<del>if t.TYPE_CHECKING or MYPY_EX is None:
<del> def _index(iterable: t.Iterable[Statement], id: str) -> int:
<add>if TYPE_CHECKING or MYPY_EX is None:
<add> def _index(iterable: Iterable[Statement], id: str) -> int:
<ide> """Identify the first ``ImportFrom`` instance the specified `id`."""
<ide> for i, value in enumerate(iterable):
<ide> if getattr(value, "id", None) == id:
<ide> def _index(iterable: t.Iterable[Statement], id: str) -> int:
<ide> def _override_imports(
<ide> file: MypyFile,
<ide> module: str,
<del> imports: t.List[t.Tuple[str, t.Optional[str]]],
<add> imports: list[tuple[str, None | str]],
<ide> ) -> None:
<ide> """Override the first `module`-based import with new `imports`."""
<ide> # Construct a new `from module import y` statement
<ide> import_obj = ImportFrom(module, 0, names=imports)
<ide> import_obj.is_top_level = True
<ide>
<ide> # Replace the first `module`-based import statement with `import_obj`
<del> for lst in [file.defs, file.imports]: # type: t.List[Statement]
<add> for lst in [file.defs, file.imports]: # type: list[Statement]
<ide> i = _index(lst, module)
<ide> lst[i] = import_obj
<ide>
<ide> class _NumpyPlugin(Plugin):
<ide> """A mypy plugin for handling versus numpy-specific typing tasks."""
<ide>
<ide> def get_type_analyze_hook(self, fullname: str) -> None | _HookFunc:
<del> """Set the precision of platform-specific `numpy.number` subclasses.
<add> """Set the precision of platform-specific `numpy.number`
<add> subclasses.
<ide>
<ide> For example: `numpy.int_`, `numpy.longlong` and `numpy.longdouble`.
<ide> """
<ide> if fullname in _PRECISION_DICT:
<ide> return _hook
<ide> return None
<ide>
<del> def get_additional_deps(self, file: MypyFile) -> t.List[t.Tuple[int, str, int]]:
<add> def get_additional_deps(
<add> self, file: MypyFile
<add> ) -> list[tuple[int, str, int]]:
<ide> """Handle all import-based overrides.
<ide>
<ide> * Import platform-specific extended-precision `numpy.number`
<ide> def get_additional_deps(self, file: MypyFile) -> t.List[t.Tuple[int, str, int]]:
<ide> )
<ide> return ret
<ide>
<del> def plugin(version: str) -> t.Type[_NumpyPlugin]:
<add> def plugin(version: str) -> type[_NumpyPlugin]:
<ide> """An entry-point for mypy."""
<ide> return _NumpyPlugin
<ide>
<ide> else:
<del> def plugin(version: str) -> t.Type[_NumpyPlugin]:
<add> def plugin(version: str) -> type[_NumpyPlugin]:
<ide> """An entry-point for mypy."""
<ide> raise MYPY_EX
<ide><path>numpy/typing/tests/test_runtime.py
<ide> from __future__ import annotations
<ide>
<ide> import sys
<del>from typing import get_type_hints, Union, Tuple, NamedTuple, get_args, get_origin
<add>from typing import get_type_hints, Union, NamedTuple, get_args, get_origin
<ide>
<ide> import pytest
<ide> import numpy as np
<ide>
<ide> class TypeTup(NamedTuple):
<ide> typ: type
<del> args: Tuple[type, ...]
<add> args: tuple[type, ...]
<ide> origin: None | type
<ide>
<ide>
<ide><path>numpy/typing/tests/test_typing.py
<add>from __future__ import annotations
<add>
<ide> import importlib.util
<ide> import itertools
<ide> import os
<ide> import re
<ide> import shutil
<ide> from collections import defaultdict
<del>from typing import Optional, IO, Dict, List
<add>from collections.abc import Iterator
<add>from typing import IO, TYPE_CHECKING
<ide>
<ide> import pytest
<ide> import numpy as np
<ide> else:
<ide> NO_MYPY = False
<ide>
<add>if TYPE_CHECKING:
<add> # We need this as annotation, but it's located in a private namespace.
<add> # As a compromise, do *not* import it during runtime
<add> from _pytest.mark.structures import ParameterSet
<ide>
<ide> DATA_DIR = os.path.join(os.path.dirname(__file__), "data")
<ide> PASS_DIR = os.path.join(DATA_DIR, "pass")
<ide>
<ide> #: A dictionary with file names as keys and lists of the mypy stdout as values.
<ide> #: To-be populated by `run_mypy`.
<del>OUTPUT_MYPY: Dict[str, List[str]] = {}
<add>OUTPUT_MYPY: dict[str, list[str]] = {}
<ide>
<ide>
<ide> def _key_func(key: str) -> str:
<ide> def run_mypy() -> None:
<ide>
<ide> NUMPY_TYPING_TEST_CLEAR_CACHE=0 pytest numpy/typing/tests
<ide> """
<del> if os.path.isdir(CACHE_DIR) and bool(os.environ.get("NUMPY_TYPING_TEST_CLEAR_CACHE", True)):
<add> if (
<add> os.path.isdir(CACHE_DIR)
<add> and bool(os.environ.get("NUMPY_TYPING_TEST_CLEAR_CACHE", True))
<add> ):
<ide> shutil.rmtree(CACHE_DIR)
<ide>
<ide> for directory in (PASS_DIR, REVEAL_DIR, FAIL_DIR, MISC_DIR):
<ide> def run_mypy() -> None:
<ide> OUTPUT_MYPY.update((k, list(v)) for k, v in iterator if k)
<ide>
<ide>
<del>def get_test_cases(directory):
<add>def get_test_cases(directory: str) -> Iterator[ParameterSet]:
<ide> for root, _, files in os.walk(directory):
<ide> for fname in files:
<ide> if os.path.splitext(fname)[-1] == ".py":
<ide> def get_test_cases(directory):
<ide> @pytest.mark.slow
<ide> @pytest.mark.skipif(NO_MYPY, reason="Mypy is not installed")
<ide> @pytest.mark.parametrize("path", get_test_cases(PASS_DIR))
<del>def test_success(path):
<add>def test_success(path) -> None:
<ide> # Alias `OUTPUT_MYPY` so that it appears in the local namespace
<ide> output_mypy = OUTPUT_MYPY
<ide> if path in output_mypy:
<ide> def test_success(path):
<ide> @pytest.mark.slow
<ide> @pytest.mark.skipif(NO_MYPY, reason="Mypy is not installed")
<ide> @pytest.mark.parametrize("path", get_test_cases(FAIL_DIR))
<del>def test_fail(path):
<add>def test_fail(path: str) -> None:
<ide> __tracebackhide__ = True
<ide>
<ide> with open(path) as fin:
<ide> def test_fail(path):
<ide>
<ide> for i, line in enumerate(lines):
<ide> lineno = i + 1
<del> if line.startswith('#') or (" E:" not in line and lineno not in errors):
<add> if (
<add> line.startswith('#')
<add> or (" E:" not in line and lineno not in errors)
<add> ):
<ide> continue
<ide>
<ide> target_line = lines[lineno - 1]
<ide> def test_fail(path):
<ide> """
<ide>
<ide>
<del>def _test_fail(path: str, error: str, expected_error: Optional[str], lineno: int) -> None:
<add>def _test_fail(
<add> path: str,
<add> error: str,
<add> expected_error: None | str,
<add> lineno: int,
<add>) -> None:
<ide> if expected_error is None:
<ide> raise AssertionError(_FAIL_MSG1.format(lineno, error))
<ide> elif error not in expected_error:
<ide> raise AssertionError(_FAIL_MSG2.format(lineno, expected_error, error))
<ide>
<ide>
<del>def _construct_format_dict():
<add>def _construct_format_dict() -> dict[str, str]:
<ide> dct = {k.split(".")[-1]: v.replace("numpy", "numpy.typing") for
<ide> k, v in _PRECISION_DICT.items()}
<ide>
<ide> def _construct_format_dict():
<ide> "float96": "numpy.floating[numpy.typing._96Bit]",
<ide> "float128": "numpy.floating[numpy.typing._128Bit]",
<ide> "float256": "numpy.floating[numpy.typing._256Bit]",
<del> "complex64": "numpy.complexfloating[numpy.typing._32Bit, numpy.typing._32Bit]",
<del> "complex128": "numpy.complexfloating[numpy.typing._64Bit, numpy.typing._64Bit]",
<del> "complex160": "numpy.complexfloating[numpy.typing._80Bit, numpy.typing._80Bit]",
<del> "complex192": "numpy.complexfloating[numpy.typing._96Bit, numpy.typing._96Bit]",
<del> "complex256": "numpy.complexfloating[numpy.typing._128Bit, numpy.typing._128Bit]",
<del> "complex512": "numpy.complexfloating[numpy.typing._256Bit, numpy.typing._256Bit]",
<add> "complex64": ("numpy.complexfloating"
<add> "[numpy.typing._32Bit, numpy.typing._32Bit]"),
<add> "complex128": ("numpy.complexfloating"
<add> "[numpy.typing._64Bit, numpy.typing._64Bit]"),
<add> "complex160": ("numpy.complexfloating"
<add> "[numpy.typing._80Bit, numpy.typing._80Bit]"),
<add> "complex192": ("numpy.complexfloating"
<add> "[numpy.typing._96Bit, numpy.typing._96Bit]"),
<add> "complex256": ("numpy.complexfloating"
<add> "[numpy.typing._128Bit, numpy.typing._128Bit]"),
<add> "complex512": ("numpy.complexfloating"
<add> "[numpy.typing._256Bit, numpy.typing._256Bit]"),
<ide>
<ide> "ubyte": f"numpy.unsignedinteger[{dct['_NBitByte']}]",
<ide> "ushort": f"numpy.unsignedinteger[{dct['_NBitShort']}]",
<ide> def _construct_format_dict():
<ide> "single": f"numpy.floating[{dct['_NBitSingle']}]",
<ide> "double": f"numpy.floating[{dct['_NBitDouble']}]",
<ide> "longdouble": f"numpy.floating[{dct['_NBitLongDouble']}]",
<del> "csingle": f"numpy.complexfloating[{dct['_NBitSingle']}, {dct['_NBitSingle']}]",
<del> "cdouble": f"numpy.complexfloating[{dct['_NBitDouble']}, {dct['_NBitDouble']}]",
<del> "clongdouble": f"numpy.complexfloating[{dct['_NBitLongDouble']}, {dct['_NBitLongDouble']}]",
<add> "csingle": ("numpy.complexfloating"
<add> f"[{dct['_NBitSingle']}, {dct['_NBitSingle']}]"),
<add> "cdouble": ("numpy.complexfloating"
<add> f"[{dct['_NBitDouble']}, {dct['_NBitDouble']}]"),
<add> "clongdouble": (
<add> "numpy.complexfloating"
<add> f"[{dct['_NBitLongDouble']}, {dct['_NBitLongDouble']}]"
<add> ),
<ide>
<ide> # numpy.typing
<ide> "_NBitInt": dct['_NBitInt'],
<ide> def _construct_format_dict():
<ide>
<ide> #: A dictionary with all supported format keys (as keys)
<ide> #: and matching values
<del>FORMAT_DICT: Dict[str, str] = _construct_format_dict()
<add>FORMAT_DICT: dict[str, str] = _construct_format_dict()
<ide>
<ide>
<del>def _parse_reveals(file: IO[str]) -> List[str]:
<del> """Extract and parse all ``" # E: "`` comments from the passed file-like object.
<add>def _parse_reveals(file: IO[str]) -> list[str]:
<add> """Extract and parse all ``" # E: "`` comments from the passed
<add> file-like object.
<ide>
<del> All format keys will be substituted for their respective value from `FORMAT_DICT`,
<del> *e.g.* ``"{float64}"`` becomes ``"numpy.floating[numpy.typing._64Bit]"``.
<add> All format keys will be substituted for their respective value
<add> from `FORMAT_DICT`, *e.g.* ``"{float64}"`` becomes
<add> ``"numpy.floating[numpy.typing._64Bit]"``.
<ide> """
<ide> string = file.read().replace("*", "")
<ide>
<ide> def _parse_reveals(file: IO[str]) -> List[str]:
<ide> # there is the risk of accidentally grabbing dictionaries and sets
<ide> key_set = set(re.findall(r"\{(.*?)\}", comments))
<ide> kwargs = {
<del> k: FORMAT_DICT.get(k, f"<UNRECOGNIZED FORMAT KEY {k!r}>") for k in key_set
<add> k: FORMAT_DICT.get(k, f"<UNRECOGNIZED FORMAT KEY {k!r}>") for
<add> k in key_set
<ide> }
<ide> fmt_str = comments.format(**kwargs)
<ide>
<ide> def _parse_reveals(file: IO[str]) -> List[str]:
<ide> @pytest.mark.slow
<ide> @pytest.mark.skipif(NO_MYPY, reason="Mypy is not installed")
<ide> @pytest.mark.parametrize("path", get_test_cases(REVEAL_DIR))
<del>def test_reveal(path):
<add>def test_reveal(path: str) -> None:
<add> """Validate that mypy correctly infers the return-types of
<add> the expressions in `path`.
<add> """
<ide> __tracebackhide__ = True
<ide>
<ide> with open(path) as fin:
<ide> def test_reveal(path):
<ide> """
<ide>
<ide>
<del>def _test_reveal(path: str, reveal: str, expected_reveal: str, lineno: int) -> None:
<add>def _test_reveal(
<add> path: str,
<add> reveal: str,
<add> expected_reveal: str,
<add> lineno: int,
<add>) -> None:
<add> """Error-reporting helper function for `test_reveal`."""
<ide> if reveal not in expected_reveal:
<del> raise AssertionError(_REVEAL_MSG.format(lineno, expected_reveal, reveal))
<add> raise AssertionError(
<add> _REVEAL_MSG.format(lineno, expected_reveal, reveal)
<add> )
<ide>
<ide>
<ide> @pytest.mark.slow
<ide> @pytest.mark.skipif(NO_MYPY, reason="Mypy is not installed")
<ide> @pytest.mark.parametrize("path", get_test_cases(PASS_DIR))
<del>def test_code_runs(path):
<add>def test_code_runs(path: str) -> None:
<add> """Validate that the code in `path` properly during runtime."""
<ide> path_without_extension, _ = os.path.splitext(path)
<ide> dirname, filename = path.split(os.sep)[-2:]
<del> spec = importlib.util.spec_from_file_location(f"{dirname}.{filename}", path)
<add>
<add> spec = importlib.util.spec_from_file_location(
<add> f"{dirname}.{filename}", path
<add> )
<add> assert spec is not None
<add> assert spec.loader is not None
<add>
<ide> test_module = importlib.util.module_from_spec(spec)
<ide> spec.loader.exec_module(test_module)
<ide> | 7 |
Text | Text | add missing yaml tag in `esm.md` | 18b833455ceb6eb3d319d47ca017eb4158cc3d9d | <ide><path>doc/api/esm.md
<ide> would provide the exports interface for the instantiation of `module.wasm`.
<ide>
<ide> ## Top-level `await`
<ide>
<del><!--
<add><!-- YAML
<ide> added: v14.8.0
<ide> -->
<ide> | 1 |
Ruby | Ruby | push options inside the scope object | 20ec0d2aaee0878f819c1d2278e078b1039aee3e | <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb
<ide> module ActionDispatch
<ide> module Routing
<ide> class Mapper
<ide> URL_OPTIONS = [:protocol, :subdomain, :domain, :host, :port]
<del> SCOPE_OPTIONS = [:path, :shallow_path, :as, :shallow_prefix, :module,
<del> :controller, :action, :path_names, :constraints,
<del> :shallow, :blocks, :defaults, :options]
<ide>
<ide> class Constraints < Endpoint #:nodoc:
<ide> attr_reader :app, :constraints
<ide> def scope(*args)
<ide> block, options[:constraints] = options[:constraints], {}
<ide> end
<ide>
<del> SCOPE_OPTIONS.each do |option|
<add> @scope.options.each do |option|
<ide> if option == :blocks
<ide> value = block
<ide> elsif option == :options
<ide> def concerns(*args)
<ide> end
<ide>
<ide> class Scope # :nodoc:
<add> OPTIONS = [:path, :shallow_path, :as, :shallow_prefix, :module,
<add> :controller, :action, :path_names, :constraints,
<add> :shallow, :blocks, :defaults, :options]
<add>
<ide> attr_reader :parent
<ide>
<ide> def initialize(hash, parent = {})
<ide> @hash = hash
<ide> @parent = parent
<ide> end
<ide>
<add> def options
<add> OPTIONS
<add> end
<add>
<ide> def new(hash)
<ide> self.class.new hash, self
<ide> end | 1 |
Go | Go | fix login/logout with creds store | 0eccc3838e4aac5318e98dcbfbe2100e253462de | <ide><path>api/client/login.go
<ide> import (
<ide> "github.com/docker/docker/cliconfig/credentials"
<ide> flag "github.com/docker/docker/pkg/mflag"
<ide> "github.com/docker/docker/pkg/term"
<del> "github.com/docker/engine-api/client"
<ide> "github.com/docker/engine-api/types"
<ide> )
<ide>
<ide> func (cli *DockerCli) CmdLogin(args ...string) error {
<ide>
<ide> response, err := cli.client.RegistryLogin(authConfig)
<ide> if err != nil {
<del> if client.IsErrUnauthorized(err) {
<del> if err2 := eraseCredentials(cli.configFile, authConfig.ServerAddress); err2 != nil {
<del> fmt.Fprintf(cli.out, "WARNING: could not save credentials: %v\n", err2)
<del> }
<del> }
<ide> return err
<ide> }
<ide>
<ide><path>api/client/logout.go
<ide> func (cli *DockerCli) CmdLogout(args ...string) error {
<ide> serverAddress = cli.electAuthServer()
<ide> }
<ide>
<add> // check if we're logged in based on the records in the config file
<add> // which means it couldn't have user/pass cause they may be in the creds store
<ide> if _, ok := cli.configFile.AuthConfigs[serverAddress]; !ok {
<ide> fmt.Fprintf(cli.out, "Not logged in to %s\n", serverAddress)
<ide> return nil
<ide> }
<ide>
<ide> fmt.Fprintf(cli.out, "Remove login credentials for %s\n", serverAddress)
<del> delete(cli.configFile.AuthConfigs, serverAddress)
<del> if err := cli.configFile.Save(); err != nil {
<del> return fmt.Errorf("Failed to save docker config: %v", err)
<add> if err := eraseCredentials(cli.configFile, serverAddress); err != nil {
<add> fmt.Fprintf(cli.out, "WARNING: could not erase credentials: %v\n", err)
<ide> }
<ide>
<ide> return nil
<ide><path>integration-cli/docker_cli_logout_test.go
<add>package main
<add>
<add>import (
<add> "fmt"
<add> "io/ioutil"
<add> "os"
<add> "path/filepath"
<add>
<add> "github.com/docker/docker/pkg/integration/checker"
<add> "github.com/go-check/check"
<add>)
<add>
<add>func (s *DockerRegistryAuthSuite) TestLogoutWithExternalAuth(c *check.C) {
<add> osPath := os.Getenv("PATH")
<add> defer os.Setenv("PATH", osPath)
<add>
<add> workingDir, err := os.Getwd()
<add> c.Assert(err, checker.IsNil)
<add> absolute, err := filepath.Abs(filepath.Join(workingDir, "fixtures", "auth"))
<add> c.Assert(err, checker.IsNil)
<add> testPath := fmt.Sprintf("%s%c%s", osPath, filepath.ListSeparator, absolute)
<add>
<add> os.Setenv("PATH", testPath)
<add>
<add> repoName := fmt.Sprintf("%v/dockercli/busybox:authtest", privateRegistryURL)
<add>
<add> tmp, err := ioutil.TempDir("", "integration-cli-")
<add> c.Assert(err, checker.IsNil)
<add>
<add> externalAuthConfig := `{ "credsStore": "shell-test" }`
<add>
<add> configPath := filepath.Join(tmp, "config.json")
<add> err = ioutil.WriteFile(configPath, []byte(externalAuthConfig), 0644)
<add> c.Assert(err, checker.IsNil)
<add>
<add> dockerCmd(c, "--config", tmp, "login", "-u", s.reg.username, "-p", s.reg.password, privateRegistryURL)
<add>
<add> b, err := ioutil.ReadFile(configPath)
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(string(b), checker.Not(checker.Contains), "\"auth\":")
<add> c.Assert(string(b), checker.Contains, privateRegistryURL)
<add>
<add> dockerCmd(c, "--config", tmp, "tag", "busybox", repoName)
<add> dockerCmd(c, "--config", tmp, "push", repoName)
<add>
<add> dockerCmd(c, "--config", tmp, "logout", privateRegistryURL)
<add>
<add> b, err = ioutil.ReadFile(configPath)
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(string(b), checker.Not(checker.Contains), privateRegistryURL)
<add>
<add> // check I cannot pull anymore
<add> out, _, err := dockerCmdWithError("--config", tmp, "pull", repoName)
<add> c.Assert(err, check.NotNil, check.Commentf(out))
<add> c.Assert(out, checker.Contains, fmt.Sprintf("Error: image dockercli/busybox not found"))
<add>} | 3 |
Text | Text | add note about inline plugins and registration | d6972abd48b4012d38a7b0cc7133aee25ea90112 | <ide><path>docs/docs/developers/plugins.md
<ide> var chart3 = new Chart(ctx, {});
<ide>
<ide> Plugins can also be defined directly in the chart `plugins` config (a.k.a. *inline plugins*):
<ide>
<add>> Note: *inline* plugins are not registered. Some plugins require registering, i.e. can't be used *inline*.
<add>
<ide> ```javascript
<ide> var chart = new Chart(ctx, {
<ide> plugins: [{ | 1 |
Javascript | Javascript | recognize shadow dom in attachment checks | 9b77def560212d12fef2d0b9c12aa50727e3e5d7 | <ide><path>src/core/isAttached.js
<add>define( [
<add> "../core",
<add> "../var/documentElement",
<add> "../selector" // jQuery.contains
<add>], function( jQuery, documentElement ) {
<add> "use strict";
<add>
<add> var isAttached = function( elem ) {
<add> return jQuery.contains( elem.ownerDocument, elem );
<add> },
<add> composed = { composed: true };
<add>
<add> // Check attachment across shadow DOM boundaries when possible (gh-3504)
<add> if ( documentElement.attachShadow ) {
<add> isAttached = function( elem ) {
<add> return jQuery.contains( elem.ownerDocument, elem ) ||
<add> elem.getRootNode( composed ) === elem.ownerDocument;
<add> };
<add> }
<add>
<add> return isAttached;
<add>} );
<ide><path>src/css/curCSS.js
<ide> define( [
<ide> "../core",
<del> "../var/isAttached",
<add> "../core/isAttached",
<ide> "./var/rboxStyle",
<ide> "./var/rnumnonpx",
<ide> "./var/getStyles",
<ide><path>src/css/var/isHiddenWithinTree.js
<ide> define( [
<ide> "../../core",
<del> "../../var/isAttached"
<add> "../../core/isAttached"
<ide>
<ide> // css is assumed
<ide> ], function( jQuery, isAttached ) {
<ide><path>src/manipulation.js
<ide> define( [
<ide> "./core",
<del> "./var/isAttached",
<add> "./core/isAttached",
<ide> "./var/concat",
<ide> "./var/isFunction",
<ide> "./var/push",
<ide><path>src/manipulation/buildFragment.js
<ide> define( [
<ide> "../core",
<ide> "../core/toType",
<del> "../var/isAttached",
<add> "../core/isAttached",
<ide> "./var/rtagName",
<ide> "./var/rscriptType",
<ide> "./wrapMap",
<ide><path>src/var/isAttached.js
<del>define( [
<del> "../core",
<del> "../selector" // Get jQuery.contains
<del>], function( jQuery ) {
<del> "use strict";
<del>
<del> return function isAttached( obj ) {
<del> return jQuery.contains( obj.ownerDocument, obj );
<del> };
<del>
<del>} );
<ide><path>test/unit/css.js
<ide> QUnit.test( "show/hide detached nodes", function( assert ) {
<ide> span.remove();
<ide> } );
<ide>
<add>QUnit[ document.body.attachShadow ? "test" : "skip" ]( "show/hide shadow child nodes", function( assert ) {
<add> assert.expect( 28 );
<add> jQuery( "<div id='shadowHost'></div>" ).appendTo( "#qunit-fixture" );
<add> var shadowHost = document.querySelector( "#shadowHost" );
<add> var shadowRoot = shadowHost.attachShadow( { mode: "open" } );
<add> shadowRoot.innerHTML = "" +
<add> "<style>.hidden{display: none;}</style>" +
<add> "<div class='hidden' id='shadowdiv'>" +
<add> " <p class='hidden' id='shadowp'>" +
<add> " <a href='#' class='hidden' id='shadowa'></a>" +
<add> " </p>" +
<add> " <code class='hidden' id='shadowcode'></code>" +
<add> " <pre class='hidden' id='shadowpre'></pre>" +
<add> " <span class='hidden' id='shadowspan'></span>" +
<add> "</div>" +
<add> "<table class='hidden' id='shadowtable'>" +
<add> " <thead class='hidden' id='shadowthead'>" +
<add> " <tr class='hidden' id='shadowtr'>" +
<add> " <th class='hidden' id='shadowth'></th>" +
<add> " </tr>" +
<add> " </thead>" +
<add> " <tbody class='hidden' id='shadowtbody'>" +
<add> " <tr class='hidden'>" +
<add> " <td class='hidden' id='shadowtd'></td>" +
<add> " </tr>" +
<add> " </tbody>" +
<add> "</table>" +
<add> "<ul class='hidden' id='shadowul'>" +
<add> " <li class='hidden' id='shadowli'></li>" +
<add> "</ul>";
<add>
<add> var test = {
<add> "div": "block",
<add> "p": "block",
<add> "a": "inline",
<add> "code": "inline",
<add> "pre": "block",
<add> "span": "inline",
<add> "table": "table",
<add> "thead": "table-header-group",
<add> "tbody": "table-row-group",
<add> "tr": "table-row",
<add> "th": "table-cell",
<add> "td": "table-cell",
<add> "ul": "block",
<add> "li": "list-item"
<add> };
<add>
<add> jQuery.each( test, function( selector, expected ) {
<add> var shadowChild = shadowRoot.querySelector( "#shadow" + selector );
<add> var $shadowChild = jQuery( shadowChild );
<add> assert.strictEqual( $shadowChild.css( "display" ), "none", "is hidden" );
<add> $shadowChild.show();
<add> assert.strictEqual( $shadowChild.css( "display" ), expected, "Show using correct display type for " + selector );
<add> } );
<add>} );
<add>
<ide> QUnit.test( "hide hidden elements (bug #7141)", function( assert ) {
<ide> assert.expect( 3 );
<ide>
<ide> QUnit[ jQuery.find.compile && jQuery.fn.toggle ? "test" : "skip" ]( "detached to
<ide> "cascade-hidden element in detached tree" );
<ide> } );
<ide>
<add>QUnit[ jQuery.find.compile && jQuery.fn.toggle && document.body.attachShadow ? "test" : "skip" ]( "shadow toggle()", function( assert ) {
<add> assert.expect( 4 );
<add> jQuery( "<div id='shadowHost'></div>" ).appendTo( "#qunit-fixture" );
<add> var shadowHost = document.querySelector( "#shadowHost" );
<add> var shadowRoot = shadowHost.attachShadow( { mode: "open" } );
<add> shadowRoot.innerHTML = "" +
<add> "<style>.hidden{display: none;}</style>" +
<add> "<div id='shadowHiddenChild' class='hidden'></div>" +
<add> "<div id='shadowChild'></div>";
<add> var shadowChild = shadowRoot.querySelector( "#shadowChild" );
<add> var shadowHiddenChild = shadowRoot.querySelector( "#shadowHiddenChild" );
<add>
<add> var $shadowChild = jQuery( shadowChild );
<add> assert.strictEqual( $shadowChild.css( "display" ), "block", "is visible" );
<add> $shadowChild.toggle();
<add> assert.strictEqual( $shadowChild.css( "display" ), "none", "is hidden" );
<add>
<add> $shadowChild = jQuery( shadowHiddenChild );
<add> assert.strictEqual( $shadowChild.css( "display" ), "none", "is hidden" );
<add> $shadowChild.toggle();
<add> assert.strictEqual( $shadowChild.css( "display" ), "block", "is visible" );
<add>} );
<add>
<ide> QUnit.test( "jQuery.css(elem, 'height') doesn't clear radio buttons (bug #1095)", function( assert ) {
<ide> assert.expect( 4 );
<ide>
<ide><path>test/unit/effects.js
<ide> supportjQuery.each( hideOptions, function( type, setup ) {
<ide>
<ide> assert.expectJqData( this, $span, "olddisplay" );
<ide> } );
<add>
<add> QUnit[ document.body.attachShadow ? "test" : "skip" ](
<add> "Persist correct display value - " + type + " hidden, shadow child", function( assert ) {
<add> assert.expect( 3 );
<add>
<add> jQuery( "<div id='shadowHost'></div>" ).appendTo( "#qunit-fixture" );
<add>
<add> var shadowHost = document.querySelector( "#shadowHost" );
<add> var shadowRoot = shadowHost.attachShadow( { mode: "open" } );
<add> shadowRoot.innerHTML = "<style>.hidden{display: none;}</style>" +
<add> "<span id='shadowChild' class='hidden'></span>";
<add> var shadowChild = shadowRoot.querySelector( "#shadowChild" );
<add>
<add> var $shadowChild = jQuery( shadowChild );
<add> var displayNone = "none";
<add> var display = "inline";
<add> var clock = this.clock;
<add>
<add> $shadowChild.fadeIn( 100, function() {
<add> assert.equal( $shadowChild.css( "display" ), display, "Expecting shadow display: " + display );
<add> $shadowChild.fadeOut( 100, function() {
<add> assert.equal( $shadowChild.css( "display" ), displayNone, "Expecting shadow display: " + displayNone );
<add> $shadowChild.fadeIn( 100, function() {
<add> assert.equal( $shadowChild.css( "display" ), display, "Expecting shadow display: " + display );
<add> } );
<add> } );
<add> } );
<add>
<add> clock.tick( 300 );
<add>
<add> assert.expectJqData( this, $shadowChild, "olddisplay" );
<add> } );
<ide> } );
<ide>
<ide> QUnit.test( "animate(Hash, Object, Function)", function( assert ) { | 8 |
Javascript | Javascript | fix code comment | b71a406d25679acbc154bd75c8feab9fc4047569 | <ide><path>packages/ember-metal/lib/set_properties.js
<ide> var changeProperties = Ember.changeProperties,
<ide> observers will be buffered.
<ide>
<ide> @method setProperties
<del> @param target
<del> @param {Hash} properties
<del> @return target
<add> @param self
<add> @param {Object} hash
<add> @return self
<ide> */
<ide> Ember.setProperties = function(self, hash) {
<ide> changeProperties(function(){ | 1 |
Text | Text | fix a small typo in the readme. | a7b2b8caa529458d2fcab8ae90c3163d8836e12b | <ide><path>README.md
<ide> map1.get('b') + " vs. " + map2.get('b') // 2 vs. 50
<ide>
<ide> ### Browser
<ide>
<del>Immutable.js has no depenencnies, which makes it predictable to include in a Browser.
<add>Immutable.js has no dependencies, which makes it predictable to include in a Browser.
<ide>
<ide> It's highly recommended to use a module bundler like [webpack](https://webpack.github.io/),
<ide> [rollup](https://rollupjs.org/), or | 1 |
PHP | PHP | remove unneeded elseif | 1d5c21ad8bca813ec94ded483934568c3bb6b3e6 | <ide><path>src/Database/Expression/CaseExpression.php
<ide> protected function _addExpressions($conditions, $values, $types)
<ide> $value = $keyValues[$k];
<ide> array_push($this->_values, $value);
<ide> continue;
<del> } elseif ($value === 'identifier') {
<add> }
<add> if ($value === 'identifier') {
<ide> $value = new IdentifierExpression($keyValues[$k]);
<ide> array_push($this->_values, $value);
<ide> continue; | 1 |
Text | Text | update membership procedures | 7bc427405a475e8aabb4fbd31367f375a88ed6ab | <ide><path>docs/Homebrew-Leadership-Responsibilities.md
<ide> - Book a group dinner (which Homebrew pays for) and check for any dietary requirements
<ide> - Ask someone to bring a conference/table microphone for people to be able to remotely participate in AGM
<ide> - February after the AGM:
<del> - Add the minutes of the AGM to <https://github.com/Homebrew/brew/tree/master/docs/governance>
<del> - Create [issue in Homebrew/brew](https://github.com/Homebrew/brew/issues?q=is%3Aissue+in%3Atitle+membership+) to ask members who did not vote in the election whether they wish to remain or step down as members
<add> - Add the minutes of the AGM to Homebrew/brew's [governance archives](https://github.com/Homebrew/brew/tree/master/docs/governance)
<add> - Create an [issue in Homebrew/brew](https://github.com/Homebrew/brew/issues?q=is%3Aissue+in%3Atitle+membership+) to survey members who did not vote in the election whether they wish to remain or step down as members
<ide> - Members that are not maintainers should be a least one of:
<ide> - An current or previously active maintainer, PLC/TSC member or Project Leader
<ide> - A long-standing member of the Homebrew community (e.g. been submitting good bug reports for over two years)
<add> - After the survey issue is closed, list the current year's members in a new file within the [governance archives](https://github.com/Homebrew/brew/tree/master/docs/governance)
<ide> - October: arrange in-person AGM
<ide> - Offer to pay for Homebrew maintainers who are at least one of:
<ide> - active Homebrew maintainers (i.e. not just contributors)
<ide><path>docs/New-Maintainer-Checklist.md
<ide> When admitted as members:
<ide>
<ide> - Invite them to the [**@Homebrew/members** team](https://github.com/orgs/Homebrew/teams/members), to give them access to the private governance repository.
<ide> - Invite them as a single-channel guest to the #members channel on the [`machomebrew` private maintainers Slack](https://machomebrew.slack.com/admin/invites) (and ensure they've read the [communication guidelines](Maintainer-Guidelines.md#communication)) and ask them to use their real name there (rather than a pseudonym they may use on e.g. GitHub).
<del>- Add them to the membership list in the [homebrew-governance repository](https://github.com/Homebrew/homebrew-governance).
<add>- Add them to the current year's membership list in the [governance archives](https://github.com/Homebrew/brew/tree/master/docs/governance). | 2 |
Java | Java | fix single.using, add unit tests and coverage | 4f878d56988d572257309dad1a1d0c9fe601f3d8 | <ide><path>src/main/java/io/reactivex/internal/operators/single/SingleLift.java
<ide> package io.reactivex.internal.operators.single;
<ide>
<ide> import io.reactivex.*;
<del>import io.reactivex.plugins.RxJavaPlugins;
<add>import io.reactivex.exceptions.Exceptions;
<add>import io.reactivex.internal.disposables.EmptyDisposable;
<add>import io.reactivex.internal.functions.ObjectHelper;
<ide>
<ide> public final class SingleLift<T, R> extends Single<R> {
<ide>
<ide> public SingleLift(SingleSource<T> source, SingleOperator<? extends R, ? super T>
<ide>
<ide> @Override
<ide> protected void subscribeActual(SingleObserver<? super R> s) {
<add> SingleObserver<? super T> sr;
<add>
<ide> try {
<del> SingleObserver<? super T> sr = onLift.apply(s);
<del>
<del> if (sr == null) {
<del> throw new NullPointerException("The onLift returned a null subscriber");
<del> }
<del> // TODO plugin wrapper
<del> source.subscribe(sr);
<del> } catch (NullPointerException ex) { // NOPMD
<del> throw ex;
<add> sr = ObjectHelper.requireNonNull(onLift.apply(s), "The onLift returned a null SingleObserver");
<ide> } catch (Throwable ex) {
<del> RxJavaPlugins.onError(ex);
<del> NullPointerException npe = new NullPointerException("Not really but can't throw other than NPE");
<del> npe.initCause(ex);
<del> throw npe;
<add> Exceptions.throwIfFatal(ex);
<add> EmptyDisposable.error(ex, s);
<add> return;
<ide> }
<add>
<add> source.subscribe(sr);
<ide> }
<ide>
<ide> }
<ide><path>src/main/java/io/reactivex/internal/operators/single/SingleUsing.java
<ide> package io.reactivex.internal.operators.single;
<ide>
<ide> import java.util.concurrent.Callable;
<add>import java.util.concurrent.atomic.AtomicReference;
<ide>
<ide> import io.reactivex.*;
<del>import io.reactivex.disposables.*;
<add>import io.reactivex.disposables.Disposable;
<ide> import io.reactivex.exceptions.*;
<ide> import io.reactivex.functions.*;
<del>import io.reactivex.internal.disposables.EmptyDisposable;
<add>import io.reactivex.internal.disposables.*;
<ide> import io.reactivex.internal.functions.ObjectHelper;
<ide> import io.reactivex.plugins.RxJavaPlugins;
<ide>
<ide> final boolean eager;
<ide>
<ide> public SingleUsing(Callable<U> resourceSupplier,
<del> Function<? super U, ? extends SingleSource<? extends T>> singleFunction, Consumer<? super U> disposer,
<add> Function<? super U, ? extends SingleSource<? extends T>> singleFunction,
<add> Consumer<? super U> disposer,
<ide> boolean eager) {
<ide> this.resourceSupplier = resourceSupplier;
<ide> this.singleFunction = singleFunction;
<ide> this.disposer = disposer;
<ide> this.eager = eager;
<ide> }
<ide>
<del>
<del>
<ide> @Override
<ide> protected void subscribeActual(final SingleObserver<? super T> s) {
<ide>
<ide> protected void subscribeActual(final SingleObserver<? super T> s) {
<ide> return;
<ide> }
<ide>
<del> SingleSource<? extends T> s1;
<add> SingleSource<? extends T> source;
<ide>
<ide> try {
<del> s1 = ObjectHelper.requireNonNull(singleFunction.apply(resource), "The singleFunction returned a null SingleSource");
<add> source = ObjectHelper.requireNonNull(singleFunction.apply(resource), "The singleFunction returned a null SingleSource");
<ide> } catch (Throwable ex) {
<ide> Exceptions.throwIfFatal(ex);
<add>
<add> if (eager) {
<add> try {
<add> disposer.accept(resource);
<add> } catch (Throwable exc) {
<add> Exceptions.throwIfFatal(exc);
<add> ex = new CompositeException(ex, exc);
<add> }
<add> }
<ide> EmptyDisposable.error(ex, s);
<add> if (!eager) {
<add> try {
<add> disposer.accept(resource);
<add> } catch (Throwable exc) {
<add> Exceptions.throwIfFatal(exc);
<add> RxJavaPlugins.onError(exc);
<add> }
<add> }
<ide> return;
<ide> }
<ide>
<del> s1.subscribe(new SingleObserver<T>() {
<del>
<del> @Override
<del> public void onSubscribe(Disposable d) {
<del> if (eager) {
<del> CompositeDisposable set = new CompositeDisposable();
<del> set.add(d);
<del> set.add(Disposables.fromRunnable(new Runnable() {
<del> @Override
<del> public void run() {
<del> try {
<del> disposer.accept(resource);
<del> } catch (Throwable e) {
<del> Exceptions.throwIfFatal(e);
<del> RxJavaPlugins.onError(e);
<del> }
<del> }
<del> }));
<del> s.onSubscribe(set);
<del> } else {
<del> s.onSubscribe(d);
<del> }
<del> }
<add> source.subscribe(new UsingSingleObserver<T, U>(s, resource, eager, disposer));
<add> }
<ide>
<del> @Override
<del> public void onSuccess(T value) {
<del> if (eager) {
<del> try {
<del> disposer.accept(resource);
<del> } catch (Throwable e) {
<del> Exceptions.throwIfFatal(e);
<del> s.onError(e);
<del> return;
<del> }
<del> }
<del> s.onSuccess(value);
<del> if (!eager) {
<del> try {
<del> disposer.accept(resource);
<del> } catch (Throwable e) {
<del> Exceptions.throwIfFatal(e);
<del> RxJavaPlugins.onError(e);
<del> }
<del> }
<add> static final class UsingSingleObserver<T, U> extends
<add> AtomicReference<Object> implements SingleObserver<T>, Disposable {
<add> /** */
<add> private static final long serialVersionUID = -5331524057054083935L;
<add>
<add> final SingleObserver<? super T> actual;
<add>
<add> final Consumer<? super U> disposer;
<add>
<add> final boolean eager;
<add>
<add> Disposable d;
<add>
<add> public UsingSingleObserver(SingleObserver<? super T> actual, U resource, boolean eager,
<add> Consumer<? super U> disposer) {
<add> super(resource);
<add> this.actual = actual;
<add> this.eager = eager;
<add> this.disposer = disposer;
<add> }
<add>
<add> @Override
<add> public void dispose() {
<add> d.dispose();
<add> d = DisposableHelper.DISPOSED;
<add> disposeAfter();
<add> }
<add>
<add> @Override
<add> public boolean isDisposed() {
<add> return d.isDisposed();
<add> }
<add>
<add> @Override
<add> public void onSubscribe(Disposable d) {
<add> if (DisposableHelper.validate(this.d, d)) {
<add> this.d = d;
<add>
<add> actual.onSubscribe(this);
<ide> }
<add> }
<add>
<add> @SuppressWarnings("unchecked")
<add> @Override
<add> public void onSuccess(T value) {
<add> d = DisposableHelper.DISPOSED;
<ide>
<del> @Override
<del> public void onError(Throwable e) {
<del> if (eager) {
<add> if (eager) {
<add> Object u = getAndSet(this);
<add> if (u != this) {
<ide> try {
<del> disposer.accept(resource);
<add> disposer.accept((U)u);
<ide> } catch (Throwable ex) {
<ide> Exceptions.throwIfFatal(ex);
<del> e = new CompositeException(ex, e);
<add> actual.onError(ex);
<add> return;
<ide> }
<add> } else {
<add> return;
<ide> }
<del> s.onError(e);
<del> if (!eager) {
<add> }
<add>
<add> actual.onSuccess(value);
<add>
<add> if (!eager) {
<add> disposeAfter();
<add> }
<add> }
<add>
<add> @SuppressWarnings("unchecked")
<add> @Override
<add> public void onError(Throwable e) {
<add> d = DisposableHelper.DISPOSED;
<add>
<add> if (eager) {
<add> Object u = getAndSet(this);
<add> if (u != this) {
<ide> try {
<del> disposer.accept(resource);
<add> disposer.accept((U)u);
<ide> } catch (Throwable ex) {
<ide> Exceptions.throwIfFatal(ex);
<del> RxJavaPlugins.onError(ex);
<add> e = new CompositeException(e, ex);
<ide> }
<add> } else {
<add> return;
<ide> }
<ide> }
<ide>
<del> });
<del> }
<add> actual.onError(e);
<add>
<add> if (!eager) {
<add> disposeAfter();
<add> }
<add> }
<ide>
<add> @SuppressWarnings("unchecked")
<add> void disposeAfter() {
<add> Object u = getAndSet(this);
<add> if (u != this) {
<add> try {
<add> disposer.accept((U)u);
<add> } catch (Throwable ex) {
<add> Exceptions.throwIfFatal(ex);
<add> RxJavaPlugins.onError(ex);
<add> }
<add> }
<add> }
<add> }
<ide> }
<ide><path>src/test/java/io/reactivex/internal/operators/single/SingleCacheTest.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.single;
<add>
<add>import org.junit.Test;
<add>
<add>import io.reactivex.*;
<add>import io.reactivex.disposables.Disposable;
<add>import io.reactivex.processors.PublishProcessor;
<add>import io.reactivex.schedulers.Schedulers;
<add>import io.reactivex.subscribers.TestSubscriber;
<add>
<add>public class SingleCacheTest {
<add>
<add> @Test
<add> public void cancelImmediately() {
<add> PublishProcessor<Integer> pp = PublishProcessor.create();
<add>
<add> Single<Integer> cached = pp.toSingle().cache();
<add>
<add> TestSubscriber<Integer> ts = cached.test(true);
<add>
<add> pp.onNext(1);
<add> pp.onComplete();
<add>
<add> ts.assertEmpty();
<add>
<add> cached.test().assertResult(1);
<add> }
<add>
<add> @Test
<add> public void addRemoveRace() {
<add> for (int i = 0; i < 500; i++) {
<add> PublishProcessor<Integer> pp = PublishProcessor.create();
<add>
<add> final Single<Integer> cached = pp.toSingle().cache();
<add>
<add> final TestSubscriber<Integer> ts1 = cached.test();
<add>
<add> Runnable r1 = new Runnable() {
<add> @Override
<add> public void run() {
<add> ts1.cancel();
<add> }
<add> };
<add>
<add> Runnable r2 = new Runnable() {
<add> @Override
<add> public void run() {
<add> cached.test();
<add> }
<add> };
<add>
<add> TestHelper.race(r1, r2, Schedulers.single());
<add> }
<add> }
<add>
<add> @Test
<add> public void doubleDispose() {
<add> PublishProcessor<Integer> pp = PublishProcessor.create();
<add>
<add> final Single<Integer> cached = pp.toSingle().cache();
<add>
<add> SingleObserver<Integer> doubleDisposer = new SingleObserver<Integer>() {
<add>
<add> @Override
<add> public void onSubscribe(Disposable d) {
<add> d.dispose();
<add> d.dispose();
<add> }
<add>
<add> @Override
<add> public void onSuccess(Integer value) {
<add>
<add> }
<add>
<add> @Override
<add> public void onError(Throwable e) {
<add>
<add> }
<add> };
<add> cached.subscribe(doubleDisposer);
<add>
<add> cached.test();
<add>
<add> cached.subscribe(doubleDisposer);
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/internal/operators/single/SingleDeferTest.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.single;
<add>
<add>import java.util.concurrent.Callable;
<add>
<add>import org.junit.Test;
<add>
<add>import io.reactivex.Single;
<add>
<add>public class SingleDeferTest {
<add>
<add> @Test
<add> public void normal() {
<add>
<add> Single<Integer> s = Single.defer(new Callable<Single<Integer>>() {
<add> int counter;
<add> @Override
<add> public Single<Integer> call() throws Exception {
<add> return Single.just(++counter);
<add> }
<add> });
<add>
<add> for (int i = 1; i < 33; i++) {
<add> s.test().assertResult(i);
<add> }
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/internal/operators/single/SingleDelayTest.java
<ide> import org.junit.Test;
<ide>
<ide> import io.reactivex.*;
<add>import io.reactivex.exceptions.TestException;
<ide> import io.reactivex.functions.BiConsumer;
<ide> import io.reactivex.schedulers.Schedulers;
<ide>
<ide> public void accept(Integer v, Throwable e) throws Exception {
<ide> assertEquals(1, value.get());
<ide> }
<ide>
<add> @Test
<add> public void delayError() {
<add> Single.error(new TestException()).delay(5, TimeUnit.SECONDS)
<add> .test()
<add> .awaitDone(1, TimeUnit.SECONDS)
<add> .assertFailure(TestException.class);
<add> }
<add>
<ide> @Test
<ide> public void delaySubscriptionCompletable() throws Exception {
<ide> Single.just(1).delaySubscription(Completable.complete().delay(100, TimeUnit.MILLISECONDS))
<ide><path>src/test/java/io/reactivex/internal/operators/single/SingleLiftTest.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.single;
<add>
<add>import org.junit.Test;
<add>
<add>import io.reactivex.*;
<add>import io.reactivex.disposables.Disposable;
<add>
<add>public class SingleLiftTest {
<add>
<add> @Test
<add> public void normal() {
<add>
<add> Single.just(1).lift(new SingleOperator<Integer, Integer>() {
<add> @Override
<add> public SingleObserver<Integer> apply(final SingleObserver<? super Integer> s) throws Exception {
<add> return new SingleObserver<Integer>() {
<add>
<add> @Override
<add> public void onSubscribe(Disposable d) {
<add> s.onSubscribe(d);
<add> }
<add>
<add> @Override
<add> public void onSuccess(Integer value) {
<add> s.onSuccess(value + 1);
<add> }
<add>
<add> @Override
<add> public void onError(Throwable e) {
<add> s.onError(e);
<add> }
<add> };
<add> }
<add> })
<add> .test()
<add> .assertResult(2);
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/internal/operators/single/SingleUsingTest.java
<add>/**
<add> * Copyright 2016 Netflix, Inc.
<add> *
<add> * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in
<add> * compliance with the License. You may obtain a copy of the License at
<add> *
<add> * http://www.apache.org/licenses/LICENSE-2.0
<add> *
<add> * Unless required by applicable law or agreed to in writing, software distributed under the License is
<add> * distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See
<add> * the License for the specific language governing permissions and limitations under the License.
<add> */
<add>
<add>package io.reactivex.internal.operators.single;
<add>
<add>import java.util.List;
<add>import java.util.concurrent.Callable;
<add>
<add>import static org.junit.Assert.*;
<add>import org.junit.Test;
<add>
<add>import io.reactivex.*;
<add>import io.reactivex.disposables.*;
<add>import io.reactivex.exceptions.*;
<add>import io.reactivex.functions.*;
<add>import io.reactivex.internal.functions.Functions;
<add>import io.reactivex.plugins.RxJavaPlugins;
<add>import io.reactivex.processors.PublishProcessor;
<add>import io.reactivex.schedulers.Schedulers;
<add>import io.reactivex.subscribers.TestSubscriber;
<add>
<add>public class SingleUsingTest {
<add>
<add> Function<Disposable, Single<Integer>> mapper = new Function<Disposable, Single<Integer>>() {
<add> @Override
<add> public Single<Integer> apply(Disposable d) throws Exception {
<add> return Single.just(1);
<add> }
<add> };
<add>
<add> Function<Disposable, Single<Integer>> mapperThrows = new Function<Disposable, Single<Integer>>() {
<add> @Override
<add> public Single<Integer> apply(Disposable d) throws Exception {
<add> throw new TestException("Mapper");
<add> }
<add> };
<add>
<add> Consumer<Disposable> disposer = new Consumer<Disposable>() {
<add> @Override
<add> public void accept(Disposable d) throws Exception {
<add> d.dispose();
<add> }
<add> };
<add>
<add> Consumer<Disposable> disposerThrows = new Consumer<Disposable>() {
<add> @Override
<add> public void accept(Disposable d) throws Exception {
<add> throw new TestException("Disposer");
<add> }
<add> };
<add>
<add> @Test
<add> public void resourceSupplierThrows() {
<add> Single.using(new Callable<Integer>() {
<add> @Override
<add> public Integer call() throws Exception {
<add> throw new TestException();
<add> }
<add> }, Functions.justFunction(Single.just(1)), Functions.emptyConsumer())
<add> .test()
<add> .assertFailure(TestException.class);
<add> }
<add>
<add> @Test
<add> public void normalEager() {
<add> Single.using(Functions.justCallable(1), Functions.justFunction(Single.just(1)), Functions.emptyConsumer())
<add> .test()
<add> .assertResult(1);
<add> }
<add>
<add> @Test
<add> public void normalNonEager() {
<add> Single.using(Functions.justCallable(1), Functions.justFunction(Single.just(1)), Functions.emptyConsumer(), false)
<add> .test()
<add> .assertResult(1);
<add> }
<add>
<add> @Test
<add> public void errorEager() {
<add> Single.using(Functions.justCallable(1), Functions.justFunction(Single.error(new TestException())), Functions.emptyConsumer())
<add> .test()
<add> .assertFailure(TestException.class);
<add> }
<add>
<add> @Test
<add> public void errorNonEager() {
<add> Single.using(Functions.justCallable(1), Functions.justFunction(Single.error(new TestException())), Functions.emptyConsumer(), false)
<add> .test()
<add> .assertFailure(TestException.class);
<add> }
<add>
<add> @Test
<add> public void eagerMapperThrowsDisposerThrows() {
<add> TestSubscriber<Integer> ts = Single.using(Functions.justCallable(Disposables.empty()), mapperThrows, disposerThrows)
<add> .test()
<add> .assertFailure(CompositeException.class);
<add>
<add> List<Throwable> ce = TestHelper.compositeList(ts.errors().get(0));
<add> TestHelper.assertError(ce, 0, TestException.class, "Mapper");
<add> TestHelper.assertError(ce, 1, TestException.class, "Disposer");
<add> }
<add>
<add> @Test
<add> public void noneagerMapperThrowsDisposerThrows() {
<add>
<add> List<Throwable> errors = TestHelper.trackPluginErrors();
<add>
<add> try {
<add> Single.using(Functions.justCallable(Disposables.empty()), mapperThrows, disposerThrows, false)
<add> .test()
<add> .assertFailureAndMessage(TestException.class, "Mapper");
<add>
<add> TestHelper.assertError(errors, 0, TestException.class, "Disposer");
<add> } finally {
<add> RxJavaPlugins.reset();
<add> }
<add> }
<add>
<add> @Test
<add> public void resourceDisposedIfMapperCrashes() {
<add> Disposable d = Disposables.empty();
<add>
<add> Single.using(Functions.justCallable(d), mapperThrows, disposer)
<add> .test()
<add> .assertFailure(TestException.class);
<add>
<add> assertTrue(d.isDisposed());
<add> }
<add>
<add> @Test
<add> public void resourceDisposedIfMapperCrashesNonEager() {
<add> Disposable d = Disposables.empty();
<add>
<add> Single.using(Functions.justCallable(d), mapperThrows, disposer, false)
<add> .test()
<add> .assertFailure(TestException.class);
<add>
<add> assertTrue(d.isDisposed());
<add> }
<add>
<add> @Test
<add> public void dispose() {
<add> Disposable d = Disposables.empty();
<add>
<add> Single.using(Functions.justCallable(d), mapper, disposer, false)
<add> .test(true);
<add>
<add> assertTrue(d.isDisposed());
<add> }
<add>
<add> @Test
<add> public void disposerThrowsEager() {
<add> Single.using(Functions.justCallable(Disposables.empty()), mapper, disposerThrows)
<add> .test()
<add> .assertFailure(TestException.class);
<add> }
<add>
<add> @Test
<add> public void disposerThrowsNonEager() {
<add>
<add> List<Throwable> errors = TestHelper.trackPluginErrors();
<add>
<add> try {
<add> Single.using(Functions.justCallable(Disposables.empty()), mapper, disposerThrows, false)
<add> .test()
<add> .assertResult(1);
<add> TestHelper.assertError(errors, 0, TestException.class, "Disposer");
<add> } finally {
<add> RxJavaPlugins.reset();
<add> }
<add> }
<add>
<add> @Test
<add> public void errorAndDisposerThrowsEager() {
<add> TestSubscriber<Integer> ts = Single.using(Functions.justCallable(Disposables.empty()),
<add> new Function<Disposable, SingleSource<Integer>>() {
<add> @Override
<add> public SingleSource<Integer> apply(Disposable v) throws Exception {
<add> return Single.<Integer>error(new TestException("Mapper-run"));
<add> }
<add> }, disposerThrows)
<add> .test()
<add> .assertFailure(CompositeException.class);
<add>
<add> List<Throwable> ce = TestHelper.compositeList(ts.errors().get(0));
<add> TestHelper.assertError(ce, 0, TestException.class, "Mapper-run");
<add> TestHelper.assertError(ce, 1, TestException.class, "Disposer");
<add> }
<add>
<add> @Test
<add> public void errorAndDisposerThrowsNonEager() {
<add> List<Throwable> errors = TestHelper.trackPluginErrors();
<add>
<add> try {
<add> Single.using(Functions.justCallable(Disposables.empty()),
<add> new Function<Disposable, SingleSource<Integer>>() {
<add> @Override
<add> public SingleSource<Integer> apply(Disposable v) throws Exception {
<add> return Single.<Integer>error(new TestException("Mapper-run"));
<add> }
<add> }, disposerThrows, false)
<add> .test()
<add> .assertFailure(TestException.class);
<add> TestHelper.assertError(errors, 0, TestException.class, "Disposer");
<add> } finally {
<add> RxJavaPlugins.reset();
<add> }
<add> }
<add>
<add> @Test
<add> public void successDisposeRace() {
<add> for (int i = 0; i < 500; i++) {
<add> final PublishProcessor<Integer> pp = PublishProcessor.create();
<add>
<add> Disposable d = Disposables.empty();
<add>
<add> final TestSubscriber<Integer> ts = Single.using(Functions.justCallable(d), new Function<Disposable, SingleSource<Integer>>() {
<add> @Override
<add> public SingleSource<Integer> apply(Disposable v) throws Exception {
<add> return pp.toSingle();
<add> }
<add> }, disposer)
<add> .test();
<add>
<add> pp.onNext(1);
<add>
<add> Runnable r1 = new Runnable() {
<add> @Override
<add> public void run() {
<add> pp.onComplete();
<add> }
<add> };
<add> Runnable r2 = new Runnable() {
<add> @Override
<add> public void run() {
<add> ts.cancel();
<add> }
<add> };
<add>
<add> TestHelper.race(r1, r2, Schedulers.single());
<add>
<add> assertTrue(d.isDisposed());
<add> }
<add> }
<add>
<add> @Test
<add> public void doubleOnSubscribe() {
<add> List<Throwable> errors = TestHelper.trackPluginErrors();
<add>
<add> try {
<add> Single.using(Functions.justCallable(1), new Function<Integer, SingleSource<Integer>>() {
<add> @Override
<add> public SingleSource<Integer> apply(Integer v) throws Exception {
<add> return new Single<Integer>() {
<add> @Override
<add> protected void subscribeActual(SingleObserver<? super Integer> observer) {
<add> observer.onSubscribe(Disposables.empty());
<add>
<add> assertFalse(((Disposable)observer).isDisposed());
<add>
<add> Disposable d = Disposables.empty();
<add> observer.onSubscribe(d);
<add>
<add> assertTrue(d.isDisposed());
<add>
<add> assertFalse(((Disposable)observer).isDisposed());
<add>
<add> observer.onSuccess(1);
<add>
<add> assertTrue(((Disposable)observer).isDisposed());
<add> }
<add> };
<add> }
<add> }, Functions.emptyConsumer())
<add> .test()
<add> .assertResult(1)
<add> ;
<add>
<add> TestHelper.assertError(errors, 0, IllegalStateException.class, "Disposable already set!");
<add> } finally {
<add> RxJavaPlugins.reset();
<add> }
<add> }
<add>
<add> @Test
<add> public void errorDisposeRace() {
<add> for (int i = 0; i < 500; i++) {
<add> final PublishProcessor<Integer> pp = PublishProcessor.create();
<add>
<add> Disposable d = Disposables.empty();
<add>
<add> final TestSubscriber<Integer> ts = Single.using(Functions.justCallable(d), new Function<Disposable, SingleSource<Integer>>() {
<add> @Override
<add> public SingleSource<Integer> apply(Disposable v) throws Exception {
<add> return pp.toSingle();
<add> }
<add> }, disposer)
<add> .test();
<add>
<add> final TestException ex = new TestException();
<add>
<add> Runnable r1 = new Runnable() {
<add> @Override
<add> public void run() {
<add> pp.onError(ex);
<add> }
<add> };
<add> Runnable r2 = new Runnable() {
<add> @Override
<add> public void run() {
<add> ts.cancel();
<add> }
<add> };
<add>
<add> TestHelper.race(r1, r2, Schedulers.single());
<add>
<add> assertTrue(d.isDisposed());
<add> }
<add> }
<add>}
<ide><path>src/test/java/io/reactivex/single/SingleTest.java
<ide>
<ide> import static org.junit.Assert.*;
<ide>
<del>import java.util.Arrays;
<add>import java.util.*;
<ide> import java.util.concurrent.*;
<ide> import java.util.concurrent.atomic.*;
<ide>
<ide> import io.reactivex.disposables.*;
<ide> import io.reactivex.exceptions.TestException;
<ide> import io.reactivex.functions.*;
<add>import io.reactivex.internal.operators.single.SingleInternalHelper;
<ide> import io.reactivex.schedulers.Schedulers;
<ide> import io.reactivex.subscribers.TestSubscriber;
<ide>
<ide> public void toFutureThrows() throws Exception {
<ide> }
<ide> }
<ide>
<add> @Test(expected = UnsupportedOperationException.class)
<add> public void toFlowableIterableRemove() {
<add> @SuppressWarnings("unchecked")
<add> Iterable<? extends Flowable<Integer>> f = SingleInternalHelper.iterableToFlowable(Arrays.asList(Single.just(1)));
<add>
<add> Iterator<? extends Flowable<Integer>> iterator = f.iterator();
<add> iterator.next();
<add> iterator.remove();
<add> }
<ide> }
<ide> | 8 |
Mixed | Text | remove deprecated file `rails/rack/debugger | 7563bf7b46e6f04e160d664e284a33052f9804b8 | <ide><path>railties/CHANGELOG.md
<add>* Remove deprecated file `rails/rack/debugger`.
<add>
<add> *Rafael Mendonça França*
<add>
<ide> * Remove deprecated `config.serve_static_files`.
<ide>
<ide> *Rafael Mendonça França*
<ide><path>railties/lib/rails/rack/debugger.rb
<del>require "active_support/deprecation"
<del>
<del>ActiveSupport::Deprecation.warn("This file is deprecated and will be removed in Rails 5.1 with no replacement.") | 2 |
Mixed | Go | support simple syntax --secret foo | a257f674ba22d325e7ad59541723c3ee4e9adc27 | <ide><path>cli/command/service/opts_test.go
<ide> package service
<ide>
<ide> import (
<del> "os"
<ide> "reflect"
<ide> "testing"
<ide> "time"
<ide>
<ide> "github.com/docker/docker/api/types/container"
<del> "github.com/docker/docker/opts"
<ide> "github.com/docker/docker/pkg/testutil/assert"
<ide> )
<ide>
<ide> func TestHealthCheckOptionsToHealthConfigConflict(t *testing.T) {
<ide> _, err := opt.toHealthConfig()
<ide> assert.Error(t, err, "--no-healthcheck conflicts with --health-* options")
<ide> }
<del>
<del>func TestSecretOptionsSimple(t *testing.T) {
<del> var opt opts.SecretOpt
<del>
<del> testCase := "source=foo,target=testing"
<del> assert.NilError(t, opt.Set(testCase))
<del>
<del> reqs := opt.Value()
<del> assert.Equal(t, len(reqs), 1)
<del> req := reqs[0]
<del> assert.Equal(t, req.Source, "foo")
<del> assert.Equal(t, req.Target, "testing")
<del>}
<del>
<del>func TestSecretOptionsCustomUidGid(t *testing.T) {
<del> var opt opts.SecretOpt
<del>
<del> testCase := "source=foo,target=testing,uid=1000,gid=1001"
<del> assert.NilError(t, opt.Set(testCase))
<del>
<del> reqs := opt.Value()
<del> assert.Equal(t, len(reqs), 1)
<del> req := reqs[0]
<del> assert.Equal(t, req.Source, "foo")
<del> assert.Equal(t, req.Target, "testing")
<del> assert.Equal(t, req.UID, "1000")
<del> assert.Equal(t, req.GID, "1001")
<del>}
<del>
<del>func TestSecretOptionsCustomMode(t *testing.T) {
<del> var opt opts.SecretOpt
<del>
<del> testCase := "source=foo,target=testing,uid=1000,gid=1001,mode=0444"
<del> assert.NilError(t, opt.Set(testCase))
<del>
<del> reqs := opt.Value()
<del> assert.Equal(t, len(reqs), 1)
<del> req := reqs[0]
<del> assert.Equal(t, req.Source, "foo")
<del> assert.Equal(t, req.Target, "testing")
<del> assert.Equal(t, req.UID, "1000")
<del> assert.Equal(t, req.GID, "1001")
<del> assert.Equal(t, req.Mode, os.FileMode(0444))
<del>}
<ide><path>docs/reference/commandline/service_create.md
<ide> ID NAME MODE REPLICAS IMAGE
<ide>
<ide> ### Create a service with secrets
<ide> Use the `--secret` flag to give a container access to a
<del>[secret](secret_create.md). The following command will create a service
<del>with two secrets named `ssh-key` and `app-key`:
<add>[secret](secret_create.md).
<add>
<add>Create a service specifying a secret:
<add>
<add>```bash
<add>$ docker service create --name redis --secret secret.json redis:3.0.6
<add>4cdgfyky7ozwh3htjfw0d12qv
<add>```
<add>
<add>Create a service specifying the secret, target, user/group ID and mode:
<ide>
<ide> ```bash
<del>$ docker service create --name redis --secret source=ssh-key,target=ssh --secret source=app-key,target=app,uid=1000,gid=1001,mode=0400 redis:3.0.6
<add>$ docker service create --name redis \
<add> --secret source=ssh-key,target=ssh \
<add> --secret source=app-key,target=app,uid=1000,gid=1001,mode=0400 \
<add> redis:3.0.6
<ide> 4cdgfyky7ozwh3htjfw0d12qv
<ide> ```
<ide>
<ide><path>integration-cli/docker_cli_service_create_test.go
<ide> func (s *DockerSwarmSuite) TestServiceCreateMountVolume(c *check.C) {
<ide> c.Assert(mounts[0].RW, checker.Equals, true)
<ide> }
<ide>
<del>func (s *DockerSwarmSuite) TestServiceCreateWithSecret(c *check.C) {
<add>func (s *DockerSwarmSuite) TestServiceCreateWithSecretSimple(c *check.C) {
<add> d := s.AddDaemon(c, true, true)
<add>
<add> serviceName := "test-service-secret"
<add> testName := "test_secret"
<add> id := d.createSecret(c, swarm.SecretSpec{
<add> swarm.Annotations{
<add> Name: testName,
<add> },
<add> []byte("TESTINGDATA"),
<add> })
<add> c.Assert(id, checker.Not(checker.Equals), "", check.Commentf("secrets: %s", id))
<add>
<add> out, err := d.Cmd("service", "create", "--name", serviceName, "--secret", testName, "busybox", "top")
<add> c.Assert(err, checker.IsNil, check.Commentf(out))
<add>
<add> out, err = d.Cmd("service", "inspect", "--format", "{{ json .Spec.TaskTemplate.ContainerSpec.Secrets }}", serviceName)
<add> c.Assert(err, checker.IsNil)
<add>
<add> var refs []swarm.SecretReference
<add> c.Assert(json.Unmarshal([]byte(out), &refs), checker.IsNil)
<add> c.Assert(refs, checker.HasLen, 1)
<add>
<add> c.Assert(refs[0].SecretName, checker.Equals, testName)
<add> c.Assert(refs[0].Target, checker.Not(checker.IsNil))
<add> c.Assert(refs[0].Target.Name, checker.Equals, testName)
<add> c.Assert(refs[0].Target.UID, checker.Equals, "0")
<add> c.Assert(refs[0].Target.GID, checker.Equals, "0")
<add>}
<add>
<add>func (s *DockerSwarmSuite) TestServiceCreateWithSecretSourceTarget(c *check.C) {
<ide> d := s.AddDaemon(c, true, true)
<ide>
<ide> serviceName := "test-service-secret"
<ide><path>opts/secret.go
<ide> func (o *SecretOpt) Set(value string) error {
<ide> Mode: 0444,
<ide> }
<ide>
<add> // support a simple syntax of --secret foo
<add> if len(fields) == 1 {
<add> options.Source = fields[0]
<add> options.Target = fields[0]
<add> o.values = append(o.values, options)
<add> return nil
<add> }
<add>
<ide> for _, field := range fields {
<ide> parts := strings.SplitN(field, "=", 2)
<ide> key := strings.ToLower(parts[0])
<ide> func (o *SecretOpt) Set(value string) error {
<ide>
<ide> options.Mode = os.FileMode(m)
<ide> default:
<del> return fmt.Errorf("invalid field in secret request: %s", key)
<add> if len(fields) == 1 && value == "" {
<add>
<add> } else {
<add> return fmt.Errorf("invalid field in secret request: %s", key)
<add> }
<ide> }
<ide> }
<ide>
<ide><path>opts/secret_test.go
<add>package opts
<add>
<add>import (
<add> "os"
<add> "testing"
<add>
<add> "github.com/docker/docker/pkg/testutil/assert"
<add>)
<add>
<add>func TestSecretOptionsSimple(t *testing.T) {
<add> var opt SecretOpt
<add>
<add> testCase := "app-secret"
<add> assert.NilError(t, opt.Set(testCase))
<add>
<add> reqs := opt.Value()
<add> assert.Equal(t, len(reqs), 1)
<add> req := reqs[0]
<add> assert.Equal(t, req.Source, "app-secret")
<add> assert.Equal(t, req.Target, "app-secret")
<add> assert.Equal(t, req.UID, "0")
<add> assert.Equal(t, req.GID, "0")
<add>}
<add>
<add>func TestSecretOptionsSourceTarget(t *testing.T) {
<add> var opt SecretOpt
<add>
<add> testCase := "source=foo,target=testing"
<add> assert.NilError(t, opt.Set(testCase))
<add>
<add> reqs := opt.Value()
<add> assert.Equal(t, len(reqs), 1)
<add> req := reqs[0]
<add> assert.Equal(t, req.Source, "foo")
<add> assert.Equal(t, req.Target, "testing")
<add>}
<add>
<add>func TestSecretOptionsCustomUidGid(t *testing.T) {
<add> var opt SecretOpt
<add>
<add> testCase := "source=foo,target=testing,uid=1000,gid=1001"
<add> assert.NilError(t, opt.Set(testCase))
<add>
<add> reqs := opt.Value()
<add> assert.Equal(t, len(reqs), 1)
<add> req := reqs[0]
<add> assert.Equal(t, req.Source, "foo")
<add> assert.Equal(t, req.Target, "testing")
<add> assert.Equal(t, req.UID, "1000")
<add> assert.Equal(t, req.GID, "1001")
<add>}
<add>
<add>func TestSecretOptionsCustomMode(t *testing.T) {
<add> var opt SecretOpt
<add>
<add> testCase := "source=foo,target=testing,uid=1000,gid=1001,mode=0444"
<add> assert.NilError(t, opt.Set(testCase))
<add>
<add> reqs := opt.Value()
<add> assert.Equal(t, len(reqs), 1)
<add> req := reqs[0]
<add> assert.Equal(t, req.Source, "foo")
<add> assert.Equal(t, req.Target, "testing")
<add> assert.Equal(t, req.UID, "1000")
<add> assert.Equal(t, req.GID, "1001")
<add> assert.Equal(t, req.Mode, os.FileMode(0444))
<add>} | 5 |
Ruby | Ruby | pass options to rails_blob_url | 835e27289a8a3e7006856fc3177a286652cda9d6 | <ide><path>activestorage/config/routes.rb
<ide> route_for(:rails_service_blob, blob.signed_id, blob.filename, options)
<ide> end
<ide>
<del> resolve("ActiveStorage::Blob") { |blob, options| route_for(:rails_blob, blob) }
<add> resolve("ActiveStorage::Blob") { |blob, options| route_for(:rails_blob, blob, options) }
<ide> resolve("ActiveStorage::Attachment") { |attachment, options| route_for(:rails_blob, attachment.blob, options) }
<ide>
<ide> | 1 |
Text | Text | separate unrelated info about child_process.exec() | 449f73b7ffd60e7dd140b360e4b53dc2428f4aef | <ide><path>doc/api/child_process.md
<ide> exec('echo "The \\$HOME variable is $HOME"');
<ide> **Never pass unsanitized user input to this function. Any input containing shell
<ide> metacharacters may be used to trigger arbitrary command execution.**
<ide>
<del>```js
<del>const { exec } = require('child_process');
<del>exec('cat *.js bad_file | wc -l', (error, stdout, stderr) => {
<del> if (error) {
<del> console.error(`exec error: ${error}`);
<del> return;
<del> }
<del> console.log(`stdout: ${stdout}`);
<del> console.log(`stderr: ${stderr}`);
<del>});
<del>```
<del>
<ide> If a `callback` function is provided, it is called with the arguments
<ide> `(error, stdout, stderr)`. On success, `error` will be `null`. On error,
<ide> `error` will be an instance of [`Error`][]. The `error.code` property will be
<ide> can be used to specify the character encoding used to decode the stdout and
<ide> stderr output. If `encoding` is `'buffer'`, or an unrecognized character
<ide> encoding, `Buffer` objects will be passed to the callback instead.
<ide>
<add>```js
<add>const { exec } = require('child_process');
<add>exec('cat *.js missing_file | wc -l', (error, stdout, stderr) => {
<add> if (error) {
<add> console.error(`exec error: ${error}`);
<add> return;
<add> }
<add> console.log(`stdout: ${stdout}`);
<add> console.log(`stderr: ${stderr}`);
<add>});
<add>```
<add>
<ide> If `timeout` is greater than `0`, the parent will send the signal
<ide> identified by the `killSignal` property (the default is `'SIGTERM'`) if the
<ide> child runs longer than `timeout` milliseconds. | 1 |
PHP | PHP | fix coding standards and improve assertions | 38f2e89b20628aa81637d0c92afabfa3e71da632 | <ide><path>tests/TestCase/Utility/XmlTest.php
<ide> public function testBuildEmptyTag()
<ide> */
<ide> public function testLoadHtml()
<ide> {
<del> $html_file = CORE_TESTS . 'Fixture/sample.html';
<del> $html = file_get_contents($html_file);
<add> $htmlFile = CORE_TESTS . 'Fixture/sample.html';
<add> $html = file_get_contents($htmlFile);
<ide> $paragraph = 'Browsers usually indent blockquote elements.';
<ide> $blockquote = "
<ide> For 50 years, WWF has been protecting the future of nature.
<ide> public function testLoadHtml()
<ide> */
<ide> public function testLoadHtmlEmptyHtml()
<ide> {
<del> try {
<del> Xml::loadHtml(null);
<del> $this->fail('No exception');
<del> } catch (\Exception $e) {
<del> $this->assertTrue(true, 'An exception was raised');
<del> }
<add> $this->expectException(XmlException::class);
<add> Xml::loadHtml(null);
<ide> }
<ide>
<ide> /** | 1 |
Javascript | Javascript | add colors to debuglog() | a03458396b35c455f01a7429456fd8f0b5d7a4f4 | <ide><path>lib/internal/util/debuglog.js
<ide> 'use strict';
<ide>
<del>const { format } = require('internal/util/inspect');
<add>const { inspect, format, formatWithOptions } = require('internal/util/inspect');
<ide>
<ide> // `debugs` is deliberately initialized to undefined so any call to
<ide> // debuglog() before initializeDebugEnv() is called will throw.
<ide> function debuglogImpl(set) {
<ide> const pid = process.pid;
<ide> emitWarningIfNeeded(set);
<ide> debugs[set] = function debug(...args) {
<del> const msg = format(...args);
<del> process.stderr.write(format('%s %d: %s\n', set, pid, msg));
<add> const colors = process.stderr.hasColors && process.stderr.hasColors();
<add> const msg = formatWithOptions({ colors }, ...args);
<add> const coloredPID = inspect(pid, { colors });
<add> process.stderr.write(format('%s %s: %s\n', set, coloredPID, msg));
<ide> };
<ide> } else {
<ide> debugs[set] = null;
<ide><path>test/sequential/test-util-debug.js
<ide> 'use strict';
<ide> const common = require('../common');
<ide> const assert = require('assert');
<add>const util = require('util');
<ide>
<ide> const [, , modeArgv, sectionArgv] = process.argv;
<ide>
<ide> function parent() {
<ide> test('*-test', true, 'abc-test');
<ide> }
<ide>
<del>function test(environ, shouldWrite, section) {
<add>function test(environ, shouldWrite, section, forceColors = false) {
<ide> let expectErr = '';
<ide> const expectOut = 'ok\n';
<ide>
<ide> const spawn = require('child_process').spawn;
<ide> const child = spawn(process.execPath, [__filename, 'child', section], {
<del> env: Object.assign(process.env, { NODE_DEBUG: environ })
<add> env: Object.assign(process.env, {
<add> NODE_DEBUG: environ,
<add> FORCE_COLOR: forceColors ? 'true' : 'false'
<add> })
<ide> });
<ide>
<ide> if (shouldWrite) {
<del> expectErr =
<del> `${section.toUpperCase()} ${child.pid}: this { is: 'a' } /debugging/\n${
<del> section.toUpperCase()} ${child.pid}: num=1 str=a obj={"foo":"bar"}\n`;
<add> if (forceColors) {
<add> const { colors, styles } = util.inspect;
<add> const addCodes = (arr) => [`\x1B[${arr[0]}m`, `\x1B[${arr[1]}m`];
<add> const num = addCodes(colors[styles.number]);
<add> const str = addCodes(colors[styles.string]);
<add> const regexp = addCodes(colors[styles.regexp]);
<add> const start = `${section.toUpperCase()} ${num[0]}${child.pid}${num[1]}`;
<add> const debugging = `${regexp[0]}/debugging/${regexp[1]}`;
<add> expectErr =
<add> `${start}: this { is: ${str[0]}'a'${str[1]} } ${debugging}\n` +
<add> `${start}: num=1 str=a obj={"foo":"bar"}\n`;
<add> } else {
<add> const start = `${section.toUpperCase()} ${child.pid}`;
<add> expectErr =
<add> `${start}: this { is: 'a' } /debugging/\n` +
<add> `${start}: num=1 str=a obj={"foo":"bar"}\n`;
<add> }
<ide> }
<ide>
<ide> let err = '';
<ide> function test(environ, shouldWrite, section) {
<ide> assert(!c);
<ide> assert.strictEqual(err, expectErr);
<ide> assert.strictEqual(out, expectOut);
<add> // Run the test again, this time with colors enabled.
<add> if (!forceColors) {
<add> test(environ, shouldWrite, section, true);
<add> }
<ide> }));
<ide> }
<ide>
<ide>
<ide> function child(section) {
<del> const util = require('util');
<add> const tty = require('tty');
<add> // Make sure we check for colors, no matter of the stream's default.
<add> Object.defineProperty(process.stderr, 'hasColors', {
<add> value: tty.WriteStream.prototype.hasColors
<add> });
<ide> const debug = util.debuglog(section);
<ide> debug('this', { is: 'a' }, /debugging/);
<ide> debug('num=%d str=%s obj=%j', 1, 'a', { foo: 'bar' }); | 2 |
Javascript | Javascript | fix tests when spaces are in path | 5ad93ab573dbe9555db0c1b9ab26de6672e7cb07 | <ide><path>test/simple/test-cli-eval.js
<ide> assert = require('assert');
<ide> child = require('child_process');
<ide>
<del>nodejs = process.execPath;
<add>nodejs = '"' + process.execPath + '"';
<ide>
<ide> if (module.parent) {
<ide> // signal we've been loaded as a module
<ide><path>test/simple/test-error-reporting.js
<ide> var path = require('path');
<ide> var exits = 0;
<ide>
<ide> function errExec(script, callback) {
<del> var cmd = process.argv[0] + ' ' + path.join(common.fixturesDir, script);
<add> var cmd = '"' + process.argv[0] + '" "' + path.join(common.fixturesDir, script) + '"';
<ide> return exec(cmd, function(err, stdout, stderr) {
<ide> // There was some error
<ide> assert.ok(err);
<ide><path>test/simple/test-http-curl-chunk-problem.js
<ide> function maybeMakeRequest() {
<ide> }
<ide>
<ide>
<del>cp.exec('dd if=/dev/zero of=' + filename + ' bs=1024 count=10240',
<add>cp.exec('dd if=/dev/zero of="' + filename + '" bs=1024 count=10240',
<ide> function(err, stdout, stderr) {
<ide> if (err) throw err;
<ide> maybeMakeRequest();
<ide><path>test/simple/test-pipe-head.js
<ide> var join = require('path').join;
<ide> var nodePath = process.argv[0];
<ide> var script = join(common.fixturesDir, 'print-10-lines.js');
<ide>
<del>var cmd = nodePath + ' ' + script + ' | head -2';
<add>var cmd = '"' + nodePath + '" "' + script + '" | head -2';
<ide>
<ide> var finished = false;
<ide>
<ide><path>test/simple/test-stdin-from-file.js
<ide> var fs = require('fs');
<ide> var stdoutScript = join(common.fixturesDir, 'echo.js');
<ide> var tmpFile = join(common.fixturesDir, 'stdin.txt');
<ide>
<del>var cmd = process.argv[0] + ' ' + stdoutScript + ' < ' + tmpFile;
<add>var cmd = '"' + process.argv[0] + '" "' + stdoutScript + '" < "' + tmpFile + '"';
<ide>
<ide> var string = 'abc\nümlaut.\nsomething else\n' +
<ide> '南越国是前203年至前111年存在于岭南地区的一个国家,国都位于番禺,' +
<ide><path>test/simple/test-stdout-to-file.js
<ide> var scriptBuffer = path.join(common.fixturesDir, 'print-chars-from-buffer.js');
<ide> var tmpFile = path.join(common.fixturesDir, 'stdout.txt');
<ide>
<ide> function test(size, useBuffer, cb) {
<del> var cmd = process.argv[0] +
<add> var cmd = '"' + process.argv[0] + '"' +
<ide> ' ' +
<del> (useBuffer ? scriptBuffer : scriptString) +
<add> '"' + (useBuffer ? scriptBuffer : scriptString) + '"' +
<ide> ' ' +
<ide> size +
<ide> ' > ' +
<del> tmpFile;
<add> '"' + tmpFile + '"';
<ide>
<ide> try {
<ide> fs.unlinkSync(tmpFile); | 6 |
Javascript | Javascript | check typeof value | e549660ade46b14a73480859057759d41f7cf5eb | <ide><path>lib/EnvironmentPlugin.js
<ide> class EnvironmentPlugin {
<ide> });
<ide> }
<ide>
<del> defs[`process.env.${key}`] = value === undefined ? "undefined" : JSON.stringify(value);
<add> defs[`process.env.${key}`] = typeof value === undefined ? "undefined" : JSON.stringify(value);
<ide>
<ide> return defs;
<ide> }, {}); | 1 |
Python | Python | allow string for ex_boot_disk on create_node | 8664118c877a43f1434a576bd46223707589b078 | <ide><path>libcloud/compute/drivers/gce.py
<ide> def create_node(self, name, size, image, location=None,
<ide> :type ex_metadata: ``dict`` or ``None``
<ide>
<ide> :keyword ex_boot_disk: The boot disk to attach to the instance.
<del> :type ex_boot_disk: :class:`StorageVolume` or ``str``
<add> :type ex_boot_disk: :class:`StorageVolume` or ``str`` or ``None``
<ide>
<ide> :keyword use_existing_disk: If True and if an existing disk with the
<ide> same name/location is found, use that
<ide> def create_node(self, name, size, image, location=None,
<ide> used. If 'None', then no external address will
<ide> be used. To use an existing static IP address,
<ide> a GCEAddress object should be passed in.
<del> :type external_ip: :class:`GCEAddress` or ``str`` or None
<add> :type external_ip: :class:`GCEAddress` or ``str`` or ``None``
<ide>
<ide> :keyword ex_disk_type: Specify a pd-standard (default) disk or pd-ssd
<ide> for an SSD disk.
<ide> def create_node(self, name, size, image, location=None,
<ide> image = self.ex_get_image(image)
<ide> if not hasattr(ex_disk_type, 'name'):
<ide> ex_disk_type = self.ex_get_disktype(ex_disk_type, zone=location)
<add> if ex_boot_disk and not hasattr(ex_boot_disk, 'name'):
<add> ex_boot_disk = self.ex_get_disk(ex_boot_disk, zone=location)
<ide>
<ide> # Use disks[].initializeParams to auto-create the boot disk
<ide> if not ex_disks_gce_struct and not ex_boot_disk:
<ide> def _create_node_req(self, name, size, image, location, network=None,
<ide> is made to ensure proper formatting of
<ide> the disks[] structure. Using this
<ide> structure obviates the need of using
<del> other disk params like 'ex_boot_disk',
<add> other disk params like 'boot_disk',
<ide> etc. See the GCE docs for specific
<ide> details.
<ide> :type ex_disks_gce_struct: ``list`` or ``None``
<ide> def _multi_create_node(self, status, node_attrs):
<ide> request, node_data = self._create_node_req(
<ide> status['name'], node_attrs['size'], node_attrs['image'],
<ide> node_attrs['location'], node_attrs['network'], node_attrs['tags'],
<del> node_attrs['metadata'], boot_disk=status['disk'],
<add> node_attrs['metadata'],
<ide> external_ip=node_attrs['external_ip'],
<ide> ex_service_accounts=node_attrs['ex_service_accounts'],
<ide> description=node_attrs['description'], | 1 |
PHP | PHP | fix lint errors | 5cc635f95b131b9f36633a4e62e2a81d120bc92b | <ide><path>src/Http/ServerRequest.php
<ide> public function __set($name, $value)
<ide> "Setting {$name} as a property will be removed in 4.0.0. " .
<ide> "Use {$method} instead."
<ide> );
<add>
<ide> return $this->{$name} = $value;
<ide> }
<ide> throw new BadMethodCallException("Cannot set {$name} it is not a known property.");
<ide> public function &__get($name)
<ide> "Accessing `{$name}` as a property will be removed in 4.0.0. " .
<ide> "Use request->{$method} instead."
<ide> );
<add>
<ide> return $this->{$name};
<ide> }
<ide>
<ide> public function __isset($name)
<ide> "Accessing {$name} as a property will be removed in 4.0.0. " .
<ide> "Use {$method} instead."
<ide> );
<add>
<ide> return isset($this->{$name});
<ide> }
<ide>
<ide><path>tests/TestCase/Auth/BasicAuthenticateTest.php
<ide> public function testAuthenticateUsernameZero()
<ide> $User->updateAll(['username' => '0'], ['username' => 'mariano']);
<ide>
<ide> $request = new ServerRequest([
<del> 'url' => 'posts/index',
<add> 'url' => 'posts/index',
<ide> 'data' => [
<ide> 'User' => [
<ide> 'user' => '0',
<ide><path>tests/TestCase/Http/ServerRequestTest.php
<ide> public function testAddDetector()
<ide> ServerRequest::addDetector('callme', function ($request) {
<ide> return $request->getAttribute('return');
<ide> });
<del> $request = $request->withAttribute('return', true);;
<add> $request = $request->withAttribute('return', true);
<ide> $request->clearDetectorCache();
<ide> $this->assertTrue($request->isCallMe());
<ide> | 3 |
Javascript | Javascript | add hook types for templates | 84c915b1918339aa6c95a037e3e11c43fc1155d7 | <ide><path>lib/AmdMainTemplatePlugin.js
<ide> class AmdMainTemplatePlugin {
<ide> }
<ide> };
<ide>
<del> for (const template of [mainTemplate, chunkTemplate]) {
<del> template.hooks.renderWithEntry.tap(
<del> "AmdMainTemplatePlugin",
<del> onRenderWithEntry
<del> );
<del> }
<add> mainTemplate.hooks.renderWithEntry.tap(
<add> "AmdMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<add>
<add> chunkTemplate.hooks.renderWithEntry.tap(
<add> "AmdMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<ide>
<ide> mainTemplate.hooks.hash.tap("AmdMainTemplatePlugin", hash => {
<ide> hash.update("exports amd");
<ide><path>lib/ChunkTemplate.js
<ide> module.exports = class ChunkTemplate {
<ide> "moduleTemplate",
<ide> "renderContext"
<ide> ]),
<add> /** @type {SyncWaterfallHook<Source, Chunk>} */
<ide> renderWithEntry: new SyncWaterfallHook(["source", "chunk"]),
<add> /** @type {SyncHook<Hash>} */
<ide> hash: new SyncHook(["hash"]),
<add> /** @type {SyncHook<Hash, Chunk>} */
<ide> hashForChunk: new SyncHook(["hash", "chunk"])
<ide> });
<ide> }
<ide><path>lib/ExportPropertyMainTemplatePlugin.js
<ide> class ExportPropertyMainTemplatePlugin {
<ide> const { mainTemplate, chunkTemplate } = compilation;
<ide>
<ide> const onRenderWithEntry = (source, chunk, hash) => {
<del> const postfix = `${accessorToObjectAccess([].concat(this.property))}`;
<add> const postfix = accessorToObjectAccess([].concat(this.property));
<ide> return new ConcatSource(source, postfix);
<ide> };
<ide>
<del> for (const template of [mainTemplate, chunkTemplate]) {
<del> template.hooks.renderWithEntry.tap(
<del> "ExportPropertyMainTemplatePlugin",
<del> onRenderWithEntry
<del> );
<del> }
<add> mainTemplate.hooks.renderWithEntry.tap(
<add> "ExportPropertyMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<add>
<add> chunkTemplate.hooks.renderWithEntry.tap(
<add> "ExportPropertyMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<ide>
<ide> mainTemplate.hooks.hash.tap("ExportPropertyMainTemplatePlugin", hash => {
<ide> hash.update("export property");
<ide><path>lib/HotUpdateChunkTemplate.js
<ide> const Template = require("./Template");
<ide> /** @typedef {import("webpack-sources").Source} Source */
<ide> /** @typedef {import("./ModuleTemplate")} ModuleTemplate */
<ide> /** @typedef {import("./ModuleTemplate").RenderContext} RenderContext */
<add>/** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> module.exports = class HotUpdateChunkTemplate {
<ide> constructor(outputOptions) {
<ide> module.exports = class HotUpdateChunkTemplate {
<ide> "renderContext",
<ide> "hash"
<ide> ]),
<add> /** @type {SyncHook<Hash>} */
<ide> hash: new SyncHook(["hash"])
<ide> });
<ide> }
<ide> module.exports = class HotUpdateChunkTemplate {
<ide> return source;
<ide> }
<ide>
<add> /**
<add> * Updates hash with information from this template
<add> * @param {Hash} hash the hash to update
<add> * @returns {void}
<add> */
<ide> updateHash(hash) {
<ide> hash.update("HotUpdateChunkTemplate");
<ide> hash.update("1");
<ide><path>lib/MainTemplate.js
<ide> module.exports = class MainTemplate {
<ide> ]),
<ide> /** @type {SyncWaterfallHook<string, RenderBootstrapContext>} */
<ide> bootstrap: new SyncWaterfallHook(["source", "renderContext"]),
<add> /** @type {SyncWaterfallHook<string, Chunk, string>} */
<ide> localVars: new SyncWaterfallHook(["source", "chunk", "hash"]),
<ide> /** @type {SyncWaterfallHook<string, RenderBootstrapContext>} */
<ide> require: new SyncWaterfallHook(["source", "renderContext"]),
<ide> module.exports = class MainTemplate {
<ide> "moduleTemplate",
<ide> "renderContext"
<ide> ]),
<add> /** @type {SyncWaterfallHook<Source, Chunk, string>} */
<ide> renderWithEntry: new SyncWaterfallHook(["source", "chunk", "hash"]),
<add> /** @type {SyncWaterfallHook<string, Chunk, string, number|string>} */
<ide> moduleRequire: new SyncWaterfallHook([
<ide> "source",
<ide> "chunk",
<ide> module.exports = class MainTemplate {
<ide> "expressions",
<ide> "renderContext"
<ide> ]),
<add> /** @type {SyncWaterfallHook<string, number>} */
<ide> currentHash: new SyncWaterfallHook(["source", "requestedLength"]),
<add> /** @type {SyncWaterfallHook<string, object>} */
<ide> assetPath: new SyncWaterfallHook(["path", "options"]),
<add> /** @type {SyncHook<Hash>} */
<ide> hash: new SyncHook(["hash"]),
<add> /** @type {SyncHook<Hash, Chunk>} */
<ide> hashForChunk: new SyncHook(["hash", "chunk"])
<ide> });
<ide> this.hooks.startup.tap(
<ide><path>lib/ModuleTemplate.js
<ide> const { SyncWaterfallHook, SyncHook } = require("tapable");
<ide> /** @typedef {import("./Module")} Module */
<ide> /** @typedef {import("./ModuleGraph")} ModuleGraph */
<ide> /** @typedef {import("./RuntimeTemplate")} RuntimeTemplate */
<add>/** @typedef {import("./util/createHash").Hash} Hash */
<ide>
<ide> /**
<ide> * @typedef {Object} RenderContext
<ide> module.exports = class ModuleTemplate {
<ide> render: new SyncWaterfallHook(["source", "module", "context"]),
<ide> /** @type {SyncWaterfallHook<Source, Module, RenderContext>} */
<ide> package: new SyncWaterfallHook(["source", "module", "context"]),
<add> /** @type {SyncHook<Hash>} */
<ide> hash: new SyncHook(["hash"])
<ide> });
<ide> }
<ide> module.exports = class ModuleTemplate {
<ide> }
<ide> }
<ide>
<add> /**
<add> * Updates hash with information from this template
<add> * @param {Hash} hash the hash to update
<add> * @returns {void}
<add> */
<ide> updateHash(hash) {
<ide> hash.update("1");
<ide> this.hooks.hash.call(hash);
<ide><path>lib/SetVarMainTemplatePlugin.js
<ide> class SetVarMainTemplatePlugin {
<ide> }
<ide> };
<ide>
<del> for (const template of [mainTemplate, chunkTemplate]) {
<del> template.hooks.renderWithEntry.tap(
<del> "SetVarMainTemplatePlugin",
<del> onRenderWithEntry
<del> );
<del> }
<add> mainTemplate.hooks.renderWithEntry.tap(
<add> "SetVarMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<add>
<add> chunkTemplate.hooks.renderWithEntry.tap(
<add> "SetVarMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<ide>
<ide> mainTemplate.hooks.hash.tap("SetVarMainTemplatePlugin", hash => {
<ide> hash.update("set var");
<ide><path>lib/UmdMainTemplatePlugin.js
<ide> class UmdMainTemplatePlugin {
<ide> );
<ide> };
<ide>
<del> for (const template of [mainTemplate, chunkTemplate]) {
<del> template.hooks.renderWithEntry.tap(
<del> "UmdMainTemplatePlugin",
<del> onRenderWithEntry
<del> );
<del> }
<add> mainTemplate.hooks.renderWithEntry.tap(
<add> "UmdMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<add>
<add> chunkTemplate.hooks.renderWithEntry.tap(
<add> "UmdMainTemplatePlugin",
<add> onRenderWithEntry
<add> );
<ide>
<ide> mainTemplate.hooks.hash.tap("UmdMainTemplatePlugin", hash => {
<ide> hash.update("umd"); | 8 |
Python | Python | add extended info | 8e92419f3b300323dc5916941bc3379a280813bc | <ide><path>glances/core/glances_processes.py
<ide> def __get_process_stats(self, proc,
<ide> procstat = proc.as_dict(attrs=['pid'])
<ide>
<ide> if mandatory_stats:
<add> procstat['mandatory_stats'] = True
<add>
<ide> # Process CPU, MEM percent and name
<ide> procstat.update(proc.as_dict(attrs=['cpu_percent', 'memory_percent', 'name'], ad_value=''))
<ide>
<ide> def __get_process_stats(self, proc,
<ide> procstat['io_counters'] += [io_tag]
<ide>
<ide> if standard_stats:
<add> procstat['standard_stats'] = True
<add>
<ide> # Process username (cached with internal cache)
<ide> try:
<ide> self.username_cache[procstat['pid']]
<ide> def __get_process_stats(self, proc,
<ide> procstat['status'] = str(procstat['status'])[:1].upper()
<ide>
<ide> if extended_stats:
<add> procstat['extended_stats'] = True
<add>
<ide> # CPU affinity
<ide> # Memory extended
<ide> # Number of context switch
<ide> def __get_process_stats(self, proc,
<ide> procstat['tcp'] = None
<ide> procstat['udp'] = None
<ide>
<add> # IO Nice
<add> # http://pythonhosted.org/psutil/#psutil.Process.ionice
<add> if is_linux or is_windows:
<add> procstat.update(proc.as_dict(attrs=['ionice']))
<add>
<ide> # !!! Only for dev
<ide> logger.debug("EXTENDED STATS: %s" % procstat)
<ide>
<ide><path>glances/plugins/glances_processlist.py
<ide> def msg_curse(self, args=None):
<ide> tag_proc_time = True
<ide>
<ide> # Loop over processes (sorted by the sort key previously compute)
<add> first = True
<ide> for p in self.sortlist(process_sort_key):
<ide> ret.append(self.curse_new_line())
<ide> # CPU
<ide> def msg_curse(self, args=None):
<ide> except UnicodeEncodeError:
<ide> ret.append(self.curse_add_line("", splittable=True))
<ide>
<add> # Add extended stats but only for the top processes
<add> # !!! CPU consumption !!!!
<add> if first:
<add> # Left padding
<add> xpad = ' ' * 13
<add> # First line is CPU affinity
<add> ret.append(self.curse_new_line())
<add> msg = xpad + _('CPU affinity: ') + ','.join(str(i) for i in p['cpu_affinity'])
<add> ret.append(self.curse_add_line(msg))
<add> # Second line is memory info
<add> ret.append(self.curse_new_line())
<add> msg = xpad + _('Memory info: ')
<add> msg += _('swap ') + self.auto_unit(p['memory_swap'], low_precision=False)
<add> for k, v in p['memory_info_ex']._asdict().items():
<add> # Ignore rss and vms (already displayed)
<add> if k not in ['rss', 'vms']:
<add> msg += ', ' + k + ' ' + self.auto_unit(v, low_precision=False)
<add> ret.append(self.curse_add_line(msg))
<add> # End of extended stats
<add> first = False
<add>
<ide> # Return the message with decoration
<ide> return ret
<ide> | 2 |
PHP | PHP | handle model instance in authorize middleware | 89b296b58b1fd8cd4a1c0e3993f091b5322caaf8 | <ide><path>src/Illuminate/Auth/Middleware/Authorize.php
<ide>
<ide> use Closure;
<ide> use Illuminate\Contracts\Auth\Access\Gate;
<add>use Illuminate\Database\Eloquent\Model;
<ide> use Illuminate\Contracts\Auth\Factory as Auth;
<ide>
<ide> class Authorize
<ide> protected function getGateArguments($request, $models)
<ide> }
<ide>
<ide> return collect($models)->map(function ($model) use ($request) {
<del> return $this->getModel($request, $model);
<add> return $model instanceof Model
<add> ?$model
<add> :$this->getModel($request, $model);
<ide> })->all();
<ide> }
<ide>
<ide><path>tests/Auth/AuthorizeMiddlewareTest.php
<ide> public function testModelAuthorized()
<ide> $this->assertEquals($response->content(), 'success');
<ide> }
<ide>
<add> public function testModelInstanceAsParameter()
<add> {
<add> $instance = m::mock(\Illuminate\Database\Eloquent\Model::class);
<add>
<add> $this->gate()->define('success', function ($user, $model) use ($instance) {
<add> $this->assertSame($model, $instance);
<add>
<add> return true;
<add> });
<add>
<add> $request = m::mock(Request::class);
<add>
<add> $nextParam = null;
<add>
<add> $next = function ($param) use (&$nextParam) {
<add> $nextParam = $param;
<add> };
<add>
<add> (new Authorize($this->container->make(Auth::class), $this->gate()))
<add> ->handle($request, $next, 'success', $instance);
<add> }
<add>
<ide> /**
<ide> * Get the Gate instance from the container.
<ide> * | 2 |
Mixed | Javascript | release notes for 1.0.0rc5 reality-distortion | e9ccec76a6694e2da4af3efbdad93aa6f01113f2 | <ide><path>CHANGELOG.md
<del><a name="v1.0.0rc4"></a>
<del># v1.0.0rc4 insomnia-induction (2012-04-05)
<add><a name="1.0.0rc5"></a>
<add># 1.0.0rc5 reality-distortion (2012-04-12)
<add>
<add>
<add>## Bug Fixes
<add>
<add>- **$location:** properly rewrite urls in html5 mode with base url set + don't rewrite links to
<add> different base paths
<add> ([6d7e7fde](https://github.com/angular/angular.js/commit/6d7e7fdea6c3d6551ff40c150aa42e1375d2cb5f),
<add> [0a5050eb](https://github.com/angular/angular.js/commit/0a5050eb3c1f1ed84134f23a44b97a7261114060))
<add>- **e2eRunner:** $browser.location should delegate to apps $location
<add> ([df72852f](https://github.com/angular/angular.js/commit/df72852f3496d7640bb4f70837338e464b7ed69f))
<add>- **input.radio:** support 2-way binding in a repeater
<add> ([93d62860](https://github.com/angular/angular.js/commit/93d62860e988a09fb64e594f50f6cd55a1fc5748),
<add> [#869](https://github.com/angular/angular.js/issues/869))
<add>- **ngBindHtml:** clear contents when model is falsy
<add> ([10daefc6](https://github.com/angular/angular.js/commit/10daefc6f466a21d9418437666461c80cf24fcfe),
<add> [#864](https://github.com/angular/angular.js/issues/864))
<add>- lots of doc fixes
<add>
<add>
<add>## Features
<add>
<add>- **$http:** expose the defaults config as $http.defaults
<add> ([dceafd32](https://github.com/angular/angular.js/commit/dceafd32ee140c8af5c7a0ca6cb808395fffeed3))
<add>- **docs:** steps 0-4 of the Tutorial have been updated and improved
<add>
<add>
<add>## Breaking Changes
<add>
<add>- `ng-ext-link` directive was removed because it's unnecessary
<add> ([6d7e7fde](https://github.com/angular/angular.js/commit/6d7e7fdea6c3d6551ff40c150aa42e1375d2cb5f))
<add>
<add> apps that relied on ng-ext-link should simply replace it with `target="_self"`
<add>
<add>- `$browser.addCss` was removed - it was never meant to be a public api
<add> ([13d5528a](https://github.com/angular/angular.js/commit/13d5528a5f5a2f0feee5c742788a914d2371841e))
<add>
<add> apps the depend on this functionality should write a simple utility function specific to the app
<add> (see this diff for hints).
<add>
<add>- `$browser.addJs` method was removed - it was never meant to be a public api
<add> ([fbaa1968](https://github.com/angular/angular.js/commit/fbaa1968b7c596ccb63ea8b4be1d3bd92eda50d8))
<add>
<add> apps that depended on this functionality should either use many of the existing script loaders or
<add> create a simple helper method specific to the app.
<add>
<add>- `$sanitize` service, `ngBindHtml` directive and `linky` filter were moved to the `ngSanitize` module
<add> ([5bcd7198](https://github.com/angular/angular.js/commit/5bcd7198664dca2bf85ddf8b3a89f417cd4e4796))
<add>
<add> apps that depend on any of these will need to load `angular-sanitize.js` and include `ngSanitize`
<add> in their dependency list: `var myApp = angular.module('myApp', ['ngSanitize']);`
<add>
<add>
<add>
<add>
<add>
<add>
<add><a name="1.0.0rc4"></a>
<add># 1.0.0rc4 insomnia-induction (2012-04-05)
<ide>
<ide>
<ide> ## Bug Fixes
<ide> We removed two useless features:
<ide>
<ide>
<ide>
<del><a name="v1.0.0rc3"></a>
<del># v1.0.0rc3 barefoot-telepathy (2012-03-29)
<add><a name="1.0.0rc3"></a>
<add># 1.0.0rc3 barefoot-telepathy (2012-03-29)
<ide>
<ide>
<ide> ## Bug Fixes
<ide> We removed two useless features:
<ide>
<ide>
<ide>
<del><a name="v1.0.0rc2"></a>
<del># v1.0.0rc2 silence-absorption (2012-03-20)
<add><a name="1.0.0rc2"></a>
<add># 1.0.0rc2 silence-absorption (2012-03-20)
<ide>
<ide> ## Features
<ide>
<ide><path>changelog.js
<ide> var parseRawCommit = function(raw) {
<ide> lines.forEach(function(line) {
<ide> match = line.match(/Closes\s#(\d+)/);
<ide> if (match) msg.closes.push(parseInt(match[1]));
<del>
<del> match = line.match(/Breaks\s(.*)/);
<del> if (match) msg.breaks.push(match[1]);
<ide> });
<add>
<add> match = raw.match(/BREAKING CHANGE:([\s\S]*)/);
<add> if (match) {
<add> console.log('found!!!')
<add> msg.breaks.push(match[1]);
<add> }
<add>
<ide>
<ide> msg.body = lines.join('\n');
<ide> match = msg.subject.match(/^(.*)\((.*)\)\:\s(.*)$/); | 2 |
Text | Text | add note about clienterror writable handling | 5bb4d01fbe1242c89eda198b92c0899da3736dbd | <ide><path>doc/api/http.md
<ide> ensure the response is a properly formatted HTTP response message.
<ide> correctly;
<ide> * `rawPacket`: the raw packet of current request.
<ide>
<add>In some cases, the client has already received the response and/or the socket
<add>has already been destroyed, like in case of `ECONNRESET` errors. Before
<add>trying to send data to the socket, it is better to check that it is still
<add>writable.
<add>
<add>```js
<add>server.on('clientError', (err, socket) => {
<add> if (err.code === 'ECONNRESET' || !socket.writable) {
<add> return;
<add> }
<add>
<add> socket.end('HTTP/1.1 400 Bad Request\r\n\r\n');
<add>});
<add>```
<add>
<ide> ### Event: `'close'`
<ide> <!-- YAML
<ide> added: v0.1.4 | 1 |
PHP | PHP | list work with middleware | cd46f5fcf99d448d96a9066aff39cb69e20c0c62 | <ide><path>src/Illuminate/Foundation/Console/RouteListCommand.php
<ide> class RouteListCommand extends Command {
<ide> * @var array
<ide> */
<ide> protected $headers = array(
<del> 'Domain', 'URI', 'Name', 'Action', 'Before Filters', 'After Filters'
<add> 'Domain', 'URI', 'Name', 'Action', 'Middleware'
<ide> );
<ide>
<ide> /**
<ide> protected function getRouteInformation(Route $route)
<ide> 'uri' => $uri,
<ide> 'name' => $route->getName(),
<ide> 'action' => $route->getActionName(),
<del> 'before' => $this->getBeforeFilters($route),
<del> 'after' => $this->getAfterFilters($route)
<add> 'middleware' => $this->getMiddleware($route)
<ide> ));
<ide> }
<ide>
<ide> protected function displayRoutes(array $routes)
<ide> * @param \Illuminate\Routing\Route $route
<ide> * @return string
<ide> */
<del> protected function getBeforeFilters($route)
<add> protected function getMiddleware($route)
<ide> {
<del> $before = array_keys($route->beforeFilters());
<add> $middleware = array_values($route->middleware());
<ide>
<del> $before = array_unique(array_merge($before, $this->getPatternFilters($route)));
<add> $middleware = array_unique(array_merge($middleware, $this->getPatternFilters($route)));
<ide>
<del> return implode(', ', $before);
<add> return implode(', ', $middleware);
<ide> }
<ide>
<ide> /**
<ide> protected function getMethodPatterns($uri, $method)
<ide> return $this->router->findPatternFilters(Request::create($uri, $method));
<ide> }
<ide>
<del> /**
<del> * Get after filters
<del> *
<del> * @param \Illuminate\Routing\Route $route
<del> * @return string
<del> */
<del> protected function getAfterFilters($route)
<del> {
<del> return implode(', ', array_keys($route->afterFilters()));
<del> }
<del>
<ide> /**
<ide> * Filter the route by URI and / or name.
<ide> * | 1 |
Javascript | Javascript | reduce benchmark cases in test-benchmark-buffer | 5f720bebe799bdcc365a8908f77d87d60f7d34f9 | <ide><path>test/sequential/test-benchmark-buffer.js
<ide> runBenchmark('buffers',
<ide> [
<ide> 'aligned=true',
<ide> 'args=1',
<add> 'buffer=fast',
<ide> 'encoding=utf8',
<ide> 'len=2',
<ide> 'method=', | 1 |
Javascript | Javascript | remove react.autobind from examples | 5db3a0e481d0b439ba87321a19d9b7b7b7e1ef15 | <ide><path>docs/_js/examples/markdown.js
<ide> var MarkdownEditor = React.createClass({\n\
<ide> getInitialState: function() {\n\
<ide> return {value: 'Type some *markdown* here!'};\n\
<ide> },\n\
<del> handleInput: React.autoBind(function() {\n\
<add> handleInput: function() {\n\
<ide> this.setState({value: this.refs.textarea.getDOMNode().value});\n\
<del> }),\n\
<add> },\n\
<ide> render: function() {\n\
<ide> return (\n\
<ide> <div className=\"MarkdownEditor\">\n\
<ide><path>docs/_js/examples/timer.js
<ide> var Timer = React.createClass({\n\
<ide> getInitialState: function() {\n\
<ide> return {secondsElapsed: 0};\n\
<ide> },\n\
<del> tick: React.autoBind(function() {\n\
<add> tick: function() {\n\
<ide> this.setState({secondsElapsed: this.state.secondsElapsed + 1});\n\
<del> }),\n\
<add> },\n\
<ide> componentDidMount: function() {\n\
<ide> setInterval(this.tick, 1000);\n\
<ide> },\n\ | 2 |
Ruby | Ruby | fix random ci fail due to non-deterministic order | 419cda7cd240fbc226fc56ed06a71cc3a600fd45 | <ide><path>activerecord/test/cases/batches_test.rb
<ide> def test_find_in_batches_should_not_error_by_default
<ide> end
<ide>
<ide> def test_find_in_batches_should_not_ignore_the_default_scope_if_it_is_other_then_order
<del> special_posts_ids = SpecialPostWithDefaultScope.all.map(&:id).sort
<add> default_scope = SpecialPostWithDefaultScope.all
<ide> posts = []
<ide> SpecialPostWithDefaultScope.find_in_batches do |batch|
<ide> posts.concat(batch)
<ide> end
<del> assert_equal special_posts_ids, posts.map(&:id)
<add> assert_equal default_scope.pluck(:id).sort, posts.map(&:id).sort
<ide> end
<ide>
<ide> def test_find_in_batches_should_not_modify_passed_options
<ide> def not_a_post.id
<ide> end
<ide>
<ide> def test_in_batches_should_not_ignore_default_scope_without_order_statements
<del> special_posts_ids = SpecialPostWithDefaultScope.all.map(&:id).sort
<add> default_scope = SpecialPostWithDefaultScope.all
<ide> posts = []
<ide> SpecialPostWithDefaultScope.in_batches do |relation|
<ide> posts.concat(relation)
<ide> end
<del> assert_equal special_posts_ids, posts.map(&:id)
<add> assert_equal default_scope.pluck(:id).sort, posts.map(&:id).sort
<ide> end
<ide>
<ide> def test_in_batches_should_not_modify_passed_options | 1 |
Javascript | Javascript | clarify the purpose of ngform | 549166740b944b2af45632bd04d7f360a24b2784 | <ide><path>src/ng/directive/form.js
<ide> function FormController(element, attrs, $scope, $animate) {
<ide> * does not allow nesting of form elements. It is useful to nest forms, for example if the validity of a
<ide> * sub-group of controls needs to be determined.
<ide> *
<add> * Note: the purpose of `ngForm` is to group controls,
<add> * but not to be a replacement for the `<form>` tag with all of its capabilities
<add> * (e.g. posting to the server, ...).
<add> *
<ide> * @param {string=} ngForm|name Name of the form. If specified, the form controller will be published into
<ide> * related scope, under this name.
<ide> * | 1 |
Text | Text | remove personal pronoun usage in errors.md | c37c21b6ef2ef13f45b99e88bf625a452c6c9c0a | <ide><path>doc/api/errors.md
<ide> added: v13.10.0
<ide> -->
<ide>
<ide> The TLS socket must be connected and securily established. Ensure the 'secure'
<del>event is emitted, before you continue.
<add>event is emitted before continuing.
<ide>
<ide> <a id="ERR_TLS_INVALID_PROTOCOL_METHOD"></a>
<ide> ### `ERR_TLS_INVALID_PROTOCOL_METHOD` | 1 |
Javascript | Javascript | fix possible issue reflectnode with vertex shader | a19b279b8236e215c6f31d0b8c2ba18d3348e205 | <ide><path>examples/jsm/nodes/accessors/ReflectNode.js
<ide> */
<ide>
<ide> import { TempNode } from '../core/TempNode.js';
<add>import { PositionNode } from './PositionNode.js';
<add>import { NormalNode } from './NormalNode.js';
<ide>
<ide> function ReflectNode( scope ) {
<ide>
<ide> ReflectNode.prototype.generate = function ( builder, output ) {
<ide>
<ide> case ReflectNode.VECTOR:
<ide>
<del> builder.addNodeCode( 'vec3 reflectVec = inverseTransformDirection( reflect( -normalize( vViewPosition ), normal ), viewMatrix );' );
<add> var viewNormal = new NormalNode().build( builder, 'v3' );
<add> var viewPosition = new PositionNode( PositionNode.VIEW ).build( builder, 'v3' );
<add>
<add> builder.addNodeCode( 'vec3 reflectVec = inverseTransformDirection( reflect( -normalize( ' + viewPosition + ' ), ' + viewNormal + ' ), viewMatrix );' );
<ide>
<ide> result = 'reflectVec';
<ide> | 1 |
Ruby | Ruby | add tests for `current_preventing_writes` | 0919871bda924c750bcf86135fa697021d4c1705 | <ide><path>activerecord/test/cases/base_prevent_writes_test.rb
<ide> class BasePreventWritesTest < ActiveRecord::TestCase
<ide>
<ide> assert_match %r/\AWrite query attempted while in readonly mode: INSERT /, conn2_error.message
<ide> end
<add>
<add> test "current_preventing_writes" do
<add> ActiveRecord::Base.while_preventing_writes do
<add> assert ActiveRecord::Base.current_preventing_writes, "expected connection current_preventing_writes to return true"
<add> end
<add> end
<ide> end
<ide>
<ide> class BasePreventWritesLegacyTest < ActiveRecord::TestCase
<ide> def teardown
<ide> assert_match %r/\AWrite query attempted while in readonly mode: INSERT /, conn2_error.message
<ide> end
<ide> end
<add>
<add> test "current_preventing_writes" do
<add> ActiveRecord::Base.connection_handler.while_preventing_writes do
<add> assert ActiveRecord::Base.current_preventing_writes, "expected connection current_preventing_writes to return true"
<add> end
<add> end
<ide> end
<ide> end | 1 |
Python | Python | update invalid token case | 1712c00001b57110b13d3e2fa926da0ef8ce3c5a | <ide><path>rest_framework/authentication.py
<ide> def authenticate(self, request):
<ide> return self.authenticate_credentials(token)
<ide>
<ide> def authenticate_credentials(self, key):
<add> model = self.get_model()
<ide> try:
<del> token = self.get_model().objects.select_related('user').get(key=key)
<del> except self.model.DoesNotExist:
<add> token = model.objects.select_related('user').get(key=key)
<add> except model.DoesNotExist:
<ide> raise exceptions.AuthenticationFailed(_('Invalid token.'))
<ide>
<ide> if not token.user.is_active:
<ide><path>tests/test_authentication.py
<ide> def test_post_form_passing_token_auth(self):
<ide> response = self.csrf_client.post('/token/', {'example': 'example'}, HTTP_AUTHORIZATION=auth)
<ide> self.assertEqual(response.status_code, status.HTTP_200_OK)
<ide>
<add> def test_fail_post_form_passing_nonexistent_token_auth(self):
<add> # use a nonexistent token key
<add> auth = 'Token wxyz6789'
<add> response = self.csrf_client.post('/token/', {'example': 'example'}, HTTP_AUTHORIZATION=auth)
<add> self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
<add>
<ide> def test_fail_post_form_passing_invalid_token_auth(self):
<ide> # add an 'invalid' unicode character
<ide> auth = 'Token ' + self.key + "¸" | 2 |
Mixed | Ruby | add reload_association to documentation [ci skip] | f2810f1dab649b2999fa0ad991ddabb416c5be74 | <ide><path>activerecord/lib/active_record/associations.rb
<ide> def association_instance_set(name, association)
<ide> # build_other(attributes={}) | X | | X
<ide> # create_other(attributes={}) | X | | X
<ide> # create_other!(attributes={}) | X | | X
<add> # reload_other | X | X | X
<ide> #
<ide> # === Collection associations (one-to-many / many-to-many)
<ide> # | | | has_many
<ide> def association_instance_set(name, association)
<ide> # others.exists? | X | X | X
<ide> # others.distinct | X | X | X
<ide> # others.reset | X | X | X
<add> # others.reload | X | X | X
<ide> #
<ide> # === Overriding generated methods
<ide> #
<ide> def has_many(name, scope = nil, options = {}, &extension)
<ide> # [create_association!(attributes = {})]
<ide> # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
<ide> # if the record is invalid.
<add> # [reload_association]
<add> # Returns the associated object, forcing a database read.
<ide> #
<ide> # === Example
<ide> #
<ide> def has_many(name, scope = nil, options = {}, &extension)
<ide> # * <tt>Account#build_beneficiary</tt> (similar to <tt>Beneficiary.new("account_id" => id)</tt>)
<ide> # * <tt>Account#create_beneficiary</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save; b</tt>)
<ide> # * <tt>Account#create_beneficiary!</tt> (similar to <tt>b = Beneficiary.new("account_id" => id); b.save!; b</tt>)
<add> # * <tt>Account#reload_beneficiary</tt>
<ide> #
<ide> # === Scopes
<ide> #
<ide> def has_one(name, scope = nil, options = {})
<ide> # [create_association!(attributes = {})]
<ide> # Does the same as <tt>create_association</tt>, but raises ActiveRecord::RecordInvalid
<ide> # if the record is invalid.
<add> # [reload_association]
<add> # Returns the associated object, forcing a database read.
<ide> #
<ide> # === Example
<ide> #
<ide> def has_one(name, scope = nil, options = {})
<ide> # * <tt>Post#build_author</tt> (similar to <tt>post.author = Author.new</tt>)
<ide> # * <tt>Post#create_author</tt> (similar to <tt>post.author = Author.new; post.author.save; post.author</tt>)
<ide> # * <tt>Post#create_author!</tt> (similar to <tt>post.author = Author.new; post.author.save!; post.author</tt>)
<add> # * <tt>Post#reload_author</tt>
<ide> # The declaration can also include an +options+ hash to specialize the behavior of the association.
<ide> #
<ide> # === Scopes
<ide><path>guides/source/association_basics.md
<ide> When you declare a `belongs_to` association, the declaring class automatically g
<ide> * `build_association(attributes = {})`
<ide> * `create_association(attributes = {})`
<ide> * `create_association!(attributes = {})`
<add>* `reload_association`
<ide>
<ide> In all of these methods, `association` is replaced with the symbol passed as the first argument to `belongs_to`. For example, given the declaration:
<ide>
<ide> The `association` method returns the associated object, if any. If no associated
<ide> @author = @book.author
<ide> ```
<ide>
<del>If the associated object has already been retrieved from the database for this object, the cached version will be returned. To override this behavior (and force a database read), call `#reload` on the parent object.
<add>If the associated object has already been retrieved from the database for this object, the cached version will be returned. To override this behavior (and force a database read), call `#reload_association` on the parent object.
<ide>
<ide> ```ruby
<del>@author = @book.reload.author
<add>@author = @book.reload_author
<ide> ```
<ide>
<ide> ##### `association=(associate)`
<ide> When you declare a `has_one` association, the declaring class automatically gain
<ide> * `build_association(attributes = {})`
<ide> * `create_association(attributes = {})`
<ide> * `create_association!(attributes = {})`
<add>* `reload_association`
<ide>
<ide> In all of these methods, `association` is replaced with the symbol passed as the first argument to `has_one`. For example, given the declaration:
<ide>
<ide> The `association` method returns the associated object, if any. If no associated
<ide> @account = @supplier.account
<ide> ```
<ide>
<del>If the associated object has already been retrieved from the database for this object, the cached version will be returned. To override this behavior (and force a database read), call `#reload` on the parent object.
<add>If the associated object has already been retrieved from the database for this object, the cached version will be returned. To override this behavior (and force a database read), call `#reload_association` on the parent object.
<ide>
<ide> ```ruby
<del>@account = @supplier.reload.account
<add>@account = @supplier.reload_account
<ide> ```
<ide>
<ide> ##### `association=(associate)` | 2 |
Python | Python | add compatibility with both 0.8 and 0.9 api | 3484a373a12b69a8e9c194ffc24b4cb854e10b7f | <ide><path>glances/exports/glances_influxdb.py
<ide> from glances.exports.glances_export import GlancesExport
<ide>
<ide> from influxdb import InfluxDBClient, client
<add>from influxdb.influxdb08 import InfluxDBClient as InfluxDBClient_Legacy
<ide>
<ide>
<ide> class Export(GlancesExport):
<ide> def init(self):
<ide> """Init the connection to the InfluxDB server"""
<ide> if not self.export_enable:
<ide> return None
<del> db = InfluxDBClient(host=self.host,
<del> port=self.port,
<del> username=self.user,
<del> password=self.password,
<del> database=self.db)
<add>
<ide> try:
<del> get_all_db = db.get_list_database()[0].values()
<add> db = InfluxDBClient(host=self.host,
<add> port=self.port,
<add> username=self.user,
<add> password=self.password,
<add> database=self.db)
<add> get_all_db = [i['name'] for i in db.get_list_database()]
<ide> except client.InfluxDBClientError as e:
<del> logger.critical("Can not connect to InfluxDB database '%s' (%s)" % (self.db, e))
<del> sys.exit(2)
<add> try:
<add> # https://github.com/influxdb/influxdb-python/issues/138
<add> logger.debug("Trying fallback to InfluxDB v0.8")
<add> db = InfluxDBClient_Legacy(host=self.host,
<add> port=self.port,
<add> username=self.user,
<add> password=self.password,
<add> database=self.db)
<add> get_all_db = [i['name'] for i in db.get_list_database()]
<add> except:
<add> logger.critical("Can not connect to InfluxDB database '%s' (%s)" % (self.db, e))
<add> sys.exit(2)
<ide>
<ide> if self.db in get_all_db:
<ide> logger.info( | 1 |
Mixed | Python | remove trailing spaces | a481bfa231ce1dc4eeaec2a1c1db740e267f663e | <ide><path>DIRECTORY.md
<ide> * [Base32](https://github.com/TheAlgorithms/Python/blob/master/ciphers/base32.py)
<ide> * [Base64 Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/base64_cipher.py)
<ide> * [Base85](https://github.com/TheAlgorithms/Python/blob/master/ciphers/base85.py)
<add> * [Beaufort Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/beaufort_cipher.py)
<ide> * [Brute Force Caesar Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/brute_force_caesar_cipher.py)
<ide> * [Caesar Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/caesar_cipher.py)
<ide> * [Cryptomath Module](https://github.com/TheAlgorithms/Python/blob/master/ciphers/cryptomath_module.py)
<ide> * [Enigma Machine2](https://github.com/TheAlgorithms/Python/blob/master/ciphers/enigma_machine2.py)
<ide> * [Hill Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/hill_cipher.py)
<ide> * [Mixed Keyword Cypher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/mixed_keyword_cypher.py)
<add> * [Mono Alphabetic Ciphers](https://github.com/TheAlgorithms/Python/blob/master/ciphers/mono_alphabetic_ciphers.py)
<ide> * [Morse Code Implementation](https://github.com/TheAlgorithms/Python/blob/master/ciphers/morse_code_implementation.py)
<ide> * [Onepad Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/onepad_cipher.py)
<ide> * [Playfair Cipher](https://github.com/TheAlgorithms/Python/blob/master/ciphers/playfair_cipher.py)
<ide> * [Validate Solutions](https://github.com/TheAlgorithms/Python/blob/master/project_euler/validate_solutions.py)
<ide>
<ide> ## Quantum
<add> * [Deutsch Jozsa](https://github.com/TheAlgorithms/Python/blob/master/quantum/deutsch_jozsa.py)
<ide> * [Half Adder](https://github.com/TheAlgorithms/Python/blob/master/quantum/half_adder.py)
<ide> * [Not Gate](https://github.com/TheAlgorithms/Python/blob/master/quantum/not_gate.py)
<ide> * [Quantum Entanglement](https://github.com/TheAlgorithms/Python/blob/master/quantum/quantum_entanglement.py)
<ide><path>quantum/deutsch_jozsa.py
<ide> classical algorithm
<ide>
<ide> Premise:
<del>We are given a hidden Boolean function f,
<add>We are given a hidden Boolean function f,
<ide> which takes as input a string of bits, and returns either 0 or 1:
<ide>
<ide> f({x0,x1,x2,...}) -> 0 or 1, where xn is 0 or 1
<del>
<add>
<ide> The property of the given Boolean function is that it is guaranteed to
<del>either be balanced or constant. A constant function returns all 0's
<del>or all 1's for any input, while a balanced function returns 0's for
<add>either be balanced or constant. A constant function returns all 0's
<add>or all 1's for any input, while a balanced function returns 0's for
<ide> exactly half of all inputs and 1's for the other half. Our task is to
<ide> determine whether the given function is balanced or constant.
<ide> | 2 |
Go | Go | fix assertportlist normalizing being too strict | c8599a6537016dc27d01f756c6747aa709554a45 | <ide><path>integration-cli/docker_cli_port_test.go
<ide> func assertPortList(c *testing.T, out string, expected []string) error {
<ide> // of the CLI used an incorrect output format for mappings on IPv6 addresses
<ide> // for example, "80/tcp -> :::80" instead of "80/tcp -> [::]:80".
<ide> oldFormat := func(mapping string) string {
<del> old := strings.Replace(mapping, "-> [", "-> ", 1)
<add> old := strings.Replace(mapping, "[", "", 1)
<ide> old = strings.Replace(old, "]:", ":", 1)
<ide> return old
<ide> } | 1 |
Javascript | Javascript | add test cases for | 4fb2ac5be1ec2be2c1d97b7c70be8423dbae2db4 | <ide><path>test/simple/test-util-inspect.js
<ide> assert.ok(ex.indexOf('[type]') != -1);
<ide> // GH-1941
<ide> // should not throw:
<ide> assert.equal(util.inspect(Object.create(Date.prototype)), '{}')
<add>
<add>// GH-1944
<add>assert.doesNotThrow(function () {
<add> var d = new Date();
<add> d.toUTCString = null;
<add> util.inspect(d);
<add>});
<add>
<add>assert.doesNotThrow(function () {
<add> var r = /regexp/;
<add> r.toString = null;
<add> util.inspect(r);
<add>}); | 1 |
Javascript | Javascript | add ast transform to validate `...attributes` | 647ae44b8f130b79689d10a47041897c5da20a53 | <ide><path>packages/ember-template-compiler/lib/plugins/assert-splattribute-expression.js
<add>import { assert } from '@ember/debug';
<add>import calculateLocationDisplay from '../system/calculate-location-display';
<add>import { EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION } from '@ember/canary-features';
<add>
<add>export default function assertSplattributeExpressions(env) {
<add> let { moduleName } = env.meta;
<add>
<add> return {
<add> name: 'assert-splattribute-expressions',
<add>
<add> visitor: {
<add> AttrNode({ name, loc }) {
<add> if (!EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION && name === '...attributes') {
<add> assert(`${errorMessage()} ${calculateLocationDisplay(moduleName, loc)}`);
<add> }
<add> },
<add>
<add> PathExpression({ original, loc }) {
<add> if (original === '...attributes') {
<add> assert(`${errorMessage()} ${calculateLocationDisplay(moduleName, loc)}`);
<add> }
<add> },
<add> },
<add> };
<add>}
<add>
<add>function errorMessage() {
<add> if (EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) {
<add> return `Using "...attributes" can only be used in the element position e.g. <div ...attributes />. It cannot be used as a path.`;
<add> }
<add>
<add> return `...attributes is an invalid path`;
<add>}
<ide><path>packages/ember-template-compiler/lib/plugins/index.js
<ide> import TransformDotComponentInvocation from './transform-dot-component-invocatio
<ide> import AssertInputHelperWithoutBlock from './assert-input-helper-without-block';
<ide> import TransformInElement from './transform-in-element';
<ide> import AssertIfHelperWithoutArguments from './assert-if-helper-without-arguments';
<add>import AssertSplattributeExpressions from './assert-splattribute-expression';
<ide>
<ide> const transforms = [
<ide> TransformDotComponentInvocation,
<ide> const transforms = [
<ide> AssertInputHelperWithoutBlock,
<ide> TransformInElement,
<ide> AssertIfHelperWithoutArguments,
<add> AssertSplattributeExpressions,
<ide> ];
<ide>
<ide> export default Object.freeze(transforms);
<ide><path>packages/ember-template-compiler/tests/plugins/assert-splattribute-expression-test.js
<add>import { EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION } from '@ember/canary-features';
<add>import { moduleFor, AbstractTestCase } from 'internal-test-helpers';
<add>import { compile } from '../../index';
<add>
<add>moduleFor(
<add> 'ember-template-compiler: assert-splattribute-expression',
<add> class extends AbstractTestCase {
<add> '@feature(EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) ...attributes is valid in element space'(
<add> assert
<add> ) {
<add> assert.expect(0);
<add>
<add> compile('<div ...attributes>Foo</div>');
<add> }
<add>
<add> '@test ...attributes is not valid in element space'(assert) {
<add> if (EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) {
<add> assert.expect(0);
<add>
<add> compile('<div ...attributes>Foo</div>');
<add> } else {
<add> expectAssertion(() => {
<add> compile('<div ...attributes>Foo</div>');
<add> }, `...attributes is an invalid path (L1:C5) `);
<add> }
<add> }
<add>
<add> '@feature(EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) {{...attributes}} is not valid'() {
<add> expectAssertion(() => {
<add> compile('<div>{{...attributes}}</div>', {
<add> moduleName: 'foo-bar',
<add> });
<add> }, `Using "...attributes" can only be used in the element position e.g. <div ...attributes />. It cannot be used as a path. ('foo-bar' @ L1:C7) `);
<add> }
<add>
<add> '@feature(EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) {{...attributes}} is not valid path expression'() {
<add> expectAssertion(() => {
<add> compile('<div>{{...attributes}}</div>', {
<add> moduleName: 'foo-bar',
<add> });
<add> }, `Using "...attributes" can only be used in the element position e.g. <div ...attributes />. It cannot be used as a path. ('foo-bar' @ L1:C7) `);
<add> }
<add> '@feature(EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) {{...attributes}} is not valid modifier'() {
<add> expectAssertion(() => {
<add> compile('<div {{...attributes}}>Wat</div>', {
<add> moduleName: 'foo-bar',
<add> });
<add> }, `Using "...attributes" can only be used in the element position e.g. <div ...attributes />. It cannot be used as a path. ('foo-bar' @ L1:C7) `);
<add> }
<add> '@feature(EMBER_GLIMMER_ANGLE_BRACKET_INVOCATION) {{...attributes}} is not valid attribute'() {
<add> expectAssertion(() => {
<add> compile('<div class={{...attributes}}>Wat</div>', {
<add> moduleName: 'foo-bar',
<add> });
<add> }, `Using "...attributes" can only be used in the element position e.g. <div ...attributes />. It cannot be used as a path. ('foo-bar' @ L1:C13) `);
<add> }
<add> }
<add>); | 3 |
Python | Python | resolve line-too-long in utils | 80ee2fa4e1db2dda14370110830db82be3eb97b7 | <ide><path>keras/tools/pip_package/create_pip_helper.py
<ide> def verify_python_files_in_pip(pip_root, bazel_root):
<ide> python_files = set(fnmatch.filter(files, "*.py"))
<ide> python_test_files = set(fnmatch.filter(files, "*test.py"))
<ide> python_benchmark_files = set(fnmatch.filter(files, "*benchmark.py"))
<del> # We only care about python files in the pip package, see create_init_files.
<add> # We only care about python files in the pip package, see
<add> # create_init_files.
<ide> files = python_files - python_test_files - python_benchmark_files
<ide> for f in files:
<ide> pip_path = os.path.join(
<ide> def verify_python_files_in_pip(pip_root, bazel_root):
<ide> if not path_exists and not file_excluded:
<ide> raise PipPackagingError(
<ide> (
<del> "Pip package missing the file %s. If this is expected, add it "
<del> "to PIP_EXCLUDED_FILES in create_pip_helper.py. Otherwise, "
<add> "Pip package missing the file %s. If this is expected, "
<add> "add it to PIP_EXCLUDED_FILES in "
<add> "create_pip_helper.py. Otherwise, "
<ide> "make sure it is a build dependency of the pip package"
<ide> )
<ide> % file_name
<ide><path>keras/utils/audio_dataset.py
<ide> def audio_dataset_from_directory(
<ide> if labels not in ("inferred", None):
<ide> if not isinstance(labels, (list, tuple)):
<ide> raise ValueError(
<del> "The `labels` argument should be a list/tuple of integer labels, of "
<del> "the same size as the number of audio files in the target "
<del> "directory. If you wish to infer the labels from the subdirectory "
<del> 'names in the target directory, pass `labels="inferred"`. '
<add> "The `labels` argument should be a list/tuple of integer "
<add> "labels, of the same size as the number of audio files in "
<add> "the target directory. If you wish to infer the labels from "
<add> "the subdirectory names in the target directory,"
<add> ' pass `labels="inferred"`. '
<ide> "If you wish to get a dataset that only contains audio samples "
<ide> f"(no labels), pass `labels=None`. Received: labels={labels}"
<ide> )
<ide> def audio_dataset_from_directory(
<ide> )
<ide> if label_mode not in {"int", "categorical", "binary", None}:
<ide> raise ValueError(
<del> '`label_mode` argument must be one of "int", "categorical", "binary", '
<add> '`label_mode` argument must be one of "int", "categorical", '
<add> '"binary", '
<ide> f"or None. Received: label_mode={label_mode}"
<ide> )
<ide>
<ide> def audio_dataset_from_directory(
<ide> if tfio is None:
<ide> raise ImportError(
<ide> "To use the argument `sampling_rate`, you should install "
<del> "tensorflow_io. You can install it via `pip install tensorflow-io`."
<add> "tensorflow_io. You can install it via `pip install "
<add> "tensorflow-io`."
<ide> )
<ide>
<ide> if labels is None or label_mode is None:
<ide><path>keras/utils/audio_dataset_test.py
<ide> def _prepare_directory(
<ide> return temp_dir
<ide>
<ide> def test_audio_dataset_from_directory_standalone(self):
<del> # Test retrieving audio samples withouts labels from a directory and its subdirs.
<del>
<add> # Test retrieving audio samples withouts labels from a directory and its
<add> # subdirs.
<ide> # Save a few extra audio in the parent directory.
<ide> directory = self._prepare_directory(count=7, num_classes=2)
<ide> for i, audio in enumerate(self._get_audio_samples(3)):
<ide> def test_audio_dataset_from_directory_ragged(self):
<ide> def test_audio_dataset_from_directory_no_output_sequence_length_no_ragged(
<ide> self,
<ide> ):
<del> # This test case tests `audio_dataset_from_directory` when `ragged` and `output_sequence_length`
<del> # are not passed while the input sequence lengths are different.
<add> # This test case tests `audio_dataset_from_directory` when `ragged` and
<add> # `output_sequence_length` are not passed while the input sequence
<add> # lengths are different.
<ide> directory = self._prepare_directory(
<ide> num_classes=2, count=16, different_sequence_lengths=True
<ide> )
<ide> # The tensor shapes are different and output_sequence_length is None
<del> # should work fine and pad each sequence to the length of the longest sequence
<del> # in it's batch
<add> # should work fine and pad each sequence to the length of the longest
<add> # sequence in it's batch
<ide> min_sequence_length, max_sequence_length = 10, 30
<ide> possible_sequence_lengths = [
<ide> i for i in range(min_sequence_length, max_sequence_length + 1)
<ide> def test_audio_dataset_from_directory_no_output_sequence_length_no_ragged(
<ide> def test_audio_dataset_from_directory_no_output_sequence_length_same_lengths(
<ide> self,
<ide> ):
<del> # This test case tests `audio_dataset_from_directory` when `ragged` and `output_sequence_length`
<del> # are not passed while the input sequence lengths are the same
<add> # This test case tests `audio_dataset_from_directory` when `ragged` and
<add> # `output_sequence_length` are not passed while the input sequence
<add> # lengths are the same
<ide> directory = self._prepare_directory(
<ide> num_classes=2, count=16, different_sequence_lengths=False
<ide> )
<ide> # The tensor shapes are different and output_sequence_length is None
<del> # should work fine and pad each sequence to the length of the longest sequence
<del> # in it's batch
<add> # should work fine and pad each sequence to the length of the longest
<add> # sequence in it's batch
<ide> dataset = audio_dataset.audio_dataset_from_directory(
<ide> directory, batch_size=2
<ide> )
<ide><path>keras/utils/composite_tensor_support_test.py
<ide> def call(self, inputs):
<ide> else:
<ide> raise TypeError("Unexpected tensor type %s" % type(inputs).__name__)
<ide>
<del> # Return a float so that we can compile models with this as the final layer.
<add> # Return a float so that we can compile models with this as the final
<add> # layer.
<ide> return tf.cast(output, tf.float32)
<ide>
<ide>
<ide> class _SubclassModel(keras.Model):
<ide>
<ide> def __init__(self, layers, i_layer=None):
<ide> super().__init__()
<del> # Note that clone and build doesn't support lists of layers in subclassed
<del> # models. Adding each layer directly here.
<add> # Note that clone and build doesn't support lists of layers in
<add> # subclassed models. Adding each layer directly here.
<ide> for i, layer in enumerate(layers):
<ide> setattr(self, self._layer_name_for_i(i), layer)
<ide> self.num_layers = len(layers)
<ide> def test_internal_sparse_tensors(self):
<ide>
<ide> def test_training_internal_ragged_tensors(self):
<ide> # Create a model that implements y=Mx. This is easy to learn and will
<del> # demonstrate appropriate gradient passing. (We have to use RaggedTensors
<del> # for this test, as ToSparse() doesn't support gradient propagation through
<del> # the layer.) TODO(b/124796939): Investigate this.
<add> # demonstrate appropriate gradient passing. (We have to use
<add> # RaggedTensors for this test, as ToSparse() doesn't support gradient
<add> # propagation through the layer.) TODO(b/124796939): Investigate this.
<ide> layers = [core.Dense(2), ToRagged(padding=0), ToDense(default_value=-1)]
<ide> model = test_utils.get_model_from_layers(layers, input_shape=(1,))
<ide>
<ide> def test_training_internal_ragged_tensors(self):
<ide> model.compile(loss="mse", optimizer="adam", **get_test_mode_kwargs())
<ide> history = model.fit(input_data, expected_data, epochs=10, verbose=0)
<ide>
<del> # If the model trained, the loss stored at history[0] should be different
<del> # than the one stored at history[-1].
<add> # If the model trained, the loss stored at history[0] should be
<add> # different than the one stored at history[-1].
<ide> self.assertNotEqual(
<ide> history.history["loss"][-1], history.history["loss"][0]
<ide> )
<ide> def test_sparse_tensors(self, use_dict, use_dataset, action):
<ide> result = model.evaluate(input_data, expected_output, **kwargs)
<ide> self.assertAllEqual(1.0, result[-1])
<ide> if action == "fit":
<del> # TODO(momernick): What's the best way of validating that fit happened?
<add> # TODO(momernick): What's the best way of validating that fit
<add> # happened?
<ide> _ = model.fit(
<ide> input_data, expected_output, shuffle=False, **kwargs
<ide> )
<ide> def test_sparse_tensors(self, use_dict, use_dataset, action):
<ide> @test_combinations.run_all_keras_modes
<ide> class ScipySparseTensorInputTest(test_combinations.TestCase, tf.test.TestCase):
<ide> def test_sparse_scipy_predict_inputs_via_input_layer_args(self):
<del> # Create a model that accepts a sparse input and converts the sparse tensor
<del> # back to a dense tensor. Scipy sparse matrices are limited to 2D, so use
<del> # a one-dimensional shape; note also that scipy's default dtype is int64.
<add> # Create a model that accepts a sparse input and converts the sparse
<add> # tensor back to a dense tensor. Scipy sparse matrices are limited to
<add> # 2D, so use a one-dimensional shape; note also that scipy's default
<add> # dtype is int64.
<ide> model_input = input_layer.Input(shape=(3,), sparse=True, dtype=tf.int64)
<ide> layers = [ToDense(default_value=-1)]
<ide> model = get_model_from_layers_with_input(
<ide> def test_sparse_scipy_predict_inputs_via_input_layer_args(self):
<ide> self.assertAllEqual(expected_output_2, output_2)
<ide>
<ide> def test_sparse_scipy_eval_inputs(self):
<del> # Create a model that accepts a sparse input and converts the sparse tensor
<del> # back to a dense tensor. Scipy sparse matrices are limited to 2D, so use
<del> # a one-dimensional shape; note also that scipy's default dtype is int64.
<add> # Create a model that accepts a sparse input and converts the sparse
<add> # tensor back to a dense tensor. Scipy sparse matrices are limited to
<add> # 2D, so use a one-dimensional shape; note also that scipy's default
<add> # dtype is int64.
<ide> model_input = input_layer.Input(shape=(3,), sparse=True, dtype=tf.int64)
<ide> layers = [ToDense(default_value=-1)]
<ide> model = get_model_from_layers_with_input(
<ide> def test_sparse_scipy_eval_inputs(self):
<ide> self.assertAllEqual(1.0, output_2[-1])
<ide>
<ide> def test_sparse_scipy_predict_input_dicts_via_input_layer_args(self):
<del> # Create a model that accepts a sparse input and converts the sparse tensor
<del> # back to a dense tensor. Scipy sparse matrices are limited to 2D, so use
<del> # a one-dimensional shape; note also that scipy's default dtype is int64.
<add> # Create a model that accepts a sparse input and converts the sparse
<add> # tensor back to a dense tensor. Scipy sparse matrices are limited to
<add> # 2D, so use a one-dimensional shape; note also that scipy's default
<add> # dtype is int64.
<ide> if test_utils.get_model_type() == "subclass":
<ide> input_name = "input_1" # Subclass models don"t support input names.
<ide> else:
<ide> def test_sparse_scipy_predict_input_dicts_via_input_layer_args(self):
<ide> self.assertAllEqual(expected_output_2, output_2)
<ide>
<ide> def test_sparse_scipy_eval_input_dicts(self):
<del> # Create a model that accepts a sparse input and converts the sparse tensor
<del> # back to a dense tensor. Scipy sparse matrices are limited to 2D, so use
<del> # a one-dimensional shape; note also that scipy's default dtype is int64.
<add> # Create a model that accepts a sparse input and converts the sparse
<add> # tensor back to a dense tensor. Scipy sparse matrices are limited to
<add> # 2D, so use a one-dimensional shape; note also that scipy's default
<add> # dtype is int64.
<ide> if test_utils.get_model_type() == "subclass":
<ide> input_name = "input_1" # Subclass models don"t support input names.
<ide> else:
<ide> def test_ragged_input(self, use_dict, use_dataset, action):
<ide> result = model.evaluate(input_data, expected_output)
<ide> self.assertAllEqual(1.0, result[-1])
<ide> if action == "fit":
<del> # TODO(momernick): What's the best way of validating that fit happened?
<add> # TODO(momernick): What's the best way of validating that fit
<add> # happened?
<ide> _ = model.fit(input_data, expected_output, shuffle=False)
<ide>
<ide>
<ide> def _normalize_shape(self, shape):
<ide> return shape
<ide>
<ide> def test_sparse_tensor_model_predict(self):
<del> # Create a model that accepts a sparse input and runs a "Dense" layer on it.
<add> # Create a model that accepts a sparse input and runs a "Dense" layer on
<add> # it.
<ide> model_input = input_layer.Input(
<ide> shape=(3,), sparse=True, dtype=tf.float32
<ide> )
<ide> def test_sparse_tensor_model_predict(self):
<ide> self.assertEqual((6, 2), self._normalize_shape(shape))
<ide>
<ide> def test_ragged_tensor_model_predict(self):
<del> # Create a model that accepts a sparse input and runs a "Dense" layer on it.
<add> # Create a model that accepts a sparse input and runs a "Dense" layer on
<add> # it.
<ide> model_input = input_layer.Input(shape=(None,), ragged=True)
<ide> self.assertEqual([None, None], model_input.shape.as_list())
<ide>
<ide><path>keras/utils/control_flow_util.py
<ide> def GetContainingWhileContext(ctxt, stop_ctxt=None):
<ide> if it sees stop_ctxt.
<ide>
<ide> Returns:
<del> `ctxt` if `ctxt` is a WhileContext, the most nested WhileContext containing
<del> `ctxt`, or None if `ctxt` is not in a while loop. If `stop_ctxt` is not
<del> `None`, this returns `ctxt` if it matches `stop_ctxt` in its traversal.
<add> `ctxt` if `ctxt` is a WhileContext, the most nested WhileContext
<add> containing `ctxt`, or None if `ctxt` is not in a while loop. If
<add> `stop_ctxt` is not `None`, this returns `ctxt` if it matches `stop_ctxt`
<add> in its traversal.
<ide> """
<ide> while ctxt:
<ide> if ctxt.IsWhileContext() or ctxt == stop_ctxt:
<ide><path>keras/utils/conv_utils.py
<ide> def convert_data_format(data_format, ndim):
<ide> return "NDHWC"
<ide> else:
<ide> raise ValueError(
<del> f"Input rank not supported: {ndim}. Expected values are [3, 4, 5]"
<add> f"Input rank not supported: {ndim}. "
<add> "Expected values are [3, 4, 5]"
<ide> )
<ide> elif data_format == "channels_first":
<ide> if ndim == 3:
<ide> def convert_data_format(data_format, ndim):
<ide> return "NCDHW"
<ide> else:
<ide> raise ValueError(
<del> f"Input rank not supported: {ndim}. Expected values are [3, 4, 5]"
<add> f"Input rank not supported: {ndim}. "
<add> "Expected values are [3, 4, 5]"
<ide> )
<ide> else:
<ide> raise ValueError(
<ide> def deconv_output_length(
<ide> input_length: Integer.
<ide> filter_size: Integer.
<ide> padding: one of `"same"`, `"valid"`, `"full"`.
<del> output_padding: Integer, amount of padding along the output dimension. Can
<del> be set to `None` in which case the output length is inferred.
<add> output_padding: Integer, amount of padding along the output dimension.
<add> Can be set to `None` in which case the output length is inferred.
<ide> stride: Integer.
<ide> dilation: Integer.
<ide>
<ide> def conv_kernel_mask(input_shape, kernel_shape, strides, padding):
<ide>
<ide> Assume a convolution with given parameters is applied to an input having N
<ide> spatial dimensions with `input_shape = (d_in1, ..., d_inN)` to produce an
<del> output with shape `(d_out1, ..., d_outN)`. This method returns a boolean array
<del> of shape `(d_in1, ..., d_inN, d_out1, ..., d_outN)` with `True` entries
<del> indicating pairs of input and output locations that are connected by a weight.
<add> output with shape `(d_out1, ..., d_outN)`. This method returns a boolean
<add> array of shape `(d_in1, ..., d_inN, d_out1, ..., d_outN)` with `True`
<add> entries indicating pairs of input and output locations that are connected by
<add> a weight.
<ide>
<ide> Example:
<ide>
<ide> def conv_kernel_idxs(
<ide> """Yields output-input tuples of indices in a CNN layer.
<ide>
<ide> The generator iterates over all `(output_idx, input_idx)` tuples, where
<del> `output_idx` is an integer index in a flattened tensor representing a single
<del> output image of a convolutional layer that is connected (via the layer
<del> weights) to the respective single input image at `input_idx`
<add> `output_idx` is an integer index in a flattened tensor representing a single
<add> output image of a convolutional layer that is connected (via the layer
<add> weights) to the respective single input image at `input_idx`
<ide>
<ide> Example:
<ide>
<ide> def conv_kernel_idxs(
<ide> data_format: string, "channels_first" or "channels_last".
<ide>
<ide> Yields:
<del> The next tuple `(output_idx, input_idx)`, where
<del> `output_idx` is an integer index in a flattened tensor representing a single
<del> output image of a convolutional layer that is connected (via the layer
<del> weights) to the respective single input image at `input_idx`.
<add> The next tuple `(output_idx, input_idx)`, where `output_idx` is an integer
<add> index in a flattened tensor representing a single output image of a
<add> convolutional layer that is connected (via the layer weights) to the
<add> respective single input image at `input_idx`.
<ide>
<ide> Raises:
<del> ValueError: if `data_format` is neither
<del> `"channels_last"` nor `"channels_first"`, or if number of strides, input,
<del> and kernel number of dimensions do not match.
<add> ValueError: if `data_format` is neither `"channels_last"` nor
<add> `"channels_first"`, or if number of strides, input, and kernel number
<add> of dimensions do not match.
<ide>
<ide> NotImplementedError: if `padding` is neither `"same"` nor `"valid"`.
<ide> """
<ide> def conv_connected_inputs(
<ide> input.
<ide> kernel_shape: tuple of size N, spatial shape of the convolutional kernel /
<ide> receptive field.
<del> output_position: tuple of size N: `(p_out1, ..., p_outN)`, a single position
<del> in the output of the convolution.
<add> output_position: tuple of size N: `(p_out1, ..., p_outN)`, a single
<add> position in the output of the convolution.
<ide> strides: tuple of size N, strides along each spatial dimension.
<ide> padding: type of padding, string `"same"` or `"valid"`.
<ide> `"valid"` means no padding. `"same"` results in padding evenly to
<ide><path>keras/utils/data_utils.py
<ide> from keras.utils.generic_utils import Progbar
<ide>
<ide> # Required to support google internal urlretrieve
<del>if (
<del> True
<del>): # This gets transformed to `if sys.version_info[0] == 2:` in OSS. # pylint: disable=using-constant-test
<add>if True: # This gets transformed to `if sys.version_info[0] == 2:` in OSS.
<ide>
<ide> def urlretrieve(url, filename, reporthook=None, data=None):
<ide> """Replacement for `urlretrieve` for Python 2.
<ide> def urlretrieve(url, filename, reporthook=None, data=None):
<ide> Args:
<ide> url: url to retrieve.
<ide> filename: where to store the retrieved data locally.
<del> reporthook: a hook function that will be called once on establishment of
<del> the network connection and once after each block read thereafter. The
<del> hook will be passed three arguments; a count of blocks transferred so
<del> far, a block size in bytes, and the total size of the file.
<add> reporthook: a hook function that will be called once on
<add> establishment of the network connection and once after each block
<add> read thereafter. The hook will be passed three arguments; a count
<add> of blocks transferred so far, a block size in bytes, and the total
<add> size of the file.
<ide> data: `data` argument passed to `urlopen`.
<ide> """
<ide>
<ide> def get_file(
<ide> fname = os.path.basename(urlsplit(origin).path)
<ide> if not fname:
<ide> raise ValueError(
<del> f"Can't parse the file name from the origin provided: '{origin}'."
<add> "Can't parse the file name from the origin provided: "
<add> f"'{origin}'."
<ide> "Please specify the `fname` as the input param."
<ide> )
<ide>
<ide> def get_file(
<ide> io_utils.print_msg(
<ide> "A local file was found, but it seems to be "
<ide> f"incomplete or outdated because the {hash_algorithm} "
<del> f"file hash does not match the original value of {file_hash} "
<add> f"file hash does not match the original value of "
<add> f"{file_hash} "
<ide> "so we will re-download the data."
<ide> )
<ide> download = True
<ide> def __call__(self, block_num, block_size, total_size):
<ide> raise
<ide>
<ide> # Validate download if succeeded and user provided an expected hash
<del> # Security conscious users would get the hash of the file from a separate
<del> # channel and pass it to this API to prevent MITM / corruption:
<add> # Security conscious users would get the hash of the file from a
<add> # separate channel and pass it to this API to prevent MITM / corruption:
<ide> if os.path.exists(fpath) and file_hash is not None:
<ide> if not validate_file(fpath, file_hash, algorithm=hash_algorithm):
<ide> raise ValueError(
<del> f"Incomplete or corrupted file detected. The {hash_algorithm} "
<del> f"file hash does not match the provided value of {file_hash}."
<add> f"Incomplete or corrupted file detected. "
<add> f"The {hash_algorithm} "
<add> f"file hash does not match the provided value "
<add> f"of {file_hash}."
<ide> )
<ide>
<ide> if untar:
<ide> def __init__(self, it):
<ide> self.it = it
<ide> self.lock = threading.Lock()
<ide>
<del> # After a generator throws an exception all subsequent next() calls raise a
<del> # StopIteration Exception. This, however, presents an issue when mixing
<del> # generators and threading because it means the order of retrieval need not
<del> # match the order in which the generator was called. This can make it appear
<del> # that a generator exited normally when in fact the terminating exception is
<del> # just in a different thread. In order to provide thread safety, once
<del> # self.it has thrown an exception we continue to throw the same exception.
<add> # After a generator throws an exception all subsequent next() calls
<add> # raise a StopIteration Exception. This, however, presents an issue when
<add> # mixing generators and threading because it means the order of
<add> # retrieval need not match the order in which the generator was called.
<add> # This can make it appear that a generator exited normally when in fact
<add> # the terminating exception is just in a different thread. In order to
<add> # provide thread safety, once self.it has thrown an exception we
<add> # continue to throw the same exception.
<ide> self._exception = None
<ide>
<ide> def __iter__(self):
<ide> def init_pool_generator(gens, random_seed=None, id_queue=None):
<ide>
<ide> worker_proc = multiprocessing.current_process()
<ide>
<del> # name isn't used for anything, but setting a more descriptive name is helpful
<del> # when diagnosing orphaned processes.
<add> # name isn't used for anything, but setting a more descriptive name is
<add> # helpful when diagnosing orphaned processes.
<ide> worker_proc.name = "Keras_worker_{}".format(worker_proc.name)
<ide>
<ide> if random_seed is not None:
<ide><path>keras/utils/dataset_creator.py
<ide> class DatasetCreator:
<ide> """Object that returns a `tf.data.Dataset` upon invoking.
<ide>
<del> `tf.keras.utils.experimental.DatasetCreator` is designated as a supported type
<del> for `x`, or the input, in `tf.keras.Model.fit`. Pass an instance of this class
<del> to `fit` when using a callable (with a `input_context` argument) that returns
<del> a `tf.data.Dataset`.
<add> `tf.keras.utils.experimental.DatasetCreator` is designated as a supported
<add> type for `x`, or the input, in `tf.keras.Model.fit`. Pass an instance of
<add> this class to `fit` when using a callable (with a `input_context` argument)
<add> that returns a `tf.data.Dataset`.
<ide>
<ide> ```python
<ide> model = tf.keras.Sequential([tf.keras.layers.Dense(10)])
<ide> def dataset_fn(input_context):
<ide>
<ide> Args:
<ide> dataset_fn: A callable that takes a single argument of type
<del> `tf.distribute.InputContext`, which is used for batch size calculation and
<del> cross-worker input pipeline sharding (if neither is needed, the
<del> `InputContext` parameter can be ignored in the `dataset_fn`), and returns
<del> a `tf.data.Dataset`.
<add> `tf.distribute.InputContext`, which is used for batch size calculation
<add> and cross-worker input pipeline sharding (if neither is needed, the
<add> `InputContext` parameter can be ignored in the `dataset_fn`), and
<add> returns a `tf.data.Dataset`.
<ide> input_options: Optional `tf.distribute.InputOptions`, used for specific
<ide> options when used with distribution, for example, whether to prefetch
<ide> dataset elements to accelerator device memory or host device memory, and
<ide> def __init__(self, dataset_fn, input_options=None):
<ide> self.input_options = input_options
<ide>
<ide> def __call__(self, *args, **kwargs):
<del> # When a `DatasetCreator` is invoked, it forwards args/kwargs straight to
<del> # the callable.
<add> # When a `DatasetCreator` is invoked, it forwards args/kwargs straight
<add> # to the callable.
<ide> dataset = self.dataset_fn(*args, **kwargs)
<ide> if not isinstance(dataset, tf.data.Dataset):
<ide> raise TypeError(
<ide><path>keras/utils/dataset_utils.py
<ide> def split_dataset(
<ide> same length.
<ide> left_size: If float, it should be in range `[0, 1]` range and signifies
<ide> the fraction of the data to pack in the left dataset. If integer, it
<del> signifies the number of samples to pack in the left dataset. If `None`,
<del> it defaults to the complement to `right_size`.
<add> signifies the number of samples to pack in the left dataset. If
<add> `None`, it defaults to the complement to `right_size`.
<ide> right_size: If float, it should be in range `[0, 1]` range and signifies
<ide> the fraction of the data to pack in the right dataset. If integer, it
<del> signifies the number of samples to pack in the right dataset. If `None`,
<del> it defaults to the complement to `left_size`.
<add> signifies the number of samples to pack in the right dataset. If
<add> `None`, it defaults to the complement to `left_size`.
<ide> shuffle: Boolean, whether to shuffle the data before splitting it.
<ide> seed: A random seed for shuffling.
<ide>
<ide> def _convert_dataset_to_list(
<ide> Args:
<ide> dataset : A `tf.data.Dataset` object or a list/tuple of arrays.
<ide> dataset_type_spec : the type of the dataset
<del> data_size_warning_flag (bool, optional): If set to True, a warning will be
<del> issued if the dataset takes longer than 10 seconds to iterate. Defaults
<del> to True.
<add> data_size_warning_flag (bool, optional): If set to True, a warning will
<add> be issued if the dataset takes longer than 10 seconds to iterate.
<add> Defaults to True.
<ide> ensure_shape_similarity (bool, optional): If set to True, the shape of
<ide> the first sample will be used to validate the shape of rest of the
<ide> samples. Defaults to True.
<ide> def _get_next_sample(
<ide> ensure_shape_similarity (bool, optional): If set to True, the shape of
<ide> the first sample will be used to validate the shape of rest of the
<ide> samples. Defaults to True.
<del> data_size_warning_flag (bool, optional): If set to True, a warning will be
<del> issued if the dataset takes longer than 10 seconds to iterate. Defaults
<del> to True.
<del> start_time (float): the start time of the dataset iteration. this is used
<del> only if `data_size_warning_flag` is set to true.
<add> data_size_warning_flag (bool, optional): If set to True, a warning will
<add> be issued if the dataset takes longer than 10 seconds to iterate.
<add> Defaults to True.
<add> start_time (float): the start time of the dataset iteration. this is
<add> used only if `data_size_warning_flag` is set to true.
<ide>
<ide> Raises:
<ide> ValueError: - If the dataset is empty.
<ide> def _get_next_sample(
<ide> if int(cur_time - start_time) > 10 and data_size_warning_flag:
<ide> warnings.warn(
<ide> "The dataset is taking longer than 10 seconds to "
<del> "iterate over. This may be due to the size of the dataset. "
<del> "Keep in mind that the `split_dataset` utility is only for "
<del> "small in-memory dataset (e.g. < 10,000 samples).",
<add> "iterate over. This may be due to the size of the "
<add> "dataset. Keep in mind that the `split_dataset` "
<add> "utility is only for small in-memory dataset "
<add> "(e.g. < 10,000 samples).",
<ide> category=ResourceWarning,
<ide> source="split_dataset",
<ide> )
<ide> def _rescale_dataset_split_sizes(left_size, right_size, total_length):
<ide> f"{left_size}"
<ide> )
<ide>
<del> # check right_size is non-negative and less than 1 and less than total_length
<add> # check right_size is non-negative and less than 1 and less than
<add> # total_length
<ide> if (
<ide> right_size_type == int
<ide> and (right_size <= 0 or right_size >= total_length)
<ide> def _rescale_dataset_split_sizes(left_size, right_size, total_length):
<ide> f"{right_size}"
<ide> )
<ide>
<del> # check sum of left_size and right_size is less than or equal to total_length
<add> # check sum of left_size and right_size is less than or equal to
<add> # total_length
<ide> if (
<ide> right_size_type == left_size_type == float
<ide> and right_size + left_size > 1
<ide> def index_directory(
<ide> tuple (file_paths, labels, class_names).
<ide> file_paths: list of file paths (strings).
<ide> labels: list of matching integer labels (same length as file_paths)
<del> class_names: names of the classes corresponding to these labels, in order.
<add> class_names: names of the classes corresponding to these labels, in
<add> order.
<ide> """
<ide> if labels is None:
<ide> # in the no-label case, index from the parent directory down.
<ide> def labels_to_dataset(labels, label_mode, num_classes):
<ide> label_mode: String describing the encoding of `labels`. Options are:
<ide> - 'binary' indicates that the labels (there can be only 2) are encoded as
<ide> `float32` scalars with values 0 or 1 (e.g. for `binary_crossentropy`).
<del> - 'categorical' means that the labels are mapped into a categorical vector.
<del> (e.g. for `categorical_crossentropy` loss).
<add> - 'categorical' means that the labels are mapped into a categorical
<add> vector. (e.g. for `categorical_crossentropy` loss).
<ide> num_classes: number of classes of labels.
<ide>
<ide> Returns:
<ide> def check_validation_split_arg(validation_split, subset, shuffle, seed):
<ide> )
<ide> if validation_split and shuffle and seed is None:
<ide> raise ValueError(
<del> "If using `validation_split` and shuffling the data, you must provide "
<del> "a `seed` argument, to make sure that there is no overlap between the "
<del> "training and validation subset."
<add> "If using `validation_split` and shuffling the data, you must "
<add> "provide a `seed` argument, to make sure that there is no "
<add> "overlap between the training and validation subset."
<ide> )
<ide><path>keras/utils/generic_utils.py
<ide>
<ide>
<ide> @keras_export(
<del> "keras.utils.custom_object_scope", # pylint: disable=g-classes-have-attributes
<add> "keras.utils.custom_object_scope",
<ide> "keras.utils.CustomObjectScope",
<ide> )
<ide> class CustomObjectScope:
<ide> class CustomObjectScope:
<ide>
<ide> ```python
<ide> layer = Dense(3, kernel_regularizer=my_regularizer)
<del> config = layer.get_config() # Config contains a reference to `my_regularizer`
<add> # Config contains a reference to `my_regularizer`
<add> config = layer.get_config()
<ide> ...
<ide> # Later:
<ide> with custom_object_scope({'my_regularizer': my_regularizer}):
<ide> def __exit__(self, *args, **kwargs):
<ide> class NoopLoadingScope:
<ide> """The default shared object loading scope. It does nothing.
<ide>
<del> Created to simplify serialization code that doesn't care about shared objects
<del> (e.g. when serializing a single object).
<add> Created to simplify serialization code that doesn't care about shared
<add> objects (e.g. when serializing a single object).
<ide> """
<ide>
<ide> def get(self, unused_object_id):
<ide> def get(self, object_id):
<ide> """Given a shared object ID, returns a previously instantiated object.
<ide>
<ide> Args:
<del> object_id: shared object ID to use when attempting to find already-loaded
<del> object.
<add> object_id: shared object ID to use when attempting to find
<add> already-loaded object.
<ide>
<ide> Returns:
<ide> The object, if we've seen this ID before. Else, `None`.
<ide> def __init__(self, base_config, object_id, **kwargs):
<ide>
<ide> def increment_ref_count(self):
<ide> # As soon as we've seen the object more than once, we want to attach the
<del> # shared object ID. This allows us to only attach the shared object ID when
<del> # it's strictly necessary, making backwards compatibility breakage less
<del> # likely.
<add> # shared object ID. This allows us to only attach the shared object ID
<add> # when it's strictly necessary, making backwards compatibility breakage
<add> # less likely.
<ide> if self.ref_count == 1:
<ide> self[SHARED_OBJECT_KEY] = self.object_id
<ide> self.ref_count += 1
<ide> def __enter__(self):
<ide>
<ide> global SHARED_OBJECT_SAVING
<ide>
<del> # Serialization can happen at a number of layers for a number of reasons.
<del> # We may end up with a case where we're opening a saving scope within
<del> # another saving scope. In that case, we'd like to use the outermost scope
<del> # available and ignore inner scopes, since there is not (yet) a reasonable
<del> # use case for having these nested and distinct.
<add> # Serialization can happen at a number of layers for a number of
<add> # reasons. We may end up with a case where we're opening a saving scope
<add> # within another saving scope. In that case, we'd like to use the
<add> # outermost scope available and ignore inner scopes, since there is not
<add> # (yet) a reasonable use case for having these nested and distinct.
<ide> if _shared_object_saving_scope() is not None:
<ide> self._passthrough = True
<ide> return _shared_object_saving_scope()
<ide> def get_config(self, obj):
<ide> try:
<ide> shared_object_config = self._shared_objects_config[obj]
<ide> except (TypeError, KeyError):
<del> # If the object is unhashable (e.g. a subclass of `AbstractBaseClass`
<del> # that has not overridden `__hash__`), a `TypeError` will be thrown.
<del> # We'll just continue on without shared object support.
<add> # If the object is unhashable (e.g. a subclass of
<add> # `AbstractBaseClass` that has not overridden `__hash__`), a
<add> # `TypeError` will be thrown. We'll just continue on without shared
<add> # object support.
<ide> return None
<ide> shared_object_config.increment_ref_count()
<ide> return shared_object_config
<ide> def create_config(self, base_config, obj):
<ide> try:
<ide> self._shared_objects_config[obj] = shared_object_config
<ide> except TypeError:
<del> # If the object is unhashable (e.g. a subclass of `AbstractBaseClass`
<del> # that has not overridden `__hash__`), a `TypeError` will be thrown.
<del> # We'll just continue on without shared object support.
<add> # If the object is unhashable (e.g. a subclass of
<add> # `AbstractBaseClass` that has not overridden `__hash__`), a
<add> # `TypeError` will be thrown. We'll just continue on without shared
<add> # object support.
<ide> pass
<ide> return shared_object_config
<ide>
<ide> def serialize_keras_class_and_config(
<ide> if shared_object_id is not None:
<ide> base_config[SHARED_OBJECT_KEY] = shared_object_id
<ide>
<del> # If we have an active `SharedObjectSavingScope`, check whether we've already
<del> # serialized this config. If so, just use that config. This will store an
<del> # extra ID field in the config, allowing us to re-create the shared object
<del> # relationship at load time.
<add> # If we have an active `SharedObjectSavingScope`, check whether we've
<add> # already serialized this config. If so, just use that config. This will
<add> # store an extra ID field in the config, allowing us to re-create the shared
<add> # object relationship at load time.
<ide> if _shared_object_saving_scope() is not None and obj is not None:
<ide> shared_object_config = _shared_object_saving_scope().get_config(obj)
<ide> if shared_object_config is None:
<ide> class MyDense(keras.layers.Dense):
<ide> ```
<ide>
<ide> Args:
<del> package: The package that this class belongs to. This is used for the `key`
<del> (which is 'package>name') to idenfify the class. Note that this is the
<del> first argument passed into the decorator.
<add> package: The package that this class belongs to. This is used for the
<add> `key` (which is 'package>name') to idenfify the class. Note that this is
<add> the first argument passed into the decorator.
<ide> name: The name to serialize this class under in this package. If not
<ide> provided or `None`, the class' name will be used (note that this is the
<del> case when the decorator is used with only one argument, which becomes the
<del> `package`).
<add> case when the decorator is used with only one argument, which becomes
<add> the `package`).
<ide>
<ide> Returns:
<ide> A decorator that registers the decorated class with the passed names.
<ide> def decorator(arg):
<ide>
<ide> if tf_inspect.isclass(arg) and not hasattr(arg, "get_config"):
<ide> raise ValueError(
<del> "Cannot register a class that does not have a get_config() method."
<add> "Cannot register a class that does not have a "
<add> "get_config() method."
<ide> )
<ide>
<ide> if registered_name in _GLOBAL_CUSTOM_OBJECTS:
<ide> def decorator(arg):
<ide>
<ide> if arg in _GLOBAL_CUSTOM_NAMES:
<ide> raise ValueError(
<del> f"{arg} has already been registered to {_GLOBAL_CUSTOM_NAMES[arg]}"
<add> f"{arg} has already been registered to "
<add> f"{_GLOBAL_CUSTOM_NAMES[arg]}"
<ide> )
<ide> _GLOBAL_CUSTOM_OBJECTS[registered_name] = arg
<ide> _GLOBAL_CUSTOM_NAMES[arg] = registered_name
<ide> def serialize_keras_object(instance):
<ide> serialization_config[key] = item
<ide> continue
<ide>
<del> # Any object of a different type needs to be converted to string or dict
<del> # for serialization (e.g. custom functions, custom classes)
<add> # Any object of a different type needs to be converted to string or
<add> # dict for serialization (e.g. custom functions, custom classes)
<ide> try:
<ide> serialized_item = serialize_keras_object(item)
<ide> if isinstance(serialized_item, dict) and not isinstance(
<ide> def class_and_config_for_serialized_keras_object(
<ide> cls = get_registered_object(class_name, custom_objects, module_objects)
<ide> if cls is None:
<ide> raise ValueError(
<del> f"Unknown {printable_module_name}: {class_name}. Please ensure this "
<add> f"Unknown {printable_module_name}: {class_name}. "
<add> "Please ensure this "
<ide> "object is passed to the `custom_objects` argument. See "
<ide> "https://www.tensorflow.org/guide/keras/save_and_serialize"
<ide> "#registering_the_custom_object for details."
<ide> def class_and_config_for_serialized_keras_object(
<ide> elif isinstance(item, str) and tf_inspect.isfunction(
<ide> get_registered_object(item, custom_objects)
<ide> ):
<del> # Handle custom functions here. When saving functions, we only save the
<del> # function's name as a string. If we find a matching string in the custom
<del> # objects during deserialization, we convert the string back to the
<del> # original function.
<del> # Note that a potential issue is that a string field could have a naming
<del> # conflict with a custom function name, but this should be a rare case.
<del> # This issue does not occur if a string field has a naming conflict with
<del> # a custom object, since the config of an object will always be a dict.
<add> # Handle custom functions here. When saving functions, we only save
<add> # the function's name as a string. If we find a matching string in
<add> # the custom objects during deserialization, we convert the string
<add> # back to the original function.
<add> # Note that a potential issue is that a string field could have a
<add> # naming conflict with a custom function name, but this should be a
<add> # rare case. This issue does not occur if a string field has a
<add> # naming conflict with a custom object, since the config of an
<add> # object will always be a dict.
<ide> deserialized_objects[key] = get_registered_object(
<ide> item, custom_objects
<ide> )
<ide> def deserialize_keras_object(
<ide>
<ide> This function is for mid-level library implementers rather than end users.
<ide>
<del> Importantly, this utility requires you to provide the dict of `module_objects`
<del> to use for looking up the object config; this is not populated by default.
<del> If you need a deserialization utility that has preexisting knowledge of
<del> built-in Keras objects, use e.g. `keras.layers.deserialize(config)`,
<del> `keras.metrics.deserialize(config)`, etc.
<add> Importantly, this utility requires you to provide the dict of
<add> `module_objects` to use for looking up the object config; this is not
<add> populated by default. If you need a deserialization utility that has
<add> preexisting knowledge of built-in Keras objects, use e.g.
<add> `keras.layers.deserialize(config)`, `keras.metrics.deserialize(config)`,
<add> etc.
<ide>
<ide> Calling `deserialize_keras_object` while underneath the
<del> `SharedObjectLoadingScope` context manager will cause any already-seen shared
<del> objects to be returned as-is rather than creating a new object.
<add> `SharedObjectLoadingScope` context manager will cause any already-seen
<add> shared objects to be returned as-is rather than creating a new object.
<ide>
<ide> Args:
<ide> identifier: the serialized form of the object.
<ide> module_objects: A dictionary of built-in objects to look the name up in.
<del> Generally, `module_objects` is provided by midlevel library implementers.
<add> Generally, `module_objects` is provided by midlevel library
<add> implementers.
<ide> custom_objects: A dictionary of custom objects to look the name up in.
<ide> Generally, `custom_objects` is provided by the end user.
<del> printable_module_name: A human-readable string representing the type of the
<del> object. Printed in case of exception.
<add> printable_module_name: A human-readable string representing the type of
<add> the object. Printed in case of exception.
<ide>
<ide> Returns:
<ide> The deserialized object.
<ide> def deserialize(config, custom_objects=None):
<ide> config, module_objects, custom_objects, printable_module_name
<ide> )
<ide>
<del> # If this object has already been loaded (i.e. it's shared between multiple
<del> # objects), return the already-loaded object.
<add> # If this object has already been loaded (i.e. it's shared between
<add> # multiple objects), return the already-loaded object.
<ide> shared_object_id = config.get(SHARED_OBJECT_KEY)
<ide> shared_object = _shared_object_loading_scope().get(
<ide> shared_object_id
<ide> def deserialize(config, custom_objects=None):
<ide> obj = module_objects.get(object_name)
<ide> if obj is None:
<ide> raise ValueError(
<del> f"Unknown {printable_module_name}: {object_name}. Please ensure "
<del> "this object is passed to the `custom_objects` argument. See "
<add> f"Unknown {printable_module_name}: {object_name}. Please "
<add> "ensure this object is passed to the `custom_objects` "
<add> "argument. See "
<ide> "https://www.tensorflow.org/guide/keras/save_and_serialize"
<ide> "#registering_the_custom_object for details."
<ide> )
<ide> def deserialize(config, custom_objects=None):
<ide> return identifier
<ide> else:
<ide> raise ValueError(
<del> f"Could not interpret serialized {printable_module_name}: {identifier}"
<add> f"Could not interpret serialized "
<add> f"{printable_module_name}: {identifier}"
<ide> )
<ide>
<ide>
<ide> def has_arg(fn, name, accept_all=False):
<ide> Args:
<ide> fn: Callable to inspect.
<ide> name: Check if `fn` can be called with `name` as a keyword argument.
<del> accept_all: What to return if there is no parameter called `name` but the
<del> function accepts a `**kwargs` argument.
<add> accept_all: What to return if there is no parameter called `name` but
<add> the function accepts a `**kwargs` argument.
<ide>
<ide> Returns:
<ide> bool, whether `fn` accepts a `name` keyword argument.
<ide> class Progbar:
<ide> target: Total number of steps expected, None if unknown.
<ide> width: Progress bar width on screen.
<ide> verbose: Verbosity mode, 0 (silent), 1 (verbose), 2 (semi-verbose)
<del> stateful_metrics: Iterable of string names of metrics that should *not* be
<del> averaged over time. Metrics in this list will be displayed as-is. All
<del> others will be averaged by the progbar before display.
<add> stateful_metrics: Iterable of string names of metrics that should *not*
<add> be averaged over time. Metrics in this list will be displayed as-is.
<add> All others will be averaged by the progbar before display.
<ide> interval: Minimum visual progress update interval (in seconds).
<ide> unit_name: Display name for step counts (usually "step" or "sample").
<ide> """
<ide> def update(self, current, values=None, finalize=None):
<ide>
<ide> Args:
<ide> current: Index of current step.
<del> values: List of tuples: `(name, value_for_last_step)`. If `name` is in
<del> `stateful_metrics`, `value_for_last_step` will be displayed as-is.
<del> Else, an average of the metric over time will be displayed.
<add> values: List of tuples: `(name, value_for_last_step)`. If `name` is
<add> in `stateful_metrics`, `value_for_last_step` will be displayed
<add> as-is. Else, an average of the metric over time will be
<add> displayed.
<ide> finalize: Whether this is the last update for the progress bar. If
<ide> `None`, defaults to `current >= self.target`.
<ide> """
<ide> def update(self, current, values=None, finalize=None):
<ide> if k not in self._values_order:
<ide> self._values_order.append(k)
<ide> if k not in self.stateful_metrics:
<del> # In the case that progress bar doesn't have a target value in the first
<del> # epoch, both on_batch_end and on_epoch_end will be called, which will
<del> # cause 'current' and 'self._seen_so_far' to have the same value. Force
<del> # the minimal value to 1 here, otherwise stateful_metric will be 0s.
<add> # In the case that progress bar doesn't have a target value in
<add> # the first epoch, both on_batch_end and on_epoch_end will be
<add> # called, which will cause 'current' and 'self._seen_so_far' to
<add> # have the same value. Force the minimal value to 1 here,
<add> # otherwise stateful_metric will be 0s.
<ide> value_base = max(current - self._seen_so_far, 1)
<ide> if k not in self._values:
<ide> self._values[k] = [v * value_base, value_base]
<ide> def _format_time(self, time_per_unit, unit_name):
<ide> def _estimate_step_duration(self, current, now):
<ide> """Estimate the duration of a single step.
<ide>
<del> Given the step number `current` and the corresponding time `now`
<del> this function returns an estimate for how long a single step
<del> takes. If this is called before one step has been completed
<del> (i.e. `current == 0`) then zero is given as an estimate. The duration
<del> estimate ignores the duration of the (assumed to be non-representative)
<del> first step for estimates when more steps are available (i.e. `current>1`).
<add> Given the step number `current` and the corresponding time `now` this
<add> function returns an estimate for how long a single step takes. If this
<add> is called before one step has been completed (i.e. `current == 0`) then
<add> zero is given as an estimate. The duration estimate ignores the duration
<add> of the (assumed to be non-representative) first step for estimates when
<add> more steps are available (i.e. `current>1`).
<add>
<ide> Args:
<ide> current: Index of current step.
<ide> now: The current time.
<add>
<ide> Returns: Estimate of the duration of a single step.
<ide> """
<ide> if current:
<ide> # there are a few special scenarios here:
<del> # 1) somebody is calling the progress bar without ever supplying step 1
<del> # 2) somebody is calling the progress bar and supplies step one multiple
<del> # times, e.g. as part of a finalizing call
<add> # 1) somebody is calling the progress bar without ever supplying
<add> # step 1
<add> # 2) somebody is calling the progress bar and supplies step one
<add> # multiple times, e.g. as part of a finalizing call
<ide> # in these cases, we just fall back to the simple calculation
<ide> if self._time_after_first_step is not None and current > 1:
<ide> time_per_unit = (now - self._time_after_first_step) / (
<ide> def check_for_unexpected_keys(name, input_dict, expected_values):
<ide> unknown = set(input_dict.keys()).difference(expected_values)
<ide> if unknown:
<ide> raise ValueError(
<del> f"Unknown entries in {name} dictionary: {list(unknown)}. Only expected "
<del> f"following keys: {expected_values}"
<add> f"Unknown entries in {name} dictionary: {list(unknown)}. "
<add> f"Only expected following keys: {expected_values}"
<ide> )
<ide>
<ide>
<ide> def _load(self):
<ide> module = importlib.import_module(self.__name__)
<ide> self._parent_module_globals[self._local_name] = module
<ide> # Update this object's dict so that if someone keeps a reference to the
<del> # LazyLoader, lookups are efficient (__getattr__ is only called on lookups
<del> # that fail).
<add> # LazyLoader, lookups are efficient (__getattr__ is only called on
<add> # lookups that fail).
<ide> self.__dict__.update(module.__dict__)
<ide> return module
<ide>
<ide><path>keras/utils/generic_utils_test.py
<ide> def get_config(self):
<ide> ValueError, ".*has already been registered.*"
<ide> ):
<ide>
<del> @keras.utils.generic_utils.register_keras_serializable() # pylint: disable=function-redefined
<add> @keras.utils.generic_utils.register_keras_serializable()
<ide> class TestClass: # pylint: disable=function-redefined
<ide> def __init__(self, value):
<ide> self._value = value
<ide> def test_serialize_custom_class_without_get_config_fails(self):
<ide> "Cannot register a class that does " "not have a get_config.*",
<ide> ):
<ide>
<del> @keras.utils.generic_utils.register_keras_serializable( # pylint: disable=unused-variable
<add> @keras.utils.generic_utils.register_keras_serializable(
<ide> "TestPackage", "TestClass"
<ide> )
<ide> class TestClass:
<ide> def test_nested_shared_object_saving_scopes(self):
<ide> with generic_utils.SharedObjectSavingScope() as scope_1:
<ide> scope_1.create_config({}, my_obj)
<ide> with generic_utils.SharedObjectSavingScope() as scope_2:
<del> # Nesting saving scopes should return the original scope and should
<del> # not clear any objects we're tracking.
<add> # Nesting saving scopes should return the original scope and
<add> # should not clear any objects we're tracking.
<ide> self.assertIs(scope_1, scope_2)
<ide> self.assertIsNotNone(scope_2.get_config(my_obj))
<ide> self.assertIsNotNone(scope_1.get_config(my_obj))
<ide> def func_that_returns_one(self):
<ide> with self.captureWritesToStream(sys.stderr) as printed:
<ide> loaded_model.fit(x, y, epochs=1)
<ide> if tf.__internal__.tf2.enabled():
<del> # `tf.print` message is only available in stderr in TF2. Check that
<del> # custom `train_step` is used.
<add> # `tf.print` message is only available in stderr in TF2. Check
<add> # that custom `train_step` is used.
<ide> self.assertRegex(printed.contents(), train_step_message)
<ide>
<ide> # Check that the custom class does get used.
<ide><path>keras/utils/image_dataset.py
<ide> def image_dataset_from_directory(
<ide> ......b_image_2.jpg
<ide> ```
<ide>
<del> Then calling `image_dataset_from_directory(main_directory, labels='inferred')`
<del> will return a `tf.data.Dataset` that yields batches of images from
<del> the subdirectories `class_a` and `class_b`, together with labels
<add> Then calling `image_dataset_from_directory(main_directory,
<add> labels='inferred')` will return a `tf.data.Dataset` that yields batches of
<add> images from the subdirectories `class_a` and `class_b`, together with labels
<ide> 0 and 1 (0 corresponding to `class_a` and 1 corresponding to `class_b`).
<ide>
<ide> Supported image formats: jpeg, png, bmp, gif.
<ide> def image_dataset_from_directory(
<ide> Defaults to False.
<ide> crop_to_aspect_ratio: If True, resize the images without aspect
<ide> ratio distortion. When the original aspect ratio differs from the target
<del> aspect ratio, the output image will be cropped so as to return the largest
<del> possible window in the image (of size `image_size`) that matches
<add> aspect ratio, the output image will be cropped so as to return the
<add> largest possible window in the image (of size `image_size`) that matches
<ide> the target aspect ratio. By default (`crop_to_aspect_ratio=False`),
<ide> aspect ratio may not be preserved.
<ide> **kwargs: Legacy keyword arguments.
<ide> def image_dataset_from_directory(
<ide> if labels not in ("inferred", None):
<ide> if not isinstance(labels, (list, tuple)):
<ide> raise ValueError(
<del> "`labels` argument should be a list/tuple of integer labels, of "
<del> "the same size as the number of image files in the target "
<del> "directory. If you wish to infer the labels from the subdirectory "
<add> "`labels` argument should be a list/tuple of integer labels, "
<add> "of the same size as the number of image files in the target "
<add> "directory. If you wish to infer the labels from the "
<add> "subdirectory "
<ide> 'names in the target directory, pass `labels="inferred"`. '
<ide> "If you wish to get a dataset that only contains images "
<ide> f"(no labels), pass `labels=None`. Received: labels={labels}"
<ide> def image_dataset_from_directory(
<ide> )
<ide> if label_mode not in {"int", "categorical", "binary", None}:
<ide> raise ValueError(
<del> '`label_mode` argument must be one of "int", "categorical", "binary", '
<add> '`label_mode` argument must be one of "int", '
<add> '"categorical", "binary", '
<ide> f"or None. Received: label_mode={label_mode}"
<ide> )
<ide> if labels is None or label_mode is None:
<ide><path>keras/utils/image_dataset_test.py
<ide> def _prepare_directory(
<ide> return temp_dir
<ide>
<ide> def test_image_dataset_from_directory_standalone(self):
<del> # Test retrieving images without labels from a directory and its subdirs.
<add> # Test retrieving images without labels from a directory and its
<add> # subdirs.
<ide> if PIL is None:
<ide> return # Skip test if PIL is not available.
<ide>
<ide><path>keras/utils/image_utils.py
<ide> def smart_resize(x, size, interpolation="bilinear"):
<ide>
<ide> Warning: `tf.keras.preprocessing.image.smart_resize` is not recommended for
<ide> new code. Prefer `tf.keras.layers.Resizing`, which provides the same
<del> functionality as a preprocessing layer and adds `tf.RaggedTensor` support. See
<del> the [preprocessing layer guide](
<add> functionality as a preprocessing layer and adds `tf.RaggedTensor` support.
<add> See the [preprocessing layer guide](
<ide> https://www.tensorflow.org/guide/keras/preprocessing_layers)
<ide> for an overview of preprocessing layers.
<ide>
<ide> TensorFlow image datasets typically yield images that have each a different
<ide> size. However, these images need to be batched before they can be
<del> processed by Keras layers. To be batched, images need to share the same height
<del> and width.
<add> processed by Keras layers. To be batched, images need to share the same
<add> height and width.
<ide>
<ide> You could simply do:
<ide>
<ide> def smart_resize(x, size, interpolation="bilinear"):
<ide>
<ide> The resizing process is:
<ide>
<del> 1. Take the largest centered crop of the image that has the same aspect ratio
<del> as the target size. For instance, if `size=(200, 200)` and the input image has
<del> size `(340, 500)`, we take a crop of `(340, 340)` centered along the width.
<add> 1. Take the largest centered crop of the image that has the same aspect
<add> ratio as the target size. For instance, if `size=(200, 200)` and the input
<add> image has size `(340, 500)`, we take a crop of `(340, 340)` centered along
<add> the width.
<ide> 2. Resize the cropped image to the target size. In the example above,
<ide> we resize the `(340, 340)` crop to `(200, 200)`.
<ide>
<ide> def smart_resize(x, size, interpolation="bilinear"):
<ide> if img.shape.rank is not None:
<ide> if img.shape.rank < 3 or img.shape.rank > 4:
<ide> raise ValueError(
<del> "Expected an image array with shape `(height, width, channels)`, "
<del> "or `(batch_size, height, width, channels)`, but "
<add> "Expected an image array with shape `(height, width, "
<add> "channels)`, or `(batch_size, height, width, channels)`, but "
<ide> f"got input with incorrect rank, of shape {img.shape}."
<ide> )
<ide> shape = tf.shape(img)
<ide> def smart_resize(x, size, interpolation="bilinear"):
<ide> tf.cast(height * target_width, "float32") / target_height, "int32"
<ide> )
<ide>
<del> # Set back to input height / width if crop_height / crop_width is not smaller.
<add> # Set back to input height / width if crop_height / crop_width is not
<add> # smaller.
<ide> crop_height = tf.minimum(height, crop_height)
<ide> crop_width = tf.minimum(width, crop_width)
<ide>
<ide> def array_to_img(x, data_format=None, scale=True, dtype=None):
<ide> Args:
<ide> x: Input data, in any form that can be converted to a Numpy array.
<ide> data_format: Image data format, can be either `"channels_first"` or
<del> `"channels_last"`. Defaults to `None`, in which case the global setting
<del> `tf.keras.backend.image_data_format()` is used (unless you changed it,
<del> it defaults to `"channels_last"`).
<add> `"channels_last"`. Defaults to `None`, in which case the global
<add> setting `tf.keras.backend.image_data_format()` is used (unless you
<add> changed it, it defaults to `"channels_last"`).
<ide> scale: Whether to rescale the image such that minimum and maximum values
<ide> are 0 and 255 respectively. Defaults to `True`.
<ide> dtype: Dtype to use. Default to `None`, in which case the global setting
<del> `tf.keras.backend.floatx()` is used (unless you changed it, it defaults
<del> to `"float32"`)
<add> `tf.keras.backend.floatx()` is used (unless you changed it, it
<add> defaults to `"float32"`)
<ide>
<ide> Returns:
<ide> A PIL Image instance.
<ide> def img_to_array(img, data_format=None, dtype=None):
<ide> Args:
<ide> img: Input PIL Image instance.
<ide> data_format: Image data format, can be either `"channels_first"` or
<del> `"channels_last"`. Defaults to `None`, in which case the global setting
<del> `tf.keras.backend.image_data_format()` is used (unless you changed it,
<del> it defaults to `"channels_last"`).
<add> `"channels_last"`. Defaults to `None`, in which case the global
<add> setting `tf.keras.backend.image_data_format()` is used (unless you
<add> changed it, it defaults to `"channels_last"`).
<ide> dtype: Dtype to use. Default to `None`, in which case the global setting
<del> `tf.keras.backend.floatx()` is used (unless you changed it, it defaults
<del> to `"float32"`).
<add> `tf.keras.backend.floatx()` is used (unless you changed it, it
<add> defaults to `"float32"`).
<ide>
<ide> Returns:
<ide> A 3D Numpy array.
<ide> def save_img(path, x, data_format=None, file_format=None, scale=True, **kwargs):
<ide> x: Numpy array.
<ide> data_format: Image data format, either `"channels_first"` or
<ide> `"channels_last"`.
<del> file_format: Optional file format override. If omitted, the format to use
<del> is determined from the filename extension. If a file object was used
<del> instead of a filename, this parameter should always be used.
<add> file_format: Optional file format override. If omitted, the format to
<add> use is determined from the filename extension. If a file object was
<add> used instead of a filename, this parameter should always be used.
<ide> scale: Whether to rescale image values to be within `[0, 255]`.
<ide> **kwargs: Additional keyword arguments passed to `PIL.Image.save()`.
<ide> """
<ide><path>keras/utils/kernelized_utils.py
<ide> def exact_gaussian_kernel(x, y, stddev):
<ide>
<ide> The Gaussian kernel for vectors u, v is defined as follows:
<ide> K(u, v) = exp(-||u-v||^2 / (2* stddev^2))
<del> where the norm is the l2-norm. x, y can be either vectors or matrices. If they
<del> are vectors, they must have the same dimension. If they are matrices, they
<del> must have the same number of columns. In the latter case, the method returns
<del> (as a matrix) K(u, v) values for all pairs (u, v) where u is a row from x and
<del> v is a row from y.
<add> where the norm is the l2-norm. x, y can be either vectors or matrices. If
<add> they are vectors, they must have the same dimension. If they are matrices,
<add> they must have the same number of columns. In the latter case, the method
<add> returns (as a matrix) K(u, v) values for all pairs (u, v) where u is a row
<add> from x and v is a row from y.
<ide>
<ide> Args:
<ide> x: a tensor of rank 1 or 2. It's shape should be either [dim] or [m, dim].
<ide> y: a tensor of rank 1 or 2. It's shape should be either [dim] or [n, dim].
<ide> stddev: The width of the Gaussian kernel.
<ide>
<ide> Returns:
<del> A single value (scalar) with shape (1, 1) (if x, y are vectors) or a matrix
<del> of shape (m, n) with entries K(u, v) (where K is the Gaussian kernel) for
<del> all (u,v) pairs where u, v are rows from x and y respectively.
<add> A single value (scalar) with shape (1, 1) (if x, y are vectors) or a
<add> matrix of shape (m, n) with entries K(u, v) (where K is the Gaussian
<add> kernel) for all (u,v) pairs where u, v are rows from x and y respectively.
<ide>
<ide> Raises:
<ide> ValueError: if the shapes of x, y are not compatible.
<ide> def exact_laplacian_kernel(x, y, stddev):
<ide>
<ide> The Laplacian kernel for vectors u, v is defined as follows:
<ide> K(u, v) = exp(-||u-v|| / stddev)
<del> where the norm is the l1-norm. x, y can be either vectors or matrices. If they
<del> are vectors, they must have the same dimension. If they are matrices, they
<del> must have the same number of columns. In the latter case, the method returns
<del> (as a matrix) K(u, v) values for all pairs (u, v) where u is a row from x and
<del> v is a row from y.
<add> where the norm is the l1-norm. x, y can be either vectors or matrices. If
<add> they are vectors, they must have the same dimension. If they are matrices,
<add> they must have the same number of columns. In the latter case, the method
<add> returns (as a matrix) K(u, v) values for all pairs (u, v) where u is a row
<add> from x and v is a row from y.
<ide>
<ide> Args:
<ide> x: a tensor of rank 1 or 2. It's shape should be either [dim] or [m, dim].
<ide><path>keras/utils/kernelized_utils_test.py
<ide> def test_almost_identical_vectors(self, exact_kernel_fn, expected_values):
<ide> exact_kernel = exact_kernel_fn(x, y)
<ide> shape = exact_kernel.shape.as_list()
<ide> self.assertLen(shape, 2)
<del> # x and y are almost identical and therefore K(x, y) will be almost equal to
<del> # the identity value of the kernel.
<add> # x and y are almost identical and therefore K(x, y) will be almost
<add> # equal to the identity value of the kernel.
<ide> self.assertAllClose(expected_values, exact_kernel, atol=1e-3)
<ide>
<ide> @parameterized.named_parameters(
<ide> ("gaussian", _exact_gaussian(stddev=1.0), [[0.99], [0.977]]),
<ide> ("laplacian", _exact_laplacian(stddev=5.0), [[0.96], [0.94]]),
<ide> )
<ide> def test_similar_matrices(self, exact_kernel_fn, expected_values):
<del> """Pairwise "close" vectors give high kernel values (similarity scores)."""
<add> """Pairwise "close" vectors give high kernel values (similarity
<add> scores)."""
<ide> x = tf.constant([1.0, 3.4, -2.1, 0.9, 3.3, -2.0], shape=[2, 3])
<ide> y = tf.constant([1.1, 3.35, -2.05])
<ide> exact_kernel = exact_kernel_fn(x, y)
<ide><path>keras/utils/kpl_test_utils.py
<ide> def define_kpls_for_training(self, use_adapt):
<ide> """Function that defines KPL used for unit tests of tf.distribute.
<ide>
<ide> Args:
<del> use_adapt: if adapt will be called. False means there will be precomputed
<del> statistics.
<add> use_adapt: if adapt will be called. False means there will be
<add> precomputed statistics.
<ide>
<ide> Returns:
<ide> feature_mapper: a simple keras model with one keras StringLookup layer
<ide><path>keras/utils/layer_utils.py
<ide> def print_layer_summary(layer, nested_level=0):
<ide> name = layer.name
<ide> cls_name = layer.__class__.__name__
<ide> if not layer.built and not getattr(layer, "_is_graph_network", False):
<del> # If a subclassed model has a layer that is not called in Model.call, the
<del> # layer will not be built and we cannot call layer.count_params().
<add> # If a subclassed model has a layer that is not called in
<add> # Model.call, the layer will not be built and we cannot call
<add> # layer.count_params().
<ide> params = "0 (unused)"
<ide> else:
<ide> params = layer.count_params()
<ide> class instance, and provides no mechanism for cache invalidation. Thus it is
<ide> For classes with custom getattr / setattr behavior (such as trackable
<ide> objects), storing cache results as object attributes is not performant.
<ide> Instead, a specialized cache can significantly reduce property lookup
<del> overhead. (While still allowing the decorated property to be lazily computed.)
<del> Consider the following class:
<add> overhead. (While still allowing the decorated property to be lazily
<add> computed.) Consider the following class:
<ide>
<ide> ```
<ide> class MyClass:
<ide> def __setattr__(self, key, value):
<ide>
<ide> Slows down attribute assignment by nearly 10x.
<ide>
<del> By contrast, replacing the definition of `thing` with the following sidesteps
<del> the expensive __setattr__ altogether:
<add> By contrast, replacing the definition of `thing` with the following
<add> sidesteps the expensive __setattr__ altogether:
<ide>
<ide> '''
<ide> @property
<ide> def thing(self):
<ide>
<ide> Performance:
<ide> The overhead for this decorator is ~0.4 us / call. A much lower overhead
<del> implementation (~0.085 us / call) can be achieved by using a custom dict type:
<add> implementation (~0.085 us / call) can be achieved by using a custom dict
<add> type:
<ide>
<ide> ```
<ide> def dict_based_cache(f):
<ide> def filter_empty_layer_containers(layer_list):
<ide>
<ide>
<ide> class CallFunctionSpec:
<del> """Caches the spec and provides utilities for handling call function args."""
<add> """Caches the spec and provides utilities for handling call function
<add> args."""
<ide>
<ide> def __init__(self, full_argspec):
<ide> """Initialies a `CallFunctionSpec`.
<ide> def __init__(self, full_argspec):
<ide>
<ide> call_fn_defaults = self._full_argspec.defaults or []
<ide> defaults = dict()
<del> # The call arg defaults are an n-tuple of the last n elements of the args
<del> # list. (n = # of elements that have a default argument)
<add> # The call arg defaults are an n-tuple of the last n elements of the
<add> # args list. (n = # of elements that have a default argument)
<ide> for i in range(-1 * len(call_fn_defaults), 0):
<ide> defaults[self._arg_names[i]] = call_fn_defaults[i]
<del> # The default training arg will be any (non-None) default specified in the
<del> # method signature, or None if no value is specified.
<add> # The default training arg will be any (non-None) default specified in
<add> # the method signature, or None if no value is specified.
<ide> defaults.update(self._full_argspec.kwonlydefaults or {})
<ide> self._default_training_arg = defaults.get("training")
<ide>
<ide> def arg_names(self, value):
<ide> @cached_per_instance
<ide> def arg_positions(self):
<ide> """Returns a dict mapping arg names to their index positions."""
<del> # `arg_positions` is not accurate if the layer has variable positional args.
<add> # `arg_positions` is not accurate if the layer has variable positional
<add> # args.
<ide> call_fn_arg_positions = dict()
<ide> for pos, arg in enumerate(self._arg_names):
<ide> call_fn_arg_positions[arg] = pos
<ide> def arg_was_passed(self, arg_name, args, kwargs, inputs_in_args=False):
<ide> arg_name: String name of the argument to find.
<ide> args: Tuple of args passed to the call function.
<ide> kwargs: Dictionary of kwargs passed to the call function.
<del> inputs_in_args: Whether the input argument (the first argument in the call
<del> function) is included in `args`. Defaults to `False`.
<add> inputs_in_args: Whether the input argument (the first argument in the
<add> call function) is included in `args`. Defaults to `False`.
<ide>
<ide> Returns:
<ide> True if argument with `arg_name` is present in `args` or `kwargs`.
<ide> def get_arg_value(self, arg_name, args, kwargs, inputs_in_args=False):
<ide> arg_name: String name of the argument to find.
<ide> args: Tuple of args passed to the call function.
<ide> kwargs: Dictionary of kwargs passed to the call function.
<del> inputs_in_args: Whether the input argument (the first argument in the call
<del> function) is included in `args`. Defaults to `False`.
<add> inputs_in_args: Whether the input argument (the first argument in the
<add> call function) is included in `args`. Defaults to `False`.
<ide>
<ide> Returns:
<del> The value of the argument with name `arg_name`, extracted from `args` or
<del> `kwargs`.
<add> The value of the argument with name `arg_name`, extracted from `args`
<add> or `kwargs`.
<ide>
<ide> Raises:
<ide> KeyError if the value of `arg_name` cannot be found.
<ide> def set_arg_value(
<ide> new_value: New value to give to the argument.
<ide> args: Tuple of args passed to the call function.
<ide> kwargs: Dictionary of kwargs passed to the call function.
<del> inputs_in_args: Whether the input argument (the first argument in the call
<del> function) is included in `args`. Defaults to `False`.
<del> pop_kwarg_if_none: If the new value is `None`, and this is `True`, then
<del> the argument is deleted from `kwargs`.
<add> inputs_in_args: Whether the input argument (the first argument in the
<add> call function) is included in `args`. Defaults to `False`.
<add> pop_kwarg_if_none: If the new value is `None`, and this is `True`,
<add> then the argument is deleted from `kwargs`.
<ide>
<ide> Returns:
<ide> The updated `(args, kwargs)`.
<ide><path>keras/utils/layer_utils_test.py
<ide> def print_to_file(text):
<ide> reader = open(fpath, "r")
<ide> lines = reader.readlines()
<ide> reader.close()
<del> # The output content are slightly different for the input shapes between
<del> # v1 and v2.
<add> # The output content are slightly different for the input shapes
<add> # between v1 and v2.
<ide> if tf.__internal__.tf2.enabled():
<ide> self.assertEqual(len(lines), 39)
<ide> else:
<ide> def test_property(self):
<ide> self.assertEqual(first_object.test_property, id(first_object))
<ide> self.assertEqual(second_object.test_property, id(second_object))
<ide>
<del> # Count the function calls to make sure the cache is actually being used.
<add> # Count the function calls to make sure the cache is actually being
<add> # used.
<ide> self.assertAllEqual(tuple(test_counter.values()), (1, 1))
<ide>
<ide> def test_property_cache_threaded(self):
<ide> def test_property(self):
<ide> call_count["test_property"] += 1
<ide> time.sleep(np.random.random() + 1.0)
<ide>
<del> # Use a RandomState which is seeded off the instance's id (the mod is
<del> # because numpy limits the range of seeds) to ensure that an instance
<del> # returns the same value in different threads, but different instances
<del> # return different values.
<add> # Use a RandomState which is seeded off the instance's id (the
<add> # mod is because numpy limits the range of seeds) to ensure that
<add> # an instance returns the same value in different threads, but
<add> # different instances return different values.
<ide> return int(
<ide> np.random.RandomState(id(self) % (2**31)).randint(2**16)
<ide> )
<ide> def get_test_property(self, _):
<ide> return self.test_property
<ide>
<ide> # Test that multiple threads return the same value. This requires that
<del> # the underlying function is repeatable, as cached_property makes no attempt
<del> # to prioritize the first call.
<add> # the underlying function is repeatable, as cached_property makes no
<add> # attempt to prioritize the first call.
<ide> test_obj = MyObject()
<ide> with contextlib.closing(multiprocessing.dummy.Pool(32)) as pool:
<del> # Intentionally make a large pool (even when there are only a small number
<del> # of cpus) to ensure that the runtime switches threads.
<add> # Intentionally make a large pool (even when there are only a small
<add> # number of cpus) to ensure that the runtime switches threads.
<ide> results = pool.map(test_obj.get_test_property, range(64))
<ide> self.assertEqual(len(set(results)), 1)
<ide>
<ide> def get_test_property(self, _):
<ide> results = pool.map(test_obj.get_test_property, range(4))
<ide> total_time = timeit.default_timer() - start_time
<ide>
<del> # Note(taylorrobie): The reason that it is safe to time a unit test is that
<del> # a cache hit will be << 1 second, and a cache miss is
<del> # guaranteed to be >= 1 second. Empirically confirmed by
<del> # 100,000 runs with no flakes.
<add> # Note(taylorrobie): The reason that it is safe to time a unit test is
<add> # that a cache hit will be << 1 second, and a cache miss is guaranteed
<add> # to be >= 1 second. Empirically confirmed by 100,000 runs with no
<add> # flakes.
<ide> self.assertLess(total_time, 0.95)
<ide>
<ide> def test_property_cache_serialization(self):
<del> # Reset call count. .keys() must be wrapped in a list, because otherwise we
<del> # would mutate the iterator while iterating.
<add> # Reset call count. .keys() must be wrapped in a list, because otherwise
<add> # we would mutate the iterator while iterating.
<ide> for k in list(_PICKLEABLE_CALL_COUNT.keys()):
<ide> _PICKLEABLE_CALL_COUNT.pop(k)
<ide>
<ide><path>keras/utils/losses_utils.py
<ide> class ReductionV2:
<ide>
<ide> Contains the following values:
<ide>
<del> * `AUTO`: Indicates that the reduction option will be determined by the usage
<del> context. For almost all cases this defaults to `SUM_OVER_BATCH_SIZE`. When
<del> used with `tf.distribute.Strategy`, outside of built-in training loops such
<del> as `tf.keras` `compile` and `fit`, we expect reduction value to be
<del> `SUM` or `NONE`. Using `AUTO` in that case will raise an error.
<del> * `NONE`: No **additional** reduction is applied to the output of the wrapped
<del> loss function. When non-scalar losses are returned to Keras functions like
<del> `fit`/`evaluate`, the unreduced vector loss is passed to the optimizer
<del> but the reported loss will be a scalar value.
<add> * `AUTO`: Indicates that the reduction option will be determined by the
<add> usage context. For almost all cases this defaults to
<add> `SUM_OVER_BATCH_SIZE`. When used with `tf.distribute.Strategy`, outside of
<add> built-in training loops such as `tf.keras` `compile` and `fit`, we expect
<add> reduction value to be `SUM` or `NONE`. Using `AUTO` in that case will
<add> raise an error.
<add> * `NONE`: No **additional** reduction is applied to the output of the
<add> wrapped loss function. When non-scalar losses are returned to Keras
<add> functions like `fit`/`evaluate`, the unreduced vector loss is passed to
<add> the optimizer but the reported loss will be a scalar value.
<ide>
<ide> Caution: **Verify the shape of the outputs when using** `Reduction.NONE`.
<del> The builtin loss functions wrapped by the loss classes reduce
<del> one dimension (`axis=-1`, or `axis` if specified by loss function).
<del> `Reduction.NONE` just means that no **additional** reduction is applied by
<del> the class wrapper. For categorical losses with an example input shape of
<del> `[batch, W, H, n_classes]` the `n_classes` dimension is reduced. For
<add> The builtin loss functions wrapped by the loss classes reduce one
<add> dimension (`axis=-1`, or `axis` if specified by loss function).
<add> `Reduction.NONE` just means that no **additional** reduction is applied
<add> by the class wrapper. For categorical losses with an example input shape
<add> of `[batch, W, H, n_classes]` the `n_classes` dimension is reduced. For
<ide> pointwise losses you must include a dummy axis so that `[batch, W, H, 1]`
<ide> is reduced to `[batch, W, H]`. Without the dummy axis `[batch, W, H]`
<ide> will be incorrectly reduced to `[batch, W]`.
<ide>
<ide> * `SUM`: Scalar sum of weighted losses.
<del> * `SUM_OVER_BATCH_SIZE`: Scalar `SUM` divided by number of elements in losses.
<del> This reduction type is not supported when used with
<del> `tf.distribute.Strategy` outside of built-in training loops like `tf.keras`
<del> `compile`/`fit`.
<add> * `SUM_OVER_BATCH_SIZE`: Scalar `SUM` divided by number of elements in
<add> losses. This reduction type is not supported when used with
<add> `tf.distribute.Strategy` outside of built-in training loops like
<add> `tf.keras` `compile`/`fit`.
<ide>
<ide> You can implement 'SUM_OVER_BATCH_SIZE' using global batch size like:
<ide> ```
<ide> def remove_squeezable_dimensions(
<ide> defaults to 0, and we squeeze the last dimension of the larger rank if they
<ide> differ by 1.
<ide>
<del> But, for example, if `labels` contains class IDs and `predictions` contains 1
<del> probability per class, we expect `predictions` to have 1 more dimension than
<del> `labels`, so `expected_rank_diff` would be 1. In this case, we'd squeeze
<del> `labels` if `rank(predictions) - rank(labels) == 0`, and
<add> But, for example, if `labels` contains class IDs and `predictions` contains
<add> 1 probability per class, we expect `predictions` to have 1 more dimension
<add> than `labels`, so `expected_rank_diff` would be 1. In this case, we'd
<add> squeeze `labels` if `rank(predictions) - rank(labels) == 0`, and
<ide> `predictions` if `rank(predictions) - rank(labels) == 2`.
<ide>
<ide> This will use static shape if available. Otherwise, it will add graph
<ide> def squeeze_or_expand_dimensions(y_pred, y_true=None, sample_weight=None):
<ide> y_pred_rank = y_pred_shape.ndims
<ide> if y_true is not None:
<ide>
<del> # If sparse matrix is provided as `y_true`, the last dimension in `y_pred`
<del> # may be > 1. Eg: y_true = [0, 1, 2] (shape=(3,)),
<del> # y_pred = [[.9, .05, .05], [.5, .89, .6], [.05, .01, .94]] (shape=(3, 3))
<del> # In this case, we should not try to remove squeezable dimension.
<add> # If sparse matrix is provided as `y_true`, the last dimension in
<add> # `y_pred` may be > 1. Eg: y_true = [0, 1, 2] (shape=(3,)), y_pred =
<add> # [[.9, .05, .05], [.5, .89, .6], [.05, .01, .94]] (shape=(3, 3)) In
<add> # this case, we should not try to remove squeezable dimension.
<ide> y_true_shape = y_true.shape
<ide> y_true_rank = y_true_shape.ndims
<ide> if (y_true_rank is not None) and (y_pred_rank is not None):
<ide> def squeeze_or_expand_dimensions(y_pred, y_true=None, sample_weight=None):
<ide> else:
<ide> # Use dynamic rank.
<ide> rank_diff = tf.rank(y_pred) - tf.rank(y_true)
<del> squeeze_dims = lambda: remove_squeezable_dimensions( # pylint: disable=g-long-lambda
<del> y_true, y_pred
<del> )
<add> squeeze_dims = lambda: remove_squeezable_dimensions(y_true, y_pred)
<ide> is_last_dim_1 = tf.equal(1, tf.shape(y_pred)[-1])
<ide> maybe_squeeze_dims = (
<ide> lambda: tf.cond( # pylint: disable=g-long-lambda
<ide> def compute_weighted_loss(
<ide>
<ide> Args:
<ide> losses: `Tensor` of shape `[batch_size, d1, ... dN]`.
<del> sample_weight: Optional `Tensor` whose rank is either 0, or the same rank as
<del> `losses`, or be broadcastable to `losses`.
<del> reduction: (Optional) Type of `tf.keras.losses.Reduction` to apply to loss.
<del> Default value is `SUM_OVER_BATCH_SIZE`.
<add> sample_weight: Optional `Tensor` whose rank is either 0, or the same rank
<add> as `losses`, or be broadcastable to `losses`.
<add> reduction: (Optional) Type of `tf.keras.losses.Reduction` to apply to
<add> loss. Default value is `SUM_OVER_BATCH_SIZE`.
<ide> name: Optional name for the op.
<ide>
<ide> Raises:
<del> ValueError: If the shape of `sample_weight` is not compatible with `losses`.
<add> ValueError: If the shape of `sample_weight` is not compatible with
<add> `losses`.
<ide>
<ide> Returns:
<ide> Weighted loss `Tensor` of the same type as `losses`. If `reduction` is
<ide> def compute_weighted_loss(
<ide> ):
<ide> sample_weight = tf.convert_to_tensor(sample_weight)
<ide>
<del> # Convert any non float dtypes to floats, to avoid it loss any precision for
<del> # dtype like int or bool.
<add> # Convert any non float dtypes to floats, to avoid it loss any precision
<add> # for dtype like int or bool.
<ide> if not losses.dtype.is_floating:
<ide> input_dtype = losses.dtype
<ide> losses = tf.cast(losses, "float32")
<ide> input_casted = True
<ide> else:
<ide> input_casted = False
<ide> sample_weight = tf.cast(sample_weight, losses.dtype)
<del> # Update dimensions of `sample_weight` to match with `losses` if possible.
<add> # Update dimensions of `sample_weight` to match with `losses` if
<add> # possible.
<ide> (
<ide> losses,
<ide> _,
<ide> sample_weight,
<del> ) = squeeze_or_expand_dimensions( # pylint: disable=unbalanced-tuple-unpacking
<del> losses, None, sample_weight
<del> )
<add> ) = squeeze_or_expand_dimensions(losses, None, sample_weight)
<ide> weighted_losses = tf.multiply(losses, sample_weight)
<ide>
<ide> # Apply reduction function to the individual weighted losses.
<ide> def cast_losses_to_common_dtype(losses):
<ide> """Cast a list of losses to a common dtype.
<ide>
<ide> If any loss is floating-point, they will all be casted to the most-precise
<del> floating-point loss. Otherwise the losses are not casted. We also skip casting
<del> losses if there are any complex losses.
<add> floating-point loss. Otherwise the losses are not casted. We also skip
<add> casting losses if there are any complex losses.
<ide>
<ide> Args:
<ide> losses: A list of losses.
<ide><path>keras/utils/metrics_utils.py
<ide> def decorated(metric_obj, *args, **kwargs):
<ide> raise ValueError(
<ide> "Trying to run metric.update_state in replica context when "
<ide> "the metric was not created in TPUStrategy scope. "
<del> "Make sure the keras Metric is created in TPUstrategy scope. "
<add> "Make sure the keras Metric is created in TPUstrategy "
<add> "scope. "
<ide> )
<ide>
<ide> with tf_utils.graph_context_for_symbolic_tensors(*args, **kwargs):
<ide> def decorated(metric_obj, *args):
<ide> """Decorated function with merge_call."""
<ide> replica_context = tf.distribute.get_replica_context()
<ide>
<del> # The purpose of using `merge_call` to call `result()` is to trigger cross
<del> # replica aggregation of metric state variables (SyncOnReadVariable). After
<del> # we introduced `variable_sync_on_read_context`, in principle there is no
<del> # need to use `merge_call` here. However the branch still exists because:
<add> # The purpose of using `merge_call` to call `result()` is to trigger
<add> # cross replica aggregation of metric state variables
<add> # (SyncOnReadVariable). After we introduced
<add> # `variable_sync_on_read_context`, in principle there is no need to use
<add> # `merge_call` here. However the branch still exists because:
<ide> #
<del> # 1. Keras V1 training code sometimes assumes `result_t` is the same tensor
<del> # across replicas (achieved by `merge_call`). With
<add> # 1. Keras V1 training code sometimes assumes `result_t` is the same
<add> # tensor across replicas (achieved by `merge_call`). With
<ide> # `variable_sync_on_read_context` each replica gets their own tensors
<ide> # residing on replica's device, thus breaking the assumption.
<del> # 2. Keras c/fit creates a tf.function (a.k.a, train_function) that returns
<del> # the metric values of the first replica. With
<add> # 2. Keras c/fit creates a tf.function (a.k.a, train_function) that
<add> # returns the metric values of the first replica. With
<ide> # `variable_sync_on_read_context` since each replica gets their own
<del> # tensors, the metric result tensors on the non-first replicas are not in
<del> # the return value of train_function, making TF graph optimizer prune the
<del> # branch that computes and aggregates those metric results. As a result,
<del> # if NCCL is used to do the aggregation, the program will hang because
<del> # NCCL ops are only launched on the non-pruned first replica.
<add> # tensors, the metric result tensors on the non-first replicas are
<add> # not in the return value of train_function, making TF graph
<add> # optimizer prune the branch that computes and aggregates those
<add> # metric results. As a result, if NCCL is used to do the aggregation,
<add> # the program will hang because NCCL ops are only launched on the
<add> # non-pruned first replica.
<ide> #
<del> # We condition on strategy_supports_no_merge_call() since we know if it is
<del> # True, the program uses `jit_compile` to compile replica fn, meaning it is
<del> # not V1 training (hence #1 is okay), and no pruning will happen as
<del> # compiled functions are not inlined (hence #2 is okay).
<add> # We condition on strategy_supports_no_merge_call() since we know if it
<add> # is True, the program uses `jit_compile` to compile replica fn, meaning
<add> # it is not V1 training (hence #1 is okay), and no pruning will happen
<add> # as compiled functions are not inlined (hence #2 is okay).
<ide> if (
<ide> replica_context is None
<ide> or tf.__internal__.distribute.strategy_supports_no_merge_call()
<ide> def decorated(metric_obj, *args):
<ide> result_t = tf.identity(raw_result)
<ide> except (ValueError, TypeError):
<ide> raise RuntimeError(
<del> "The output of `metric.result()` can only be a single "
<del> "Tensor/Variable, or a dict of Tensors/Variables. "
<del> f"For metric {metric_obj.name}, got result {raw_result}."
<add> "The output of `metric.result()` can only be a "
<add> "single Tensor/Variable, or a dict of "
<add> "Tensors/Variables. "
<add> f"For metric {metric_obj.name}, "
<add> f"got result {raw_result}."
<ide> )
<ide> else:
<del> # TODO(psv): Test distribution of metrics using different distribution
<del> # strategies.
<add> # TODO(psv): Test distribution of metrics using different
<add> # distribution strategies.
<ide>
<del> # Creating a wrapper for merge_fn. merge_call invokes the given merge_fn
<del> # with distribution object as the first parameter. We create a wrapper
<del> # here so that the result function need not have that parameter.
<add> # Creating a wrapper for merge_fn. merge_call invokes the given
<add> # merge_fn with distribution object as the first parameter. We
<add> # create a wrapper here so that the result function need not have
<add> # that parameter.
<ide> def merge_fn_wrapper(distribution, merge_fn, *args):
<del> # We will get `PerReplica` merge function. Taking the first one as all
<del> # are identical copies of the function that we had passed below.
<add> # We will get `PerReplica` merge function. Taking the first one
<add> # as all are identical copies of the function that we had passed
<add> # below.
<ide> result = distribution.experimental_local_results(merge_fn)[0](
<ide> *args
<ide> )
<ide>
<ide> # Wrapping result in identity so that control dependency between
<del> # update_op from `update_state` and result works in case result returns
<del> # a tensor.
<add> # update_op from `update_state` and result works in case result
<add> # returns a tensor.
<ide> return tf.identity(result)
<ide>
<del> # Wrapping result in merge_call. merge_call is used when we want to leave
<del> # replica mode and compute a value in cross replica mode.
<add> # Wrapping result in merge_call. merge_call is used when we want to
<add> # leave replica mode and compute a value in cross replica mode.
<ide> result_t = replica_context.merge_call(
<ide> merge_fn_wrapper, args=(result_fn,) + args
<ide> )
<ide>
<ide> # We are saving the result op here to be used in train/test execution
<del> # functions. This basically gives the result op that was generated with a
<del> # control dep to the updates for these workflows.
<add> # functions. This basically gives the result op that was generated with
<add> # a control dep to the updates for these workflows.
<ide> metric_obj._call_result = result_t
<ide> return result_t
<ide>
<ide> def assert_thresholds_range(thresholds):
<ide> ]
<ide> if invalid_thresholds:
<ide> raise ValueError(
<del> f"Threshold values must be in [0, 1]. Received: {invalid_thresholds}"
<add> f"Threshold values must be in [0, 1]. "
<add> f"Received: {invalid_thresholds}"
<ide> )
<ide>
<ide>
<ide> def _update_confusion_matrix_variables_optimized(
<ide> tp_bucket_value = tf.math.unsorted_segment_sum(true_labels, bucket_indices,
<ide> num_segments=num_thresholds)
<ide> = [1, 1, 0]
<del> # For [1, 1, 0] here, it means there is 1 true value contributed by bucket 0,
<del> # and 1 value contributed by bucket 1. When we aggregate them to together,
<del> # the result become [a + b + c, b + c, c], since large thresholds will always
<del> # contribute to the value for smaller thresholds.
<add> # For [1, 1, 0] here, it means there is 1 true value contributed by bucket
<add> # 0, and 1 value contributed by bucket 1. When we aggregate them to
<add> # together, the result become [a + b + c, b + c, c], since large thresholds
<add> # will always contribute to the value for smaller thresholds.
<ide> true_positive = tf.math.cumsum(tp_bucket_value, reverse=True)
<ide> = [2, 1, 0]
<ide>
<ide> def _update_confusion_matrix_variables_optimized(
<ide> Args:
<ide> variables_to_update: Dictionary with 'tp', 'fn', 'tn', 'fp' as valid keys
<ide> and corresponding variables to update as values.
<del> y_true: A floating point `Tensor` whose shape matches `y_pred`. Will be cast
<del> to `bool`.
<del> y_pred: A floating point `Tensor` of arbitrary shape and whose values are in
<del> the range `[0, 1]`.
<add> y_true: A floating point `Tensor` whose shape matches `y_pred`. Will be
<add> cast to `bool`.
<add> y_pred: A floating point `Tensor` of arbitrary shape and whose values are
<add> in the range `[0, 1]`.
<ide> thresholds: A sorted floating point `Tensor` with value in `[0, 1]`.
<del> It need to be evenly distributed (the diff between each element need to be
<del> the same).
<add> It need to be evenly distributed (the diff between each element need to
<add> be the same).
<ide> multi_label: Optional boolean indicating whether multidimensional
<del> prediction/labels should be treated as multilabel responses, or flattened
<del> into a single label. When True, the valus of `variables_to_update` must
<del> have a second dimension equal to the number of labels in y_true and
<del> y_pred, and those tensors must not be RaggedTensors.
<add> prediction/labels should be treated as multilabel responses, or
<add> flattened into a single label. When True, the valus of
<add> `variables_to_update` must have a second dimension equal to the number
<add> of labels in y_true and y_pred, and those tensors must not be
<add> RaggedTensors.
<ide> sample_weights: Optional `Tensor` whose rank is either 0, or the same rank
<ide> as `y_true`, and must be broadcastable to `y_true` (i.e., all dimensions
<del> must be either `1`, or the same as the corresponding `y_true` dimension).
<add> must be either `1`, or the same as the corresponding `y_true`
<add> dimension).
<ide> label_weights: Optional tensor of non-negative weights for multilabel
<del> data. The weights are applied when calculating TP, FP, FN, and TN without
<del> explicit multilabel handling (i.e. when the data is to be flattened).
<del> thresholds_with_epsilon: Optional boolean indicating whether the leading and
<del> tailing thresholds has any epsilon added for floating point imprecisions.
<del> It will change how we handle the leading and tailing bucket.
<add> data. The weights are applied when calculating TP, FP, FN, and TN
<add> without explicit multilabel handling (i.e. when the data is to be
<add> flattened).
<add> thresholds_with_epsilon: Optional boolean indicating whether the leading
<add> and tailing thresholds has any epsilon added for floating point
<add> imprecisions. It will change how we handle the leading and tailing
<add> bucket.
<ide>
<ide> Returns:
<ide> Update op.
<ide> def _update_confusion_matrix_variables_optimized(
<ide> if multi_label:
<ide> # We need to run bucket segment sum for each of the label class. In the
<ide> # multi_label case, the rank of the label is 2. We first transpose it so
<del> # that the label dim becomes the first and we can parallel run though them.
<add> # that the label dim becomes the first and we can parallel run though
<add> # them.
<ide> true_labels = tf.transpose(true_labels)
<ide> false_labels = tf.transpose(false_labels)
<ide> bucket_indices = tf.transpose(bucket_indices)
<ide> def is_evenly_distributed_thresholds(thresholds):
<ide> evaluated.
<ide>
<ide> Args:
<del> thresholds: A python list or tuple, or 1D numpy array whose value is ranged
<del> in [0, 1].
<add> thresholds: A python list or tuple, or 1D numpy array whose value is
<add> ranged in [0, 1].
<ide>
<ide> Returns:
<ide> boolean, whether the values in the inputs are evenly distributed.
<ide> def update_confusion_matrix_variables(
<ide> true_negatives: y_true == False and y_pred <= thresholds
<ide> false_positive: y_true == False and y_pred > thresholds
<ide>
<del> The results will be weighted and added together. When multiple thresholds are
<del> provided, we will repeat the same for every threshold.
<add> The results will be weighted and added together. When multiple thresholds
<add> are provided, we will repeat the same for every threshold.
<ide>
<del> For estimation of these metrics over a stream of data, the function creates an
<del> `update_op` operation that updates the given variables.
<add> For estimation of these metrics over a stream of data, the function creates
<add> an `update_op` operation that updates the given variables.
<ide>
<ide> If `sample_weight` is `None`, weights default to 1.
<ide> Use weights of 0 to mask values.
<ide> def update_confusion_matrix_variables(
<ide> variables_to_update: Dictionary with 'tp', 'fn', 'tn', 'fp' as valid keys
<ide> and corresponding variables to update as values.
<ide> y_true: A `Tensor` whose shape matches `y_pred`. Will be cast to `bool`.
<del> y_pred: A floating point `Tensor` of arbitrary shape and whose values are in
<del> the range `[0, 1]`.
<add> y_pred: A floating point `Tensor` of arbitrary shape and whose values are
<add> in the range `[0, 1]`.
<ide> thresholds: A float value, float tensor, python list, or tuple of float
<ide> thresholds in `[0, 1]`, or NEG_INF (used when top_k is set).
<del> top_k: Optional int, indicates that the positive labels should be limited to
<del> the top k predictions.
<add> top_k: Optional int, indicates that the positive labels should be limited
<add> to the top k predictions.
<ide> class_id: Optional int, limits the prediction and labels to the class
<ide> specified by this argument.
<del> sample_weight: Optional `Tensor` whose rank is either 0, or the same rank as
<del> `y_true`, and must be broadcastable to `y_true` (i.e., all dimensions must
<del> be either `1`, or the same as the corresponding `y_true` dimension).
<add> sample_weight: Optional `Tensor` whose rank is either 0, or the same rank
<add> as `y_true`, and must be broadcastable to `y_true` (i.e., all dimensions
<add> must be either `1`, or the same as the corresponding `y_true`
<add> dimension).
<ide> multi_label: Optional boolean indicating whether multidimensional
<del> prediction/labels should be treated as multilabel responses, or flattened
<del> into a single label. When True, the valus of `variables_to_update` must
<del> have a second dimension equal to the number of labels in y_true and
<del> y_pred, and those tensors must not be RaggedTensors.
<add> prediction/labels should be treated as multilabel responses, or
<add> flattened into a single label. When True, the valus of
<add> `variables_to_update` must have a second dimension equal to the number
<add> of labels in y_true and y_pred, and those tensors must not be
<add> RaggedTensors.
<ide> label_weights: (optional) tensor of non-negative weights for multilabel
<del> data. The weights are applied when calculating TP, FP, FN, and TN without
<del> explicit multilabel handling (i.e. when the data is to be flattened).
<add> data. The weights are applied when calculating TP, FP, FN, and TN
<add> without explicit multilabel handling (i.e. when the data is to be
<add> flattened).
<ide> thresholds_distributed_evenly: Boolean, whether the thresholds are evenly
<ide> distributed within the list. An optimized method will be used if this is
<ide> the case. See _update_confusion_matrix_variables_optimized() for more
<ide> def update_confusion_matrix_variables(
<ide>
<ide> Raises:
<ide> ValueError: If `y_pred` and `y_true` have mismatched shapes, or if
<del> `sample_weight` is not `None` and its shape doesn't match `y_pred`, or if
<del> `variables_to_update` contains invalid keys.
<add> `sample_weight` is not `None` and its shape doesn't match `y_pred`, or
<add> if `variables_to_update` contains invalid keys.
<ide> """
<ide> if multi_label and label_weights is not None:
<ide> raise ValueError(
<ide> def update_confusion_matrix_variables(
<ide> raise ValueError(
<ide> "Please provide at least one valid confusion matrix "
<ide> "variable to update. Valid variable key options are: "
<del> f'"{list(ConfusionMatrix)}". Received: "{variables_to_update.keys()}"'
<add> f'"{list(ConfusionMatrix)}". '
<add> f'Received: "{variables_to_update.keys()}"'
<ide> )
<ide>
<ide> variable_dtype = list(variables_to_update.values())[0].dtype
<ide> def update_confusion_matrix_variables(
<ide>
<ide> if thresholds_distributed_evenly:
<ide> # Check whether the thresholds has any leading or tailing epsilon added
<del> # for floating point imprecision. The leading and tailing threshold will be
<del> # handled bit differently as the corner case.
<del> # At this point, thresholds should be a list/array with more than 2 items,
<del> # and ranged between [0, 1]. See is_evenly_distributed_thresholds() for more
<add> # for floating point imprecision. The leading and tailing threshold will
<add> # be handled bit differently as the corner case. At this point,
<add> # thresholds should be a list/array with more than 2 items, and ranged
<add> # between [0, 1]. See is_evenly_distributed_thresholds() for more
<ide> # details.
<ide> thresholds_with_epsilon = thresholds[0] < 0.0 or thresholds[-1] > 1.0
<ide>
<ide> def weighted_assign_add(label, pred, weights, var):
<ide> def _filter_top_k(x, k):
<ide> """Filters top-k values in the last dim of x and set the rest to NEG_INF.
<ide>
<del> Used for computing top-k prediction values in dense labels (which has the same
<del> shape as predictions) for recall and precision top-k metrics.
<add> Used for computing top-k prediction values in dense labels (which has the
<add> same shape as predictions) for recall and precision top-k metrics.
<ide>
<ide> Args:
<ide> x: tensor with any dimensions.
<ide> def ragged_assert_compatible_and_get_flat_values(values, mask=None):
<ide> Returns:
<ide> A tuple in which the first element is the list of tensors and the second
<ide> is the mask tensor. ([Values], mask). Mask and the element in Values
<del> are equal to the flat_values of the input arguments (if they were ragged).
<add> are equal to the flat_values of the input arguments (if they were
<add> ragged).
<ide> """
<ide> if isinstance(values, list):
<ide> is_all_ragged = all(isinstance(rt, tf.RaggedTensor) for rt in values)
<ide> def ragged_assert_compatible_and_get_flat_values(values, mask=None):
<ide> to_be_stripped = True
<ide>
<ide> # NOTE: we leave the flat_values compatibility to
<del> # tf.TensorShape `assert_is_compatible_with`
<del> # check if both dynamic dimensions are equal and then use the flat_values.
<add> # tf.TensorShape `assert_is_compatible_with` check if both dynamic
<add> # dimensions are equal and then use the flat_values.
<ide> nested_row_split_list = [rt.nested_row_splits for rt in values]
<ide> assertion_list = _assert_splits_match(nested_row_split_list)
<ide>
<del> # if both are ragged sample_weights also should be ragged with same dims.
<add> # if both are ragged sample_weights also should be ragged with same
<add> # dims.
<ide> if isinstance(mask, tf.RaggedTensor):
<ide> assertion_list_for_mask = _assert_splits_match(
<ide> [nested_row_split_list[0], mask.nested_row_splits]
<ide> def _assert_splits_match(nested_splits_lists):
<ide> fully identical.
<ide>
<ide> Args:
<del> nested_splits_lists: A list of nested_splits_lists, where each split_list is
<del> a list of `splits` tensors from a `RaggedTensor`, ordered from outermost
<del> ragged dimension to innermost ragged dimension.
<add> nested_splits_lists: A list of nested_splits_lists, where each split_list
<add> is a list of `splits` tensors from a `RaggedTensor`, ordered from
<add> outermost ragged dimension to innermost ragged dimension.
<ide>
<ide> Returns:
<ide> A list of control dependency op tensors.
<ide> def binary_matches(y_true, y_pred, threshold=0.5):
<ide> Args:
<ide> y_true: Ground truth values, of shape (batch_size, d0, .. dN).
<ide> y_pred: The predicted values, of shape (batch_size, d0, .. dN).
<del> threshold: (Optional) Float representing the threshold for deciding whether
<del> prediction values are 1 or 0.
<add> threshold: (Optional) Float representing the threshold for deciding
<add> whether prediction values are 1 or 0.
<ide>
<ide> Returns:
<ide> Binary matches, of shape (batch_size, d0, .. dN).
<ide> def sparse_categorical_matches(y_true, y_pred):
<ide> reshape_matches = True
<ide> y_pred = tf.math.argmax(y_pred, axis=-1)
<ide>
<del> # If the predicted output and actual output types don't match, force cast them
<del> # to match.
<add> # If the predicted output and actual output types don't match, force cast
<add> # them to match.
<ide> if backend.dtype(y_pred) != backend.dtype(y_true):
<ide> y_pred = tf.cast(y_pred, backend.dtype(y_true))
<ide> matches = tf.cast(tf.equal(y_true, y_pred), backend.floatx())
<ide> def sparse_categorical_matches(y_true, y_pred):
<ide>
<ide>
<ide> def sparse_top_k_categorical_matches(y_true, y_pred, k=5):
<del> """Creates float Tensor, 1.0 for label-TopK_prediction match, 0.0 for mismatch.
<add> """Creates float Tensor, 1.0 for label-TopK_prediction match, 0.0 for
<add> mismatch.
<ide>
<ide> Args:
<ide> y_true: tensor of true targets.
<ide><path>keras/utils/metrics_utils_test.py
<ide> def test_failing_different_mask_ranks(self, x_list, y_list, mask_list):
<ide> )
<ide>
<ide> # we do not support such cases that ragged_ranks are different but overall
<del> # dimension shapes and sizes are identical due to adding too much performance
<del> # overheads to the overall use cases.
<add> # dimension shapes and sizes are identical due to adding too much
<add> # performance overheads to the overall use cases.
<ide> def test_failing_different_ragged_ranks(self):
<ide> dt = tf.constant([[[1, 2]]])
<ide> # adding a ragged dimension
<ide> def test_sparse_categorical_matches(self):
<ide> y_pred = tf.constant(np.random.random((6, 7)))
<ide> self.assertEqual(matches_method(y_true, y_pred).dtype, backend.floatx())
<ide>
<del> # Tests that resulting Tensor always has same shape as y_true. Tests from
<del> # 1 dim to 4 dims
<add> # Tests that resulting Tensor always has same shape as y_true. Tests
<add> # from 1 dim to 4 dims
<ide> dims = []
<ide> for _ in range(4):
<ide> dims.append(np.random.randint(1, 7))
<ide> def test_sparse_categorical_matches(self):
<ide> matches_method(y_true, y_pred), [[0.0], [1.0], [1.0], [1.0]]
<ide> )
<ide>
<del> # Test correctness if the shape of y_true is (batch_size, seq_length) and
<del> # y_pred is (batch_size, seq_length, num_classes)
<add> # Test correctness if the shape of y_true is (batch_size, seq_length)
<add> # and y_pred is (batch_size, seq_length, num_classes)
<ide> y_pred = tf.constant(
<ide> [
<ide> [[0.2, 0.3, 0.1], [0.1, 0.2, 0.7]],
<ide> def test_sparse_top_k_categorical_matches(self):
<ide> matches_method(y_true, y_pred, 1).dtype, backend.floatx()
<ide> )
<ide>
<del> # Tests that resulting Tensor always has same shape as y_true. Tests from
<del> # 1 dim to 4 dims
<add> # Tests that resulting Tensor always has same shape as y_true. Tests
<add> # from 1 dim to 4 dims
<ide> dims = []
<ide> for _ in range(4):
<ide> dims.append(np.random.randint(1, 7))
<ide> def test_sparse_top_k_categorical_matches(self):
<ide> matches_method(y_true, y_pred, 1).shape, y_true.shape
<ide> )
<ide>
<del> # Test correctness if the shape of y_true is (num_samples,) for k = 1,2,3
<add> # Test correctness if the shape of y_true is (num_samples,) for k =
<add> # 1,2,3
<ide> y_true = tf.constant([1.0, 0.0, 0.0, 0.0])
<ide> y_pred = tf.constant(
<ide> [[0.7, 0.2, 0.1], [0.5, 0.3, 0.2], [0.6, 0.3, 0.1], [0.0, 0.1, 0.9]]
<ide> def test_sparse_top_k_categorical_matches(self):
<ide> matches_method(y_true, y_pred, 3), [[1.0], [1.0], [1.0], [1.0]]
<ide> )
<ide>
<del> # Test correctness if the shape of y_true is (batch_size, seq_length) and
<del> # y_pred is (batch_size, seq_length, num_classes) for k = 1,2,3
<add> # Test correctness if the shape of y_true is (batch_size, seq_length)
<add> # and y_pred is (batch_size, seq_length, num_classes) for k = 1,2,3
<ide> y_pred = tf.constant(
<ide> [
<ide> [[0.2, 0.3, 0.1], [0.1, 0.2, 0.7]],
<ide> def test_binary_matches(self):
<ide> matches_method(y_true, y_pred, 0.5).dtype, backend.floatx()
<ide> )
<ide>
<del> # Tests that resulting Tensor always has same shape as y_true. Tests from
<del> # 1 dim to 4 dims.
<add> # Tests that resulting Tensor always has same shape as y_true. Tests
<add> # from 1 dim to 4 dims.
<ide> dims = []
<ide> for _ in range(4):
<ide> dims.append(np.random.randint(1, 7))
<ide><path>keras/utils/object_identity.py
<ide> def _assert_type(self, other):
<ide> if not isinstance(other, _ObjectIdentityWrapper):
<ide> raise TypeError(
<ide> "Cannot compare wrapped object with unwrapped object. "
<del> f"Expect the object to be `_ObjectIdentityWrapper`. Got: {other}"
<add> f"Expect the object to be `_ObjectIdentityWrapper`. "
<add> f"Got: {other}"
<ide> )
<ide>
<ide> def __lt__(self, other):
<ide><path>keras/utils/text_dataset.py
<ide> def text_dataset_from_directory(
<ide> ......b_text_2.txt
<ide> ```
<ide>
<del> Then calling `text_dataset_from_directory(main_directory, labels='inferred')`
<del> will return a `tf.data.Dataset` that yields batches of texts from
<del> the subdirectories `class_a` and `class_b`, together with labels
<add> Then calling `text_dataset_from_directory(main_directory,
<add> labels='inferred')` will return a `tf.data.Dataset` that yields batches of
<add> texts from the subdirectories `class_a` and `class_b`, together with labels
<ide> 0 and 1 (0 corresponding to `class_a` and 1 corresponding to `class_b`).
<ide>
<ide> Only `.txt` files are supported at this time.
<ide> def text_dataset_from_directory(
<ide> if labels not in ("inferred", None):
<ide> if not isinstance(labels, (list, tuple)):
<ide> raise ValueError(
<del> "`labels` argument should be a list/tuple of integer labels, of "
<del> "the same size as the number of text files in the target "
<del> "directory. If you wish to infer the labels from the subdirectory "
<del> 'names in the target directory, pass `labels="inferred"`. '
<add> "`labels` argument should be a list/tuple of integer labels, "
<add> "of the same size as the number of text files in the target "
<add> "directory. If you wish to infer the labels from the "
<add> "subdirectory names in the target directory, "
<add> 'pass `labels="inferred"`. '
<ide> "If you wish to get a dataset that only contains text samples "
<ide> f"(no labels), pass `labels=None`. Received: labels={labels}"
<ide> )
<ide> def text_dataset_from_directory(
<ide> )
<ide> if label_mode not in {"int", "categorical", "binary", None}:
<ide> raise ValueError(
<del> '`label_mode` argument must be one of "int", "categorical", "binary", '
<add> '`label_mode` argument must be one of "int", '
<add> '"categorical", "binary", '
<ide> f"or None. Received: label_mode={label_mode}"
<ide> )
<ide> if labels is None or label_mode is None:
<ide><path>keras/utils/text_dataset_test.py
<ide> def _prepare_directory(
<ide> return temp_dir
<ide>
<ide> def test_text_dataset_from_directory_standalone(self):
<del> # Test retrieving txt files without labels from a directory and its subdirs.
<del> # Save a few extra files in the parent directory.
<add> # Test retrieving txt files without labels from a directory and its
<add> # subdirs. Save a few extra files in the parent directory.
<ide> directory = self._prepare_directory(count=7, num_classes=2)
<ide> for i in range(3):
<ide> filename = "text_%s.txt" % (i,)
<ide><path>keras/utils/tf_inspect.py
<ide> def _get_argspec_for_partial(obj):
<ide> # When callable is a functools.partial object, we construct its ArgSpec with
<ide> # following strategy:
<ide> # - If callable partial contains default value for positional arguments (ie.
<del> # object.args), then final ArgSpec doesn't contain those positional arguments.
<add> # object.args), then final ArgSpec doesn't contain those positional
<add> # arguments.
<ide> # - If callable partial contains default value for keyword arguments (ie.
<ide> # object.keywords), then we merge them with wrapped target. Default values
<ide> # from callable partial takes precedence over those from wrapped target.
<ide> def _get_argspec_for_partial(obj):
<ide> # return 2 * m + n
<ide> # partialed = functools.partial(func, m=1)
<ide> #
<del> # This example will result in m having a default value but n doesn't. This is
<del> # usually not allowed in Python and can not be expressed in ArgSpec correctly.
<add> # This example will result in m having a default value but n doesn't. This
<add> # is usually not allowed in Python and can not be expressed in ArgSpec
<add> # correctly.
<ide> #
<del> # Thus, we must detect cases like this by finding first argument with default
<del> # value and ensures all following arguments also have default values. When
<del> # this is not true, a ValueError is raised.
<add> # Thus, we must detect cases like this by finding first argument with
<add> # default value and ensures all following arguments also have default
<add> # values. When this is not true, a ValueError is raised.
<ide>
<ide> n_prune_args = len(obj.args)
<ide> partial_keywords = obj.keywords or {}
<ide> def getcallargs(*func_and_positional, **named):
<ide> A dictionary mapping `func`'s named arguments to the values they would
<ide> receive if `func(*positional, **named)` were called.
<ide>
<del> `getcallargs` will use the argspec from the outermost decorator that provides
<del> it. If no attached decorators modify argspec, the final unwrapped target's
<del> argspec will be used.
<add> `getcallargs` will use the argspec from the outermost decorator that
<add> provides it. If no attached decorators modify argspec, the final unwrapped
<add> target's argspec will be used.
<ide> """
<ide> func = func_and_positional[0]
<ide> positional = func_and_positional[1:]
<ide><path>keras/utils/tf_utils.py
<ide> def set_random_seed(seed):
<ide> """Sets all random seeds for the program (Python, NumPy, and TensorFlow).
<ide>
<del> You can use this utility to make almost any Keras program fully deterministic.
<del> Some limitations apply in cases where network communications are involved
<del> (e.g. parameter server distribution), which creates additional sources of
<del> randomness, or when certain non-deterministic cuDNN ops are involved.
<add> You can use this utility to make almost any Keras program fully
<add> deterministic. Some limitations apply in cases where network communications
<add> are involved (e.g. parameter server distribution), which creates additional
<add> sources of randomness, or when certain non-deterministic cuDNN ops are
<add> involved.
<ide>
<ide> Calling this utility is equivalent to the following:
<ide>
<ide> def get_reachable_from_inputs(inputs, targets=None):
<ide> targets: List of tensors.
<ide>
<ide> Returns:
<del> A set of tensors reachable from the inputs (includes the inputs themselves).
<add> A set of tensors reachable from the inputs (includes the inputs
<add> themselves).
<ide> """
<ide> inputs = tf.nest.flatten(inputs, expand_composites=True)
<ide> reachable = object_identity.ObjectIdentitySet(inputs)
<ide> def get_reachable_from_inputs(inputs, targets=None):
<ide> outputs = x.consumers()
<ide> else:
<ide> raise TypeError(
<del> f"Expected tf.Operation, tf.Variable, or tf.Tensor. Received: {x}"
<add> f"Expected tf.Operation, tf.Variable, or tf.Tensor. "
<add> f"Received: {x}"
<ide> )
<ide>
<ide> for y in outputs:
<ide> def convert_shapes(input_shape, to_tuples=True):
<ide> - None
<ide>
<ide> Args:
<del> input_shape: A nested structure of objects to be converted to TensorShapes.
<del> to_tuples: If `True`, converts all TensorShape to tuples. Otherwise converts
<del> all tuples representing shapes to TensorShapes.
<add> input_shape: A nested structure of objects to be converted to
<add> TensorShapes.
<add> to_tuples: If `True`, converts all TensorShape to tuples. Otherwise
<add> converts all tuples representing shapes to TensorShapes.
<ide>
<ide> Returns:
<ide> Nested structure of shapes in desired format.
<ide> def as_list(self):
<ide>
<ide>
<ide> def convert_inner_node_data(nested, wrap=False):
<del> """Either wraps or unwraps innermost node data lists in `ListWrapper` objects.
<add> """Either wraps or unwraps innermost node data lists in `ListWrapper`
<add> objects.
<ide>
<ide> Args:
<ide> nested: A nested data structure.
<del> wrap: If `True`, wrap innermost lists in `ListWrapper` objects. If `False`,
<del> unwraps `ListWrapper` objects into lists.
<add> wrap: If `True`, wrap innermost lists in `ListWrapper` objects. If
<add> `False`, unwraps `ListWrapper` objects into lists.
<ide>
<ide> Returns:
<ide> Structure of same type as nested, with lists wrapped/unwrapped.
<ide> def is_extension_type(tensor):
<ide>
<ide>
<ide> def is_symbolic_tensor(tensor):
<del> """Returns whether a tensor is symbolic (from a TF graph) or an eager tensor.
<add> """Returns whether a tensor is symbolic (from a TF graph) or an eager
<add> tensor.
<ide>
<ide> A Variable can be seen as either: it is considered symbolic
<ide> when we are in a graph scope, and eager when we are in an eager scope.
<ide> def assert_no_legacy_layers(layers):
<ide> "To use keras as a "
<ide> "framework (for instance using the Network, Model, or Sequential "
<ide> "classes), please use the tf.keras.layers implementation instead. "
<del> "(Or, if writing custom layers, subclass from tf.keras.layers rather "
<del> "than tf.layers)"
<add> "(Or, if writing custom layers, subclass from tf.keras.layers "
<add> "rather than tf.layers)"
<ide> )
<ide>
<ide>
<ide> def maybe_init_scope(layer):
<ide>
<ide> @tf_contextlib.contextmanager
<ide> def graph_context_for_symbolic_tensors(*args, **kwargs):
<del> """Returns graph context manager if any of the inputs is a symbolic tensor."""
<add> """Returns graph context manager if any of the inputs is a symbolic
<add> tensor."""
<ide> if any(is_symbolic_tensor(v) for v in list(args) + list(kwargs.values())):
<ide> with backend.get_graph().as_default():
<ide> yield
<ide> def get_tensor_spec(t, dynamic_batch=False, name=None):
<ide>
<ide>
<ide> def sync_to_numpy_or_python_type(tensors):
<del> """Syncs and converts a structure of `Tensor`s to `NumPy` arrays or Python scalar types.
<add> """Syncs and converts a structure of `Tensor`s to `NumPy` arrays or Python
<add> scalar types.
<ide>
<ide> For each tensor, it calls `tensor.numpy()`. If the result is a scalar value,
<ide> it converts it to a Python type, such as a float or int, by calling
<ide> `result.item()`.
<ide>
<del> Numpy scalars are converted, as Python types are often more convenient to deal
<del> with. This is especially useful for bfloat16 Numpy scalars, which don't
<add> Numpy scalars are converted, as Python types are often more convenient to
<add> deal with. This is especially useful for bfloat16 Numpy scalars, which don't
<ide> support as many operations as other Numpy values.
<ide>
<ide> Async strategies (such as `TPUStrategy` and `ParameterServerStrategy`) are
<ide> def _to_single_numpy_or_python_type(t):
<ide> # Don't turn ragged or sparse tensors to NumPy.
<ide> if isinstance(t, tf.Tensor):
<ide> t = t.numpy()
<del> # Strings, ragged and sparse tensors don't have .item(). Return them as-is.
<add> # Strings, ragged and sparse tensors don't have .item(). Return them
<add> # as-is.
<ide> if not isinstance(t, (np.ndarray, np.generic)):
<ide> return t
<ide> return t.item() if np.ndim(t) == 0 else t
<ide><path>keras/utils/timeseries_dataset.py
<ide> def timeseries_dataset_from_array(
<ide> for batch in dataset:
<ide> inputs, targets = batch
<ide> assert np.array_equal(inputs[0], data[:10]) # First sequence: steps [0-9]
<del> assert np.array_equal(targets[0], data[10]) # Corresponding target: step 10
<add> # Corresponding target: step 10
<add> assert np.array_equal(targets[0], data[10])
<ide> break
<ide> ```
<ide>
<ide> def timeseries_dataset_from_array(
<ide> if end_index is None:
<ide> end_index = len(data)
<ide>
<del> # Determine the lowest dtype to store start positions (to lower memory usage).
<add> # Determine the lowest dtype to store start positions (to lower memory
<add> # usage).
<ide> num_seqs = end_index - start_index - (sequence_length * sampling_rate) + 1
<ide> if targets is not None:
<ide> num_seqs = min(num_seqs, len(targets))
<ide><path>keras/utils/timeseries_dataset_test.py
<ide> def test_shuffle(self):
<ide> self.assertNotAllClose(x, np.arange(0, 5))
<ide> self.assertAllClose(x[:, 0] * 2, y)
<ide> first_seq = x
<del> # Check that a new iteration with the same dataset yields different results
<add> # Check that a new iteration with the same dataset yields different
<add> # results
<ide> for x, _ in dataset.take(1):
<ide> self.assertNotAllClose(x, first_seq)
<ide> # Check determism with same seed
<ide><path>keras/utils/traceback_utils.py
<ide> def _process_traceback_frames(tb):
<ide>
<ide>
<ide> def filter_traceback(fn):
<del> """Filter out Keras-internal stack trace frames in exceptions raised by fn."""
<add> """Filter out Keras-internal stack trace frames in exceptions raised by
<add> fn."""
<ide> if sys.version_info.major != 3 or sys.version_info.minor < 7:
<ide> return fn
<ide>
<ide> def error_handler(*args, **kwargs):
<ide> if isinstance(e, tf.errors.OpError):
<ide> message = e.message
<ide> elif e.args:
<del> # Canonically, the 1st argument in an exception is the error message.
<del> # This works for all built-in Python exceptions.
<add> # Canonically, the 1st argument in an exception is the error
<add> # message. This works for all built-in Python exceptions.
<ide> message = e.args[0]
<ide> else:
<ide> message = ""
<ide> def error_handler(*args, **kwargs):
<ide> new_e = e.__class__(e.node_def, e.op, message, e.error_code)
<ide> else:
<ide> try:
<del> # For standard exceptions such as ValueError, TypeError, etc.
<add> # For standard exceptions such as ValueError, TypeError,
<add> # etc.
<ide> new_e = e.__class__(message)
<ide> except TypeError:
<del> # For any custom error that doesn't have a standard signature.
<add> # For any custom error that doesn't have a standard
<add> # signature.
<ide> new_e = RuntimeError(message)
<ide> new_e._keras_call_info_injected = (
<ide> True # pylint: disable=protected-access
<ide><path>keras/utils/version_utils.py
<ide> def __new__(cls, *args, **kwargs): # pylint: disable=unused-argument
<ide> start_cls == callbacks_v1.TensorBoard
<ide> and cls == callbacks.TensorBoard
<ide> ):
<del> # Since the v2 class is not a subclass of the v1 class, __init__ has to
<del> # be called manually.
<add> # Since the v2 class is not a subclass of the v1 class, __init__ has
<add> # to be called manually.
<ide> return cls(*args, **kwargs)
<ide> return super(TensorBoardVersionSelector, cls).__new__(cls)
<ide>
<ide> def swap_class(cls, v2_cls, v1_cls, use_v2):
<ide> # `v1_cls` often extends `v2_cls`, so it may still call `swap_class`
<ide> # even if it doesn't need to. That being said, it may be the safest
<ide> # not to over optimize this logic for the sake of correctness,
<del> # especially if we swap v1 & v2 classes that don't extend each other,
<del> # or when the inheritance order is different.
<add> # especially if we swap v1 & v2 classes that don't extend each
<add> # other, or when the inheritance order is different.
<ide> or (not use_v2 and issubclass(base, v2_cls))
<ide> ):
<ide> new_base = swap_class(base, v2_cls, v1_cls, use_v2)
<ide> def swap_class(cls, v2_cls, v1_cls, use_v2):
<ide> def disallow_legacy_graph(cls_name, method_name):
<ide> if not tf.compat.v1.executing_eagerly_outside_functions():
<ide> error_msg = (
<del> f"Calling `{cls_name}.{method_name}` in graph mode is not supported "
<del> f"when the `{cls_name}` instance was constructed with eager mode "
<del> f"enabled. Please construct your `{cls_name}` instance in graph mode or"
<del> f" call `{cls_name}.{method_name}` with eager mode enabled."
<add> f"Calling `{cls_name}.{method_name}` in graph mode is not "
<add> f"supported when the `{cls_name}` instance was constructed with "
<add> f"eager mode enabled. Please construct your `{cls_name}` instance "
<add> f"in graph mode or call `{cls_name}.{method_name}` with "
<add> "eager mode enabled."
<ide> )
<ide> raise ValueError(error_msg)
<ide>
<ide><path>keras/utils/vis_utils.py
<ide> def plot_model(
<ide> expand_nested: Whether to expand nested models into clusters.
<ide> dpi: Dots per inch.
<ide> layer_range: input of `list` containing two `str` items, which is the
<del> starting layer name and ending layer name (both inclusive) indicating the
<del> range of layers for which the plot will be generated. It also accepts
<del> regex patterns instead of exact name. In such case, start predicate will
<del> be the first element it matches to `layer_range[0]` and the end predicate
<del> will be the last element it matches to `layer_range[1]`. By default `None`
<del> which considers all layers of model. Note that you must pass range such
<del> that the resultant subgraph must be complete.
<add> starting layer name and ending layer name (both inclusive) indicating
<add> the range of layers for which the plot will be generated. It also
<add> accepts regex patterns instead of exact name. In such case, start
<add> predicate will be the first element it matches to `layer_range[0]` and
<add> the end predicate will be the last element it matches to
<add> `layer_range[1]`. By default `None` which considers all layers of model.
<add> Note that you must pass range such that the resultant subgraph must be
<add> complete.
<ide> show_layer_activations: Display layer activations (only for layers that
<ide> have an `activation` property).
<ide>
<ide> def plot_model(
<ide> "for plot_model to work."
<ide> )
<ide> if "IPython.core.magics.namespace" in sys.modules:
<del> # We don't raise an exception here in order to avoid crashing notebook
<del> # tests where graphviz is not available.
<add> # We don't raise an exception here in order to avoid crashing
<add> # notebook tests where graphviz is not available.
<ide> io_utils.print_msg(message)
<ide> return
<ide> else: | 32 |
Mixed | Python | add overwrite parameter to models.save_weights() | 08547d2582ce3071495d2f992381a5dbba605882 | <ide><path>docs/sources/models.md
<ide> model = keras.models.Sequential()
<ide> - __Return__: loss over the data, or tuple `(loss, accuracy)` if `accuracy=True`.
<ide> - __test__(X, y, accuracy=False): Single performance evaluation on one batch. if accuracy==False, return tuple (loss_on_batch, accuracy_on_batch). Else, return loss_on_batch.
<ide> - __Return__: loss over the data, or tuple `(loss, accuracy)` if `accuracy=True`.
<del> - __save_weights__(fname): Store the weights of all layers to a HDF5 file.
<add> - __save_weights__(fname, overwrite=False): Store the weights of all layers to a HDF5 file. If overwrite==False and the file already exists, an exception will be thrown.
<ide> - __load_weights__(fname): Sets the weights of a model, based to weights stored by __save__weights__. You can only __load__weights__ on a savefile from a model with an identical architecture. __load_weights__ can be called either before or after the __compile__ step.
<ide>
<ide> __Examples__:
<ide><path>keras/models.py
<ide> def set_weights(self, weights):
<ide> self.layers[i].set_weights(weights[:nb_param])
<ide> weights = weights[nb_param:]
<ide>
<del> def save_weights(self, filepath):
<add> def save_weights(self, filepath, overwrite=False):
<ide> # Save weights from all layers to HDF5
<ide> import h5py
<del> # FIXME: fail if file exists, or add option to overwrite!
<add> import os.path
<add> # if file exists and should not be overwritten
<add> if not overwrite and os.path.isfile(filepath):
<add> raise IOError('%s already exists' % (filepath))
<ide> f = h5py.File(filepath, 'w')
<ide> f.attrs['nb_layers'] = len(self.layers)
<ide> for k, l in enumerate(self.layers):
<ide> def load_weights(self, filepath):
<ide> weights = [g['param_{}'.format(p)] for p in range(g.attrs['nb_params'])]
<ide> self.layers[k].set_weights(weights)
<ide> f.close()
<del>
<ide>\ No newline at end of file
<add> | 2 |
Mixed | Ruby | remove more references to white list | 2c5c452d8cf3ee8e042aa4c0187b32a88d0da025 | <ide><path>actionpack/lib/action_controller/metal/strong_parameters.rb
<ide> def unpermitted_keys(params)
<ide> # --- Filtering ----------------------------------------------------------
<ide> #
<ide>
<del> # This is a white list of permitted scalar types that includes the ones
<add> # This is a list of permitted scalar types that includes the ones
<ide> # supported in XML and JSON requests.
<ide> #
<ide> # This list is in particular used to filter ordinary requests, String goes
<ide><path>guides/source/active_support_core_extensions.md
<ide> NOTE: Defined in `active_support/core_ext/hash/keys.rb`.
<ide>
<ide> #### `assert_valid_keys`
<ide>
<del>The method `assert_valid_keys` receives an arbitrary number of arguments, and checks whether the receiver has any key outside that white list. If it does `ArgumentError` is raised.
<add>The method `assert_valid_keys` receives an arbitrary number of arguments, and checks whether the receiver has any key outside that list. If it does `ArgumentError` is raised.
<ide>
<ide> ```ruby
<ide> {a: 1}.assert_valid_keys(:a) # passes | 2 |
Python | Python | fix memory issues in language.evaluate | b57be94c78998f0ad2abc69fe0a8f486e2a1f75b | <ide><path>spacy/language.py
<ide> def evaluate(
<ide>
<ide> DOCS: https://nightly.spacy.io/api/language#evaluate
<ide> """
<add> examples = list(examples)
<ide> validate_examples(examples, "Language.evaluate")
<ide> if batch_size is None:
<ide> batch_size = self.batch_size
<ide> def evaluate(
<ide> kwargs = dict(scorer_cfg)
<ide> kwargs.setdefault("nlp", self)
<ide> scorer = Scorer(**kwargs)
<del> texts = [eg.reference.text for eg in examples]
<del> docs = [eg.predicted for eg in examples]
<add> # reset annotation in predicted docs and time tokenization
<ide> start_time = timer()
<del> # tokenize the texts only for timing purposes
<del> if not hasattr(self.tokenizer, "pipe"):
<del> _ = [self.tokenizer(text) for text in texts] # noqa: F841
<del> else:
<del> _ = list(self.tokenizer.pipe(texts)) # noqa: F841
<add> for eg in examples:
<add> eg.predicted = self.make_doc(eg.reference.text)
<add> # apply all pipeline components
<ide> for name, pipe in self.pipeline:
<ide> kwargs = component_cfg.get(name, {})
<ide> kwargs.setdefault("batch_size", batch_size)
<del> docs = _pipe(docs, pipe, kwargs)
<del> # iterate over the final generator
<del> if len(self.pipeline):
<del> docs = list(docs)
<add> for doc, eg in zip(
<add> _pipe((eg.predicted for eg in examples), pipe, kwargs), examples
<add> ):
<add> eg.predicted = doc
<ide> end_time = timer()
<del> for i, (doc, eg) in enumerate(zip(docs, examples)):
<del> util.logger.debug(doc)
<del> eg.predicted = doc
<ide> results = scorer.score(examples)
<del> n_words = sum(len(doc) for doc in docs)
<add> n_words = sum(len(eg.predicted) for eg in examples)
<ide> results["speed"] = n_words / (end_time - start_time)
<ide> return results
<ide>
<ide><path>spacy/tests/test_language.py
<ide> def test_language_evaluate(nlp):
<ide> annots = {"doc_annotation": {"cats": {"POSITIVE": 1.0, "NEGATIVE": 0.0}}}
<ide> doc = Doc(nlp.vocab, words=text.split(" "))
<ide> example = Example.from_dict(doc, annots)
<del> nlp.evaluate([example])
<add> scores = nlp.evaluate([example])
<add> assert scores["speed"] > 0
<add>
<add> # test with generator
<add> scores = nlp.evaluate(eg for eg in [example])
<add> assert scores["speed"] > 0
<ide>
<ide> # Not allowed to call with just one Example
<ide> with pytest.raises(TypeError):
<ide><path>spacy/training/loop.py
<ide> def create_evaluation_callback(
<ide> weights = {key: value for key, value in weights.items() if value is not None}
<ide>
<ide> def evaluate() -> Tuple[float, Dict[str, float]]:
<del> dev_examples = list(dev_corpus(nlp))
<ide> try:
<del> scores = nlp.evaluate(dev_examples)
<add> scores = nlp.evaluate(dev_corpus(nlp))
<ide> except KeyError as e:
<ide> raise KeyError(Errors.E900.format(pipeline=nlp.pipe_names)) from e
<ide> # Calculate a weighted sum based on score_weights for the main score. | 3 |
Go | Go | add test for rest api container rename | 8f752ffeafd2f8c08035a5e39220fe17c9309fd7 | <ide><path>integration-cli/docker_api_containers_test.go
<ide> func (s *DockerSuite) TestStartWithTooLowMemoryLimit(c *check.C) {
<ide> c.Assert(status, check.Equals, http.StatusInternalServerError)
<ide> c.Assert(strings.Contains(string(b), "Minimum memory limit allowed is 4MB"), check.Equals, true)
<ide> }
<add>
<add>func (s *DockerSuite) TestContainerApiRename(c *check.C) {
<add> runCmd := exec.Command(dockerBinary, "run", "--name", "first_name", "-d", "busybox", "sh")
<add> out, _, err := runCommandWithOutput(runCmd)
<add> c.Assert(err, check.IsNil)
<add>
<add> containerID := strings.TrimSpace(out)
<add> newName := "new_name" + stringid.GenerateRandomID()
<add> statusCode, _, err := sockRequest("POST", "/containers/"+containerID+"/rename?name="+newName, nil)
<add>
<add> // 204 No Content is expected, not 200
<add> c.Assert(statusCode, check.Equals, http.StatusNoContent)
<add> c.Assert(err, check.IsNil)
<add>
<add> name, err := inspectField(containerID, "Name")
<add> if name != "/"+newName {
<add> c.Fatalf("Failed to rename container, expected %v, got %v. Container rename API failed", newName, name)
<add> }
<add>} | 1 |
Ruby | Ruby | integrate brew list with brew cask list | 28c0962430bb33806bb4870ba394192e606d26f5 | <ide><path>Library/Homebrew/cmd/list.rb
<ide> require "metafiles"
<ide> require "formula"
<ide> require "cli/parser"
<add>require "cask/cmd"
<ide>
<ide> module Homebrew
<ide> module_function
<ide> def list_args
<ide> switch "--pinned",
<ide> description: "Show the versions of pinned formulae, or only the specified (pinned) "\
<ide> "formulae if <formula> are provided. See also `pin`, `unpin`."
<add> switch "--casks",
<add> description: "List casks"
<ide> # passed through to ls
<ide> switch "-1",
<ide> description: "Force output to be one entry per line. " \
<ide> def list_args
<ide> description: "Sort by time modified, listing most recently modified first."
<ide> switch :verbose
<ide> switch :debug
<add> conflicts "--casks", "--unbrewed", "--multiple", "--pinned", "-l", "-r", "-t"
<ide> end
<ide> end
<ide>
<ide> def list
<ide> list_args.parse
<ide>
<add> return list_casks if args.casks?
<add>
<ide> return list_unbrewed if args.unbrewed?
<ide>
<ide> # Unbrewed uses the PREFIX, which will exist
<ide> def filtered_list
<ide> end
<ide> end
<ide> end
<add>
<add> def list_casks
<add> cask_list = Cask::Cmd::List.new args.named
<add> cask_list.one = ARGV.include? "-1"
<add> cask_list.versions = args.versions?
<add> cask_list.full_name = args.full_name?
<add> cask_list.run
<add> end
<ide> end
<ide>
<ide> class PrettyListing | 1 |
Javascript | Javascript | remove proptypes from incrementalpresenter | 0625c348cc8fe189e56d7b83ab000a87b8ba8a4f | <ide><path>Libraries/Components/View/ViewPropTypes.js
<ide> type DirectEventProps = $ReadOnly<{|
<ide> *
<ide> * See http://facebook.github.io/react-native/docs/view.html#onlayout
<ide> */
<del> onLayout?: ?(event: LayoutEvent) => void,
<add> onLayout?: ?(event: LayoutEvent) => mixed,
<ide>
<ide> /**
<ide> * When `accessible` is `true`, the system will invoke this function when the
<ide><path>Libraries/Experimental/IncrementalPresenter.js
<ide>
<ide> 'use strict';
<ide>
<del>const DeprecatedViewPropTypes = require('DeprecatedViewPropTypes');
<ide> const IncrementalGroup = require('IncrementalGroup');
<ide> const PropTypes = require('prop-types');
<ide> const React = require('React');
<ide> const View = require('View');
<ide>
<ide> import type {Context} from 'Incremental';
<ide> import type {ViewStyleProp} from 'StyleSheet';
<add>import type {LayoutEvent} from 'CoreEventTypes';
<ide>
<ide> /**
<ide> * WARNING: EXPERIMENTAL. Breaking changes will probably happen a lot and will
<ide> import type {ViewStyleProp} from 'StyleSheet';
<ide> *
<ide> * See Incremental.js for more info.
<ide> */
<del>type Props = {
<add>type Props = $ReadOnly<{|
<ide> name: string,
<ide> disabled?: boolean,
<del> onDone?: () => void,
<del> onLayout?: (event: Object) => void,
<add> onDone?: () => mixed,
<add> onLayout?: (event: LayoutEvent) => mixed,
<ide> style?: ViewStyleProp,
<del> children?: any,
<del>};
<add> children?: React.Node,
<add>|}>;
<add>
<ide> class IncrementalPresenter extends React.Component<Props> {
<ide> context: Context;
<ide> _isDone: boolean;
<ide>
<del> static propTypes = {
<del> name: PropTypes.string,
<del> disabled: PropTypes.bool,
<del> onDone: PropTypes.func,
<del> onLayout: PropTypes.func,
<del> style: DeprecatedViewPropTypes.style,
<del> };
<ide> static contextTypes = {
<ide> incrementalGroup: PropTypes.object,
<ide> incrementalGroupEnabled: PropTypes.bool, | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.