content_type
stringclasses
8 values
main_lang
stringclasses
7 values
message
stringlengths
1
50
sha
stringlengths
40
40
patch
stringlengths
52
962k
file_count
int64
1
300
Python
Python
add esmfold code sample
4f1e5e4efd46333578f15d09a7f931a3e7a2fd78
<ide><path>src/transformers/models/esm/modeling_esmfold.py <ide> <ide> <ide> logger = logging.get_logger(__name__) <add>_CHECKPOINT_FOR_DOC = "Rocketknight1/esmfold_v1" <add>_CONFIG_FOR_DOC = "EsmConfig" <add>_TOKENIZER_FOR_DOC = "EsmTokenizer" <ide> <ide> <ide> @dataclass <ide> def forward( <ide> <ide> Example: <ide> <del> TODO Matt <add> ```python <add> >>> from transformers import AutoTokenizer, EsmForProteinFolding <add> <add> >>> model = EsmForProteinFolding.from_pretrained("facebook/esmfold_v1") <add> >>> tokenizer = AutoTokenizer.from_pretrained("facebook/esmfold_v1") <add> >>> inputs = tokenizer(["MLKNVQVQLV"], return_tensors="pt") # A tiny random peptide <add> >>> outputs = model(**inputs) <add> >>> folded_positions = outputs.positions <add> ``` <add> <ide> """ <ide> cfg = self.config.esmfold_config <ide>
1
Ruby
Ruby
move http only option to appgenerator
2c0f6b0d8a8f357078fa7399765cf11edc7c645b
<ide><path>railties/lib/rails/generators/app_base.rb <ide> def self.add_shared_options_for(name) <ide> class_option :skip_test_unit, :type => :boolean, :aliases => "-T", :default => false, <ide> :desc => "Skip Test::Unit files" <ide> <del> class_option :http_only, :type => :boolean, :default => false, <del> :desc => "Preconfigure smaller stack for HTTP only apps" <del> <ide> class_option :help, :type => :boolean, :aliases => "-h", :group => :rails, <ide> :desc => "Show this help message and quit" <ide> end <ide><path>railties/lib/rails/generators/rails/app/app_generator.rb <ide> module Generators <ide> class AppGenerator < AppBase <ide> add_shared_options_for "application" <ide> <add> class_option :http_only, :type => :boolean, :default => false, <add> :desc => "Preconfigure smaller stack for HTTP only apps" <add> <ide> # Add bin/rails options <ide> class_option :version, :type => :boolean, :aliases => "-v", :group => :rails, <ide> :desc => "Show Rails version number and quit"
2
Ruby
Ruby
fix wrong assignment
70b995a77f3e47c24ae88509066d0cdc4b2d779e
<ide><path>actionpack/test/controller/parameters/serialization_test.rb <ide> <ide> class ParametersSerializationTest < ActiveSupport::TestCase <ide> setup do <del> @old_permitted_parameters = ActionController::Parameters.always_permitted_parameters <del> ActionController::Parameters.always_permitted_parameters = true <add> @old_permitted_parameters = ActionController::Parameters.permit_all_parameters <add> ActionController::Parameters.permit_all_parameters = false <ide> end <ide> <ide> teardown do <del> ActionController::Parameters.always_permitted_parameters = @old_permitted_parameters <add> ActionController::Parameters.permit_all_parameters = @old_permitted_parameters <ide> end <ide> <ide> test 'yaml serialization' do <del> assert_equal <<-end_of_yaml.strip_heredoc, YAML.dump(ActionController::Parameters.new(key: :value)) <add> params = ActionController::Parameters.new(key: :value) <add> assert_equal <<-end_of_yaml.strip_heredoc, YAML.dump(params) <ide> --- !ruby/object:ActionController::Parameters <ide> parameters: !ruby/hash:ActiveSupport::HashWithIndifferentAccess <ide> key: :value
1
Ruby
Ruby
add missing require in `dev-cmd/irb`
6f097685493b82330bbae2a743038745e6892a2b
<ide><path>Library/Homebrew/dev-cmd/irb.rb <ide> # typed: false <ide> # frozen_string_literal: true <ide> <add>require "formulary" <ide> require "cli/parser" <ide> <ide> class Symbol
1
Python
Python
fix typo in facebook ads provider
ef8df17348e3c567e2d2f0aface641acae3896ba
<ide><path>airflow/providers/facebook/ads/hooks/ads.py <ide> def facebook_ads_config(self) -> Dict: <ide> self.log.info("Fetching fb connection: %s", self.facebook_conn_id) <ide> conn = self.get_connection(self.facebook_conn_id) <ide> config = conn.extra_dejson <del> missings_keys = self.client_required_fields - config.keys() <del> if missings_keys: <del> message = "{missings_keys} fields are missing".format(missings_keys=missings_keys) <add> missing_keys = self.client_required_fields - config.keys() <add> if missing_keys: <add> message = "{missing_keys} fields are missing".format(missing_keys=missing_keys) <ide> raise AirflowException(message) <ide> return config <ide> <ide><path>tests/providers/google/cloud/transfers/test_facebook_ads_to_gcs_system.py <ide> def provide_facebook_connection( <ide> ) <ide> with open(key_file_path, 'r') as credentials: <ide> creds = json.load(credentials) <del> missings_keys = CONFIG_REQUIRED_FIELDS - creds.keys() <del> if missings_keys: <del> message = "{missings_keys} fields are missing".format(missings_keys=missings_keys) <add> missing_keys = CONFIG_REQUIRED_FIELDS - creds.keys() <add> if missing_keys: <add> message = "{missing_keys} fields are missing".format(missing_keys=missing_keys) <ide> raise AirflowException(message) <ide> conn = Connection( <ide> conn_id=FACEBOOK_CONNECTION_ID,
2
Python
Python
set version to v2.1.4.dev0
83511972d3afc50dcfa9084928ab470077674a11
<ide><path>spacy/about.py <ide> # fmt: off <ide> <ide> __title__ = "spacy" <del>__version__ = "2.1.3" <add>__version__ = "2.1.4.dev0" <ide> __summary__ = "Industrial-strength Natural Language Processing (NLP) with Python and Cython" <ide> __uri__ = "https://spacy.io" <ide> __author__ = "Explosion AI" <ide> __email__ = "[email protected]" <ide> __license__ = "MIT" <del>__release__ = True <add>__release__ = False <ide> <ide> __download_url__ = "https://github.com/explosion/spacy-models/releases/download" <ide> __compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"
1
Ruby
Ruby
update image_tag output in examples to actual
a200ebd8818d045e035c6e025733f8868f5a0181
<ide><path>actionpack/lib/action_view/helpers/asset_tag_helper.rb <ide> def font_url(source) <ide> # value is not in the correct format. <ide> # <ide> # image_tag("icon") <del> # # => <img src="/assets/icon" alt="Icon" /> <add> # # => <img alt="Icon" src="/assets/icon" /> <ide> # image_tag("icon.png") <del> # # => <img src="/assets/icon.png" alt="Icon" /> <add> # # => <img alt="Icon" src="/assets/icon.png" /> <ide> # image_tag("icon.png", :size => "16x10", :alt => "Edit Entry") <del> # # => <img src="/assets/icon.png" width="16" height="10" alt="Edit Entry" /> <add> # # => <img alt="Edit Entry" height="10" src="/assets/icon.png" width="16" /> <ide> # image_tag("/icons/icon.gif", :size => "16x16") <del> # # => <img src="/icons/icon.gif" width="16" height="16" alt="Icon" /> <add> # # => <img alt="Icon" height="16" src="/icons/icon.gif" width="16" /> <ide> # image_tag("/icons/icon.gif", :height => '32', :width => '32') <ide> # # => <img alt="Icon" height="32" src="/icons/icon.gif" width="32" /> <ide> # image_tag("/icons/icon.gif", :class => "menu_icon")
1
Javascript
Javascript
increase hitslop for notification dismiss button
390546f6ed7965c09c8dcc4962dd18bc71c1d1d8
<ide><path>Libraries/LogBox/UI/LogBoxLogNotification.js <ide> function DismissButton(props) { <ide> default: LogBoxStyle.getTextColor(0.3), <ide> pressed: LogBoxStyle.getTextColor(0.5), <ide> }} <add> hitSlop={{ <add> top: 12, <add> right: 10, <add> bottom: 12, <add> left: 10, <add> }} <ide> onPress={props.onPress} <ide> style={dismissStyles.press}> <ide> <Image
1
Python
Python
fix the git "sparse checkout" functionality
2771e4f2b3bfcc19f6c11a6801d95f4bb595c029
<ide><path>spacy/cli/_util.py <ide> def git_sparse_checkout( <ide> if dest.exists(): <ide> msg.fail("Destination of checkout must not exist", exits=1) <ide> if not dest.parent.exists(): <del> msg.fail("Parent of destination of checkout must exist", exits=1) <add> raise IOError("Parent of destination of checkout must exist") <add> # We're using Git, partial clone and sparse checkout to <add> # only clone the files we need <add> # This ends up being RIDICULOUS. omg. <add> # So, every tutorial and SO post talks about 'sparse checkout'...But they <add> # go and *clone* the whole repo. Worthless. And cloning part of a repo <add> # turns out to be completely broken. The only way to specify a "path" is.. <add> # a path *on the server*? The contents of which, specifies the paths. Wat. <add> # Obviously this is hopelessly broken and insecure, because you can query <add> # arbitrary paths on the server! So nobody enables this. <add> # What we have to do is disable *all* files. We could then just checkout <add> # the path, and it'd "work", but be hopelessly slow...Because it goes and <add> # transfers every missing object one-by-one. So the final piece is that we <add> # need to use some weird git internals to fetch the missings in bulk, and <add> # *that* we can do by path. <ide> # We're using Git and sparse checkout to only clone the files we need <ide> with make_tempdir() as tmp_dir: <add> # This is the "clone, but don't download anything" part. <ide> cmd = ( <del> f"git clone {repo} {tmp_dir} --no-checkout " <del> "--depth 1 --config core.sparseCheckout=true" <add> f"git clone {repo} {tmp_dir} --no-checkout --depth 1 " <add> "--filter=blob:none" # <-- The key bit <ide> ) <ide> if branch is not None: <ide> cmd = f"{cmd} -b {branch}" <del> run_command(cmd) <del> with (tmp_dir / ".git" / "info" / "sparse-checkout").open("w") as f: <del> f.write(subpath) <del> run_command(["git", "-C", str(tmp_dir), "fetch"]) <del> run_command(["git", "-C", str(tmp_dir), "checkout"]) <add> run_command(cmd, capture=True) <add> # Now we need to find the missing filenames for the subpath we want. <add> # Looking for this 'rev-list' command in the git --help? Hah. <add> cmd = f"git -C {tmp_dir} rev-list --objects --all --missing=print -- {subpath}" <add> ret = run_command(cmd, capture=True) <add> missings = "\n".join([x[1:] for x in ret.stdout.split() if x.startswith("?")]) <add> # Now pass those missings into another bit of git internals <add> run_command( <add> f"git -C {tmp_dir} fetch-pack --stdin {repo}", capture=True, stdin=missings <add> ) <add> # And finally, we can checkout our subpath <add> run_command(f"git -C {tmp_dir} checkout {branch} {subpath}") <ide> # We need Path(name) to make sure we also support subdirectories <ide> shutil.move(str(tmp_dir / Path(subpath)), str(dest)) <del> print(dest) <del> print(list(dest.iterdir())) <ide><path>spacy/scorer.py <ide> def score_set(self, cand: set, gold: set) -> None: <ide> <ide> @property <ide> def precision(self) -> float: <del> return (self.tp / (self.tp + self.fp + 1e-100)) <add> return self.tp / (self.tp + self.fp + 1e-100) <ide> <ide> @property <ide> def recall(self) -> float: <del> return (self.tp / (self.tp + self.fn + 1e-100)) <add> return self.tp / (self.tp + self.fn + 1e-100) <ide> <ide> @property <ide> def fscore(self) -> float: <ide><path>spacy/util.py <ide> def join_command(command: List[str]) -> str: <ide> return " ".join(shlex.quote(cmd) for cmd in command) <ide> <ide> <del>def run_command(command: Union[str, List[str]]) -> None: <add>def run_command(command: Union[str, List[str]], *, capture=False, stdin=None) -> None: <ide> """Run a command on the command line as a subprocess. If the subprocess <ide> returns a non-zero exit code, a system exit is performed. <ide> <ide> def run_command(command: Union[str, List[str]]) -> None: <ide> if isinstance(command, str): <ide> command = split_command(command) <ide> try: <del> status = subprocess.call(command, env=os.environ.copy()) <add> ret = subprocess.run( <add> command, <add> env=os.environ.copy(), <add> capture_output=capture, <add> input=stdin, <add> text=True, <add> check=True, <add> ) <ide> except FileNotFoundError: <ide> raise FileNotFoundError( <ide> Errors.E970.format(str_command=" ".join(command), tool=command[0]) <ide> ) from None <del> if status != 0: <del> sys.exit(status) <add> if ret.returncode != 0: <add> sys.exit(ret.returncode) <add> return ret <ide> <ide> <ide> @contextmanager
3
PHP
PHP
fix bug in auth guard logout method
98c0c3b1f25cf643a81f4e2d88ab4befc4ce6a22
<ide><path>src/Illuminate/Auth/Guard.php <ide> protected function createRecaller($id) <ide> */ <ide> public function logout() <ide> { <add> $user = $this->user(); <add> <add> // If we have an event dispatcher instance, we can fire off the logout event <add> // so any further processing can be done. This allows the developer to be <add> // listening for anytime a user signs out of this application manually. <ide> $this->clearUserDataFromStorage(); <ide> <ide> if (isset($this->events)) <ide> { <del> $this->events->fire('auth.logout', array($this->user())); <add> $this->events->fire('auth.logout', array($user)); <ide> } <ide> <add> // Once we have fired the logout event we will clear the users out of memory <add> // so they are no longer available as the user is no longer considered as <add> // being signed into this application and should not be available here. <ide> $this->user = null; <ide> <ide> $this->loggedOut = true;
1
Javascript
Javascript
add transitions for dynamic data
f0f146715d1652dfa4bec96f131a2ad7c8464ba8
<ide><path>d3.chart.js <ide> d3.chart.bullet = function() { <ide> .enter().append('svg:rect') <ide> .attr('class', 'range'); <ide> chart.selectAll('rect.range') <add> .transition() <ide> .attr('width', scale) <ide> .attr('height', height) <ide> .attr('style', function(d, i) { return 'fill:' + rangeColor(i) }); <ide> d3.chart.bullet = function() { <ide> .enter().append('svg:rect') <ide> .attr('class', 'measure'); <ide> chart.selectAll('rect.measure') <add> .transition() <ide> .attr('width', scale) <ide> .attr('height', height / 3) <ide> .attr('y', height / 3) <ide> d3.chart.bullet = function() { <ide> .attr('stroke', '#000') <ide> .attr('stroke-width', '2px') <ide> chart.selectAll('line.marker') <add> .transition() <ide> .attr('x1', scale) <ide> .attr('x2', scale) <ide> .attr('y1', height/6) <ide> d3.chart.bullet = function() { <ide> .attr('stroke', '#666') <ide> .attr('stroke-width', '.5px') <ide> this.selectAll('line.rule') <add> .transition() <ide> .attr('x1', scale) <ide> .attr('x2', scale) <ide> .attr('y1', height) <ide> d3.chart.bullet = function() { <ide> .attr('text-anchor', 'middle') <ide> .attr('dy', '1em') <ide> this.selectAll('text.tick') <add> .text(tickFormat) <add> .transition() <ide> .attr('x', scale) <ide> .attr('y', height * 7/6) <del> .text(tickFormat) <ide> } <ide> <ide> var maxlength = function(l) { <ide><path>d3.chart.min.js <del>(function(){d3.chart={},d3.chart.bullet=function(){var a="left",b=function(a){return a.ranges},c=function(a){return a.markers},d=function(a){return a.measures},e,f=null,g=800,h=30,i=d3.scale.linear(),j=d3.scale.linear(),k=d3.scale.linear(),l=d3.format(",.0f"),m=function(a){for(var b=0,c=a.length;b<c;b++)a[b].sort(function(a,b){return b-a})},n=function(){var a=[];for(var e=0,f=this[0].length;e<f;e++)a.push(this[0][e].__data__);var g={ranges:a.map(b),measures:a.map(d),markers:a.map(c)};p(g),m(g.ranges),m(g.measures);var n=this;n.selectAll("rect.range").data(b).enter().append("svg:rect").attr("class","range"),n.selectAll("rect.range").attr("width",k).attr("height",h).attr("style",function(a,b){return"fill:"+i(b)}),n.selectAll("rect.measure").data(d).enter().append("svg:rect").attr("class","measure"),n.selectAll("rect.measure").attr("width",k).attr("height",h/3).attr("y",h/3).attr("fill",function(a,b){return j(b)}),n.selectAll("line.marker").data(c).enter().append("svg:line").attr("class","marker").attr("stroke","#000").attr("stroke-width","2px"),n.selectAll("line.marker").attr("x1",k).attr("x2",k).attr("y1",h/6).attr("y2",h*5/6);var o=k.ticks(10),q=this.selectAll("line.rule").data(o);q.exit().remove(),q.enter().append("svg:line").attr("class","rule").attr("stroke","#666").attr("stroke-width",".5px"),this.selectAll("line.rule").attr("x1",k).attr("x2",k).attr("y1",h).attr("y2",h*7/6);var r=this.selectAll("text.tick").data(o);r.exit().remove(),r.enter().append("svg:text").attr("class","tick").attr("text-anchor","middle").attr("dy","1em"),this.selectAll("text.tick").attr("x",k).attr("y",h*7/6).text(l)},o=function(a){return d3.max(a,function(a){return a.length})},p=function(b){e=/^left|right$/.test(a),i.domain([0,Math.max(1,o(b.ranges)-1)]).range(["#eee","#bbb"]),j.domain([0,Math.max(1,o(b.measures)-1)]).range(["lightsteelblue","steelblue"]),f=d3.max([].concat(b.ranges,b.markers,b.measures),function(a){return d3.max(a)}),k.domain([0,f]).range([0,g])};n.orient=function(b){if(!arguments.length)return a;a=b;return n},n.ranges=function(a){if(!arguments.length)return b;b=a;return n},n.markers=function(a){if(!arguments.length)return c;c=a;return n},n.measures=function(a){if(!arguments.length)return d;d=a;return n},n.maximum=function(a){if(!arguments.length)return f;f=a;return n},n.width=function(a){if(!arguments.length)return g;g=a;return n},n.height=function(a){if(!arguments.length)return h;h=a;return n},n.tickFormat=function(a){if(!arguments.length)return l;l=a;return n};return n}})() <ide>\ No newline at end of file <add>(function(){d3.chart={},d3.chart.bullet=function(){var a="left",b=function(a){return a.ranges},c=function(a){return a.markers},d=function(a){return a.measures},e,f=null,g=800,h=30,i=d3.scale.linear(),j=d3.scale.linear(),k=d3.scale.linear(),l=d3.format(",.0f"),m=function(a){for(var b=0,c=a.length;b<c;b++)a[b].sort(function(a,b){return b-a})},n=function(){var a=[];for(var e=0,f=this[0].length;e<f;e++)a.push(this[0][e].__data__);var g={ranges:a.map(b),measures:a.map(d),markers:a.map(c)};p(g),m(g.ranges),m(g.measures);var n=this;n.selectAll("rect.range").data(b).enter().append("svg:rect").attr("class","range"),n.selectAll("rect.range").transition().attr("width",k).attr("height",h).attr("style",function(a,b){return"fill:"+i(b)}),n.selectAll("rect.measure").data(d).enter().append("svg:rect").attr("class","measure"),n.selectAll("rect.measure").transition().attr("width",k).attr("height",h/3).attr("y",h/3).attr("fill",function(a,b){return j(b)}),n.selectAll("line.marker").data(c).enter().append("svg:line").attr("class","marker").attr("stroke","#000").attr("stroke-width","2px"),n.selectAll("line.marker").transition().attr("x1",k).attr("x2",k).attr("y1",h/6).attr("y2",h*5/6);var o=k.ticks(10),q=this.selectAll("line.rule").data(o);q.exit().remove(),q.enter().append("svg:line").attr("class","rule").attr("stroke","#666").attr("stroke-width",".5px"),this.selectAll("line.rule").transition().attr("x1",k).attr("x2",k).attr("y1",h).attr("y2",h*7/6);var r=this.selectAll("text.tick").data(o);r.exit().remove(),r.enter().append("svg:text").attr("class","tick").attr("text-anchor","middle").attr("dy","1em"),this.selectAll("text.tick").text(l).transition().attr("x",k).attr("y",h*7/6)},o=function(a){return d3.max(a,function(a){return a.length})},p=function(b){e=/^left|right$/.test(a),i.domain([0,Math.max(1,o(b.ranges)-1)]).range(["#eee","#bbb"]),j.domain([0,Math.max(1,o(b.measures)-1)]).range(["lightsteelblue","steelblue"]),f=d3.max([].concat(b.ranges,b.markers,b.measures),function(a){return d3.max(a)}),k.domain([0,f]).range([0,g])};n.orient=function(b){if(!arguments.length)return a;a=b;return n},n.ranges=function(a){if(!arguments.length)return b;b=a;return n},n.markers=function(a){if(!arguments.length)return c;c=a;return n},n.measures=function(a){if(!arguments.length)return d;d=a;return n},n.maximum=function(a){if(!arguments.length)return f;f=a;return n},n.width=function(a){if(!arguments.length)return g;g=a;return n},n.height=function(a){if(!arguments.length)return h;h=a;return n},n.tickFormat=function(a){if(!arguments.length)return l;l=a;return n};return n}})() <ide>\ No newline at end of file <ide><path>src/chart/bullet.js <ide> d3.chart.bullet = function() { <ide> .enter().append('svg:rect') <ide> .attr('class', 'range'); <ide> chart.selectAll('rect.range') <add> .transition() <ide> .attr('width', scale) <ide> .attr('height', height) <ide> .attr('style', function(d, i) { return 'fill:' + rangeColor(i) }); <ide> d3.chart.bullet = function() { <ide> .enter().append('svg:rect') <ide> .attr('class', 'measure'); <ide> chart.selectAll('rect.measure') <add> .transition() <ide> .attr('width', scale) <ide> .attr('height', height / 3) <ide> .attr('y', height / 3) <ide> d3.chart.bullet = function() { <ide> .attr('stroke', '#000') <ide> .attr('stroke-width', '2px') <ide> chart.selectAll('line.marker') <add> .transition() <ide> .attr('x1', scale) <ide> .attr('x2', scale) <ide> .attr('y1', height/6) <ide> d3.chart.bullet = function() { <ide> .attr('stroke', '#666') <ide> .attr('stroke-width', '.5px') <ide> this.selectAll('line.rule') <add> .transition() <ide> .attr('x1', scale) <ide> .attr('x2', scale) <ide> .attr('y1', height) <ide> d3.chart.bullet = function() { <ide> .attr('text-anchor', 'middle') <ide> .attr('dy', '1em') <ide> this.selectAll('text.tick') <add> .text(tickFormat) <add> .transition() <ide> .attr('x', scale) <ide> .attr('y', height * 7/6) <del> .text(tickFormat) <ide> } <ide> <ide> var maxlength = function(l) {
3
Python
Python
include flask.json package in setup()
16396248b829c61806ba65840aa70e0a3d8b3c8c
<ide><path>setup.py <ide> def hello(): <ide> description='A microframework based on Werkzeug, Jinja2 ' <ide> 'and good intentions', <ide> long_description=__doc__, <del> packages=['flask', 'flask.ext'], <add> packages=['flask', 'flask.ext', 'flask.json'], <ide> include_package_data=True, <ide> zip_safe=False, <ide> platforms='any',
1
Javascript
Javascript
add readline test for escape sequence
5c020762bb5b2b33a11b816c7fba12cce0df2e65
<ide><path>test/parallel/test-readline-keys.js <ide> addTest('\x1b\x1b\x1b', [ <ide> { name: 'escape', sequence: '\x1b\x1b\x1b', meta: true }, <ide> ]); <ide> <add>// Escape sequence <add>addTest('\x1b]', [{ name: undefined, sequence: '\x1B]', meta: true }]); <add> <ide> // Control keys <ide> addTest('\x01\x0b\x10', [ <ide> { name: 'a', sequence: '\x01', ctrl: true },
1
Javascript
Javascript
replace string concatenation with templates
d00bb87cc7f3cb01403a932365698f9667d47be6
<ide><path>test/parallel/test-whatwg-url-parsing.js <ide> for (const test of failureTests) { <ide> return false; <ide> <ide> // The input could be processed, so we don't do strict matching here <del> const match = (error + '').match(/Invalid URL: (.*)$/); <add> const match = (`${error}`).match(/Invalid URL: (.*)$/); <ide> if (!match) { <ide> return false; <ide> }
1
Python
Python
update loading logics
f5397ffc3bf444e814b4234526dccba146be0347
<ide><path>pytorch_transformers/__init__.py <ide> # Files and general utilities <ide> from .file_utils import (PYTORCH_TRANSFORMERS_CACHE, PYTORCH_PRETRAINED_BERT_CACHE, <ide> cached_path, add_start_docstrings, add_end_docstrings, <del> WEIGHTS_NAME, TF_WEIGHTS_NAME, CONFIG_NAME) <add> WEIGHTS_NAME, TF2_WEIGHTS_NAME, TF_WEIGHTS_NAME, CONFIG_NAME) <ide> <ide> def is_torch_available(): <ide> return _torch_available <ide><path>pytorch_transformers/file_utils.py <ide> PYTORCH_TRANSFORMERS_CACHE = PYTORCH_PRETRAINED_BERT_CACHE # Kept for backward compatibility <ide> <ide> WEIGHTS_NAME = "pytorch_model.bin" <add>TF2_WEIGHTS_NAME = 'tf_model.h5' <ide> TF_WEIGHTS_NAME = 'model.ckpt' <ide> CONFIG_NAME = "config.json" <ide> <ide><path>pytorch_transformers/modeling_tf_utils.py <ide> import tensorflow as tf <ide> <ide> from .configuration_utils import PretrainedConfig <del>from .file_utils import cached_path, WEIGHTS_NAME, TF_WEIGHTS_NAME <add>from .file_utils import cached_path, WEIGHTS_NAME, TF_WEIGHTS_NAME, TF2_WEIGHTS_NAME <ide> <ide> logger = logging.getLogger(__name__) <ide> <ide> def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): <ide> model_kwargs = kwargs <ide> <ide> # Load model <del> if pretrained_model_name_or_path in cls.pretrained_model_archive_map: <del> archive_file = cls.pretrained_model_archive_map[pretrained_model_name_or_path] <del> elif os.path.isdir(pretrained_model_name_or_path): <del> if from_pt: <del> # Load from a PyTorch checkpoint <del> archive_file = os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME) <del> else: <del> archive_file = os.path.join(pretrained_model_name_or_path, TF_WEIGHTS_NAME) <del> else: <del> archive_file = pretrained_model_name_or_path <del> # redirect to the cache, if necessary <del> try: <del> resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies) <del> except EnvironmentError: <add> if pretrained_model_name_or_path is not None: <ide> if pretrained_model_name_or_path in cls.pretrained_model_archive_map: <del> logger.error( <del> "Couldn't reach server at '{}' to download pretrained weights.".format( <del> archive_file)) <add> archive_file = cls.pretrained_model_archive_map[pretrained_model_name_or_path] <add> elif os.path.isdir(pretrained_model_name_or_path): <add> if os.path.isfile(os.path.join(pretrained_model_name_or_path, TF2_WEIGHTS_NAME)): <add> # Load from a TF 2.0 checkpoint <add> archive_file = os.path.join(pretrained_model_name_or_path, TF2_WEIGHTS_NAME) <add> elif from_pt and os.path.isfile(os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME)): <add> # Load from a PyTorch checkpoint <add> archive_file = os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME) <add> else: <add> raise EnvironmentError("Error no file named {} found in directory {}".format( <add> tuple(WEIGHTS_NAME, TF2_WEIGHTS_NAME), <add> pretrained_model_name_or_path)) <add> elif os.path.isfile(pretrained_model_name_or_path): <add> archive_file = pretrained_model_name_or_path <add> else: <add> raise EnvironmentError("Error file {} not found".format(pretrained_model_name_or_path)) <add> <add> # redirect to the cache, if necessary <add> try: <add> resolved_archive_file = cached_path(archive_file, cache_dir=cache_dir, force_download=force_download, proxies=proxies) <add> except EnvironmentError as e: <add> if pretrained_model_name_or_path in cls.pretrained_model_archive_map: <add> logger.error( <add> "Couldn't reach server at '{}' to download pretrained weights.".format( <add> archive_file)) <add> else: <add> logger.error( <add> "Model name '{}' was not found in model name list ({}). " <add> "We assumed '{}' was a path or url but couldn't find any file " <add> "associated to this path or url.".format( <add> pretrained_model_name_or_path, <add> ', '.join(cls.pretrained_model_archive_map.keys()), <add> archive_file)) <add> raise e <add> if resolved_archive_file == archive_file: <add> logger.info("loading weights file {}".format(archive_file)) <ide> else: <del> logger.error( <del> "Model name '{}' was not found in model name list ({}). " <del> "We assumed '{}' was a path or url but couldn't find any file " <del> "associated to this path or url.".format( <del> pretrained_model_name_or_path, <del> ', '.join(cls.pretrained_model_archive_map.keys()), <del> archive_file)) <del> return None <del> if resolved_archive_file == archive_file: <del> logger.info("loading weights file {}".format(archive_file)) <add> logger.info("loading weights file {} from cache at {}".format( <add> archive_file, resolved_archive_file)) <ide> else: <del> logger.info("loading weights file {} from cache at {}".format( <del> archive_file, resolved_archive_file)) <add> resolved_archive_file = None <ide> <ide> # Instantiate model. <ide> model = cls(config, *model_args, **model_kwargs) <ide><path>pytorch_transformers/modeling_utils.py <ide> from torch.nn import functional as F <ide> <ide> from .configuration_utils import PretrainedConfig <del>from .file_utils import cached_path, WEIGHTS_NAME, TF_WEIGHTS_NAME <add>from .file_utils import cached_path, WEIGHTS_NAME, TF_WEIGHTS_NAME, TF2_WEIGHTS_NAME <ide> <ide> logger = logging.getLogger(__name__) <ide> <ide> def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs): <ide> if pretrained_model_name_or_path in cls.pretrained_model_archive_map: <ide> archive_file = cls.pretrained_model_archive_map[pretrained_model_name_or_path] <ide> elif os.path.isdir(pretrained_model_name_or_path): <del> if from_tf: <del> # Directly load from a TensorFlow checkpoint <add> if from_tf and os.path.isfile(os.path.join(pretrained_model_name_or_path, TF_WEIGHTS_NAME + ".index")): <add> # Load from a TF 1.0 checkpoint <ide> archive_file = os.path.join(pretrained_model_name_or_path, TF_WEIGHTS_NAME + ".index") <del> else: <add> elif from_tf and os.path.isfile(os.path.join(pretrained_model_name_or_path, TF2_WEIGHTS_NAME)): <add> # Load from a TF 2.0 checkpoint <add> archive_file = os.path.join(pretrained_model_name_or_path, TF2_WEIGHTS_NAME) <add> elif os.path.isfile(os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME)): <add> # Load from a PyTorch checkpoint <ide> archive_file = os.path.join(pretrained_model_name_or_path, WEIGHTS_NAME) <add> else: <add> raise EnvironmentError("Error no file named {} found in directory {}".format( <add> tuple(WEIGHTS_NAME, TF2_WEIGHTS_NAME, TF_WEIGHTS_NAME + ".index"), <add> pretrained_model_name_or_path)) <ide> elif os.path.isfile(pretrained_model_name_or_path): <ide> archive_file = pretrained_model_name_or_path <ide> else:
4
Javascript
Javascript
support formatting options in trace()
539bf1d7b7070eae0d0da067741d5bedf41750a8
<ide><path>lib/console.js <ide> Console.prototype.timeEnd = function(label) { <ide> }; <ide> <ide> <del>Console.prototype.trace = function(label) { <add>Console.prototype.trace = function() { <ide> // TODO probably can to do this better with V8's debug object once that is <ide> // exposed. <ide> var err = new Error; <ide> err.name = 'Trace'; <del> err.message = label || ''; <add> err.message = util.format.apply(this, arguments); <ide> Error.captureStackTrace(err, arguments.callee); <ide> this.error(err.stack); <ide> }; <ide><path>test/simple/test-console.js <ide> console.log('foo', 'bar'); <ide> console.log('%s %s', 'foo', 'bar', 'hop'); <ide> console.log({slashes: '\\\\'}); <ide> <add>console._stderr = process.stdout; <add>console.trace('This is a %j %d', { formatted: 'trace' }, 10, 'foo'); <add> <ide> global.process.stdout.write = stdout_write; <add> <ide> assert.equal('foo\n', strings.shift()); <ide> assert.equal('foo bar\n', strings.shift()); <ide> assert.equal('foo bar hop\n', strings.shift()); <ide> assert.equal("{ slashes: '\\\\\\\\' }\n", strings.shift()); <del> <del>process.stderr.write('hello world'); <add>assert.equal('Trace: This is a {"formatted":"trace"} 10 foo', <add> strings.shift().split('\n').shift()); <ide> <ide> assert.throws(function () { <ide> console.timeEnd('no such label');
2
Javascript
Javascript
add support for iife with arrow functions
dd6512ddfe47468311c9ed2db9f3cb5f3cfa4513
<ide><path>lib/javascript/JavascriptParser.js <ide> class JavascriptParser extends Parser { <ide> } <ide> this.walkExpression(argOrThis); <ide> }; <del> const { params } = functionExpression; <add> const { params, type } = functionExpression; <add> const arrow = type === "ArrowFunctionExpression"; <ide> const renameThis = currentThis ? getVarInfo(currentThis) : null; <ide> const varInfoForArgs = options.map(getVarInfo); <ide> const wasTopLevel = this.scope.topLevelScope; <del> this.scope.topLevelScope = false; <add> this.scope.topLevelScope = arrow ? "arrow" : false; <ide> const scopeParams = params.filter( <ide> (identifier, idx) => !varInfoForArgs[idx] <ide> ); <ide> class JavascriptParser extends Parser { <ide> } <ide> <ide> this.inFunctionScope(true, scopeParams, () => { <del> if (renameThis) { <add> if (renameThis && !arrow) { <ide> this.setVariable("this", renameThis); <ide> } <ide> for (let i = 0; i < varInfoForArgs.length; i++) { <ide> class JavascriptParser extends Parser { <ide> walkCallExpression(expression) { <ide> if ( <ide> expression.callee.type === "MemberExpression" && <del> expression.callee.object.type === "FunctionExpression" && <add> expression.callee.object.type.endsWith("FunctionExpression") && <ide> !expression.callee.computed && <ide> (expression.callee.property.name === "call" || <ide> expression.callee.property.name === "bind") && <ide> class JavascriptParser extends Parser { <ide> expression.arguments.slice(1), <ide> expression.arguments[0] <ide> ); <del> } else if (expression.callee.type === "FunctionExpression") { <add> } else if (expression.callee.type.endsWith("FunctionExpression")) { <ide> // (function(…) { }(…)) <ide> this._walkIIFE(expression.callee, expression.arguments, null); <ide> } else {
1
Javascript
Javascript
show plural if assets are more than one
23208a311ca2fce52a36db7d4fe38cda2eac0747
<ide><path>lib/Stats.js <ide> class Stats { <ide> colors.green(" [built]"); <ide> } <ide> if (module.assets && module.assets.length) { <del> colors.magenta(` [${module.assets.length} asset]`); <add> colors.magenta( <add> ` [${module.assets.length} asset${ <add> module.assets.length === 1 ? "" : "s" <add> }]` <add> ); <ide> } <ide> if (module.prefetched) { <ide> colors.magenta(" [prefetched]");
1
Ruby
Ruby
remove yaml serialization workaround for columns
4a2be83a7b9c71a952ae98e2aa63933b3a19b5e0
<ide><path>activerecord/lib/active_record/connection_adapters/column.rb <ide> module Format <ide> ISO_DATETIME = /\A(\d{4})-(\d\d)-(\d\d) (\d\d):(\d\d):(\d\d)(\.\d+)?\z/ <ide> end <ide> <del> attr_reader :name, :cast_type, :sql_type, :default_function <add> attr_reader :name, :cast_type, :null, :sql_type, :default_function <ide> <ide> delegate :type, :precision, :scale, :limit, :klass, :accessor, <ide> :text?, :number?, :binary?, :serialized?, :changed?, <ide> def initialize(name, default, cast_type, sql_type = nil, null = true) <ide> @name = name <ide> @cast_type = cast_type <ide> @sql_type = sql_type <del> @nullable = null <add> @null = null <ide> @original_default = default <ide> @default_function = nil <ide> end <ide> def with_type(type) <ide> clone.instance_variable_set('@cast_type', type) <ide> end <ide> end <del> <del> def null <del> @nullable <del> end <ide> end <ide> <ide> class NullColumn < Column
1
PHP
PHP
fix double error tags when option parsing fails
8203e4ce6a367d6bcfe87aecf0e0379edf6dde53
<ide><path>src/Console/Shell.php <ide> public function runCommand($argv, $autoMethod = false, $extra = []) <ide> try { <ide> list($this->params, $this->args) = $this->OptionParser->parse($argv); <ide> } catch (ConsoleException $e) { <del> $this->err('<error>Error: ' . $e->getMessage() . '</error>'); <add> $this->err('Error: ' . $e->getMessage()); <ide> $this->out($this->OptionParser->help($command)); <ide> <ide> return false; <ide><path>tests/TestCase/Console/ShellTest.php <ide> public function testRunCommandInvokeTask() <ide> $this->assertTrue($task->params['requested'], 'Task is requested, no welcome.'); <ide> } <ide> <add> /** <add> * test run command missing parameters <add> * <add> * @return void <add> */ <add> public function testRunCommandMainMissingArgument() <add> { <add> $io = $this->getMockBuilder('Cake\Console\ConsoleIo')->getMock(); <add> $shell = $this->getMockBuilder('Cake\Console\Shell') <add> ->setMethods(['main', 'startup', 'getOptionParser']) <add> ->setConstructorArgs([$io]) <add> ->getMock(); <add> <add> $parser = new ConsoleOptionParser('test'); <add> $parser->addArgument('filename', [ <add> 'required' => true, <add> 'help' => 'a file', <add> ]); <add> $shell->expects($this->once()) <add> ->method('getOptionParser') <add> ->will($this->returnValue($parser)); <add> $shell->expects($this->never())->method('main'); <add> <add> $io->expects($this->once()) <add> ->method('err') <add> ->with('<error>Error: Missing required arguments. filename is required.</error>'); <add> $result = $shell->runCommand([]); <add> $this->assertFalse($result, 'Shell should fail'); <add> } <add> <ide> /** <ide> * test wrapBlock wrapping text. <ide> *
2
Mixed
Ruby
handle other pk types in postgresql gracefully
c0a12453418356a837c3f62709fac6fe948047a6
<ide><path>activerecord/CHANGELOG.md <add>* Allow the PostgreSQL adapter to handle bigserial pk types again. <add> <add> Fixes #10410. <add> <add> *Patrick Robertson* <add> <ide> * Deprecate joining, eager loading and preloading of instance dependent <ide> associations without replacement. These operations happen before instances <ide> are created. The current behavior is unexpected and can result in broken <ide><path>activerecord/lib/active_record/connection_adapters/postgresql/schema_statements.rb <ide> def visit_AddColumn(o) <ide> <ide> def visit_ColumnDefinition(o) <ide> sql = super <del> if o.primary_key? && o.type == :uuid <add> if o.primary_key? && o.type != :primary_key <ide> sql << " PRIMARY KEY " <ide> add_column_options!(sql, column_options(o)) <ide> end <ide><path>activerecord/test/cases/primary_keys_test.rb <ide> def test_primary_key_method_with_ansi_quotes <ide> end <ide> end <ide> end <add> <add>if current_adapter?(:PostgreSQLAdapter) <add> class PrimaryKeyBigSerialTest < ActiveRecord::TestCase <add> self.use_transactional_fixtures = false <add> <add> class Widget < ActiveRecord::Base <add> end <add> <add> setup do <add> @connection = ActiveRecord::Base.connection <add> @connection.create_table(:widgets, id: :bigserial) { |t| } <add> end <add> <add> teardown do <add> @connection.drop_table :widgets <add> end <add> <add> def test_bigserial_primary_key <add> assert_equal "id", Widget.primary_key <add> assert_equal :integer, Widget.columns_hash[Widget.primary_key].type <add> <add> widget = Widget.create! <add> assert_not_nil widget.id <add> end <add> end <add>end
3
Javascript
Javascript
fix the inversion
c0beca7103cd7c636af7c5afda1d9e0bd61f3547
<ide><path>examples/js/loaders/LDrawLoader.js <ide> THREE.LDrawLoader = ( function () { <ide> <ide> } <ide> <add> // If the scale of the object is negated then the triangle winding order <add> // needs to be flipped. <add> var matrix = currentParseScope.matrix; <add> if ( <add> matrix.determinant() < 0 && ( <add> scope.separateObjects && isPrimitiveType( type ) || <add> ! scope.separateObjects <add> ) ) { <add> <add> currentParseScope.inverted = ! currentParseScope.inverted; <add> <add> } <add> <add> <ide> triangles = currentParseScope.triangles; <ide> lineSegments = currentParseScope.lineSegments; <ide> optionalSegments = currentParseScope.optionalSegments; <ide> THREE.LDrawLoader = ( function () { <ide> <ide> } <ide> <del> // If the scale of the object is negated then the triangle winding order <del> // needs to be flipped. <del> if ( matrix.determinant() < 0 ) { <del> <del> bfcInverted = ! bfcInverted; <del> <del> } <del> <ide> subobjects.push( { <ide> material: material, <ide> matrix: matrix,
1
Go
Go
add libnetwork call on daemon rename
8e0bbb28986c9aca5c51f546ba6fd0f1041ace14
<ide><path>daemon/rename.go <ide> package daemon <ide> <ide> import ( <add> "github.com/Sirupsen/logrus" <ide> derr "github.com/docker/docker/errors" <add> "github.com/docker/libnetwork" <add> "strings" <ide> ) <ide> <ide> // ContainerRename changes the name of a container, using the oldName <ide> // to find the container. An error is returned if newName is already <ide> // reserved. <ide> func (daemon *Daemon) ContainerRename(oldName, newName string) error { <add> var ( <add> err error <add> sid string <add> sb libnetwork.Sandbox <add> container *Container <add> ) <add> <ide> if oldName == "" || newName == "" { <ide> return derr.ErrorCodeEmptyRename <ide> } <ide> <del> container, err := daemon.Get(oldName) <add> container, err = daemon.Get(oldName) <ide> if err != nil { <ide> return err <ide> } <ide> func (daemon *Daemon) ContainerRename(oldName, newName string) error { <ide> <ide> container.Name = newName <ide> <del> undo := func() { <del> container.Name = oldName <del> daemon.reserveName(container.ID, oldName) <del> daemon.containerGraphDB.Delete(newName) <del> } <add> defer func() { <add> if err != nil { <add> container.Name = oldName <add> daemon.reserveName(container.ID, oldName) <add> daemon.containerGraphDB.Delete(newName) <add> } <add> }() <ide> <del> if err := daemon.containerGraphDB.Delete(oldName); err != nil { <del> undo() <add> if err = daemon.containerGraphDB.Delete(oldName); err != nil { <ide> return derr.ErrorCodeRenameDelete.WithArgs(oldName, err) <ide> } <ide> <del> if err := container.toDisk(); err != nil { <del> undo() <add> if err = container.toDisk(); err != nil { <add> return err <add> } <add> <add> if !container.Running { <add> container.logEvent("rename") <add> return nil <add> } <add> <add> defer func() { <add> if err != nil { <add> container.Name = oldName <add> if e := container.toDisk(); e != nil { <add> logrus.Errorf("%s: Failed in writing to Disk on rename failure: %v", container.ID, e) <add> } <add> } <add> }() <add> <add> sid = container.NetworkSettings.SandboxID <add> sb, err = daemon.netController.SandboxByID(sid) <add> if err != nil { <add> return err <add> } <add> <add> err = sb.Rename(strings.TrimPrefix(container.Name, "/")) <add> if err != nil { <ide> return err <ide> } <ide>
1
Go
Go
add api tests for secret update
aed7667bee243b0e0d1aa480e3bb52bae894cfe5
<ide><path>integration-cli/daemon_swarm.go <ide> func (d *SwarmDaemon) info() (swarm.Info, error) { <ide> <ide> type serviceConstructor func(*swarm.Service) <ide> type nodeConstructor func(*swarm.Node) <add>type specConstructor func(*swarm.Spec) <ide> <ide> func (d *SwarmDaemon) createService(c *check.C, f ...serviceConstructor) string { <ide> var service swarm.Service <ide> func (d *SwarmDaemon) listNodes(c *check.C) []swarm.Node { <ide> c.Assert(json.Unmarshal(out, &nodes), checker.IsNil) <ide> return nodes <ide> } <add> <add>func (d *SwarmDaemon) updateSwarm(c *check.C, f ...specConstructor) { <add> var sw swarm.Swarm <add> status, out, err := d.SockRequest("GET", "/swarm", nil) <add> c.Assert(err, checker.IsNil) <add> c.Assert(status, checker.Equals, http.StatusOK, check.Commentf("output: %q", string(out))) <add> c.Assert(json.Unmarshal(out, &sw), checker.IsNil) <add> <add> for _, fn := range f { <add> fn(&sw.Spec) <add> } <add> url := fmt.Sprintf("/swarm/update?version=%d", sw.Version.Index) <add> status, out, err = d.SockRequest("POST", url, sw.Spec) <add> c.Assert(err, checker.IsNil) <add> c.Assert(status, checker.Equals, http.StatusOK, check.Commentf("output: %q", string(out))) <add>} <ide><path>integration-cli/docker_api_swarm_test.go <ide> func (s *DockerSwarmSuite) TestApiSwarmSecretAcceptance(c *check.C) { <ide> info, err = d2.info() <ide> c.Assert(err, checker.IsNil) <ide> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive) <add> <add> // change secret <add> d1.updateSwarm(c, func(s *swarm.Spec) { <add> for i := range s.AcceptancePolicy.Policies { <add> p := "foobaz" <add> s.AcceptancePolicy.Policies[i].Secret = &p <add> } <add> }) <add> <add> err = d2.Join(d1.listenAddr, "foobar", "", false) <add> c.Assert(err, checker.NotNil) <add> c.Assert(err.Error(), checker.Contains, "secret token is necessary") <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive) <add> <add> c.Assert(d2.Join(d1.listenAddr, "foobaz", "", false), checker.IsNil) <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateActive) <add> c.Assert(d2.Leave(false), checker.IsNil) <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive) <add> <add> // change policy, don't change secret <add> d1.updateSwarm(c, func(s *swarm.Spec) { <add> for i, p := range s.AcceptancePolicy.Policies { <add> if p.Role == swarm.NodeRoleManager { <add> s.AcceptancePolicy.Policies[i].Autoaccept = false <add> } <add> s.AcceptancePolicy.Policies[i].Secret = nil <add> } <add> }) <add> <add> err = d2.Join(d1.listenAddr, "", "", false) <add> c.Assert(err, checker.NotNil) <add> c.Assert(err.Error(), checker.Contains, "secret token is necessary") <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive) <add> <add> c.Assert(d2.Join(d1.listenAddr, "foobaz", "", false), checker.IsNil) <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateActive) <add> c.Assert(d2.Leave(false), checker.IsNil) <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive) <add> <add> // clear secret <add> d1.updateSwarm(c, func(s *swarm.Spec) { <add> for i := range s.AcceptancePolicy.Policies { <add> p := "" <add> s.AcceptancePolicy.Policies[i].Secret = &p <add> } <add> }) <add> <add> c.Assert(d2.Join(d1.listenAddr, "", "", false), checker.IsNil) <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateActive) <add> c.Assert(d2.Leave(false), checker.IsNil) <add> info, err = d2.info() <add> c.Assert(err, checker.IsNil) <add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive) <add> <ide> } <ide> <ide> func (s *DockerSwarmSuite) TestApiSwarmCAHash(c *check.C) {
2
Go
Go
fix wrong comment
718eba88363c14065b3a2a2dfbc9f270fc7cf63e
<ide><path>daemon/events.go <ide> func (daemon *Daemon) LogContainerEventWithAttributes(container *container.Conta <ide> daemon.EventsService.Log(action, events.ContainerEventType, actor) <ide> } <ide> <del>// LogImageEvent generates an event related to a container with only the default attributes. <add>// LogImageEvent generates an event related to an image with only the default attributes. <ide> func (daemon *Daemon) LogImageEvent(imageID, refName, action string) { <ide> daemon.LogImageEventWithAttributes(imageID, refName, action, map[string]string{}) <ide> } <ide> <del>// LogImageEventWithAttributes generates an event related to a container with specific given attributes. <add>// LogImageEventWithAttributes generates an event related to an image with specific given attributes. <ide> func (daemon *Daemon) LogImageEventWithAttributes(imageID, refName, action string, attributes map[string]string) { <ide> img, err := daemon.GetImage(imageID) <ide> if err == nil && img.Config != nil {
1
Go
Go
ignore no such container in testenv.clean
5dd89abdf1ec3e633ce01e74d1c47f8eb02fa31e
<ide><path>integration-cli/docker_api_containers_test.go <ide> func (s *DockerSuite) TestPutContainerArchiveErrSymlinkInVolumeToReadOnlyRootfs( <ide> readOnly: true, <ide> volumes: defaultVolumes(testVol), // Our bind mount is at /vol2 <ide> }) <del> defer deleteContainer(false, cID) <add> defer deleteContainer(cID) <ide> <ide> // Attempt to extract to a symlink in the volume which points to a <ide> // directory outside the volume. This should cause an error because the <ide><path>integration-cli/docker_cli_by_digest_test.go <ide> func setupImageWithTag(c *check.C, tag string) (digest.Digest, error) { <ide> c.Assert(err, checker.IsNil, check.Commentf("image tagging failed: %s", out)) <ide> <ide> // delete the container as we don't need it any more <del> err = deleteContainer(false, containerName) <add> err = deleteContainer(containerName) <ide> c.Assert(err, checker.IsNil) <ide> <ide> // push the image <ide><path>integration-cli/docker_cli_run_test.go <ide> func (s *DockerSuite) TestRunDeallocatePortOnMissingIptablesRule(c *check.C) { <ide> icmd.RunCommand("iptables", "-D", "DOCKER", "-d", fmt.Sprintf("%s/32", ip), <ide> "!", "-i", "docker0", "-o", "docker0", "-p", "tcp", "-m", "tcp", "--dport", "23", "-j", "ACCEPT").Assert(c, icmd.Success) <ide> <del> if err := deleteContainer(false, id); err != nil { <add> if err := deleteContainer(id); err != nil { <ide> c.Fatal(err) <ide> } <ide> <ide><path>integration-cli/docker_utils_test.go <ide> func daemonHost() string { <ide> } <ide> <ide> // FIXME(vdemeester) move this away are remove ignoreNoSuchContainer bool <del>func deleteContainer(ignoreNoSuchContainer bool, container ...string) error { <del> result := icmd.RunCommand(dockerBinary, append([]string{"rm", "-fv"}, container...)...) <del> if ignoreNoSuchContainer && result.Error != nil { <del> // If the error is "No such container: ..." this means the container doesn't exists anymore, <del> // we can safely ignore that one. <del> if strings.Contains(result.Stderr(), "No such container") { <del> return nil <del> } <del> } <del> return result.Compare(icmd.Success) <add>func deleteContainer(container ...string) error { <add> return icmd.RunCommand(dockerBinary, append([]string{"rm", "-fv"}, container...)...).Compare(icmd.Success) <ide> } <ide> <ide> func getAllContainers(c *check.C) string { <ide> func getAllContainers(c *check.C) string { <ide> func deleteAllContainers(c *check.C) { <ide> containers := getAllContainers(c) <ide> if containers != "" { <del> err := deleteContainer(true, strings.Split(strings.TrimSpace(containers), "\n")...) <add> err := deleteContainer(strings.Split(strings.TrimSpace(containers), "\n")...) <ide> c.Assert(err, checker.IsNil) <ide> } <ide> } <ide> func (f *remoteFileServer) Close() error { <ide> if f.container == "" { <ide> return nil <ide> } <del> return deleteContainer(false, f.container) <add> return deleteContainer(f.container) <ide> } <ide> <ide> func newRemoteFileServer(c *check.C, ctx *FakeContext) *remoteFileServer { <ide><path>integration-cli/environment/clean.go <ide> func getPausedContainers(t testingT, dockerBinary string) []string { <ide> func deleteAllContainers(t testingT, dockerBinary string) { <ide> containers := getAllContainers(t, dockerBinary) <ide> if len(containers) > 0 { <del> icmd.RunCommand(dockerBinary, append([]string{"rm", "-fv"}, containers...)...).Assert(t, icmd.Success) <add> result := icmd.RunCommand(dockerBinary, append([]string{"rm", "-fv"}, containers...)...) <add> if result.Error != nil { <add> // If the error is "No such container: ..." this means the container doesn't exists anymore, <add> // we can safely ignore that one. <add> if strings.Contains(result.Stderr(), "No such container") { <add> return <add> } <add> t.Fatalf("error removing containers %v : %v (%s)", containers, result.Error, result.Combined()) <add> } <ide> } <ide> } <ide>
5
Javascript
Javascript
remove unused arguments from function
2a97eb61c65e23de3cee42c7b70f845b0c3a3d09
<ide><path>test/parallel/test-http-parser.js <ide> function newParser(type) { <ide> parser.url += url; <ide> }; <ide> <del> parser[kOnHeadersComplete] = function(info) { <add> parser[kOnHeadersComplete] = function() { <ide> }; <ide> <ide> parser[kOnBody] = common.mustNotCall('kOnBody should not be called'); <ide> function expectBody(expected) { <ide> const request = Buffer.from('GET /hello HTTP/1.1\r\n\r\n'); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(versionMajor, 1); <ide> assert.strictEqual(versionMinor, 1); <ide> assert.strictEqual(method, methods.indexOf('GET')); <ide> function expectBody(expected) { <ide> // thrown from parser.execute() <ide> // <ide> <del> parser[kOnHeadersComplete] = function(info) { <add> parser[kOnHeadersComplete] = function() { <ide> throw new Error('hello world'); <ide> }; <ide> <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url, statusCode, statusMessage) => { <ide> assert.strictEqual(method, undefined); <ide> assert.strictEqual(versionMajor, 1); <ide> assert.strictEqual(versionMinor, 1); <ide> function expectBody(expected) { <ide> 'HTTP/1.0 200 Connection established\r\n\r\n'); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url, statusCode, statusMessage) => { <ide> assert.strictEqual(versionMajor, 1); <ide> assert.strictEqual(versionMinor, 0); <ide> assert.strictEqual(method, undefined); <ide> function expectBody(expected) { <ide> <ide> let seen_body = false; <ide> <del> const onHeaders = (headers, url) => { <add> const onHeaders = (headers) => { <ide> assert.ok(seen_body); // trailers should come after the body <ide> assert.deepStrictEqual(headers, <ide> ['Vary', '*', 'Content-Type', 'text/plain']); <ide> }; <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url || parser.url, '/it'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method) => { <ide> assert.strictEqual(method, methods.indexOf('GET')); <ide> assert.strictEqual(versionMajor, 1); <ide> assert.strictEqual(versionMinor, 0); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('GET')); <ide> assert.strictEqual(url || parser.url, '/foo/bar/baz?quux=42#1337'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url || parser.url, '/it'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url || parser.url, '/it'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url || parser.url, '/it'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> <ide> function test(a, b) { <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url || parser.url, '/helpme'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url || parser.url, '/it'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> ); <ide> <ide> const onHeadersComplete1 = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('PUT')); <ide> assert.strictEqual(url, '/this'); <ide> assert.strictEqual(versionMajor, 1); <ide> function expectBody(expected) { <ide> }; <ide> <ide> const onHeadersComplete2 = (versionMajor, versionMinor, headers, <del> method, url, statusCode, statusMessage, <del> upgrade, shouldKeepAlive) => { <add> method, url) => { <ide> assert.strictEqual(method, methods.indexOf('POST')); <ide> assert.strictEqual(url, '/that'); <ide> assert.strictEqual(versionMajor, 1);
1
Go
Go
fix some types on errors and form
ac9c47e26f57a0fa2fed95a1fb1bf6c9410a3b60
<ide><path>api/errors/errors.go <ide> func (e apiError) HTTPErrorStatusCode() int { <ide> <ide> // NewErrorWithStatusCode allows you to associate <ide> // a specific HTTP Status Code to an error. <del>// The Server will take that code and set <add>// The server will take that code and set <ide> // it as the response status. <ide> func NewErrorWithStatusCode(err error, code int) error { <ide> return apiError{err, code} <ide><path>api/server/httputils/errors.go <ide> type inputValidationError interface { <ide> IsValidationError() bool <ide> } <ide> <del>// GetHTTPErrorStatusCode retrieves status code from error message <add>// GetHTTPErrorStatusCode retrieves status code from error message. <ide> func GetHTTPErrorStatusCode(err error) int { <ide> if err == nil { <ide> logrus.WithFields(logrus.Fields{"error": err}).Error("unexpected HTTP error handling") <ide><path>api/server/httputils/form.go <ide> func BoolValue(r *http.Request, k string) bool { <ide> } <ide> <ide> // BoolValueOrDefault returns the default bool passed if the query param is <del>// missing, otherwise it's just a proxy to boolValue above <add>// missing, otherwise it's just a proxy to boolValue above. <ide> func BoolValueOrDefault(r *http.Request, k string, d bool) bool { <ide> if _, ok := r.Form[k]; !ok { <ide> return d
3
PHP
PHP
make fallback loader just a special name loader
da47cd4c353dd65d6c80637218e773f78457618c
<ide><path>src/I18n/I18n.php <ide> public static function config($name, callable $loader) <ide> static::translators()->registerLoader($name, $loader); <ide> } <ide> <del> /** <del> * Registers a fallback translator loader <del> * <del> * @param callable $loader A callable object that should return a Package <del> * instance to be used for assembling a new translator. <del> * @return void <del> */ <del> public static function fallbackConfig(callable $loader) <del> { <del> static::translators()->setFallbackLoader($loader); <del> } <del> <ide> /** <ide> * Sets the default locale to use for future translator instances. <ide> * This also affects the `intl.default_locale` PHP setting. <ide><path>src/I18n/TranslatorRegistry.php <ide> class TranslatorRegistry extends TranslatorLocator <ide> * <ide> * @var array <ide> */ <del> protected $_loaders; <add> public $_loaders; <ide> <ide> /** <del> * Fallback loader <add> * Fallback loader name <ide> * <del> * @var callable <add> * @var string <ide> */ <del> protected $_fallbackLoader; <add> protected $_fallbackLoader = '_fallback'; <ide> <ide> /** <ide> * The name of the default formatter to use for newly created <ide> public function __construct( <ide> ) { <ide> parent::__construct($packages, $formatters, $factory, $locale); <ide> <del> $this->setFallbackLoader(function ($name, $locale) { <add> $this->registerLoader($this->_fallbackLoader, function ($name, $locale) { <ide> $chain = new ChainMessagesLoader([ <ide> new MessagesFileLoader($name, $locale, 'mo'), <ide> new MessagesFileLoader($name, $locale, 'po') <ide> public function registerLoader($name, callable $loader) <ide> $this->_loaders[$name] = $loader; <ide> } <ide> <del> /** <del> * Set fallback loader function <del> * <del> * @param callable $loader A callable object that should return a Package <del> * @return void <del> */ <del> public function setFallbackLoader(callable $loader) <del> { <del> $this->_fallbackLoader = $loader; <del> } <del> <ide> /** <ide> * Sets the name of the default messages formatter to use for future <ide> * translator instances. <ide> public function useFallback($enable = true) <ide> */ <ide> protected function _fallbackLoader($name, $locale) <ide> { <del> return call_user_func($this->_fallbackLoader, $name, $locale); <add> return $this->_loaders[$this->_fallbackLoader]($name, $locale); <ide> } <ide> <ide> /** <ide><path>tests/TestCase/I18n/I18nTest.php <ide> public function testLoaderFactory() <ide> */ <ide> public function testFallbackLoaderFactory() <ide> { <del> I18n::fallbackConfig(function ($name) { <add> I18n::config('_fallback', function ($name) { <ide> $package = new Package('default'); <ide> <ide> if ($name == 'custom') {
3
PHP
PHP
fix bad whitespace
f79325f4deea4c88643575e95cc5e850738c0f78
<ide><path>src/Utility/Inflector.php <ide> public static function normalize($string, $replacement = '_') <ide> $result = static::_cache($cacheKey, $string); <ide> <ide> if ($result === false) { <del> $result = strtolower(preg_replace('/(?<=\\w)([A-Z])/', $replacement .'\\1', $string)); <add> $result = strtolower(preg_replace('/(?<=\\w)([A-Z])/', $replacement . '\\1', $string)); <ide> static::_cache($cacheKey, $string, $result); <ide> } <ide>
1
Text
Text
update laracasts statistics
d6cf4ea06da4a9477e040af473174546082dbcb9
<ide><path>readme.md <ide> Laravel is accessible, yet powerful, providing tools needed for large, robust ap <ide> <ide> Laravel has the most extensive and thorough documentation and video tutorial library of any modern web application framework. The [Laravel documentation](https://laravel.com/docs) is in-depth and complete, making it a breeze to get started learning the framework. <ide> <del>If you're not in the mood to read, [Laracasts](https://laracasts.com) contains over 900 video tutorials on a range of topics including Laravel, modern PHP, unit testing, JavaScript, and more. Boost the skill level of yourself and your entire team by digging into our comprehensive video library. <add>If you're not in the mood to read, [Laracasts](https://laracasts.com) contains over 1100 video tutorials on a range of topics including Laravel, modern PHP, unit testing, JavaScript, and more. Boost the skill level of yourself and your entire team by digging into our comprehensive video library. <ide> <ide> ## Laravel Sponsors <ide>
1
Python
Python
move mappedoperator tests to mirror code location
70b41e46b46e65c0446a40ab91624cb2291a5039
<ide><path>tests/models/test_baseoperator.py <ide> from unittest import mock <ide> <ide> import jinja2 <del>import pendulum <ide> import pytest <ide> <ide> from airflow.decorators import task as task_decorator <ide> from airflow.exceptions import AirflowException <ide> from airflow.lineage.entities import File <ide> from airflow.models import DAG <ide> from airflow.models.baseoperator import BaseOperator, BaseOperatorMeta, chain, cross_downstream <del>from airflow.models.mappedoperator import MappedOperator <del>from airflow.models.taskinstance import TaskInstance <del>from airflow.models.taskmap import TaskMap <del>from airflow.models.xcom import XCOM_RETURN_KEY <del>from airflow.models.xcom_arg import XComArg <ide> from airflow.utils.context import Context <ide> from airflow.utils.edgemodifier import Label <del>from airflow.utils.state import TaskInstanceState <ide> from airflow.utils.task_group import TaskGroup <ide> from airflow.utils.trigger_rule import TriggerRule <ide> from airflow.utils.weight_rule import WeightRule <ide> from tests.models import DEFAULT_DATE <ide> from tests.test_utils.config import conf_vars <del>from tests.test_utils.mapping import expand_mapped_task <ide> from tests.test_utils.mock_operators import DeprecatedOperator, MockOperator <ide> <ide> <ide> def test_operator_retries(caplog, dag_maker, retries, expected): <ide> assert caplog.record_tuples == expected <ide> <ide> <del>def test_task_mapping_with_dag(): <del> with DAG("test-dag", start_date=DEFAULT_DATE) as dag: <del> task1 = BaseOperator(task_id="op1") <del> literal = ['a', 'b', 'c'] <del> mapped = MockOperator.partial(task_id='task_2').expand(arg2=literal) <del> finish = MockOperator(task_id="finish") <del> <del> task1 >> mapped >> finish <del> <del> assert task1.downstream_list == [mapped] <del> assert mapped in dag.tasks <del> assert mapped.task_group == dag.task_group <del> # At parse time there should only be three tasks! <del> assert len(dag.tasks) == 3 <del> <del> assert finish.upstream_list == [mapped] <del> assert mapped.downstream_list == [finish] <del> <del> <del>def test_task_mapping_without_dag_context(): <del> with DAG("test-dag", start_date=DEFAULT_DATE) as dag: <del> task1 = BaseOperator(task_id="op1") <del> literal = ['a', 'b', 'c'] <del> mapped = MockOperator.partial(task_id='task_2').expand(arg2=literal) <del> <del> task1 >> mapped <del> <del> assert isinstance(mapped, MappedOperator) <del> assert mapped in dag.tasks <del> assert task1.downstream_list == [mapped] <del> assert mapped in dag.tasks <del> # At parse time there should only be two tasks! <del> assert len(dag.tasks) == 2 <del> <del> <del>def test_task_mapping_default_args(): <del> default_args = {'start_date': DEFAULT_DATE.now(), 'owner': 'test'} <del> with DAG("test-dag", start_date=DEFAULT_DATE, default_args=default_args): <del> task1 = BaseOperator(task_id="op1") <del> literal = ['a', 'b', 'c'] <del> mapped = MockOperator.partial(task_id='task_2').expand(arg2=literal) <del> <del> task1 >> mapped <del> <del> assert mapped.partial_kwargs['owner'] == 'test' <del> assert mapped.start_date == pendulum.instance(default_args['start_date']) <del> <del> <del>def test_map_unknown_arg_raises(): <del> with pytest.raises(TypeError, match=r"argument 'file'"): <del> BaseOperator.partial(task_id='a').expand(file=[1, 2, {'a': 'b'}]) <del> <del> <del>def test_map_xcom_arg(): <del> """Test that dependencies are correct when mapping with an XComArg""" <del> with DAG("test-dag", start_date=DEFAULT_DATE): <del> task1 = BaseOperator(task_id="op1") <del> mapped = MockOperator.partial(task_id='task_2').expand(arg2=XComArg(task1)) <del> finish = MockOperator(task_id="finish") <del> <del> mapped >> finish <del> <del> assert task1.downstream_list == [mapped] <del> <del> <del>def test_partial_on_instance() -> None: <del> """`.partial` on an instance should fail -- it's only designed to be called on classes""" <del> with pytest.raises(TypeError): <del> MockOperator( <del> task_id='a', <del> ).partial() <del> <del> <del>def test_partial_on_class() -> None: <del> # Test that we accept args for superclasses too <del> op = MockOperator.partial(task_id='a', arg1="a", trigger_rule=TriggerRule.ONE_FAILED) <del> assert op.kwargs["arg1"] == "a" <del> assert op.kwargs["trigger_rule"] == TriggerRule.ONE_FAILED <del> <del> <del>def test_partial_on_class_invalid_ctor_args() -> None: <del> """Test that when we pass invalid args to partial(). <del> <del> I.e. if an arg is not known on the class or any of its parent classes we error at parse time <del> """ <del> with pytest.raises(TypeError, match=r"arguments 'foo', 'bar'"): <del> MockOperator.partial(task_id='a', foo='bar', bar=2) <del> <del> <del>@pytest.mark.parametrize( <del> ["num_existing_tis", "expected"], <del> ( <del> pytest.param(0, [(0, None), (1, None), (2, None)], id='only-unmapped-ti-exists'), <del> pytest.param( <del> 3, <del> [(0, 'success'), (1, 'success'), (2, 'success')], <del> id='all-tis-exist', <del> ), <del> pytest.param( <del> 5, <del> [ <del> (0, 'success'), <del> (1, 'success'), <del> (2, 'success'), <del> (3, TaskInstanceState.REMOVED), <del> (4, TaskInstanceState.REMOVED), <del> ], <del> id="tis-to-be-removed", <del> ), <del> ), <del>) <del>def test_expand_mapped_task_instance(dag_maker, session, num_existing_tis, expected): <del> literal = [1, 2, {'a': 'b'}] <del> with dag_maker(session=session): <del> task1 = BaseOperator(task_id="op1") <del> mapped = MockOperator.partial(task_id='task_2').expand(arg2=XComArg(task1)) <del> <del> dr = dag_maker.create_dagrun() <del> <del> session.add( <del> TaskMap( <del> dag_id=dr.dag_id, <del> task_id=task1.task_id, <del> run_id=dr.run_id, <del> map_index=-1, <del> length=len(literal), <del> keys=None, <del> ) <del> ) <del> <del> if num_existing_tis: <del> # Remove the map_index=-1 TI when we're creating other TIs <del> session.query(TaskInstance).filter( <del> TaskInstance.dag_id == mapped.dag_id, <del> TaskInstance.task_id == mapped.task_id, <del> TaskInstance.run_id == dr.run_id, <del> ).delete() <del> <del> for index in range(num_existing_tis): <del> # Give the existing TIs a state to make sure we don't change them <del> ti = TaskInstance(mapped, run_id=dr.run_id, map_index=index, state=TaskInstanceState.SUCCESS) <del> session.add(ti) <del> session.flush() <del> <del> mapped.expand_mapped_task(dr.run_id, session=session) <del> <del> indices = ( <del> session.query(TaskInstance.map_index, TaskInstance.state) <del> .filter_by(task_id=mapped.task_id, dag_id=mapped.dag_id, run_id=dr.run_id) <del> .order_by(TaskInstance.map_index) <del> .all() <del> ) <del> <del> assert indices == expected <del> <del> <del>def test_expand_mapped_task_instance_skipped_on_zero(dag_maker, session): <del> with dag_maker(session=session): <del> task1 = BaseOperator(task_id="op1") <del> mapped = MockOperator.partial(task_id='task_2').expand(arg2=XComArg(task1)) <del> <del> dr = dag_maker.create_dagrun() <del> <del> expand_mapped_task(mapped, dr.run_id, task1.task_id, length=0, session=session) <del> <del> indices = ( <del> session.query(TaskInstance.map_index, TaskInstance.state) <del> .filter_by(task_id=mapped.task_id, dag_id=mapped.dag_id, run_id=dr.run_id) <del> .order_by(TaskInstance.map_index) <del> .all() <del> ) <del> <del> assert indices == [(-1, TaskInstanceState.SKIPPED)] <del> <del> <del>def test_mapped_task_applies_default_args_classic(dag_maker): <del> with dag_maker(default_args={"execution_timeout": timedelta(minutes=30)}) as dag: <del> MockOperator(task_id="simple", arg1=None, arg2=0) <del> MockOperator.partial(task_id="mapped").expand(arg1=[1], arg2=[2, 3]) <del> <del> assert dag.get_task("simple").execution_timeout == timedelta(minutes=30) <del> assert dag.get_task("mapped").execution_timeout == timedelta(minutes=30) <del> <del> <del>def test_mapped_task_applies_default_args_taskflow(dag_maker): <del> with dag_maker(default_args={"execution_timeout": timedelta(minutes=30)}) as dag: <del> <del> @dag.task <del> def simple(arg): <del> pass <del> <del> @dag.task <del> def mapped(arg): <del> pass <del> <del> simple(arg=0) <del> mapped.expand(arg=[1, 2]) <del> <del> assert dag.get_task("simple").execution_timeout == timedelta(minutes=30) <del> assert dag.get_task("mapped").execution_timeout == timedelta(minutes=30) <del> <del> <del>def test_mapped_render_template_fields_validating_operator(dag_maker, session): <del> class MyOperator(MockOperator): <del> def __init__(self, value, arg1, **kwargs): <del> assert isinstance(value, str), "value should have been resolved before unmapping" <del> assert isinstance(arg1, str), "value should have been resolved before unmapping" <del> super().__init__(arg1=arg1, **kwargs) <del> self.value = value <del> <del> with dag_maker(session=session): <del> task1 = BaseOperator(task_id="op1") <del> xcom_arg = XComArg(task1) <del> mapped = MyOperator.partial(task_id='a', arg2='{{ ti.task_id }}').expand( <del> value=xcom_arg, arg1=xcom_arg <del> ) <del> <del> dr = dag_maker.create_dagrun() <del> ti: TaskInstance = dr.get_task_instance(task1.task_id, session=session) <del> <del> ti.xcom_push(key=XCOM_RETURN_KEY, value=['{{ ds }}'], session=session) <del> <del> session.add( <del> TaskMap( <del> dag_id=dr.dag_id, <del> task_id=task1.task_id, <del> run_id=dr.run_id, <del> map_index=-1, <del> length=1, <del> keys=None, <del> ) <del> ) <del> session.flush() <del> <del> mapped_ti: TaskInstance = dr.get_task_instance(mapped.task_id, session=session) <del> mapped_ti.map_index = 0 <del> op = mapped.render_template_fields(context=mapped_ti.get_template_context(session=session)) <del> assert isinstance(op, MyOperator) <del> <del> assert op.value == "{{ ds }}", "Should not be templated!" <del> assert op.arg1 == "{{ ds }}" <del> assert op.arg2 == "a" <del> <del> <ide> def test_default_retry_delay(dag_maker): <ide> with dag_maker(dag_id='test_default_retry_delay'): <ide> task1 = BaseOperator(task_id='test_no_explicit_retry_delay') <ide><path>tests/models/test_mappedoperator.py <add># <add># Licensed to the Apache Software Foundation (ASF) under one <add># or more contributor license agreements. See the NOTICE file <add># distributed with this work for additional information <add># regarding copyright ownership. The ASF licenses this file <add># to you under the Apache License, Version 2.0 (the <add># "License"); you may not use this file except in compliance <add># with the License. You may obtain a copy of the License at <add># <add># http://www.apache.org/licenses/LICENSE-2.0 <add># <add># Unless required by applicable law or agreed to in writing, <add># software distributed under the License is distributed on an <add># "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY <add># KIND, either express or implied. See the License for the <add># specific language governing permissions and limitations <add># under the License. <add>from datetime import timedelta <add> <add>import pendulum <add>import pytest <add> <add>from airflow.models import DAG <add>from airflow.models.baseoperator import BaseOperator <add>from airflow.models.mappedoperator import MappedOperator <add>from airflow.models.taskinstance import TaskInstance <add>from airflow.models.taskmap import TaskMap <add>from airflow.models.xcom import XCOM_RETURN_KEY <add>from airflow.models.xcom_arg import XComArg <add>from airflow.utils.state import TaskInstanceState <add>from airflow.utils.trigger_rule import TriggerRule <add>from tests.models import DEFAULT_DATE <add>from tests.test_utils.mapping import expand_mapped_task <add>from tests.test_utils.mock_operators import MockOperator <add> <add> <add>def test_task_mapping_with_dag(): <add> with DAG("test-dag", start_date=DEFAULT_DATE) as dag: <add> task1 = BaseOperator(task_id="op1") <add> literal = ['a', 'b', 'c'] <add> mapped = MockOperator.partial(task_id='task_2').expand(arg2=literal) <add> finish = MockOperator(task_id="finish") <add> <add> task1 >> mapped >> finish <add> <add> assert task1.downstream_list == [mapped] <add> assert mapped in dag.tasks <add> assert mapped.task_group == dag.task_group <add> # At parse time there should only be three tasks! <add> assert len(dag.tasks) == 3 <add> <add> assert finish.upstream_list == [mapped] <add> assert mapped.downstream_list == [finish] <add> <add> <add>def test_task_mapping_without_dag_context(): <add> with DAG("test-dag", start_date=DEFAULT_DATE) as dag: <add> task1 = BaseOperator(task_id="op1") <add> literal = ['a', 'b', 'c'] <add> mapped = MockOperator.partial(task_id='task_2').expand(arg2=literal) <add> <add> task1 >> mapped <add> <add> assert isinstance(mapped, MappedOperator) <add> assert mapped in dag.tasks <add> assert task1.downstream_list == [mapped] <add> assert mapped in dag.tasks <add> # At parse time there should only be two tasks! <add> assert len(dag.tasks) == 2 <add> <add> <add>def test_task_mapping_default_args(): <add> default_args = {'start_date': DEFAULT_DATE.now(), 'owner': 'test'} <add> with DAG("test-dag", start_date=DEFAULT_DATE, default_args=default_args): <add> task1 = BaseOperator(task_id="op1") <add> literal = ['a', 'b', 'c'] <add> mapped = MockOperator.partial(task_id='task_2').expand(arg2=literal) <add> <add> task1 >> mapped <add> <add> assert mapped.partial_kwargs['owner'] == 'test' <add> assert mapped.start_date == pendulum.instance(default_args['start_date']) <add> <add> <add>def test_map_unknown_arg_raises(): <add> with pytest.raises(TypeError, match=r"argument 'file'"): <add> BaseOperator.partial(task_id='a').expand(file=[1, 2, {'a': 'b'}]) <add> <add> <add>def test_map_xcom_arg(): <add> """Test that dependencies are correct when mapping with an XComArg""" <add> with DAG("test-dag", start_date=DEFAULT_DATE): <add> task1 = BaseOperator(task_id="op1") <add> mapped = MockOperator.partial(task_id='task_2').expand(arg2=XComArg(task1)) <add> finish = MockOperator(task_id="finish") <add> <add> mapped >> finish <add> <add> assert task1.downstream_list == [mapped] <add> <add> <add>def test_partial_on_instance() -> None: <add> """`.partial` on an instance should fail -- it's only designed to be called on classes""" <add> with pytest.raises(TypeError): <add> MockOperator( <add> task_id='a', <add> ).partial() <add> <add> <add>def test_partial_on_class() -> None: <add> # Test that we accept args for superclasses too <add> op = MockOperator.partial(task_id='a', arg1="a", trigger_rule=TriggerRule.ONE_FAILED) <add> assert op.kwargs["arg1"] == "a" <add> assert op.kwargs["trigger_rule"] == TriggerRule.ONE_FAILED <add> <add> <add>def test_partial_on_class_invalid_ctor_args() -> None: <add> """Test that when we pass invalid args to partial(). <add> <add> I.e. if an arg is not known on the class or any of its parent classes we error at parse time <add> """ <add> with pytest.raises(TypeError, match=r"arguments 'foo', 'bar'"): <add> MockOperator.partial(task_id='a', foo='bar', bar=2) <add> <add> <add>@pytest.mark.parametrize( <add> ["num_existing_tis", "expected"], <add> ( <add> pytest.param(0, [(0, None), (1, None), (2, None)], id='only-unmapped-ti-exists'), <add> pytest.param( <add> 3, <add> [(0, 'success'), (1, 'success'), (2, 'success')], <add> id='all-tis-exist', <add> ), <add> pytest.param( <add> 5, <add> [ <add> (0, 'success'), <add> (1, 'success'), <add> (2, 'success'), <add> (3, TaskInstanceState.REMOVED), <add> (4, TaskInstanceState.REMOVED), <add> ], <add> id="tis-to-be-removed", <add> ), <add> ), <add>) <add>def test_expand_mapped_task_instance(dag_maker, session, num_existing_tis, expected): <add> literal = [1, 2, {'a': 'b'}] <add> with dag_maker(session=session): <add> task1 = BaseOperator(task_id="op1") <add> mapped = MockOperator.partial(task_id='task_2').expand(arg2=XComArg(task1)) <add> <add> dr = dag_maker.create_dagrun() <add> <add> session.add( <add> TaskMap( <add> dag_id=dr.dag_id, <add> task_id=task1.task_id, <add> run_id=dr.run_id, <add> map_index=-1, <add> length=len(literal), <add> keys=None, <add> ) <add> ) <add> <add> if num_existing_tis: <add> # Remove the map_index=-1 TI when we're creating other TIs <add> session.query(TaskInstance).filter( <add> TaskInstance.dag_id == mapped.dag_id, <add> TaskInstance.task_id == mapped.task_id, <add> TaskInstance.run_id == dr.run_id, <add> ).delete() <add> <add> for index in range(num_existing_tis): <add> # Give the existing TIs a state to make sure we don't change them <add> ti = TaskInstance(mapped, run_id=dr.run_id, map_index=index, state=TaskInstanceState.SUCCESS) <add> session.add(ti) <add> session.flush() <add> <add> mapped.expand_mapped_task(dr.run_id, session=session) <add> <add> indices = ( <add> session.query(TaskInstance.map_index, TaskInstance.state) <add> .filter_by(task_id=mapped.task_id, dag_id=mapped.dag_id, run_id=dr.run_id) <add> .order_by(TaskInstance.map_index) <add> .all() <add> ) <add> <add> assert indices == expected <add> <add> <add>def test_expand_mapped_task_instance_skipped_on_zero(dag_maker, session): <add> with dag_maker(session=session): <add> task1 = BaseOperator(task_id="op1") <add> mapped = MockOperator.partial(task_id='task_2').expand(arg2=XComArg(task1)) <add> <add> dr = dag_maker.create_dagrun() <add> <add> expand_mapped_task(mapped, dr.run_id, task1.task_id, length=0, session=session) <add> <add> indices = ( <add> session.query(TaskInstance.map_index, TaskInstance.state) <add> .filter_by(task_id=mapped.task_id, dag_id=mapped.dag_id, run_id=dr.run_id) <add> .order_by(TaskInstance.map_index) <add> .all() <add> ) <add> <add> assert indices == [(-1, TaskInstanceState.SKIPPED)] <add> <add> <add>def test_mapped_task_applies_default_args_classic(dag_maker): <add> with dag_maker(default_args={"execution_timeout": timedelta(minutes=30)}) as dag: <add> MockOperator(task_id="simple", arg1=None, arg2=0) <add> MockOperator.partial(task_id="mapped").expand(arg1=[1], arg2=[2, 3]) <add> <add> assert dag.get_task("simple").execution_timeout == timedelta(minutes=30) <add> assert dag.get_task("mapped").execution_timeout == timedelta(minutes=30) <add> <add> <add>def test_mapped_task_applies_default_args_taskflow(dag_maker): <add> with dag_maker(default_args={"execution_timeout": timedelta(minutes=30)}) as dag: <add> <add> @dag.task <add> def simple(arg): <add> pass <add> <add> @dag.task <add> def mapped(arg): <add> pass <add> <add> simple(arg=0) <add> mapped.expand(arg=[1, 2]) <add> <add> assert dag.get_task("simple").execution_timeout == timedelta(minutes=30) <add> assert dag.get_task("mapped").execution_timeout == timedelta(minutes=30) <add> <add> <add>def test_mapped_render_template_fields_validating_operator(dag_maker, session): <add> class MyOperator(MockOperator): <add> def __init__(self, value, arg1, **kwargs): <add> assert isinstance(value, str), "value should have been resolved before unmapping" <add> assert isinstance(arg1, str), "value should have been resolved before unmapping" <add> super().__init__(arg1=arg1, **kwargs) <add> self.value = value <add> <add> with dag_maker(session=session): <add> task1 = BaseOperator(task_id="op1") <add> xcom_arg = XComArg(task1) <add> mapped = MyOperator.partial(task_id='a', arg2='{{ ti.task_id }}').expand( <add> value=xcom_arg, arg1=xcom_arg <add> ) <add> <add> dr = dag_maker.create_dagrun() <add> ti: TaskInstance = dr.get_task_instance(task1.task_id, session=session) <add> <add> ti.xcom_push(key=XCOM_RETURN_KEY, value=['{{ ds }}'], session=session) <add> <add> session.add( <add> TaskMap( <add> dag_id=dr.dag_id, <add> task_id=task1.task_id, <add> run_id=dr.run_id, <add> map_index=-1, <add> length=1, <add> keys=None, <add> ) <add> ) <add> session.flush() <add> <add> mapped_ti: TaskInstance = dr.get_task_instance(mapped.task_id, session=session) <add> mapped_ti.map_index = 0 <add> op = mapped.render_template_fields(context=mapped_ti.get_template_context(session=session)) <add> assert isinstance(op, MyOperator) <add> <add> assert op.value == "{{ ds }}", "Should not be templated!" <add> assert op.arg1 == "{{ ds }}" <add> assert op.arg2 == "a"
2
Javascript
Javascript
fix error reporting in child_process callbacks
264e540d008a251a7e37f367611677414c2591d1
<ide><path>lib/child_process.js <ide> exports.execFile = function (file, args /*, options, callback */) { <ide> <ide> <ide> function ChildProcess () { <del> process.EventEmitter.call(this); <add> EventEmitter.call(this); <ide> <ide> var self = this; <ide> <ide> function ChildProcess () { <ide> var stdout = this.stdout = new Stream(); <ide> var stderr = this.stderr = new Stream(); <ide> <del> stderr.onend = stdout.onend = function () { <add> function onClose () { <ide> if (gotCHLD && !stdout.readable && !stderr.readable) { <ide> self.emit('exit', exitCode, termSignal); <ide> } <del> }; <add> } <add> <add> stderr.addListener('close', onClose); <add> stdout.addListener('close', onClose); <ide> <ide> internal.onexit = function (code, signal) { <ide> gotCHLD = true; <ide><path>test/simple/test-child-process-exit-code.js <ide> require("../common"); <del>var spawn = require('child_process').spawn <del> , path = require('path') <del> , sub = path.join(fixturesDir, 'exit.js') <del> , child = spawn(process.argv[0], [sub, 23]) <del> ; <add>spawn = require('child_process').spawn, <add>path = require('path'); <ide> <del>child.addListener('exit', function(code, signal) { <add>exits = 0; <add> <add>exitScript = path.join(fixturesDir, 'exit.js') <add>exitChild = spawn(process.argv[0], [exitScript, 23]); <add>exitChild.addListener('exit', function(code, signal) { <ide> assert.strictEqual(code, 23); <ide> assert.strictEqual(signal, null); <del>}); <ide>\ No newline at end of file <add> <add> exits++; <add>}); <add> <add> <add> <add>errorScript = path.join(fixturesDir, 'child_process_should_emit_error.js') <add>errorChild = spawn(process.argv[0], [errorScript]); <add>errorChild.addListener('exit', function(code, signal) { <add> assert.ok(code !== 0); <add> assert.strictEqual(signal, null); <add> <add> exits++; <add>}); <add> <add> <add>process.addListener('exit', function () { <add> assert.equal(2, exits); <add>});
2
Go
Go
add more details to runningtaskscount
67d6f174ae38d6765df35aa2b626b8c58954bff9
<ide><path>integration/internal/swarm/states.go <ide> func RunningTasksCount(client client.ServiceAPIClient, serviceID string, instanc <ide> tasks, err := client.TaskList(context.Background(), types.TaskListOptions{ <ide> Filters: filter, <ide> }) <add> var running int <add> var taskError string <add> for _, task := range tasks { <add> switch task.Status.State { <add> case swarmtypes.TaskStateRunning: <add> running++ <add> case swarmtypes.TaskStateFailed: <add> if task.Status.Err != "" { <add> taskError = task.Status.Err <add> } <add> } <add> } <add> <ide> switch { <ide> case err != nil: <ide> return poll.Error(err) <del> case len(tasks) == int(instances): <del> for _, task := range tasks { <del> if task.Status.State != swarmtypes.TaskStateRunning { <del> return poll.Continue("waiting for tasks to enter run state") <del> } <del> } <add> case running > int(instances): <add> return poll.Continue("waiting for tasks to terminate") <add> case running < int(instances) && taskError != "": <add> return poll.Continue("waiting for tasks to enter run state. task failed with error: %s", taskError) <add> case running == int(instances): <ide> return poll.Success() <ide> default: <del> return poll.Continue("task count at %d waiting for %d", len(tasks), instances) <add> return poll.Continue("running task count at %d waiting for %d (total tasks: %d)", running, instances, len(tasks)) <ide> } <ide> } <ide> }
1
Go
Go
remove unused fields
64fb664908f7d3368d1bbfd1efb56cd45e5ed7a3
<ide><path>profiles/apparmor/apparmor.go <ide> var ( <ide> type profileData struct { <ide> // Name is profile name. <ide> Name string <del> // ExecPath is the path to the docker binary. <del> ExecPath string <ide> // Imports defines the apparmor functions to import, before defining the profile. <ide> Imports []string <ide> // InnerImports defines the apparmor functions to import in the profile. <ide><path>profiles/apparmor/template.go <ide> profile {{.Name}} flags=(attach_disconnected,mediate_deleted) { <ide> # suppress ptrace denials when using 'docker ps' or using 'ps' inside a container <ide> ptrace (trace,read) peer=docker-default, <ide> {{end}} <del>{{if ge .Version 209000}} <del> # docker daemon confinement requires explict allow rule for signal <del> signal (receive) set=(kill,term) peer={{.ExecPath}}, <del>{{end}} <ide> } <ide> `
2
Ruby
Ruby
add tap accessor
b3d44da9e3ca226829c64421e09fbbbbd6fe09bc
<ide><path>Library/Homebrew/dependency.rb <ide> def merge_repeats(all) <ide> end <ide> <ide> class TapDependency < Dependency <add> attr_reader :tap <add> <ide> def initialize(name, tags=[], env_proc=DEFAULT_ENV_PROC, option_name=name) <del> super(name, tags, env_proc, name.split("/").last) <add> @tap, _, option_name = option_name.rpartition "/" <add> super(name, tags, env_proc, option_name) <ide> end <ide> <ide> def installed?
1
Javascript
Javascript
add additional tests
266c7ed74c52cfe73c304a1adac7b986f4b723a5
<ide><path>test/integration/basic/test/security.js <ide> /* eslint-env jest */ <ide> import { fetchViaHTTP } from 'next-test-utils' <add>import url from 'url' <ide> <ide> module.exports = (context) => { <ide> describe('With Security Related Issues', () => { <ide> module.exports = (context) => { <ide> } <ide> } <ide> }) <add> <add> it('should handle encoded / value for trailing slash correctly', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/%2fexample.com/', <add> undefined, <add> { redirect: 'manual' } <add> ) <add> <add> const { pathname, hostname } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(308) <add> expect(pathname).toBe('/%2fexample.com') <add> expect(hostname).not.toBe('example.com') <add> }) <ide> }) <ide> } <ide><path>test/integration/production/next.config.js <ide> module.exports = { <ide> destination: '/about', <ide> permanent: false, <ide> }, <add> { <add> source: '/redirect-query-test/:path', <add> destination: '/about?foo=:path', <add> permanent: false, <add> }, <ide> ] <ide> }, <ide> } <ide><path>test/integration/production/test/security.js <ide> module.exports = (context) => { <ide> 'url=https%3A%2F%2Fgoogle.com%2Fimage%3Fcrop%3Dfocalpoint%26w%3D24&w=1200&q=100' <ide> ) <ide> }) <add> <add> it('should handle encoded value in the pathname correctly /', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/redirect/me/to-about/%2fgoogle.com', <add> undefined, <add> { <add> redirect: 'manual', <add> } <add> ) <add> <add> const { pathname, hostname } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(307) <add> expect(pathname).toBe('/%2fgoogle.com/about') <add> expect(hostname).not.toBe('google.com') <add> }) <add> <add> it('should handle encoded value in the pathname to query correctly (/)', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/redirect-query-test/%2Fgoogle.com', <add> undefined, <add> { <add> redirect: 'manual', <add> } <add> ) <add> <add> const { pathname, hostname, query } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(307) <add> expect(pathname).toBe('/about') <add> expect(query).toBe('foo=%2Fgoogle.com') <add> expect(hostname).not.toBe('google.com') <add> expect(hostname).not.toMatch(/google/) <add> }) <add> <add> it('should handle encoded / value for trailing slash correctly', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/%2fexample.com/', <add> undefined, <add> { redirect: 'manual' } <add> ) <add> <add> const { pathname, hostname } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(308) <add> expect(pathname).toBe('/%2fexample.com') <add> expect(hostname).not.toBe('example.com') <add> }) <add> <add> it('should handle encoded value in the pathname correctly /', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/redirect/me/to-about/%2fgoogle.com', <add> undefined, <add> { <add> redirect: 'manual', <add> } <add> ) <add> <add> const { pathname, hostname } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(307) <add> expect(pathname).toBe('/%2fgoogle.com/about') <add> expect(hostname).not.toBe('google.com') <add> }) <add> <add> it('should handle encoded value in the pathname to query correctly (/)', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/redirect-query-test/%2Fgoogle.com', <add> undefined, <add> { <add> redirect: 'manual', <add> } <add> ) <add> <add> const { pathname, hostname, query } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(307) <add> expect(pathname).toBe('/about') <add> expect(query).toBe('foo=%2Fgoogle.com') <add> expect(hostname).not.toBe('google.com') <add> expect(hostname).not.toMatch(/google/) <add> }) <add> <add> it('should handle encoded / value for trailing slash correctly', async () => { <add> const res = await fetchViaHTTP( <add> context.appPort, <add> '/%2fexample.com/', <add> undefined, <add> { redirect: 'manual' } <add> ) <add> <add> const { pathname, hostname } = url.parse( <add> res.headers.get('location') || '' <add> ) <add> expect(res.status).toBe(308) <add> expect(pathname).toBe('/%2fexample.com') <add> expect(hostname).not.toBe('example.com') <add> }) <ide> }) <ide> }
3
Go
Go
mitigate parallel pull issues
b779bc31022fddd165ac7d919cb06d78288a93c9
<ide><path>graph/graph.go <ide> func (graph *Graph) Create(layerData io.Reader, containerID, containerImage, com <ide> } <ide> <ide> // Register imports a pre-existing image into the graph. <add>// Returns nil if the image is already registered. <ide> func (graph *Graph) Register(img *image.Image, layerData io.Reader) (err error) { <ide> <ide> if err := image.ValidateID(img.ID); err != nil { <ide> func (graph *Graph) Register(img *image.Image, layerData io.Reader) (err error) <ide> graph.imageMutex.Lock(img.ID) <ide> defer graph.imageMutex.Unlock(img.ID) <ide> <add> // Skip register if image is already registered <add> if graph.Exists(img.ID) { <add> return nil <add> } <add> <ide> // The returned `error` must be named in this function's signature so that <ide> // `err` is not shadowed in this deferred cleanup. <ide> defer func() { <ide> func (graph *Graph) Register(img *image.Image, layerData io.Reader) (err error) <ide> } <ide> }() <ide> <del> // (This is a convenience to save time. Race conditions are taken care of by os.Rename) <del> if graph.Exists(img.ID) { <del> return fmt.Errorf("Image %s already exists", img.ID) <del> } <del> <ide> // Ensure that the image root does not exist on the filesystem <ide> // when it is not registered in the graph. <ide> // This is common when you switch from one graph driver to another <ide> func (graph *Graph) saveSize(root string, size int64) error { <ide> <ide> // SetDigest sets the digest for the image layer to the provided value. <ide> func (graph *Graph) SetDigest(id string, dgst digest.Digest) error { <add> graph.imageMutex.Lock(id) <add> defer graph.imageMutex.Unlock(id) <add> <ide> root := graph.imageRoot(id) <ide> if err := ioutil.WriteFile(filepath.Join(root, digestFileName), []byte(dgst.String()), 0600); err != nil { <ide> return fmt.Errorf("Error storing digest in %s/%s: %s", root, digestFileName, err) <ide> func (graph *Graph) SetDigest(id string, dgst digest.Digest) error { <ide> <ide> // GetDigest gets the digest for the provide image layer id. <ide> func (graph *Graph) GetDigest(id string) (digest.Digest, error) { <add> graph.imageMutex.Lock(id) <add> defer graph.imageMutex.Unlock(id) <add> <ide> root := graph.imageRoot(id) <ide> cs, err := ioutil.ReadFile(filepath.Join(root, digestFileName)) <ide> if err != nil {
1
Text
Text
simplify challenge teststring
cb3524ee31764dbc02214e11891962356910e708
<ide><path>curriculum/challenges/english/02-javascript-algorithms-and-data-structures/basic-javascript/subtract-one-number-from-another-with-javascript.english.md <ide> tests: <ide> - text: The variable <code>difference</code> should be equal to 12. <ide> testString: assert(difference === 12); <ide> - text: You should only subtract one number from 45. <del> testString: assert(/var\s*difference\s*=\s*45\s*-\s*[0-9]*;(?!\s*[a-zA-Z0-9]+)/.test(code)); <del> <add> testString: assert(/difference=45-33;?/.test(code.replace(/\s/g, ''))); <ide> ``` <ide> <ide> </section>
1
Python
Python
remove unnecessary argument in test
77cf2fb0f63a5520de3b8b3456ce4c9181b91d16
<ide><path>spacy/tests/regression/test_issue595.py <ide> def vocab(lemmatizer, tag_map): <ide> return Vocab(lemmatizer=lemmatizer, tag_map=tag_map) <ide> <ide> <del>def test_not_lemmatize_base_forms(vocab, lemmatizer): <add>def test_not_lemmatize_base_forms(vocab): <ide> doc = Doc(vocab, words=["Do", "n't", "feed", "the", "dog"]) <ide> feed = doc[2] <ide> feed.tag_ = u'VB'
1
Go
Go
use dockersuite for pullclientdisconnect test
9dd6c8822ddccff1952dfd611263ba551693ff7d
<ide><path>integration-cli/docker_cli_pull_test.go <ide> func (s *DockerTrustSuite) TestTrustedPullWithExpiredSnapshot(c *check.C) { <ide> } <ide> <ide> // Test that pull continues after client has disconnected. #15589 <del>func (s *DockerTrustSuite) TestPullClientDisconnect(c *check.C) { <add>func (s *DockerSuite) TestPullClientDisconnect(c *check.C) { <ide> testRequires(c, Network) <ide> <ide> repoName := "hello-world:latest"
1
Go
Go
use serf logger for memberlist log
2f03577ec8d39c77c94fbb58d439671973cfa375
<ide><path>libnetwork/drivers/overlay/ov_serf.go <ide> func (d *driver) serfInit() error { <ide> config.UserQuiescentPeriod = 50 * time.Millisecond <ide> <ide> config.LogOutput = &logWriter{} <add> config.MemberlistConfig.LogOutput = config.LogOutput <ide> <ide> s, err := serf.Create(config) <ide> if err != nil {
1
Python
Python
fix schedule_equal to handle none entry
40fd143ac1c48146f180a79b9ab87badeb68bc41
<ide><path>celery/beat.py <ide> def tick(self, event_t=event_t, min=min, heappop=heapq.heappop, <ide> return min(adjust(next_time_to_run) or max_interval, max_interval) <ide> <ide> def schedules_equal(self, old_schedules, new_schedules): <add> if old_schedules is new_schedules is None: <add> return True <add> if old_schedules is None or new_schedules is None: <add> return False <ide> if set(old_schedules.keys()) != set(new_schedules.keys()): <ide> return False <ide> for name, old_entry in old_schedules.items(): <ide><path>t/unit/app/test_beat.py <ide> def test_schedule_equal_task_vs_task_fail(self): <ide> b = {'a': self.create_schedule_entry(task='b')} <ide> assert not scheduler.schedules_equal(a, b) <ide> <add> def test_schedule_equal_none_entry_vs_entry(self): <add> scheduler = beat.Scheduler(app=self.app) <add> a = None <add> b = {'a': self.create_schedule_entry(task='b')} <add> assert not scheduler.schedules_equal(a, b) <add> <add> def test_schedule_equal_entry_vs_none_entry(self): <add> scheduler = beat.Scheduler(app=self.app) <add> a = {'a': self.create_schedule_entry(task='a')} <add> b = None <add> assert not scheduler.schedules_equal(a, b) <add> <add> def test_schedule_equal_none_entry_vs_none_entry(self): <add> scheduler = beat.Scheduler(app=self.app) <add> a = None <add> b = None <add> assert scheduler.schedules_equal(a, b) <add> <ide> <ide> def create_persistent_scheduler(shelv=None): <ide> if shelv is None:
2
Ruby
Ruby
remove unused require
7418469f3926b727d363527c6f6531f841edc980
<ide><path>activerecord/lib/active_record/scoping/named.rb <ide> # frozen_string_literal: true <ide> <ide> require "active_support/core_ext/array" <del>require "active_support/core_ext/hash/except" <ide> require "active_support/core_ext/kernel/singleton_class" <ide> <ide> module ActiveRecord
1
Ruby
Ruby
resolve error 4012. 18 errors => 13 errors
a919ba9ccd7c26179cd4490ce467ffa0f255ce80
<ide><path>Library/Homebrew/os/mac/xquartz.rb <ide> <ide> module OS <ide> module Mac <del> X11 = XQuartz = Module.new # rubocop:disable Style/MutableConstant <del> <del> module XQuartz <add> X11 = XQuartz = Module.new do # rubocop:disable Style/MutableConstant <ide> module_function <ide> <ide> DEFAULT_BUNDLE_PATH = Pathname.new("Applications/Utilities/XQuartz.app").freeze
1
Text
Text
unlock the assert api
797ec338ea34cbed0de70005c8c0c8b6e7111527
<ide><path>doc/api/assert.md <ide> # Assert <ide> <del>> Stability: 3 - Locked <add>> Stability: 2 - Stable <ide> <ide> The `assert` module provides a simple set of assertion tests that can be used to <ide> test invariants.
1
Javascript
Javascript
add some more info for browserstack sessions
b08427dde9307bb442e3632f24d7b7054a50350e
<ide><path>karma-shared.conf.js <ide> module.exports = function(config, specificOptions) { <ide> <ide> // BrowserStack config for Travis CI <ide> browserStack: { <del> startTunnel: false <add> startTunnel: false, <add> project: 'AngularJS', <add> name: specificOptions.testName, <add> build: process.env.TRAVIS_BUILD_NUMBER <ide> }, <ide> <ide> // For more browsers on Sauce Labs see:
1
Javascript
Javascript
fix typo in july
513a24e459b0460a6c16ea114722201ae771d8f6
<ide><path>src/locale/te.js <ide> import moment from '../moment'; <ide> <ide> export default moment.defineLocale('te', { <del> months : 'జనవరి_ఫిబ్రవరి_మార్చి_ఏప్రిల్_మే_జూన్_జూలై_ఆగస్టు_సెప్టెంబర్_అక్టోబర్_నవంబర్_డిసెంబర్'.split('_'), <del> monthsShort : 'జన._ఫిబ్ర._మార్చి_ఏప్రి._మే_జూన్_జూలై_ఆగ._సెప్._అక్టో._నవ._డిసె.'.split('_'), <add> months : 'జనవరి_ఫిబ్రవరి_మార్చి_ఏప్రిల్_మే_జూన్_జులై_ఆగస్టు_సెప్టెంబర్_అక్టోబర్_నవంబర్_డిసెంబర్'.split('_'), <add> monthsShort : 'జన._ఫిబ్ర._మార్చి_ఏప్రి._మే_జూన్_జులై_ఆగ._సెప్._అక్టో._నవ._డిసె.'.split('_'), <ide> monthsParseExact : true, <ide> weekdays : 'ఆదివారం_సోమవారం_మంగళవారం_బుధవారం_గురువారం_శుక్రవారం_శనివారం'.split('_'), <ide> weekdaysShort : 'ఆది_సోమ_మంగళ_బుధ_గురు_శుక్ర_శని'.split('_'), <ide><path>src/test/locale/te.js <ide> import moment from '../../moment'; <ide> localeModule('te'); <ide> <ide> test('parse', function (assert) { <del> var tests = 'జనవరి జన._ఫిబ్రవరి ఫిబ్ర._మార్చి మార్చి_ఏప్రిల్ ఏప్రి._మే మే_జూన్ జూన్_జూలై జూలై_ఆగస్టు ఆగ._సెప్టెంబర్ సెప్._అక్టోబర్ అక్టో._నవంబర్ నవ._డిసెంబర్ డిసె.'.split('_'), i; <add> var tests = 'జనవరి జన._ఫిబ్రవరి ఫిబ్ర._మార్చి మార్చి_ఏప్రిల్ ఏప్రి._మే మే_జూన్ జూన్_జులై జులై_ఆగస్టు ఆగ._సెప్టెంబర్ సెప్._అక్టోబర్ అక్టో._నవంబర్ నవ._డిసెంబర్ డిసె.'.split('_'), i; <ide> function equalTest(input, mmm, i) { <ide> assert.equal(moment(input, mmm).month(), i, input + ' should be month ' + (i + 1)); <ide> } <ide> test('format ordinal', function (assert) { <ide> }); <ide> <ide> test('format month', function (assert) { <del> var expected = 'జనవరి జన._ఫిబ్రవరి ఫిబ్ర._మార్చి మార్చి_ఏప్రిల్ ఏప్రి._మే మే_జూన్ జూన్_జూలై జూలై_ఆగస్టు ఆగ._సెప్టెంబర్ సెప్._అక్టోబర్ అక్టో._నవంబర్ నవ._డిసెంబర్ డిసె.'.split('_'), i; <add> var expected = 'జనవరి జన._ఫిబ్రవరి ఫిబ్ర._మార్చి మార్చి_ఏప్రిల్ ఏప్రి._మే మే_జూన్ జూన్_జులై జులై_ఆగస్టు ఆగ._సెప్టెంబర్ సెప్._అక్టోబర్ అక్టో._నవంబర్ నవ._డిసెంబర్ డిసె.'.split('_'), i; <ide> for (i = 0; i < expected.length; i++) { <ide> assert.equal(moment([2011, i, 1]).format('MMMM MMM'), expected[i], expected[i]); <ide> }
2
Text
Text
clarify translated title
93b23aca5d2c246f9e4323ecac99472caaa5dd54
<ide><path>curriculum/challenges/russian/03-front-end-libraries/front-end-libraries-projects/build-a-javascript-calculator.russian.md <ide> title: Build a JavaScript Calculator <ide> challengeType: 3 <ide> isRequired: true <ide> videoUrl: '' <del>localeTitle: Создайте калькулятор JavaScript <add>localeTitle: Создайте калькулятор на JavaScript <ide> --- <ide> <ide> ## Description
1
Text
Text
update simulate docs and reorg a little bit
99b80938af2fe9edce427f30f768bc88e3f929a8
<ide><path>docs/docs/09.4-test-utils.md <ide> next: clone-with-props.html <ide> <ide> `React.addons.TestUtils` makes it easy to test React components in the testing framework of your choice (we use [Jasmine](http://pivotal.github.io/jasmine/) with [jsdom](https://github.com/tmpvar/jsdom)). <ide> <del>#### ReactComponent renderIntoDocument(ReactComponent instance) <add>### Simulate <add> <add>```javascript <add>Simulate.{eventName}({ReactComponent|DOMElement} element, object eventData) <add>``` <add> <add>Simulate an event dispatch on a React component instance or browser DOM node with optional `eventData` event data. **This is possibly the single most useful utility in `ReactTestUtils`.** <add> <add>Example usage: <add> <add>```javascript <add>React.addons.TestUtils.Simulate.click(myComponent); <add>React.addons.TestUtils.Simulate.change(myComponent); <add>React.addons.TestUtils.Simulate.keydown(myComponent, {key: "Enter"}); <add>``` <add> <add>`Simulate` has a method for every event that React understands. <add> <add>### renderIntoDocument <add> <add>```javascript <add>ReactComponent renderIntoDocument(ReactComponent instance) <add>``` <ide> <ide> Render a component into a detached DOM node in the document. **This function requires a DOM.** <ide> <del>#### boolean isComponentOfType(ReactComponent instance, function componentClass) <add>### mockComponent <add> <add>```javascript <add>object mockComponent(function componentClass, string? tagName) <add>``` <add> <add>Pass a mocked component module to this method to augment it with useful methods that allow it to be used as a dummy React component. Instead of rendering as usual, the component will become a simple `<div>` (or other tag if `mockTagName` is provided) containing any provided children. <add> <add>### isComponentOfType <add> <add>```javascript <add>boolean isComponentOfType(ReactComponent instance, function componentClass) <add>``` <ide> <ide> Returns true if `instance` is an instance of a React `componentClass`. <ide> <del>#### boolean isDOMComponent(ReactComponent instance) <add>### isDOMComponent <add> <add>```javascript <add>boolean isDOMComponent(ReactComponent instance) <add>``` <ide> <ide> Returns true if `instance` is a DOM component (such as a `<div>` or `<span>`). <ide> <del>#### boolean isCompositeComponent(ReactComponent instance)` <add>### isCompositeComponent <add> <add>```javascript <add>boolean isCompositeComponent(ReactComponent instance)` <add>``` <ide> <ide> Returns true if `instance` is a composite component (created with `React.createClass()`) <ide> <del>#### boolean isCompositeComponentWithType(ReactComponent instance, function componentClass) <add>### isCompositeComponentWithType <add> <add>```javascript <add>boolean isCompositeComponentWithType(ReactComponent instance, function componentClass) <add>``` <ide> <ide> The combination of `isComponentOfType()` and `isCompositeComponent()`. <ide> <del>#### boolean isTextComponent(ReactComponent instance) <add>### isTextComponent <add> <add>```javascript <add>boolean isTextComponent(ReactComponent instance) <add>``` <ide> <ide> Returns true if `instance` is a plain text component. <ide> <del>#### array findAllInRenderedTree(ReactComponent tree, function test) <add>### findAllInRenderedTree <ide> <del>Traverse all components in `tree` and accumulate all components where `test(component)` is true. This is not that useful on its own, but it's used as a primitive for other test utils. <add>```javascript <add>array findAllInRenderedTree(ReactComponent tree, function test) <add>``` <ide> <del>#### array scryRenderedDOMComponentsWithClass(ReactCompoennt tree, string className) <add>Traverse all components in `tree` and accumulate all components where `test(component)` is true. This is not that useful on its own, but it's used as a primitive for other test utils. <ide> <del>Finds all instance of components in the rendered tree that are DOM components with the class name matching `className`. <add>### scryRenderedDOMComponentsWithClass <ide> <del>#### ReactComponent findRenderedDOMComponentWithClass(ReactComponent tree, string className) <add>```javascript <add>array scryRenderedDOMComponentsWithClass(ReactCompoennt tree, string className) <add>``` <ide> <del>Like `scryRenderedDOMComponentsWithClass()` but expects there to be one result, and returns that one result, or throws exception if there is any other number of matches besides one. <add>Finds all instance of components in the rendered tree that are DOM components with the class name matching `className`. <ide> <del>#### array scryRenderedDOMComponentsWithTag(ReactComponent tree, string tagName) <add>### findRenderedDOMComponentWithClass <ide> <del>Finds all instance of components in the rendered tree that are DOM components with the tag name matching `tagName`. <add>```javascript <add>ReactComponent findRenderedDOMComponentWithClass(ReactComponent tree, string className) <add>``` <ide> <del>#### ReactComponent findRenderedDOMComponentWithTag(ReactComponent tree, string tagName) <add>Like `scryRenderedDOMComponentsWithClass()` but expects there to be one result, and returns that one result, or throws exception if there is any other number of matches besides one. <ide> <del>Like `scryRenderedDOMComponentsWithTag()` but expects there to be one result, and returns that one result, or throws exception if there is any other number of matches besides one. <add>### scryRenderedDOMComponentsWithTag <ide> <del>#### array scryRenderedComponentsWithType(ReactComponent tree, function componentClass) <add>```javascript <add>array scryRenderedDOMComponentsWithTag(ReactComponent tree, string tagName) <add>``` <ide> <del>Finds all instances of components with type equal to `componentClass`. <add>Finds all instance of components in the rendered tree that are DOM components with the tag name matching `tagName`. <ide> <del>#### ReactComponent findRenderedComponentWithType(ReactComponent tree, function componentClass) <add>### findRenderedDOMComponentWithTag <ide> <del>Same as `scryRenderedComponentsWithType()` but expects there to be one result and returns that one result, or throws exception if there is any other number of matches besides one. <add>```javascript <add>ReactComponent findRenderedDOMComponentWithTag(ReactComponent tree, string tagName) <add>``` <ide> <del>#### object mockComponent(function componentClass, string? tagName) <add>Like `scryRenderedDOMComponentsWithTag()` but expects there to be one result, and returns that one result, or throws exception if there is any other number of matches besides one. <ide> <del>Pass a mocked component module to this method to augment it with useful methods that allow it to be used as a dummy React component. Instead of rendering as usual, the component will become a simple `<div>` (or other tag if `mockTagName` is provided) containing any provided children. <add>### scryRenderedComponentsWithType <ide> <del>#### Simulate.{eventName}({ReactComponent|DOMElement} element, object nativeEventData) <add>```javascript <add>array scryRenderedComponentsWithType(ReactComponent tree, function componentClass) <add>``` <ide> <del>Simulate an event dispatch on a React component instance or browser DOM node with optional `nativeEventData` event data. **This is possibly the single most useful utility in `ReactTestUtils`.** <add>Finds all instances of components with type equal to `componentClass`. <ide> <del>> Note: <del>> <del>> This helper is used to simulate browser events, so synthetic React events like `change` are not available. If you want to test `change`, simulate the underlying `input` browser event. <add>### findRenderedComponentWithType <ide> <del>Example usage: `React.addons.TestUtils.Simulate.click(myComponent)` <add>```javascript <add>ReactComponent findRenderedComponentWithType(ReactComponent tree, function componentClass) <add>``` <ide> <del>`Simulate` has a method for every event that React understands. <add>Same as `scryRenderedComponentsWithType()` but expects there to be one result and returns that one result, or throws exception if there is any other number of matches besides one.
1
Go
Go
remove obsolete comments
d5098fde9a64fdec2098c7a0c6bd777f88f7d4c9
<ide><path>trust/trusts.go <ide> func (t *TrustStore) fetch() { <ide> go func() { <ide> err := t.reload() <ide> if err != nil { <del> // TODO log <ide> log.Infof("Reload of trust graph failed: %s", err) <ide> } <ide> }()
1
Python
Python
fix inplace building of data sources
f0c7ba02e42d71d120cc782395f01acc6ae15db0
<ide><path>numpy/distutils/command/build_src.py <ide> def run(self): <ide> <ide> def build_sources(self): <ide> <add> if self.inplace: <add> self.get_package_dir = self.get_finalized_command('build_py')\ <add> .get_package_dir <add> <ide> self.build_py_modules_sources() <ide> <ide> for libname_info in self.libraries: <ide> def build_data_files_sources(self): <ide> elif isinstance(data,tuple): <ide> d,files = data <ide> if self.inplace: <del> build_dir = d <add> build_dir = self.get_package_dir('.'.join(d.split(os.sep))) <ide> else: <ide> build_dir = os.path.join(self.build_src,d) <ide> funcs = filter(callable,files) <ide> def build_py_modules_sources(self): <ide> return <ide> log.info('building py_modules sources') <ide> new_py_modules = [] <del> if self.inplace: <del> get_package_dir = self.get_finalized_command('build_py').get_package_dir <ide> for source in self.py_modules: <ide> if is_sequence(source) and len(source)==3: <ide> package, module_base, source = source <ide> if self.inplace: <del> build_dir = get_package_dir(package) <add> build_dir = self.get_package_dir(package) <ide> else: <ide> build_dir = os.path.join(self.build_src, <ide> os.path.join(*package.split('.'))) <ide> def build_extension_sources(self, ext): <ide> modpath = fullname.split('.') <ide> package = '.'.join(modpath[0:-1]) <ide> <del> <ide> if self.inplace: <del> build_py = self.get_finalized_command('build_py') <del> self.ext_target_dir = build_py.get_package_dir(package) <add> self.ext_target_dir = self.get_package_dir(package) <ide> <ide> sources = self.generate_sources(sources, ext) <ide>
1
Python
Python
apply patch from #953 to fix two doctests
de6264438e148882e58f5925f9176966216b940f
<ide><path>numpy/matlib.py <ide> def ones(shape, dtype=None, order='C'): <ide> [ 1., 1., 1.]]) <ide> <ide> >>> np.matlib.ones(2) <del> matrix([[ 1., 1.]] <add> matrix([[ 1., 1.]]) <ide> <ide> """ <ide> a = ndarray.__new__(matrix, shape, dtype, order=order) <ide> def zeros(shape, dtype=None, order='C'): <ide> [ 0., 0., 0.]]) <ide> <ide> >>> np.matlib.zeros(2) <del> matrix([[ 0., 0.]] <add> matrix([[ 0., 0.]]) <ide> <ide> """ <ide> a = ndarray.__new__(matrix, shape, dtype, order=order)
1
Javascript
Javascript
do math on numbers in compare.js, not strings
e7b8bad3d983e23e9b4981e6cfd05bba16d3e3b5
<ide><path>benchmark/compare.js <ide> function compare() { <ide> if (show === 'green' && !g || show === 'red' && !r) <ide> return; <ide> <del> var r0 = util.format('%s%s: %d%s', g, nodes[0], n0, g ? reset : ''); <del> var r1 = util.format('%s%s: %d%s', r, nodes[1], n1, r ? reset : ''); <add> var r0 = util.format('%s%s: %d%s', g, nodes[0], n0.toPrecision(5), g ? reset : ''); <add> var r1 = util.format('%s%s: %d%s', r, nodes[1], n1.toPrecision(5), r ? reset : ''); <ide> var pct = c + pct + '%' + reset; <ide> var l = util.format('%s: %s %s', bench, r0, r1); <ide> maxLen = Math.max(l.length + pct.length, maxLen); <ide> function avg(list) { <ide> var q = Math.floor(list.length / 4) || 1; <ide> list = list.slice(q, -q); <ide> } <del> return (list.reduce(function(a, b) { <add> return list.reduce(function(a, b) { <ide> return a + b; <del> }, 0) / list.length).toPrecision(5); <add> }, 0) / list.length; <ide> }
1
Go
Go
set permission on atomic file write
1cd74902810b0eefddd173239cd95b506b34e678
<ide><path>pkg/ioutils/fswriters.go <ide> func NewAtomicFileWriter(filename string, perm os.FileMode) (io.WriteCloser, err <ide> if err != nil { <ide> return nil, err <ide> } <add> <ide> abspath, err := filepath.Abs(filename) <ide> if err != nil { <ide> return nil, err <ide> } <ide> return &atomicFileWriter{ <del> f: f, <del> fn: abspath, <add> f: f, <add> fn: abspath, <add> perm: perm, <ide> }, nil <ide> } <ide> <ide> func AtomicWriteFile(filename string, data []byte, perm os.FileMode) error { <ide> n, err := f.Write(data) <ide> if err == nil && n < len(data) { <ide> err = io.ErrShortWrite <add> f.(*atomicFileWriter).writeErr = err <ide> } <ide> if err1 := f.Close(); err == nil { <ide> err = err1 <ide> type atomicFileWriter struct { <ide> f *os.File <ide> fn string <ide> writeErr error <add> perm os.FileMode <ide> } <ide> <ide> func (w *atomicFileWriter) Write(dt []byte) (int, error) { <ide> func (w *atomicFileWriter) Write(dt []byte) (int, error) { <ide> <ide> func (w *atomicFileWriter) Close() (retErr error) { <ide> defer func() { <del> if retErr != nil { <add> if retErr != nil || w.writeErr != nil { <ide> os.Remove(w.f.Name()) <ide> } <ide> }() <ide> func (w *atomicFileWriter) Close() (retErr error) { <ide> if err := w.f.Close(); err != nil { <ide> return err <ide> } <add> if err := os.Chmod(w.f.Name(), w.perm); err != nil { <add> return err <add> } <ide> if w.writeErr == nil { <ide> return os.Rename(w.f.Name(), w.fn) <ide> } <ide><path>pkg/ioutils/fswriters_test.go <ide> func TestAtomicWriteToFile(t *testing.T) { <ide> defer os.RemoveAll(tmpDir) <ide> <ide> expected := []byte("barbaz") <del> if err := AtomicWriteFile(filepath.Join(tmpDir, "foo"), expected, 0600); err != nil { <add> if err := AtomicWriteFile(filepath.Join(tmpDir, "foo"), expected, 0666); err != nil { <ide> t.Fatalf("Error writing to file: %v", err) <ide> } <ide> <ide> func TestAtomicWriteToFile(t *testing.T) { <ide> if bytes.Compare(actual, expected) != 0 { <ide> t.Fatalf("Data mismatch, expected %q, got %q", expected, actual) <ide> } <add> <add> st, err := os.Stat(filepath.Join(tmpDir, "foo")) <add> if err != nil { <add> t.Fatalf("Error statting file: %v", err) <add> } <add> if expected := os.FileMode(0666); st.Mode() != expected { <add> t.Fatalf("Mode mismatched, expected %o, got %o", expected, st.Mode()) <add> } <ide> }
2
PHP
PHP
add allowdynamicproperties attribute to shell
68a138a9b9c343d5cb68a4a67c5e5af83a8616e1
<ide><path>src/Console/Shell.php <ide> * @deprecated 3.6.0 ShellDispatcher and Shell will be removed in 5.0 <ide> * @method int|bool|null|void main(...$args) Main entry method for the shell. <ide> */ <add>#[\AllowDynamicProperties] <ide> class Shell <ide> { <ide> use LocatorAwareTrait;
1
Ruby
Ruby
add tests for `merge_bottle_spec`
4cbd4f296bd101ba17b94b1e9383fc5d7f5ba5fd
<ide><path>Library/Homebrew/dev-cmd/bottle.rb <ide> def merge_bottle_spec(old_keys, old_bottle_spec, new_bottle_hash) <ide> new_values = { <ide> root_url: new_bottle_hash["root_url"], <ide> prefix: new_bottle_hash["prefix"], <del> cellar: new_bottle_hash["cellar"].to_sym, <add> cellar: new_bottle_hash["cellar"], <ide> rebuild: new_bottle_hash["rebuild"], <ide> } <ide> <ide> old_keys.each do |key| <ide> next if key == :sha256 <ide> <del> old_value = old_bottle_spec.send(key) <del> new_value = new_values[key] <del> next if key == :cellar && old_value == :any && new_value == :any_skip_relocation <add> old_value = old_bottle_spec.send(key).to_s <add> new_value = new_values[key].to_s <add> next if key == :cellar && old_value == "any" && new_value == "any_skip_relocation" <ide> next if old_value.present? && new_value == old_value <ide> <ide> mismatches << "#{key}: old: #{old_value.inspect}, new: #{new_value.inspect}" <ide> end <ide> <add> return [mismatches, checksums] unless old_keys.include? :sha256 <add> <ide> old_bottle_spec.collector.each_key do |tag| <ide> old_value = old_bottle_spec.collector[tag].hexdigest <del> new_value = new_bottle_hash["tags"][tag.to_s] <add> new_value = new_bottle_hash.dig("tags", tag.to_s) <ide> if new_value.present? <ide> mismatches << "sha256 => #{tag}" <ide> else <ide><path>Library/Homebrew/test/dev-cmd/bottle_spec.rb <ide> def stub_hash(parameters) <ide> "d9cc50eec8ac243148a121049c236cba06af4a0b1156ab397d0a2850aa79c137", <ide> ) <ide> end <add> <add> describe "#merge_bottle_spec" do <add> it "allows new bottle hash to be empty" do <add> valid_keys = [:root_url, :prefix, :cellar, :rebuild, :sha256] <add> old_spec = BottleSpecification.new <add> old_spec.sha256("f59bc65c91e4e698f6f050e1efea0040f57372d4dcf0996cbb8f97ced320403b" => :big_sur) <add> expect { homebrew.merge_bottle_spec(valid_keys, old_spec, {}) }.not_to raise_error <add> end <add> <add> it "checks for conflicting root URL" do <add> old_spec = BottleSpecification.new <add> old_spec.root_url("https://failbrew.bintray.com/bottles") <add> new_hash = { "root_url" => "https://testbrew.bintray.com/bottles" } <add> expect(homebrew.merge_bottle_spec([:root_url], old_spec, new_hash)).to eq [ <add> ['root_url: old: "https://failbrew.bintray.com/bottles", new: "https://testbrew.bintray.com/bottles"'], <add> [], <add> ] <add> end <add> <add> it "checks for conflicting prefix" do <add> old_spec = BottleSpecification.new <add> old_spec.prefix("/opt/failbrew") <add> new_hash = { "prefix" => "/opt/testbrew" } <add> expect(homebrew.merge_bottle_spec([:prefix], old_spec, new_hash)).to eq [ <add> ['prefix: old: "/opt/failbrew", new: "/opt/testbrew"'], <add> [], <add> ] <add> end <add> <add> it "checks for conflicting cellar" do <add> old_spec = BottleSpecification.new <add> old_spec.cellar("/opt/failbrew/Cellar") <add> new_hash = { "cellar" => "/opt/testbrew/Cellar" } <add> expect(homebrew.merge_bottle_spec([:cellar], old_spec, new_hash)).to eq [ <add> ['cellar: old: "/opt/failbrew/Cellar", new: "/opt/testbrew/Cellar"'], <add> [], <add> ] <add> end <add> <add> it "checks for conflicting rebuild number" do <add> old_spec = BottleSpecification.new <add> old_spec.rebuild(1) <add> new_hash = { "rebuild" => 2 } <add> expect(homebrew.merge_bottle_spec([:rebuild], old_spec, new_hash)).to eq [ <add> ['rebuild: old: "1", new: "2"'], <add> [], <add> ] <add> end <add> <add> it "checks for conflicting checksums" do <add> old_spec = BottleSpecification.new <add> old_spec.sha256("109c0cb581a7b5d84da36d84b221fb9dd0f8a927b3044d82611791c9907e202e" => :catalina) <add> old_spec.sha256("7571772bf7a0c9fe193e70e521318b53993bee6f351976c9b6e01e00d13d6c3f" => :mojave) <add> new_hash = { "tags" => { "catalina" => "ec6d7f08412468f28dee2be17ad8cd8b883b16b34329efcecce019b8c9736428" } } <add> expect(homebrew.merge_bottle_spec([:sha256], old_spec, new_hash)).to eq [ <add> ["sha256 => catalina"], <add> [{ "7571772bf7a0c9fe193e70e521318b53993bee6f351976c9b6e01e00d13d6c3f" => :mojave }], <add> ] <add> end <add> end <ide> end <ide> <ide> describe "brew bottle --merge", :integration_test, :needs_linux do
2
Python
Python
remove stray func from test
c36122a7ba2cdc69f94f5732f26428329be54200
<ide><path>rest_framework/tests/test_pagination.py <ide> def count(self): <ide> # pretend like we don't know how many pages we have <ide> return None <ide> <del> def default_page_token(self): <del> return None <del> <ide> def page(self, token=None): <ide> if token: <ide> try:
1
Java
Java
remove redundant protectivelywrap method
0ab352f849dada233f287fcb27b009985a6c1fc1
<ide><path>rxjava-core/src/main/java/rx/Observable.java <ide> public final <R> Observable<R> parallel(final Func1<Observable<T>, Observable<R> <ide> return lift(new OperatorParallel<T, R>(f, s)); <ide> } <ide> <del> /** <del> * Protects against errors being thrown from Observer implementations and ensures <del> * onNext/onError/onCompleted contract compliance. <del> * <p> <del> * See https://github.com/Netflix/RxJava/issues/216 for a discussion on "Guideline 6.4: Protect calls to <del> * user code from within an Observer" <del> */ <del> private Subscription protectivelyWrapAndSubscribe(Subscriber<? super T> o) { <del> return subscribe(new SafeSubscriber<T>(o)); <del> } <del> <ide> /** <ide> * Returns a {@link ConnectableObservable}, which waits until its {@link ConnectableObservable#connect connect} method is called before it begins emitting items to those {@link Observer}s that <ide> * have subscribed to it. <ide> public final Observable<T> startWith(T[] values, Scheduler scheduler) { <ide> * if the Observable tries to call {@code onError} <ide> */ <ide> public final Subscription subscribe() { <del> return protectivelyWrapAndSubscribe(new Subscriber<T>() { <add> return subscribe(new Subscriber<T>() { <ide> <ide> @Override <ide> public final void onCompleted() { <ide> public final Subscription subscribe(final Action1<? super T> onNext) { <ide> throw new IllegalArgumentException("onNext can not be null"); <ide> } <ide> <del> /** <del> * Wrapping since raw functions provided by the user are being invoked. <del> * <del> * See https://github.com/Netflix/RxJava/issues/216 for discussion on "Guideline 6.4: Protect calls to <del> * user code from within an Observer" <del> */ <del> return protectivelyWrapAndSubscribe(new Subscriber<T>() { <add> return subscribe(new Subscriber<T>() { <ide> <ide> @Override <ide> public final void onCompleted() { <ide> public final Subscription subscribe(final Action1<? super T> onNext, final Actio <ide> throw new IllegalArgumentException("onError can not be null"); <ide> } <ide> <del> /** <del> * Wrapping since raw functions provided by the user are being invoked. <del> * <del> * See https://github.com/Netflix/RxJava/issues/216 for discussion on <del> * "Guideline 6.4: Protect calls to user code from within an Observer" <del> */ <del> return protectivelyWrapAndSubscribe(new Subscriber<T>() { <add> return subscribe(new Subscriber<T>() { <ide> <ide> @Override <ide> public final void onCompleted() { <ide> public final Subscription subscribe(final Action1<? super T> onNext, final Actio <ide> throw new IllegalArgumentException("onComplete can not be null"); <ide> } <ide> <del> /** <del> * Wrapping since raw functions provided by the user are being invoked. <del> * <del> * See https://github.com/Netflix/RxJava/issues/216 for discussion on "Guideline 6.4: Protect calls to user code from within an Observer" <del> */ <del> return protectivelyWrapAndSubscribe(new Subscriber<T>() { <add> return subscribe(new Subscriber<T>() { <ide> <ide> @Override <ide> public final void onCompleted() { <ide> public final Subscription unsafeSubscribe(Subscriber<? super T> subscriber) { <ide> * For more information see the <ide> * <a href="https://github.com/Netflix/RxJava/wiki/Observable">RxJava Wiki</a> <ide> * <del> * @param observer <add> * @param subscriber <ide> * the {@link Subscriber} <ide> * @return a {@link Subscription} reference with which Subscribers that are {@link Observer}s can <ide> * unsubscribe from the Observable <ide> public final Subscription unsafeSubscribe(Subscriber<? super T> subscriber) { <ide> * @throws RuntimeException <ide> * if the {@link Subscriber}'s {@code onError} method itself threw a {@code Throwable} <ide> */ <del> public final Subscription subscribe(Subscriber<? super T> observer) { <add> public final Subscription subscribe(Subscriber<? super T> subscriber) { <ide> // allow the hook to intercept and/or decorate <ide> OnSubscribe<T> onSubscribeFunction = hook.onSubscribeStart(this, onSubscribe); <ide> // validate and proceed <del> if (observer == null) { <add> if (subscriber == null) { <ide> throw new IllegalArgumentException("observer can not be null"); <ide> } <ide> if (onSubscribeFunction == null) { <ide> public final Subscription subscribe(Subscriber<? super T> observer) { <ide> * to user code from within an Observer" <ide> */ <ide> // if not already wrapped <del> if (!(observer instanceof SafeSubscriber)) { <add> if (!(subscriber instanceof SafeSubscriber)) { <ide> // assign to `observer` so we return the protected version <del> observer = new SafeSubscriber<T>(observer); <add> subscriber = new SafeSubscriber<T>(subscriber); <ide> } <del> onSubscribeFunction.call(observer); <del> final Subscription returnSubscription = hook.onSubscribeReturn(observer); <add> onSubscribeFunction.call(subscriber); <add> final Subscription returnSubscription = hook.onSubscribeReturn(subscriber); <ide> // we return it inside a Subscription so it can't be cast back to Subscriber <ide> return Subscriptions.create(new Action0() { <ide> <ide> public void call() { <ide> Exceptions.throwIfFatal(e); <ide> // if an unhandled error occurs executing the onSubscribe we will propagate it <ide> try { <del> observer.onError(hook.onSubscribeError(e)); <add> subscriber.onError(hook.onSubscribeError(e)); <ide> } catch (OnErrorNotImplementedException e2) { <ide> // special handling when onError is not implemented ... we just rethrow <ide> throw e2;
1
Javascript
Javascript
change tests to jest
1159935fe85f9a0014fb1045322d378d701a821c
<ide><path>test/Errors.test.js <ide> describe("Errors", () => { <ide> entry: "./entry-point-error-loader-required.js" <ide> }, <ide> (errors, warnings) => { <del> warnings.length.should.be.eql(1); <del> warnings[0] <del> .split("\n")[1] <del> .should.match(/^Module Warning \(from .\/emit-error-loader.js\):$/); <del> errors.length.should.be.eql(1); <del> errors[0] <del> .split("\n")[1] <del> .should.match(/^Module Error \(from .\/emit-error-loader.js\):$/); <add> expect(warnings).toHaveLength(1); <add> expect(warnings[0].split("\n")[1]).toMatch( <add> /^Module Warning \(from .\/emit-error-loader.js\):$/ <add> ); <add> expect(errors).toHaveLength(1); <add> expect(errors[0].split("\n")[1]).toMatch( <add> /^Module Error \(from .\/emit-error-loader.js\):$/ <add> ); <ide> } <ide> ), <ide> getErrorsPromise( <ide> describe("Errors", () => { <ide> entry: path.resolve(base, "./emit-error-loader") + "!./entry-point.js" <ide> }, <ide> (errors, warnings) => { <del> warnings.length.should.be.eql(1); <del> warnings[0] <del> .split("\n")[1] <del> .should.match(/^Module Warning \(from .\/emit-error-loader.js\):$/); <del> errors.length.should.be.eql(1); <del> errors[0] <del> .split("\n")[1] <del> .should.match(/^Module Error \(from .\/emit-error-loader.js\):$/); <add> expect(warnings).toHaveLength(1); <add> expect(warnings[0].split("\n")[1]).toMatch( <add> /^Module Warning \(from .\/emit-error-loader.js\):$/ <add> ); <add> expect(errors).toHaveLength(1); <add> expect(errors[0].split("\n")[1]).toMatch( <add> /^Module Error \(from .\/emit-error-loader.js\):$/ <add> ); <ide> } <ide> ), <ide> getErrorsPromise( <ide> describe("Errors", () => { <ide> } <ide> }, <ide> (errors, warnings) => { <del> warnings.length.should.be.eql(1); <del> warnings[0] <del> .split("\n")[1] <del> .should.match(/^Module Warning \(from .\/emit-error-loader.js\):$/); <del> errors.length.should.be.eql(2); <del> errors[0] <del> .split("\n")[1] <del> .should.match(/^Module Error \(from .\/emit-error-loader.js\):$/); <del> errors[1] <del> .split("\n")[1] <del> .should.match( <del> /^Module build failed \(from \(webpack\)\/node_modules\/json-loader\/index.js\):$/ <del> ); <add> expect(warnings).toHaveLength(1); <add> expect(warnings[0].split("\n")[1]).toMatch( <add> /^Module Warning \(from .\/emit-error-loader.js\):$/ <add> ); <add> expect(errors).toHaveLength(2); <add> expect(errors[0].split("\n")[1]).toMatch( <add> /^Module Error \(from .\/emit-error-loader.js\):$/ <add> ); <add> expect(errors[1].split("\n")[1]).toMatch( <add> /^Module build failed \(from \(webpack\)\/node_modules\/json-loader\/index.js\):$/ <add> ); <ide> } <ide> ), <ide> getErrorsPromise( <ide> describe("Errors", () => { <ide> } <ide> }, <ide> (errors, warnings) => { <del> errors.length.should.be.eql(1); <del> errors[0] <del> .split("\n")[1] <del> .should.match( <del> /^Module build failed \(from .\/async-error-loader.js\):$/ <del> ); <add> expect(errors).toHaveLength(1); <add> expect(errors[0].split("\n")[1]).toMatch( <add> /^Module build failed \(from .\/async-error-loader.js\):$/ <add> ); <ide> } <ide> ), <ide> getErrorsPromise( <ide> describe("Errors", () => { <ide> } <ide> }, <ide> (errors, warnings) => { <del> errors.length.should.be.eql(1); <del> errors[0] <del> .split("\n")[1] <del> .should.match( <del> /^Module build failed \(from .\/throw-error-loader.js\):$/ <del> ); <add> expect(errors).toHaveLength(1); <add> expect(errors[0].split("\n")[1]).toMatch( <add> /^Module build failed \(from .\/throw-error-loader.js\):$/ <add> ); <ide> } <ide> ), <ide> getErrorsPromise( <ide> describe("Errors", () => { <ide> } <ide> }, <ide> (errors, warnings) => { <del> warnings.length.should.be.eql(2); <del> warnings[0] <del> .split("\n")[1] <del> .should.match( <del> /^Module Warning \(from .\/irregular-error-loader.js\):$/ <del> ); <del> warnings[1] <del> .split("\n")[1] <del> .should.match( <del> /^Module Warning \(from .\/irregular-error-loader.js\):$/ <del> ); <add> expect(warnings).toHaveLength(2); <add> expect(warnings[0].split("\n")[1]).toMatch( <add> /^Module Warning \(from .\/irregular-error-loader.js\):$/ <add> ); <add> expect(warnings[1].split("\n")[1]).toMatch( <add> /^Module Warning \(from .\/irregular-error-loader.js\):$/ <add> ); <ide> <del> errors.length.should.be.eql(3); <del> errors[0] <del> .split("\n")[1] <del> .should.match( <del> /^Module Error \(from .\/irregular-error-loader.js\):$/ <del> ); <del> errors[1] <del> .split("\n")[1] <del> .should.match( <del> /^Module Error \(from .\/irregular-error-loader.js\):$/ <del> ); <del> errors[2] <del> .split("\n")[1] <del> .should.match( <del> /^Module build failed \(from .\/irregular-error-loader.js\):$/ <del> ); <add> expect(errors).toHaveLength(3); <add> expect(errors[0].split("\n")[1]).toMatch( <add> /^Module Error \(from .\/irregular-error-loader.js\):$/ <add> ); <add> expect(errors[1].split("\n")[1]).toMatch( <add> /^Module Error \(from .\/irregular-error-loader.js\):$/ <add> ); <add> expect(errors[2].split("\n")[1]).toMatch( <add> /^Module build failed \(from .\/irregular-error-loader.js\):$/ <add> ); <ide> } <ide> ) <ide> ]); <ide> describe("Errors", () => { <ide> entry: path.resolve(base, "./no-return-loader") + "!./entry-point.js" <ide> }, <ide> (errors, warnings) => { <del> errors.length.should.be.eql(1); <add> expect(errors).toHaveLength(1); <ide> const messages = errors[0].split("\n"); <del> messages[1].should.match(/^Module build failed:$/); <del> messages[2].should.match(/didn't return/); <add> expect(messages[1]).toMatch(/^Module build failed:$/); <add> expect(messages[2]).toMatch(/didn't return/); <ide> done(); <ide> } <ide> ); <ide><path>test/watchCases/scope-hoisting/caching-inner-source/0/index.js <ide> it("should not crash when scope-hoisted modules change", function() { <del> require("./module").default.should.be.eql(WATCH_STEP); <del>}) <add> expect(require("./module").default).toBe(WATCH_STEP); <add>});
2
Text
Text
add 16.3.2 changelog
01402f4ad922b5467812586567519e9e5bbd595f
<ide><path>CHANGELOG.md <ide> </summary> <ide> </details> <ide> <add>## 16.3.2 (April 16, 2018) <add> <add>### React <add> <add>* Improve the error message when passing `null` or `undefined` to `React.cloneElement`. ([@nicolevy](https://github.com/nicolevy) in [#12534](https://github.com/facebook/react/pull/12534)) <add> <add>### React DOM <add> <add>* Fix an IE crash in development when using `<StrictMode>`. ([@bvaughn](https://github.com/bvaughn) in [#12546](https://github.com/facebook/react/pull/12546)) <add>* Fix labels in User Timing measurements for new component types. ([@bvaughn](https://github.com/bvaughn) in [#12609](https://github.com/facebook/react/pull/12609)) <add>* Improve the warning about wrong component type casing. ([@nicolevy](https://github.com/nicolevy) in [#12533](https://github.com/facebook/react/pull/12533)) <add>* Improve general performance in development mode. ([@gaearon](https://github.com/gaearon) in [#12537](https://github.com/facebook/react/pull/12537)) <add>* Improve performance of the experimental `unstable_observedBits` API with nesting. ([@gaearon](https://github.com/gaearon) in [#12543](https://github.com/facebook/react/pull/12543)) <add> <add>### React Test Renderer <add> <add>* Add a UMD build. ([@bvaughn](https://github.com/bvaughn) in [#12594](https://github.com/facebook/react/pull/12594)) <add> <ide> ## 16.3.1 (April 3, 2018) <ide> <ide> ### React
1
PHP
PHP
use env value for redis queue name
4525f36cacc15cb82e8df4ef2e61f24423fcc639
<ide><path>config/queue.php <ide> 'redis' => [ <ide> 'driver' => 'redis', <ide> 'connection' => 'default', <del> 'queue' => 'default', <add> 'queue' => env('REDIS_QUEUE', 'default'), <ide> 'retry_after' => 90, <ide> 'block_for' => null, <ide> ],
1
Text
Text
change language and make the command one line
335339ef6104f7740fbf499eca772f1b09039ac2
<ide><path>docs/build-instructions/linux.md <ide> have Node.js installed, or node isn't identified as Node.js on your machine. <ide> If it's the latter, entering `sudo ln -s /usr/bin/nodejs /usr/bin/node` into <ide> your terminal may fix the issue. <ide> <del>## You can also use Alternatives: <add>#### You can also use Alternatives <ide> <del>On some variants it's preferrable for you to use Alternatives so can easily <del>rollback and change the binary paths: <add>On some variants (mostly Debian based Distros) it's preferrable for you to use <add>Alternatives so that changes the binary paths can be fixed or alterered easily: <ide> <ide> ``` <del>sudo update-alternatives --install /usr/bin/node node /usr/bin/nodejs 1 \ <del> --slave /usr/bin/js js /usr/bin/nodejs <add>sudo update-alternatives --install /usr/bin/node node /usr/bin/nodejs 1 --slave /usr/bin/js js /usr/bin/nodejs <ide> ``` <ide> <ide> ### Linux build error reports in atom/atom
1
Javascript
Javascript
fix toggle wrap on dag code page
a1632edac783878cb82d9099f4f973c9a10b0d0f
<ide><path>airflow/www/static/js/dag_code.js <ide> * under the License. <ide> */ <ide> <del>/* global $ */ <add>/* global window, $ */ <ide> <ide> import getMetaValue from './meta_value'; <ide> <add>function toggleWrap() { <add> $('.code pre').toggleClass('wrap'); <add>} <add> <ide> const isWrapped = getMetaValue('wrapped'); <ide> <ide> // pygments generates the HTML so set wrap toggle via js <del>if (isWrapped) { <del> $('.code pre').toggleClass('wrap'); <add>if (isWrapped === 'True') { <add> toggleWrap(); <ide> } <add> <add>window.toggleWrap = toggleWrap;
1
Ruby
Ruby
join strings rather than resizing buffer
081e15f104b7da597114a8539f519953f166e096
<ide><path>actionpack/lib/action_view/helpers/number_helper.rb <ide> def number_to_phone(number, options = {}) <ide> number.slice!(0, 1) if number.starts_with?('-') <ide> end <ide> <del> str = "" <add> str = [] <ide> str << "+#{country_code}#{delimiter}" unless country_code.blank? <ide> str << number <ide> str << " x #{extension}" unless extension.blank? <del> ERB::Util.html_escape(str) <add> ERB::Util.html_escape(str.join) <ide> end <ide> <ide> # Formats a +number+ into a currency string (e.g., $13.65). You can customize the format
1
Python
Python
add config resolver test
8d6448ccf7926f1d219a25b467173a231553db20
<ide><path>spacy/tests/serialize/test_serialize_config.py <ide> from spacy.ml.models import build_Tok2Vec_model, build_tb_parser_model <ide> from spacy.ml.models import MultiHashEmbed, MaxoutWindowEncoder <ide> from spacy.schemas import ConfigSchema, ConfigSchemaPretrain <add>from catalogue import RegistryError <ide> <ide> <ide> from ..util import make_tempdir <ide> def test_serialize_parser(parser_config_string): <ide> assert model.get_ref("lower").get_dim("nI") == 66 <ide> <ide> <del> <ide> def test_config_nlp_roundtrip(): <ide> """Test that a config prduced by the nlp object passes training config <ide> validation.""" <ide> def test_config_validate_literal(parser_config_string): <ide> nlp.add_pipe("parser", config=config) <ide> config["model"]["state_type"] = "ner" <ide> nlp.add_pipe("parser", config=config) <add> <add> <add>def test_config_only_resolve_relevant_blocks(): <add> """Test that only the relevant blocks are resolved in the different methods <add> and that invalid blocks are ignored if needed. For instance, the [initialize] <add> shouldn't be resolved at runtime. <add> """ <add> nlp = English() <add> config = nlp.config <add> config["training"]["before_to_disk"] = {"@misc": "nonexistent"} <add> config["initialize"]["lookups"] = {"@misc": "nonexistent"} <add> # This shouldn't resolve [training] or [initialize] <add> nlp = load_model_from_config(config, auto_fill=True) <add> # This will raise for nonexistent value <add> with pytest.raises(RegistryError): <add> nlp.initialize() <add> nlp.config["initialize"]["lookups"] = None <add> nlp.initialize()
1
Go
Go
improve wait for lxc and driver interface
8c9f62d037a1bc82742ea316adaaf658af56b7c3
<ide><path>container.go <ide> func (container *Container) monitor(callback execdriver.StartCallback) error { <ide> <ide> if container.process == nil { <ide> // This happends when you have a GHOST container with lxc <del> err = container.runtime.Wait(container, 0) <add> err = container.runtime.WaitGhost(container) <ide> } else { <ide> exitCode, err = container.runtime.Run(container, callback) <ide> } <ide><path>execdriver/chroot/driver.go <ide> package chroot <ide> import ( <ide> "github.com/dotcloud/docker/execdriver" <ide> "os/exec" <del> "time" <ide> ) <ide> <ide> type driver struct { <ide> func (d *driver) Kill(p *execdriver.Process, sig int) error { <ide> return p.Process.Kill() <ide> } <ide> <del>func (d *driver) Wait(id string, duration time.Duration) error { <del> panic("No Implemented") <add>func (d *driver) Wait(id string) error { <add> panic("Not Implemented") <ide> } <ide> <ide> func (d *driver) Version() string { <ide><path>execdriver/driver.go <ide> import ( <ide> "errors" <ide> "os/exec" <ide> "syscall" <del> "time" <ide> ) <ide> <ide> var ( <ide> type Driver interface { <ide> Run(c *Process, startCallback StartCallback) (int, error) // Run executes the process and blocks until the process exits and returns the exit code <ide> Kill(c *Process, sig int) error <ide> // TODO: @crosbymichael @creack wait should probably return the exit code <del> Wait(id string, duration time.Duration) error // Wait on an out of process...process - lxc ghosts <add> Wait(id string) error // Wait on an out of process...process - lxc ghosts <ide> Version() string <ide> Name() string <ide> } <ide><path>execdriver/lxc/driver.go <ide> func (d *driver) Kill(c *execdriver.Process, sig int) error { <ide> return d.kill(c, sig) <ide> } <ide> <del>func (d *driver) Wait(id string, duration time.Duration) error { <del> var ( <del> killer bool <del> done = d.waitLxc(id, &killer) <del> ) <del> <del> if duration > 0 { <del> select { <del> case err := <-done: <add>func (d *driver) Wait(id string) error { <add> for { <add> output, err := exec.Command("lxc-info", "-n", id).CombinedOutput() <add> if err != nil { <ide> return err <del> case <-time.After(duration): <del> killer = true <del> return execdriver.ErrWaitTimeoutReached <ide> } <del> } else { <del> return <-done <add> if !strings.Contains(string(output), "RUNNING") { <add> return nil <add> } <add> time.Sleep(500 * time.Millisecond) <ide> } <del> return nil <ide> } <ide> <ide> func (d *driver) Version() string { <ide> func (d *driver) waitForStart(c *execdriver.Process, waitLock chan struct{}) err <ide> return execdriver.ErrNotRunning <ide> } <ide> <del>func (d *driver) waitLxc(id string, kill *bool) <-chan error { <del> done := make(chan error) <del> go func() { <del> for *kill { <del> output, err := exec.Command("lxc-info", "-n", id).CombinedOutput() <del> if err != nil { <del> done <- err <del> return <del> } <del> if !strings.Contains(string(output), "RUNNING") { <del> done <- err <del> return <del> } <del> time.Sleep(500 * time.Millisecond) <del> } <del> }() <del> return done <del>} <del> <ide> func (d *driver) getInfo(c *execdriver.Process) ([]byte, error) { <ide> return exec.Command("lxc-info", "-s", "-n", c.ID).CombinedOutput() <ide> } <ide><path>runtime.go <ide> func (runtime *Runtime) Kill(c *Container, sig int) error { <ide> return runtime.execDriver.Kill(c.process, sig) <ide> } <ide> <del>func (runtime *Runtime) Wait(c *Container, duration time.Duration) error { <del> return runtime.execDriver.Wait(c.ID, duration) <add>func (runtime *Runtime) WaitGhost(c *Container) error { <add> return runtime.execDriver.Wait(c.ID) <ide> } <ide> <ide> // Nuke kills all containers then removes all content
5
Ruby
Ruby
implement api suggestions of pull request
db040cdf8ba832123bae68764189bbcb569d473a
<ide><path>activesupport/lib/active_support/message_encryptor.rb <ide> class MessageEncryptor <ide> class InvalidMessage < StandardError; end <ide> OpenSSLCipherError = OpenSSL::Cipher.const_defined?(:CipherError) ? OpenSSL::Cipher::CipherError : OpenSSL::CipherError <ide> <del> attr_accessor :serializer, :deserializer <add> attr_accessor :serializer <ide> <del> def initialize(secret, cipher = 'aes-256-cbc') <add> def initialize(secret, cipher = 'aes-256-cbc', serializer = Marshal) <ide> @secret = secret <ide> @cipher = cipher <del> @serializer = lambda { |value| Marshal.dump(value) } <del> @deserializer = lambda { |value| Marshal.load(value) } <add> @serializer = serializer <ide> end <ide> <ide> def encrypt(value) <ide> def encrypt(value) <ide> cipher.key = @secret <ide> cipher.iv = iv <ide> <del> encrypted_data = cipher.update(serializer.call(value)) <add> encrypted_data = cipher.update(serializer.dump(value)) <ide> encrypted_data << cipher.final <ide> <ide> [encrypted_data, iv].map {|v| ActiveSupport::Base64.encode64s(v)}.join("--") <ide> def decrypt(encrypted_message) <ide> decrypted_data = cipher.update(encrypted_data) <ide> decrypted_data << cipher.final <ide> <del> deserializer.call(decrypted_data) <add> serializer.load(decrypted_data) <ide> rescue OpenSSLCipherError, TypeError <ide> raise InvalidMessage <ide> end <ide><path>activesupport/lib/active_support/message_verifier.rb <ide> module ActiveSupport <ide> class MessageVerifier <ide> class InvalidSignature < StandardError; end <ide> <del> attr_accessor :serializer, :deserializer <add> attr_accessor :serializer <ide> <del> def initialize(secret, digest = 'SHA1') <add> def initialize(secret, digest = 'SHA1', serializer = Marshal) <ide> @secret = secret <ide> @digest = digest <del> @serializer = lambda { |value| Marshal.dump(value) } <del> @deserializer = lambda { |value| Marshal.load(value) } <add> @serializer = serializer <ide> end <ide> <ide> def verify(signed_message) <ide> raise InvalidSignature if signed_message.blank? <ide> <ide> data, digest = signed_message.split("--") <ide> if data.present? && digest.present? && secure_compare(digest, generate_digest(data)) <del> deserializer.call(ActiveSupport::Base64.decode64(data)) <add> serializer.load(ActiveSupport::Base64.decode64(data)) <ide> else <ide> raise InvalidSignature <ide> end <ide> end <ide> <ide> def generate(value) <del> data = ActiveSupport::Base64.encode64s(serializer.call(value)) <add> data = ActiveSupport::Base64.encode64s(serializer.dump(value)) <ide> "#{data}--#{generate_digest(data)}" <ide> end <ide> <ide><path>activesupport/test/message_encryptor_test.rb <ide> require 'active_support/json' <ide> <ide> class MessageEncryptorTest < Test::Unit::TestCase <add> <add> class JSONSerializer <add> def dump(value) <add> ActiveSupport::JSON.encode(value) <add> end <add> <add> def load(value) <add> ActiveSupport::JSON.decode(value) <add> end <add> end <add> <ide> def setup <ide> @encryptor = ActiveSupport::MessageEncryptor.new(SecureRandom.hex(64)) <ide> @data = { :some => "data", :now => Time.local(2010) } <ide> def test_signed_round_tripping <ide> end <ide> <ide> def test_alternative_serialization_method <del> @encryptor.serializer = lambda { |value| ActiveSupport::JSON.encode(value) } <del> @encryptor.deserializer = lambda { |value| ActiveSupport::JSON.decode(value) } <del> <add> @encryptor.serializer = JSONSerializer.new <ide> message = @encryptor.encrypt_and_sign({ :foo => 123, 'bar' => Time.utc(2010) }) <ide> assert_equal @encryptor.decrypt_and_verify(message), { "foo" => 123, "bar" => "2010-01-01T00:00:00Z" } <ide> end <ide><path>activesupport/test/message_verifier_test.rb <ide> require 'active_support/json' <ide> <ide> class MessageVerifierTest < Test::Unit::TestCase <add> <add> class JSONSerializer <add> def dump(value) <add> ActiveSupport::JSON.encode(value) <add> end <add> <add> def load(value) <add> ActiveSupport::JSON.decode(value) <add> end <add> end <add> <ide> def setup <ide> @verifier = ActiveSupport::MessageVerifier.new("Hey, I'm a secret!") <ide> @data = { :some => "data", :now => Time.local(2010) } <ide> def test_tampered_data_raises <ide> end <ide> <ide> def test_alternative_serialization_method <del> @verifier.serializer = lambda { |value| ActiveSupport::JSON.encode(value) } <del> @verifier.deserializer = lambda { |value| ActiveSupport::JSON.decode(value) } <del> <add> @verifier.serializer = JSONSerializer.new <ide> message = @verifier.generate({ :foo => 123, 'bar' => Time.utc(2010) }) <ide> assert_equal @verifier.verify(message), { "foo" => 123, "bar" => "2010-01-01T00:00:00Z" } <ide> end
4
Javascript
Javascript
add support for enter/shift for find
45fe76e752dc6042f0f6e3df346ccb8deaf3c36b
<ide><path>web/viewer.js <ide> var PDFFindBar = { <ide> self.dispatchEvent(''); <ide> }); <ide> <del> // TODO: Add keybindings like enter, shift-enter, CMD-G etc. to go to prev/ <add> // TODO: Add keybindings CMD-G etc. to go to prev/ <ide> // next match when the findField is selected. <ide> <add> this.findField.addEventListener('keydown', function(evt) { <add> switch (evt.keyCode) { <add> case 13: // Enter <add> self.dispatchEvent('again', evt.shiftKey); <add> break; <add> } <add> }); <add> <ide> document.getElementById('findPrevious').addEventListener('click', <ide> function() { self.dispatchEvent('again', true); } <ide> );
1
PHP
PHP
add more column types and wire up mocks better
04613949c11f29962e6b0c55c3979c2fec6ce199
<ide><path>lib/Cake/Database/Schema/MysqlSchema.php <ide> public function extraSchemaColumns() { <ide> ]; <ide> } <ide> <add>/** <add> * Generate the SQL to create a table. <add> * <add> * @param string $table The name of the table. <add> * @param array $lines The lines (columns + indexes) to go inside the table. <add> * @return string A complete CREATE TABLE statement <add> */ <ide> public function createTableSql($table, $lines) { <del> <add> $content = implode(",\n", $lines); <add> return sprintf("CREATE TABLE `%s` (\n%s\n);", $table, $content); <ide> } <ide> <ide> /** <ide> public function columnSql($name, $data) { <ide> } <ide> break; <ide> case 'integer': <del> $out .= ' INT'; <add> $out .= ' INTEGER'; <ide> break; <ide> case 'biginteger': <ide> $out .= ' BIGINT'; <ide> public function columnSql($name, $data) { <ide> case 'datetime': <ide> $out .= ' DATETIME'; <ide> break; <add> case 'timestamp': <add> $out .= ' TIMESTAMP'; <ide> break; <ide> } <ide> $hasLength = [ <ide> public function columnSql($name, $data) { <ide> if (isset($data['null']) && $data['null'] === false) { <ide> $out .= ' NOT NULL'; <ide> } <del> if (isset($data['default'])) { <add> if (isset($data['null']) && $data['null'] === true) { <add> $out .= $data['type'] === 'timestamp' ? ' NULL' : ' DEFAULT NULL'; <add> unset($data['default']); <add> } <add> if (isset($data['default']) && $data['type'] !== 'timestamp') { <ide> $out .= ' DEFAULT ' . $this->_value($data['default']); <ide> } <add> if ( <add> isset($data['default']) && <add> $data['type'] === 'timestamp' && <add> strtolower($data['default']) === 'current_timestamp' <add> ) { <add> $out .= ' DEFAULT CURRENT_TIMESTAMP'; <add> } <ide> if (isset($data['comment'])) { <ide> $out .= ' COMMENT ' . $this->_value($data['comment']); <ide> } <ide><path>lib/Cake/Test/TestCase/Database/Schema/MysqlSchemaTest.php <ide> public static function columnSqlProvider() { <ide> ['type' => 'string', 'length' => 25, 'null' => false], <ide> '`title` VARCHAR(25) NOT NULL' <ide> ], <add> [ <add> 'title', <add> ['type' => 'string', 'length' => 25, 'null' => true, 'default' => 'ignored'], <add> '`title` VARCHAR(25) DEFAULT NULL' <add> ], <ide> [ <ide> 'id', <ide> ['type' => 'string', 'length' => 32, 'fixed' => true, 'null' => false], <ide> public static function columnSqlProvider() { <ide> [ <ide> 'post_id', <ide> ['type' => 'integer', 'length' => 11], <del> '`post_id` INT(11)' <add> '`post_id` INTEGER(11)' <ide> ], <ide> [ <ide> 'post_id', <ide> public static function columnSqlProvider() { <ide> '`created` DATETIME COMMENT "Created timestamp"' <ide> ], <ide> // timestamps <del> // TODO add timestamps including CURRENT_TIMESTAMP <add> [ <add> 'created', <add> ['type' => 'timestamp', 'null' => true], <add> '`created` TIMESTAMP NULL' <add> ], <add> [ <add> 'created', <add> ['type' => 'timestamp', 'null' => false, 'default' => 'current_timestamp'], <add> '`created` TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP' <add> ], <ide> ]; <ide> } <ide> <ide> public static function columnSqlProvider() { <ide> */ <ide> public function testColumnSql($name, $data, $expected) { <ide> $driver = new \Cake\Database\Driver\Mysql(); <add> <add> $mock = $this->getMock('FakePdo', ['quote']); <add> $mock->expects($this->any()) <add> ->method('quote') <add> ->will($this->returnCallback(function ($value) { <add> return '"' . $value . '"'; <add> })); <add> <add> $driver->connection($mock); <ide> $dialect = new MysqlSchema($driver); <ide> $this->assertEquals($expected, $dialect->columnSql($name, $data)); <ide> } <ide> public function testCreateTableSql() { <ide> ]); <ide> <ide> $connection = $this->getMock('Cake\Database\Connection', array(), array(), '', false); <del> $driver = $this->getMock('Cake\Database\Driver\Mysql'); <add> $driver = new \Cake\Database\Driver\Mysql(); <add> $mock = $this->getMock('FakePdo', ['quote']); <add> $driver->connection($mock); <add> <ide> $dialect = new MysqlSchema($driver); <add> <ide> $connection->expects($this->any())->method('driver') <ide> ->will($this->returnValue($driver)); <del> $driver->expects($this->any()) <del> ->method('schemaDialect') <del> ->will($this->returnValue($dialect)); <add> <add> $mock->expects($this->any()) <add> ->method('quote') <add> ->will($this->returnCallback(function ($value) { <add> return '"' . $value . '"'; <add> })); <ide> <ide> $result = $table->createTableSql($connection); <ide> $expected = <<<SQL <ide> CREATE TABLE `posts` ( <del> `id` INTEGER NOT NULL AUTO_INCREMENT, <del> `title` VARCHAR(255) NOT NULL COMMENT 'The title', <del> `body` TEXT, <del> `created` DATETIME, <del> PRIMARY KEY (`id`) <add>`id` INTEGER NOT NULL AUTO_INCREMENT, <add>`title` VARCHAR(255) NOT NULL COMMENT "The title", <add>`body` TEXT, <add>`created` DATETIME, <add>PRIMARY KEY (`id`) <ide> ); <ide> SQL; <ide> $this->assertEquals($expected, $result); <ide> } <ide> <add> <ide> }
2
Javascript
Javascript
write font cmap using a string
f63af3e614cc78de1483800b5e965336d4b0effc
<ide><path>fonts.js <ide> var Font = (function () { <ide> var searchRange = FontsUtils.getMaxPower2(segCount) * 2; <ide> var searchEntry = Math.log(segCount) / Math.log(2); <ide> var rangeShift = 2 * segCount - searchRange; <del> var cmap = [].concat( <del> [ <del> 0x00, 0x00, // version <del> 0x00, 0x01, // numTables <del> 0x00, 0x03, // platformID <del> 0x00, 0x01, // encodingID <del> 0x00, 0x00, 0x00, 0x0C, // start of the table record <del> 0x00, 0x04 // format <del> ], <del> FontsUtils.integerToBytes(headerSize, 2), // length <del> [0x00, 0x00], // language <del> FontsUtils.integerToBytes(segCount2, 2), <del> FontsUtils.integerToBytes(searchRange, 2), <del> FontsUtils.integerToBytes(searchEntry, 2), <del> FontsUtils.integerToBytes(rangeShift, 2) <del> ); <add> <add> var cmap = "\x00\x00" + // version <add> "\x00\x01" + // numTables <add> "\x00\x03" + // platformID <add> "\x00\x01" + // encodingID <add> "\x00\x00\x00\x0C" + // start of the table record <add> "\x00\x04" + // format <add> s16(headerSize) + // length <add> "\x00\x00" + // languages <add> s16(segCount2) + <add> s16(searchRange) + <add> s16(searchEntry) + <add> s16(rangeShift); <add> cmap = s2a(cmap); <ide> <ide> // Fill up the 4 parallel arrays describing the segments. <ide> var startCount = [];
1
Ruby
Ruby
remove redundant reader method
b0138b9c9b8ac12deaebd83b0c665c46c16815c8
<ide><path>Library/Homebrew/requirements/python_dependency.rb <ide> class PythonInstalled < Requirement <ide> attr_reader :min_version <ide> attr_reader :if3then3 <del> attr_reader :site_packages <ide> attr_accessor :site_packages <ide> attr_accessor :binary # The python.rb formula needs to set the binary <ide>
1
Python
Python
return scalar losses instead of per-sample means
96d833b211a35bc48c3f9174042a796bb110a66b
<ide><path>src/transformers/modeling_tf_utils.py <ide> def hf_compute_loss(self, labels, logits): <ide> unmasked_loss = loss_fn(tf.nn.relu(labels), logits) <ide> # make sure only labels that are not equal to -100 affect the loss <ide> loss_mask = tf.cast(labels != -100, dtype=unmasked_loss.dtype) <del> # Avoid division by zero later <del> loss_denominator = tf.math.maximum(tf.cast(1, loss_mask.dtype), tf.reduce_sum(loss_mask, axis=1)) <ide> masked_loss = unmasked_loss * loss_mask <del> reduced_masked_loss = tf.reduce_sum(masked_loss, axis=1) / loss_denominator <del> return reduced_masked_loss <add> reduced_masked_loss = tf.reduce_sum(masked_loss) / tf.reduce_sum(loss_mask) <add> return tf.reshape(reduced_masked_loss, (1,)) <ide> <ide> <ide> class TFQuestionAnsweringLoss: <ide> def hf_compute_loss(self, labels, logits): <ide> # are taken into account as loss <ide> loss_mask = tf.cast(labels >= 0, dtype=unmasked_loss.dtype) <ide> # Avoid possible division by zero later <del> loss_denominator = tf.math.maximum(tf.cast(1, loss_mask.dtype), tf.reduce_sum(loss_mask, axis=1)) <ide> # Masked positions will have a loss of NaN because -100 and -1 are not valid labels <ide> masked_loss = unmasked_loss * loss_mask <del> reduced_masked_loss = tf.reduce_sum(masked_loss, axis=1) / loss_denominator <del> return reduced_masked_loss <add> reduced_masked_loss = tf.reduce_sum(masked_loss) / tf.reduce_sum(loss_mask) <add> return tf.reshape(reduced_masked_loss, (1,)) <ide> <ide> <ide> class TFSequenceClassificationLoss: <ide><path>src/transformers/models/albert/modeling_tf_albert.py <ide> def hf_compute_loss(self, labels: tf.Tensor, logits: tf.Tensor) -> tf.Tensor: <ide> # make sure only labels that are not equal to -100 <ide> # are taken into account for the loss computation <ide> lm_loss_mask = tf.cast(labels["labels"] != -100, dtype=unmasked_lm_losses.dtype) <del> # Avoid division by zero later <del> lm_loss_denominator = tf.math.maximum(tf.cast(1, lm_loss_mask.dtype), tf.reduce_sum(lm_loss_mask, axis=1)) <ide> masked_lm_losses = unmasked_lm_losses * lm_loss_mask <del> reduced_masked_lm_loss = tf.reduce_sum(masked_lm_losses, axis=1) / lm_loss_denominator <add> reduced_masked_lm_loss = tf.reduce_sum(masked_lm_losses) / tf.reduce_sum(lm_loss_mask) <ide> <ide> sop_logits = tf.reshape(logits[1], (-1, 2)) <ide> # Clip negative labels to zero here to avoid NaNs and errors - those positions will get masked later anyway <ide> unmasked_sop_loss = loss_fn(y_true=tf.nn.relu(labels["sentence_order_label"]), y_pred=sop_logits) <ide> sop_loss_mask = tf.cast(labels["sentence_order_label"] != -100, dtype=unmasked_sop_loss.dtype) <ide> <del> # No reduction because this already has shape (num_samples,) <ide> masked_sop_loss = unmasked_sop_loss * sop_loss_mask <add> reduced_masked_sop_loss = tf.reduce_sum(masked_sop_loss) / tf.reduce_sum(sop_loss_mask) <ide> <del> return reduced_masked_lm_loss + masked_sop_loss <add> return tf.reshape(reduced_masked_lm_loss + reduced_masked_sop_loss, (1,)) <ide> <ide> <ide> class TFAlbertEmbeddings(tf.keras.layers.Layer): <ide><path>src/transformers/models/bert/modeling_tf_bert.py <ide> def hf_compute_loss(self, labels: tf.Tensor, logits: tf.Tensor) -> tf.Tensor: <ide> # make sure only labels that are not equal to -100 <ide> # are taken into account for the loss computation <ide> lm_loss_mask = tf.cast(labels["labels"] != -100, dtype=unmasked_lm_losses.dtype) <del> # Avoid potential division by zero later <del> lm_loss_denominator = tf.math.maximum(tf.cast(1, lm_loss_mask.dtype), tf.reduce_sum(lm_loss_mask, axis=1)) <ide> masked_lm_losses = unmasked_lm_losses * lm_loss_mask <del> reduced_masked_lm_loss = tf.reduce_sum(masked_lm_losses, axis=1) / lm_loss_denominator <add> reduced_masked_lm_loss = tf.reduce_sum(masked_lm_losses) / tf.reduce_sum(lm_loss_mask) <ide> <ide> # Clip negative labels to zero here to avoid NaNs and errors - those positions will get masked later anyway <ide> unmasked_ns_loss = loss_fn(y_true=tf.nn.relu(labels["next_sentence_label"]), y_pred=logits[1]) <ide> ns_loss_mask = tf.cast(labels["next_sentence_label"] != -100, dtype=unmasked_ns_loss.dtype) <del> # Just zero out samples where label is -100, no reduction <ide> masked_ns_loss = unmasked_ns_loss * ns_loss_mask <ide> <del> return reduced_masked_lm_loss + masked_ns_loss <add> reduced_masked_ns_loss = tf.reduce_sum(masked_ns_loss) / tf.reduce_sum(ns_loss_mask) <add> <add> return tf.reshape(reduced_masked_lm_loss + reduced_masked_ns_loss, (1,)) <ide> <ide> <ide> class TFBertEmbeddings(tf.keras.layers.Layer): <ide><path>src/transformers/models/led/modeling_tf_led.py <ide> def hf_compute_loss(self, labels, logits): <ide> unmasked_loss = loss_fn(tf.nn.relu(labels), logits) <ide> # make sure only non-padding labels affect the loss <ide> loss_mask = tf.cast(labels != self.config.pad_token_id, dtype=unmasked_loss.dtype) <del> loss_denominator = tf.math.maximum(tf.cast(1, loss_mask.dtype), tf.reduce_sum(loss_mask, axis=1)) <ide> masked_loss = unmasked_loss * loss_mask <del> reduced_masked_loss = tf.reduce_sum(masked_loss, axis=1) / loss_denominator <del> return reduced_masked_loss <add> reduced_masked_loss = tf.reduce_sum(masked_loss) / tf.reduce_sum(loss_mask) <add> return tf.reshape(reduced_masked_loss, (1,)) <ide><path>src/transformers/models/rag/modeling_tf_rag.py <ide> def get_nll(self, seq_logits, doc_scores, target, reduce_loss=False, epsilon=0.0 <ide> # Adopted modeling_tf_bart + add smooth_loss to match with pytorch version <ide> def hf_compute_loss(self, labels, y_pred, smooth_epsilon=0.0, from_logits=True, reduce_loss=False): <ide> """CrossEntropyLoss that ignores pad tokens""" <del> if self.config.tf_legacy_loss: <del> loss_fn = tf.keras.losses.SparseCategoricalCrossentropy( <del> from_logits=True, <del> reduction=tf.keras.losses.Reduction.SUM, <del> ) <del> <del> if from_logits is False: # convert to logits <del> eps = 1e-9 <del> y_pred = tf.clip_by_value(y_pred, clip_value_min=eps, clip_value_max=1 - eps) <del> y_pred = tf.math.log(y_pred) <del> <del> logits = y_pred <del> melted_labels = tf.reshape(labels, (-1,)) <del> active_loss = tf.not_equal(melted_labels, self.config.generator.pad_token_id) <del> <del> reduced_logits = tf.boolean_mask(tf.reshape(logits, (-1, logits.shape[2])), active_loss) <del> labels = tf.boolean_mask(melted_labels, active_loss) <del> nll_loss = loss_fn(labels, reduced_logits) <del> <del> smooth_loss = -tf.reduce_sum(reduced_logits, axis=-1) <del> smooth_loss = tf.reduce_sum(smooth_loss) # sum and squeeze like torch <del> eps_i = smooth_epsilon / reduced_logits.shape[-1] <del> <del> loss = (1.0 - smooth_epsilon) * nll_loss + eps_i * smooth_loss <del> <del> return loss <del> <add> # Matt: As written, this loss is not XLA-compatible, but it's doing some very weird things <add> # and I don't feel comfortable converting it. <ide> loss_fn = tf.keras.losses.SparseCategoricalCrossentropy( <del> from_logits=from_logits, <del> reduction=tf.keras.losses.Reduction.NONE, <add> from_logits=True, <add> reduction=tf.keras.losses.Reduction.SUM, <ide> ) <ide> <del> unmasked_loss = loss_fn(labels, y_pred) <del> loss_mask = labels != self.config.generator.pad_token_id <del> nll_loss = tf.reduce_sum(unmasked_loss * loss_mask) <add> if from_logits is False: # convert to logits <add> eps = 1e-9 <add> y_pred = tf.clip_by_value(y_pred, clip_value_min=eps, clip_value_max=1 - eps) <add> y_pred = tf.math.log(y_pred) <add> <add> logits = y_pred <add> melted_labels = tf.reshape(labels, (-1,)) <add> active_loss = tf.not_equal(melted_labels, self.config.generator.pad_token_id) <add> <add> reduced_logits = tf.boolean_mask(tf.reshape(logits, (-1, logits.shape[2])), active_loss) <add> labels = tf.boolean_mask(melted_labels, active_loss) <add> nll_loss = loss_fn(labels, reduced_logits) <ide> <del> # Matt: This makes no sense to me, but I'm just copying the old loss in XLA-compatible form <del> smooth_loss = -tf.reduce_sum(y_pred * tf.expand_dims(labels, -1), axis=-1) <del> smooth_loss = tf.reduce_sum(smooth_loss) <del> eps_i = smooth_epsilon / y_pred.shape[-1] <add> smooth_loss = -tf.reduce_sum(reduced_logits, axis=-1) <add> smooth_loss = tf.reduce_sum(smooth_loss) # sum and squeeze like torch <add> eps_i = smooth_epsilon / reduced_logits.shape[-1] <ide> <ide> loss = (1.0 - smooth_epsilon) * nll_loss + eps_i * smooth_loss <ide> <ide><path>tests/models/xlnet/test_modeling_tf_xlnet.py <ide> def test_loss_computation(self): <ide> input_ids = prepared_for_class.pop(input_name) <ide> <ide> loss = model(input_ids, **prepared_for_class)[0] <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> <ide> # Test that model correctly compute the loss with a dict <ide> prepared_for_class = self._prepare_for_class(inputs_dict.copy(), model_class, return_labels=True) <ide> loss = model(prepared_for_class)[0] <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> <ide> # Test that model correctly compute the loss with a tuple <ide> prepared_for_class = self._prepare_for_class(inputs_dict.copy(), model_class, return_labels=True) <ide> def test_loss_computation(self): <ide> # Send to model <ide> loss = model(tuple_input[:-1])[0] <ide> <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> <ide> <ide> @require_tf <ide><path>tests/test_modeling_tf_common.py <ide> def test_loss_computation(self): <ide> model_input = prepared_for_class.pop(input_name) <ide> <ide> loss = model(model_input, **prepared_for_class)[0] <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> <ide> # Test that model correctly compute the loss when we mask some positions <ide> prepared_for_class = self._prepare_for_class(inputs_dict.copy(), model_class, return_labels=True) <ide> def test_loss_computation(self): <ide> labels[0] = -100 <ide> prepared_for_class["labels"] = tf.convert_to_tensor(labels) <ide> loss = model(model_input, **prepared_for_class)[0] <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> self.assertTrue(not np.any(np.isnan(loss.numpy()))) <ide> <ide> # Test that model correctly compute the loss with a dict <ide> prepared_for_class = self._prepare_for_class(inputs_dict.copy(), model_class, return_labels=True) <ide> loss = model(prepared_for_class)[0] <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> <ide> # Test that model correctly compute the loss with a tuple <ide> prepared_for_class = self._prepare_for_class(inputs_dict.copy(), model_class, return_labels=True) <ide> def test_loss_computation(self): <ide> # Send to model <ide> loss = model(tuple_input[:-1])[0] <ide> <del> self.assertEqual(loss.shape.as_list(), expected_loss_size) <add> self.assertTrue(loss.shape.as_list() == expected_loss_size or loss.shape.as_list() == [1]) <ide> <ide> def test_keras_fit(self): <ide> config, inputs_dict = self.model_tester.prepare_config_and_inputs_for_common()
7
PHP
PHP
throw exception if sqlite database doesn't exist
5f6a831bdae5eb1042e24ff033ade63a6af03afb
<ide><path>src/Illuminate/Database/Connectors/SQLiteConnector.php <ide> public function connect(array $config) <ide> <ide> $path = realpath($config['database']); <ide> <add> // Here we'll verify that the SQLite database exists before we gooing further <add> // as the developer probably wants to know if the database exists and this <add> // SQLite driver will not throw any exception if it does not by default. <add> if ($path === false) <add> { <add> throw new \InvalidArgumentException("Database does not exist."); <add> } <add> <ide> return $this->createConnection("sqlite:{$path}", $config, $options); <ide> } <ide>
1
PHP
PHP
apply fixes from styleci
66e69a2f674515cd0aa18fc596d228e4d96a959e
<ide><path>src/Illuminate/Foundation/Providers/FoundationServiceProvider.php <ide> <ide> use Illuminate\Contracts\Foundation\MaintenanceMode as MaintenanceModeContract; <ide> use Illuminate\Foundation\FileBasedMaintenanceMode; <del>use Illuminate\Foundation\MaintenanceMode; <ide> use Illuminate\Http\Request; <ide> use Illuminate\Log\Events\MessageLogged; <ide> use Illuminate\Support\AggregateServiceProvider;
1
Text
Text
add dynamic routing keyword (#788)
8fa630d85574c47151c797947d035f1fc4e79d9c
<ide><path>examples/parameterized-routing/README.md <ide> <del># Parametrized routes example <add># Parametrized routes example (dynamic routing) <ide> <ide> ## How to use <ide>
1
Go
Go
improve udev unsupported error message
b8f38747e60eb76e19f08129ab27cb808d21c22a
<ide><path>daemon/graphdriver/devmapper/deviceset.go <ide> import ( <ide> "github.com/Sirupsen/logrus" <ide> <ide> "github.com/docker/docker/daemon/graphdriver" <add> "github.com/docker/docker/dockerversion" <ide> "github.com/docker/docker/pkg/devicemapper" <ide> "github.com/docker/docker/pkg/idtools" <ide> "github.com/docker/docker/pkg/loopback" <ide> func (devices *DeviceSet) initDevmapper(doInit bool) error { <ide> <ide> // https://github.com/docker/docker/issues/4036 <ide> if supported := devicemapper.UdevSetSyncSupport(true); !supported { <del> logrus.Errorf("devmapper: Udev sync is not supported. This will lead to data loss and unexpected behavior. Install a dynamic binary to use devicemapper or select a different storage driver. For more information, see https://docs.docker.com/engine/reference/commandline/daemon/#daemon-storage-driver-option") <add> if dockerversion.IAmStatic == "true" { <add> logrus.Errorf("devmapper: Udev sync is not supported. This will lead to data loss and unexpected behavior. Install a dynamic binary to use devicemapper or select a different storage driver. For more information, see https://docs.docker.com/engine/reference/commandline/daemon/#daemon-storage-driver-option") <add> } else { <add> logrus.Errorf("devmapper: Udev sync is not supported. This will lead to data loss and unexpected behavior. Install a more recent version of libdevmapper or select a different storage driver. For more information, see https://docs.docker.com/engine/reference/commandline/daemon/#daemon-storage-driver-option") <add> } <add> <ide> if !devices.overrideUdevSyncCheck { <ide> return graphdriver.ErrNotSupported <ide> }
1
Text
Text
fix typo in as guide [ci skip]
fb42520252477d83560f2e2a2550c7f377e07bc1
<ide><path>guides/source/active_support_core_extensions.md <ide> Extensions to `Marshal` <ide> <ide> Active Support adds constant autoloading support to `load`. <ide> <del>For example, the file cache store deserializes this way: <add>For example, the file cache store deserializes this way: <ide> <ide> ```ruby <ide> File.open(file_name) { |f| Marshal.load(f) } <ide> ``` <ide> <del>If the cached data refers to a constant that is unknown at that point, the autoloading mechanism is triggered and if it succeeds the desarialization is retried transparently. <add>If the cached data refers to a constant that is unknown at that point, the autoloading mechanism is triggered and if it succeeds the deserialization is retried transparently. <ide> <ide> WARNING. If the argument is an `IO` it needs to respond to `rewind` to be able to retry. Regular files respond to `rewind`. <ide>
1
PHP
PHP
fix postgres for bigint primary key
3433e10fbdb91d4dd84865dd8f0ee72b7e41507f
<ide><path>lib/Cake/Model/Datasource/Database/Postgres.php <ide> public function buildColumn($column) { <ide> ); <ide> <ide> $out = str_replace('integer serial', 'serial', $out); <add> $out = str_replace('bigint serial', 'bigserial', $out); <ide> if (strpos($out, 'timestamp DEFAULT')) { <ide> if (isset($column['null']) && $column['null']) { <ide> $out = str_replace('DEFAULT NULL', '', $out); <ide><path>lib/Cake/Test/Case/Model/Datasource/Database/PostgresTest.php <ide> public function testCakeSchema() { <ide> $db1->query('DROP TABLE ' . $db1->fullTableName('datatype_tests')); <ide> } <ide> <add>/** <add> * testCakeSchemaBegserial method <add> * <add> * Test that schema generated postgresql queries are valid. <add> * <add> * @return void <add> */ <add> public function testCakeSchemaBigserial() { <add> $db1 = ConnectionManager::getDataSource('test'); <add> $db1->cacheSources = false; <add> <add> $db1->rawQuery('CREATE TABLE ' . $db1->fullTableName('bigserial_tests') . ' ( <add> "id" bigserial NOT NULL, <add> "varchar" character varying(40) NOT NULL, <add> PRIMARY KEY ("id") <add> )'); <add> <add> $schema = new CakeSchema(array('connection' => 'test')); <add> $result = $schema->read(array( <add> 'connection' => 'test', <add> 'models' => array('BigserialTest') <add> )); <add> $schema->tables = array( <add> 'bigserial_tests' => $result['tables']['missing']['bigserial_tests'] <add> ); <add> $result = $db1->createSchema($schema, 'bigserial_tests'); <add> <add> $this->assertContains('"id" bigserial NOT NULL,', $result); <add> <add> $db1->query('DROP TABLE ' . $db1->fullTableName('bigserial_tests')); <add> } <add> <ide> /** <ide> * Test index generation from table info. <ide> *
2
Ruby
Ruby
add i18n tests to engines
5cd9aad4fdf55c591fe8e12657008e83315251d7
<ide><path>activesupport/lib/active_support/railtie.rb <ide> class Railtie < Rails::Railtie <ide> I18n.send("#{setting}=", value) <ide> end <ide> end <add> <add> I18n.reload! <ide> end <ide> end <ide> end <ide><path>railties/test/initializer/initialize_i18n_test.rb <del>require "isolation/abstract_unit" <del> <del>module InitializerTests <del> class InitializeI18nTest < Test::Unit::TestCase <del> include ActiveSupport::Testing::Isolation <del> <del> def setup <del> build_app <del> boot_rails <del> end <del> <del> # test_config_defaults_and_settings_should_be_added_to_i18n_defaults <del> test "i18n config defaults and settings should be added to i18n defaults" do <del> add_to_config <<-RUBY <del> config.root = "#{app_path}" <del> config.i18n.load_path << "my/other/locale.yml" <del> RUBY <del> <del> require "#{app_path}/config/environment" <del> <del> #{RAILS_FRAMEWORK_ROOT}/railties/test/fixtures/plugins/engines/engine/config/locales/en.yml <del> assert_equal %W( <del> #{RAILS_FRAMEWORK_ROOT}/activesupport/lib/active_support/locale/en.yml <del> #{RAILS_FRAMEWORK_ROOT}/activemodel/lib/active_model/locale/en.yml <del> #{RAILS_FRAMEWORK_ROOT}/activerecord/lib/active_record/locale/en.yml <del> #{RAILS_FRAMEWORK_ROOT}/actionpack/lib/action_view/locale/en.yml <del> #{RAILS_FRAMEWORK_ROOT}/railties/tmp/app/config/locales/en.yml <del> my/other/locale.yml <del> ).map { |path| File.expand_path(path) }, I18n.load_path.map { |path| File.expand_path(path) } <del> end <del> <del> test "i18n finds locale files in engines" do <del> # app_file "vendor/plugins/engine/init.rb", "" <del> # app_file "vendor/plugins/engine/app/models/hellos.rb", "class Hello ; end" <del> # app_file "vendor/plugins/engine/lib/omg.rb", "puts 'omg'" <del> # app_file "vendor/plugins/engine/config/locales/en.yml", "hello:" <del> # <del> # Rails::Initializer.run do |c| <del> # c.root = app_path <del> # c.i18n.load_path << "my/other/locale.yml" <del> # end <del> # Rails.initialize! <del> # <del> # #{RAILS_FRAMEWORK_ROOT}/railties/test/fixtures/plugins/engines/engine/config/locales/en.yml <del> # assert_equal %W( <del> # #{RAILS_FRAMEWORK_ROOT}/activesupport/lib/active_support/locale/en.yml <del> # #{RAILS_FRAMEWORK_ROOT}/activemodel/lib/active_model/locale/en.yml <del> # #{RAILS_FRAMEWORK_ROOT}/activerecord/lib/active_record/locale/en.yml <del> # #{RAILS_FRAMEWORK_ROOT}/actionpack/lib/action_view/locale/en.yml <del> # #{app_path}/config/locales/en.yml <del> # my/other/locale.yml <del> # #{app_path}/vendor/plugins/engine/config/locales/en.yml <del> # ).map { |path| File.expand_path(path) }, I18n.load_path.map { |path| File.expand_path(path) } <del> end <del> end <del>end <ide>\ No newline at end of file <ide><path>railties/test/plugins/vendored_test.rb <ide> def self.call(env) <ide> Rake::Task[:foo].invoke <ide> assert $executed <ide> end <add> <add> test "i18n files are added with lower priority than application ones" do <add> add_to_config <<-RUBY <add> config.i18n.load_path << "#{app_path}/app/locales/en.yml" <add> RUBY <add> <add> app_file 'app/locales/en.yml', <<-YAML <add>en: <add> bar: "1" <add>YAML <add> <add> app_file 'config/locales/en.yml', <<-YAML <add>en: <add> foo: "2" <add> bar: "2" <add>YAML <add> <add> @plugin.write 'config/locales/en.yml', <<-YAML <add>en: <add> foo: "3" <add>YAML <add> <add> boot_rails <add> require "#{app_path}/config/environment" <add> <add> assert_equal %W( <add> #{RAILS_FRAMEWORK_ROOT}/activesupport/lib/active_support/locale/en.yml <add> #{RAILS_FRAMEWORK_ROOT}/activemodel/lib/active_model/locale/en.yml <add> #{RAILS_FRAMEWORK_ROOT}/activerecord/lib/active_record/locale/en.yml <add> #{RAILS_FRAMEWORK_ROOT}/actionpack/lib/action_view/locale/en.yml <add> #{app_path}/vendor/plugins/bukkits/config/locales/en.yml <add> #{app_path}/config/locales/en.yml <add> #{app_path}/app/locales/en.yml <add> ).map { |path| File.expand_path(path) }, I18n.load_path.map { |path| File.expand_path(path) } <add> <add> assert_equal "2", I18n.t(:foo) <add> assert_equal "1", I18n.t(:bar) <add> end <ide> end <ide> <ide> class VendoredOrderingTest < Test::Unit::TestCase
3
Python
Python
replace arraytype, newaxis, and typecode =
de0ac347a7332b04ff35b21c87e7e1bdb0cb5bc4
<ide><path>numpy/lib/convertcode.py <ide> def replaceattr(astr): <ide> astr = astr.replace(".itemsize()",".itemsize") <ide> astr = astr.replace("matrixmultiply","dot") <ide> # preserve uses of flat that should be o.k. <del> tmpstr = flatindex_re.sub("\\2",astr) <add> tmpstr = flatindex_re.sub(r"\2",astr) <ide> # replace other uses of flat <ide> tmpstr = tmpstr.replace(".flat",".ravel()") <ide> # put back .flat where it was valid <ide> def replaceattr(astr): <ide> svspc3 = re.compile(r'(\S+[.]savespace[(].*[)])') <ide> #shpe = re.compile(r'(\S+\s*)[.]shape\s*=[^=]\s*(.+)') <ide> def replaceother(astr): <del> astr = astr.replace("typecode=","dtype=") <add> astr = re.sub(r'typecode\s*=', 'dtype=', astr) <ide> astr = astr.replace("UserArray","ndarray") <del> astr = svspc.sub('\\1)',astr) <add> astr = astr.replace('ArrayType', 'ndarray') <add> astr = astr.replace('NewAxis', 'newaxis') <add> astr = svspc.sub(r'\1)',astr) <ide> astr = svspc2.sub('True',astr) <del> astr = svspc3.sub('pass ## \\1', astr) <add> astr = svspc3.sub(r'pass ## \1', astr) <ide> #astr = shpe.sub('\\1=\\1.reshape(\\2)', astr) <ide> return astr <ide>
1
Go
Go
remove the pushimagerec and use iteration instead
55cf05835b6f55e7ec83c63bc69a570876722df5
<ide><path>registry.go <ide> func (graph *Graph) PullRepository(stdout io.Writer, remote, askedTag string, re <ide> return nil <ide> } <ide> <del>func pushImageRec(graph *Graph, stdout io.Writer, img *Image, registry string, token []string) error { <del> if parent, err := img.GetParent(); err != nil { <del> return err <del> } else if parent != nil { <del> if err := pushImageRec(graph, stdout, parent, registry, token); err != nil { <del> return err <del> } <del> } <add>// Push a local image to the registry <add>func (graph *Graph) PushImage(stdout io.Writer, img *Image, registry string, token []string) error { <add> registry = "https://" + registry + "/v1" <add> <ide> client := graph.getHttpClient() <ide> jsonRaw, err := ioutil.ReadFile(path.Join(graph.Root, img.Id, "json")) <ide> if err != nil { <ide> func pushImageRec(graph *Graph, stdout io.Writer, img *Image, registry string, t <ide> return nil <ide> } <ide> <del>// Push a local image to the registry with its history if needed <del>func (graph *Graph) PushImage(stdout io.Writer, imgOrig *Image, registry string, token []string) error { <del> registry = "https://" + registry + "/v1" <del> return pushImageRec(graph, stdout, imgOrig, registry, token) <del>} <del> <ide> // push a tag on the registry. <ide> // Remote has the format '<user>/<repo> <ide> func (graph *Graph) pushTag(remote, revision, tag, registry string, token []string) error { <ide> func (graph *Graph) PushRepository(stdout io.Writer, remote string, localRepo Re <ide> client.Jar = cookiejar.NewCookieJar() <ide> var imgList []*ImgListJson <ide> <add> fmt.Fprintf(stdout, "Processing checksums\n") <add> imageSet := make(map[string]struct{}) <ide> for _, id := range localRepo { <del> checksum, err := graph.getChecksum(id) <add> img, err := graph.Get(id) <ide> if err != nil { <ide> return err <ide> } <del> imgList = append(imgList, &ImgListJson{ <del> Id: id, <del> Checksum: checksum, <add> img.WalkHistory(func(img *Image) error { <add> if _, exists := imageSet[img.Id]; exists { <add> return nil <add> } <add> imageSet[img.Id] = struct{}{} <add> checksum, err := graph.getChecksum(img.Id) <add> if err != nil { <add> return err <add> } <add> imgList = append(imgList, &ImgListJson{ <add> Id: img.Id, <add> Checksum: checksum, <add> }) <add> return nil <ide> }) <ide> } <ide> <ide> func (graph *Graph) PushRepository(stdout io.Writer, remote string, localRepo Re <ide> <ide> Debugf("json sent: %s\n", imgListJson) <ide> <add> fmt.Fprintf(stdout, "Sending image list\n") <ide> req, err := http.NewRequest("PUT", INDEX_ENDPOINT+"/repositories/"+remote+"/", bytes.NewReader(imgListJson)) <ide> if err != nil { <ide> return err
1
Go
Go
move builder cli helper functions to own pkg
feaf5902f650f2326e1c41e82dfe28962f1ba46e
<ide><path>builder/utils_test.go <ide> func createTestTempFile(t *testing.T, dir, filename, contents string, perm os.Fi <ide> <ide> return filePath <ide> } <del> <del>// chdir changes current working directory to dir. <del>// It returns a function which changes working directory back to the previous one. <del>// This function is meant to be executed as a deferred call. <del>// When an error occurs, it terminates the test. <del>func chdir(t *testing.T, dir string) func() { <del> workingDirectory, err := os.Getwd() <del> <del> if err != nil { <del> t.Fatalf("Error when retrieving working directory: %s", err) <del> } <del> <del> err = os.Chdir(dir) <del> <del> if err != nil { <del> t.Fatalf("Error when changing directory to %s: %s", dir, err) <del> } <del> <del> return func() { <del> err = os.Chdir(workingDirectory) <del> <del> if err != nil { <del> t.Fatalf("Error when changing back to working directory (%s): %s", workingDirectory, err) <del> } <del> } <del>} <ide><path>cli/command/image/build.go <ide> import ( <ide> "github.com/docker/docker/api" <ide> "github.com/docker/docker/api/types" <ide> "github.com/docker/docker/api/types/container" <del> "github.com/docker/docker/builder" <ide> "github.com/docker/docker/builder/dockerignore" <ide> "github.com/docker/docker/cli" <ide> "github.com/docker/docker/cli/command" <add> "github.com/docker/docker/cli/command/image/build" <ide> "github.com/docker/docker/opts" <ide> "github.com/docker/docker/pkg/archive" <ide> "github.com/docker/docker/pkg/fileutils" <ide> import ( <ide> "github.com/docker/docker/pkg/urlutil" <ide> "github.com/docker/docker/reference" <ide> runconfigopts "github.com/docker/docker/runconfig/opts" <del> "github.com/docker/go-units" <add> units "github.com/docker/go-units" <ide> "github.com/spf13/cobra" <ide> ) <ide> <ide> func runBuild(dockerCli *command.DockerCli, options buildOptions) error { <ide> <ide> switch { <ide> case specifiedContext == "-": <del> buildCtx, relDockerfile, err = builder.GetContextFromReader(dockerCli.In(), options.dockerfileName) <add> buildCtx, relDockerfile, err = build.GetContextFromReader(dockerCli.In(), options.dockerfileName) <ide> case urlutil.IsGitURL(specifiedContext): <del> tempDir, relDockerfile, err = builder.GetContextFromGitURL(specifiedContext, options.dockerfileName) <add> tempDir, relDockerfile, err = build.GetContextFromGitURL(specifiedContext, options.dockerfileName) <ide> case urlutil.IsURL(specifiedContext): <del> buildCtx, relDockerfile, err = builder.GetContextFromURL(progBuff, specifiedContext, options.dockerfileName) <add> buildCtx, relDockerfile, err = build.GetContextFromURL(progBuff, specifiedContext, options.dockerfileName) <ide> default: <del> contextDir, relDockerfile, err = builder.GetContextFromLocalDir(specifiedContext, options.dockerfileName) <add> contextDir, relDockerfile, err = build.GetContextFromLocalDir(specifiedContext, options.dockerfileName) <ide> } <ide> <ide> if err != nil { <ide> func runBuild(dockerCli *command.DockerCli, options buildOptions) error { <ide> } <ide> } <ide> <del> if err := builder.ValidateContextDirectory(contextDir, excludes); err != nil { <add> if err := build.ValidateContextDirectory(contextDir, excludes); err != nil { <ide> return fmt.Errorf("Error checking context: '%s'.", err) <ide> } <ide> <add><path>cli/command/image/build/context.go <del><path>builder/context.go <del>package builder <add>package build <ide> <ide> import ( <ide> "bufio" <ide> import ( <ide> "github.com/docker/docker/pkg/streamformatter" <ide> ) <ide> <add>const ( <add> // DefaultDockerfileName is the Default filename with Docker commands, read by docker build <add> DefaultDockerfileName string = "Dockerfile" <add>) <add> <ide> // ValidateContextDirectory checks if all the contents of the directory <ide> // can be read and returns an error if some files can't be read <ide> // symlinks which point to non-existing files don't trigger an error <add><path>cli/command/image/build/context_test.go <del><path>builder/context_test.go <del>package builder <add>package build <ide> <ide> import ( <ide> "archive/tar" <ide> "bytes" <ide> "io" <ide> "io/ioutil" <add> "os" <ide> "path/filepath" <ide> "runtime" <ide> "strings" <ide> import ( <ide> "github.com/docker/docker/pkg/archive" <ide> ) <ide> <add>const dockerfileContents = "FROM busybox" <add> <ide> var prepareEmpty = func(t *testing.T) (string, func()) { <ide> return "", func() {} <ide> } <ide> func TestValidateContextDirectoryWithOneFile(t *testing.T) { <ide> func TestValidateContextDirectoryWithOneFileExcludes(t *testing.T) { <ide> testValidateContextDirectory(t, prepareOneFile, []string{DefaultDockerfileName}) <ide> } <add> <add>// createTestTempDir creates a temporary directory for testing. <add>// It returns the created path and a cleanup function which is meant to be used as deferred call. <add>// When an error occurs, it terminates the test. <add>func createTestTempDir(t *testing.T, dir, prefix string) (string, func()) { <add> path, err := ioutil.TempDir(dir, prefix) <add> <add> if err != nil { <add> t.Fatalf("Error when creating directory %s with prefix %s: %s", dir, prefix, err) <add> } <add> <add> return path, func() { <add> err = os.RemoveAll(path) <add> <add> if err != nil { <add> t.Fatalf("Error when removing directory %s: %s", path, err) <add> } <add> } <add>} <add> <add>// createTestTempSubdir creates a temporary directory for testing. <add>// It returns the created path but doesn't provide a cleanup function, <add>// so createTestTempSubdir should be used only for creating temporary subdirectories <add>// whose parent directories are properly cleaned up. <add>// When an error occurs, it terminates the test. <add>func createTestTempSubdir(t *testing.T, dir, prefix string) string { <add> path, err := ioutil.TempDir(dir, prefix) <add> <add> if err != nil { <add> t.Fatalf("Error when creating directory %s with prefix %s: %s", dir, prefix, err) <add> } <add> <add> return path <add>} <add> <add>// createTestTempFile creates a temporary file within dir with specific contents and permissions. <add>// When an error occurs, it terminates the test <add>func createTestTempFile(t *testing.T, dir, filename, contents string, perm os.FileMode) string { <add> filePath := filepath.Join(dir, filename) <add> err := ioutil.WriteFile(filePath, []byte(contents), perm) <add> <add> if err != nil { <add> t.Fatalf("Error when creating %s file: %s", filename, err) <add> } <add> <add> return filePath <add>} <add> <add>// chdir changes current working directory to dir. <add>// It returns a function which changes working directory back to the previous one. <add>// This function is meant to be executed as a deferred call. <add>// When an error occurs, it terminates the test. <add>func chdir(t *testing.T, dir string) func() { <add> workingDirectory, err := os.Getwd() <add> <add> if err != nil { <add> t.Fatalf("Error when retrieving working directory: %s", err) <add> } <add> <add> err = os.Chdir(dir) <add> <add> if err != nil { <add> t.Fatalf("Error when changing directory to %s: %s", dir, err) <add> } <add> <add> return func() { <add> err = os.Chdir(workingDirectory) <add> <add> if err != nil { <add> t.Fatalf("Error when changing back to working directory (%s): %s", workingDirectory, err) <add> } <add> } <add>} <add><path>cli/command/image/build/context_unix.go <del><path>builder/context_unix.go <ide> // +build !windows <ide> <del>package builder <add>package build <ide> <ide> import ( <ide> "path/filepath" <add><path>cli/command/image/build/context_windows.go <del><path>builder/context_windows.go <ide> // +build windows <ide> <del>package builder <add>package build <ide> <ide> import ( <ide> "path/filepath"
6
Python
Python
fix imports in diagnose and excise old crap
9596a895dd01c27b9984fe8ff20e151eda41b9b8
<ide><path>numpy/f2py/diagnose.py <ide> #!/usr/bin/env python <ide> <del>import os,sys,tempfile <add>import os <add>import sys <add>import tempfile <ide> <ide> def run_command(cmd): <ide> print 'Running %r:' % (cmd) <ide> def run(): <ide> print '------' <ide> print 'sys.path=%r' % (':'.join(sys.path)) <ide> print '------' <del> try: <del> import Numeric <del> has_Numeric = 1 <del> except ImportError: <del> print 'Failed to import Numeric:',sys.exc_value <del> has_Numeric = 0 <del> try: <del> import numarray <del> has_numarray = 1 <del> except ImportError: <del> print 'Failed to import numarray:',sys.exc_value <del> has_numarray = 0 <add> <ide> try: <ide> import numpy <ide> has_newnumpy = 1 <ide> except ImportError: <ide> print 'Failed to import new numpy:', sys.exc_value <ide> has_newnumpy = 0 <add> <ide> try: <del> import f2py2e <add> from numpy.f2py import f2py2e <ide> has_f2py2e = 1 <ide> except ImportError: <ide> print 'Failed to import f2py2e:',sys.exc_value <ide> has_f2py2e = 0 <add> <ide> try: <ide> import numpy.distutils <ide> has_numpy_distutils = 2 <ide> def run(): <ide> except ImportError: <ide> print 'Failed to import numpy_distutils:',sys.exc_value <ide> has_numpy_distutils = 0 <del> if has_Numeric: <del> try: <del> print 'Found Numeric version %r in %s' % \ <del> (Numeric.__version__,Numeric.__file__) <del> except Exception,msg: <del> print 'error:',msg <del> print '------' <del> if has_numarray: <del> try: <del> print 'Found numarray version %r in %s' % \ <del> (numarray.__version__,numarray.__file__) <del> except Exception,msg: <del> print 'error:',msg <del> print '------' <add> <ide> if has_newnumpy: <ide> try: <ide> print 'Found new numpy version %r in %s' % \ <ide> (numpy.__version__, numpy.__file__) <ide> except Exception,msg: <ide> print 'error:', msg <ide> print '------' <add> <ide> if has_f2py2e: <ide> try: <ide> print 'Found f2py2e version %r in %s' % \ <ide> (f2py2e.__version__.version,f2py2e.__file__) <ide> except Exception,msg: <ide> print 'error:',msg <ide> print '------' <add> <ide> if has_numpy_distutils: <ide> try: <del> if has_numpy_distutils==2: <add> if has_numpy_distutils == 2: <ide> print 'Found numpy.distutils version %r in %r' % (\ <ide> numpy.distutils.__version__, <ide> numpy.distutils.__file__) <ide> def run(): <ide> print 'error:',msg <ide> print '------' <ide> try: <del> if has_numpy_distutils==1: <add> if has_numpy_distutils == 1: <ide> print 'Importing numpy_distutils.command.build_flib ...', <ide> import numpy_distutils.command.build_flib as build_flib <ide> print 'ok' <ide> def run(): <ide> print 'error:',msg,'(ignore it, build_flib is obsolute for numpy.distutils 0.2.2 and up)' <ide> print '------' <ide> try: <del> if has_numpy_distutils==2: <add> if has_numpy_distutils == 2: <ide> print 'Importing numpy.distutils.fcompiler ...', <ide> import numpy.distutils.fcompiler as fcompiler <ide> else: <ide> def run(): <ide> print 'error:',msg <ide> print '------' <ide> try: <del> if has_numpy_distutils==2: <add> if has_numpy_distutils == 2: <ide> print 'Importing numpy.distutils.cpuinfo ...', <ide> from numpy.distutils.cpuinfo import cpuinfo <ide> print 'ok'
1
Javascript
Javascript
replace fixturesdir with the fixtures module
1a9f48a87591f3d1194c4051b9b867a38ba2bccc
<ide><path>test/parallel/test-http-default-port.js <ide> const common = require('../common'); <ide> if (!common.hasCrypto) <ide> common.skip('missing crypto'); <ide> <add>const fixtures = require('../common/fixtures'); <ide> const http = require('http'); <ide> const https = require('https'); <ide> const assert = require('assert'); <ide> const hostExpect = 'localhost'; <del>const fs = require('fs'); <del>const path = require('path'); <del>const fixtures = path.join(common.fixturesDir, 'keys'); <ide> const options = { <del> key: fs.readFileSync(`${fixtures}/agent1-key.pem`), <del> cert: fs.readFileSync(`${fixtures}/agent1-cert.pem`) <add> key: fixtures.readKey('agent1-key.pem'), <add> cert: fixtures.readKey('agent1-cert.pem') <ide> }; <ide> let gotHttpsResp = false; <ide> let gotHttpResp = false;
1
Mixed
Ruby
fix mounting engines inside a resources block
e6c602da9046a653747ce99c9cab7f08f572fa40
<ide><path>actionpack/CHANGELOG.md <add>* Fix generating a path for engine inside a resources block (#8533) <add> <add> *Piotr Sarnacki* <add> <ide> * Add Mime::Type.register "text/vcard", :vcf to the default list of mime types <ide> <ide> *DHH* <ide><path>actionpack/lib/action_dispatch/routing/mapper.rb <ide> def mount(app, options = nil) <ide> raise "A rack application must be specified" unless path <ide> <ide> options[:as] ||= app_name(app) <add> target_as = name_for_action(options[:as], path) <ide> options[:via] ||= :all <ide> <ide> match(path, options.merge(:to => app, :anchor => false, :format => false)) <ide> <del> define_generate_prefix(app, options[:as]) <add> define_generate_prefix(app, target_as) <ide> self <ide> end <ide> <ide><path>actionpack/test/dispatch/mount_test.rb <ide> class TestRoutingMount < ActionDispatch::IntegrationTest <ide> <ide> class FakeEngine <ide> def self.routes <del> Object.new <add> @routes ||= ActionDispatch::Routing::RouteSet.new <ide> end <ide> <ide> def self.call(env) <ide> def self.call(env) <ide> scope "/its_a" do <ide> mount SprocketsApp, :at => "/sprocket" <ide> end <add> <add> resources :users do <add> mount FakeEngine, :at => "/fakeengine", :as => :fake_mounted_at_resource <add> end <ide> end <ide> <ide> def app <ide> Router <ide> end <ide> <add> def test_app_name_is_properly_generated_when_engine_is_mounted_in_resources <add> assert Router.mounted_helpers.method_defined?(:user_fake_mounted_at_resource), <add> "A mounted helper should be defined with a parent's prefix" <add> assert Router.named_routes.routes[:user_fake_mounted_at_resource], <add> "A named route should be defined with a parent's prefix" <add> end <add> <ide> def test_trailing_slash_is_not_removed_from_path_info <ide> get "/sprockets/omg/" <ide> assert_equal "/sprockets -- /omg/", response.body
3
Go
Go
close the containers stdin when the process dies
47607494027be3d37cbb01a07e026e99f9c5151b
<ide><path>container.go <ide> func (container *Container) monitor() { <ide> if err := container.releaseNetwork(); err != nil { <ide> log.Printf("%v: Failed to release network: %v", container.Id, err) <ide> } <add> if container.Config.OpenStdin { <add> if err := container.stdin.Close(); err != nil { <add> Debugf("%s: Error close stdin: %s", container.Id, err) <add> } <add> } <ide> if err := container.stdout.Close(); err != nil { <ide> Debugf("%s: Error close stdout: %s", container.Id, err) <ide> }
1
PHP
PHP
use string based accessor for schema facade
8059b393eb45749d7e8840a41f33c99b2f4acafd
<ide><path>src/Illuminate/Database/DatabaseServiceProvider.php <ide> protected function registerConnectionServices() <ide> return $app['db']->connection(); <ide> }); <ide> <add> $this->app->bind('db.schema', function ($app) { <add> return $app['db']->connection()->getSchemaBuilder(); <add> }); <add> <ide> $this->app->singleton('db.transactions', function ($app) { <ide> return new DatabaseTransactionsManager; <ide> }); <ide><path>src/Illuminate/Foundation/Application.php <ide> public function registerCoreContainerAliases() <ide> 'cookie' => [\Illuminate\Cookie\CookieJar::class, \Illuminate\Contracts\Cookie\Factory::class, \Illuminate\Contracts\Cookie\QueueingFactory::class], <ide> 'db' => [\Illuminate\Database\DatabaseManager::class, \Illuminate\Database\ConnectionResolverInterface::class], <ide> 'db.connection' => [\Illuminate\Database\Connection::class, \Illuminate\Database\ConnectionInterface::class], <add> 'db.schema' => [\Illuminate\Database\Schema\Builder::class], <ide> 'encrypter' => [\Illuminate\Encryption\Encrypter::class, \Illuminate\Contracts\Encryption\Encrypter::class, \Illuminate\Contracts\Encryption\StringEncrypter::class], <ide> 'events' => [\Illuminate\Events\Dispatcher::class, \Illuminate\Contracts\Events\Dispatcher::class], <ide> 'files' => [\Illuminate\Filesystem\Filesystem::class], <ide><path>src/Illuminate/Support/Facades/Facade.php <ide> abstract class Facade <ide> */ <ide> protected static $resolvedInstance; <ide> <add> /** <add> * Determine if the resolved facade should be cached. <add> * <add> * @var bool <add> */ <add> protected static $cached = true; <add> <ide> /** <ide> * Run a Closure when the facade has been resolved. <ide> * <ide> public static function shouldReceive() <ide> $name = static::getFacadeAccessor(); <ide> <ide> $mock = static::isMock() <del> ? static::$resolvedInstance[$name] <del> : static::createFreshMockInstance(); <add> ? static::$resolvedInstance[$name] <add> : static::createFreshMockInstance(); <ide> <ide> return $mock->shouldReceive(...func_get_args()); <ide> } <ide> protected static function getFacadeAccessor() <ide> /** <ide> * Resolve the facade root instance from the container. <ide> * <del> * @param object|string $name <add> * @param string $name <ide> * @return mixed <ide> */ <ide> protected static function resolveFacadeInstance($name) <ide> { <del> if (is_object($name)) { <del> return $name; <del> } <del> <ide> if (isset(static::$resolvedInstance[$name])) { <ide> return static::$resolvedInstance[$name]; <ide> } <del> <add> dump(array_keys(static::$resolvedInstance)); <ide> if (static::$app) { <del> return static::$resolvedInstance[$name] = static::$app[$name]; <add> if (static::$cached) { <add> return static::$resolvedInstance[$name] = static::$app[$name]; <add> } <add> <add> return static::$app[$name]; <ide> } <ide> } <ide> <ide><path>src/Illuminate/Support/Facades/RateLimiter.php <ide> class RateLimiter extends Facade <ide> */ <ide> protected static function getFacadeAccessor() <ide> { <del> return 'Illuminate\Cache\RateLimiter'; <add> return \Illuminate\Cache\RateLimiter::class; <ide> } <ide> } <ide><path>src/Illuminate/Support/Facades/Schema.php <ide> */ <ide> class Schema extends Facade <ide> { <add> /** <add> * Determine if the resolved facade should be cached. <add> * <add> * @var bool <add> */ <add> protected static $cached = false; <add> <ide> /** <ide> * Get a schema builder instance for a connection. <ide> * <ide> public static function connection($name) <ide> } <ide> <ide> /** <del> * Get a schema builder instance for the default connection. <add> * Get the registered name of the component. <ide> * <del> * @return \Illuminate\Database\Schema\Builder <add> * @return string <ide> */ <ide> protected static function getFacadeAccessor() <ide> { <del> return static::$app['db']->connection()->getSchemaBuilder(); <add> return 'db.schema'; <ide> } <ide> } <ide><path>tests/Database/DatabaseMigratorIntegrationTest.php <ide> protected function setUp(): void <ide> <ide> $container = new Container; <ide> $container->instance('db', $db->getDatabaseManager()); <add> $container->bind('db.schema', function ($app) { <add> return $app['db']->connection()->getSchemaBuilder(); <add> }); <ide> <ide> Facade::setFacadeApplication($container); <ide>
6
Javascript
Javascript
restore missing break
a0c68ec70b5a6e87e1ffcba0d61a63efc4b6aa60
<ide><path>src/text-editor.js <ide> class TextEditor { <ide> this.component.scheduleUpdate() <ide> } <ide> } <add> break <ide> <ide> case 'placeholderText': <ide> if (value !== this.placeholderText) {
1
Javascript
Javascript
use switch command from javascript
4eb8a951ffd60dfdb4b05145d9d36a8b493a4cd7
<ide><path>Libraries/Components/Switch/Switch.js <ide> const StyleSheet = require('../../StyleSheet/StyleSheet'); <ide> import AndroidSwitchNativeComponent, { <ide> Commands as AndroidSwitchCommands, <ide> } from './AndroidSwitchNativeComponent'; <add>import SwitchNativeComponent, { <add> Commands as SwitchCommands, <add>} from './SwitchNativeComponent'; <ide> <ide> import type {ColorValue} from '../../StyleSheet/StyleSheetTypes'; <ide> import type {SyntheticEvent} from '../../Types/CoreEventTypes'; <ide> import type {ViewProps} from '../View/ViewPropTypes'; <del>import SwitchNativeComponent from './SwitchNativeComponent'; <ide> <ide> type SwitchChangeEvent = SyntheticEvent< <ide> $ReadOnly<{| <ide> class Switch extends React.Component<Props> { <ide> const nativeProps = {}; <ide> const value = this.props.value === true; <ide> <del> if (this._lastNativeValue !== value && typeof value === 'boolean') { <add> if (this._lastNativeValue !== value) { <ide> nativeProps.value = value; <ide> } <ide> <ide> class Switch extends React.Component<Props> { <ide> nativeProps.value, <ide> ); <ide> } else { <del> this._nativeSwitchRef.setNativeProps(nativeProps); <add> SwitchCommands.setValue(this._nativeSwitchRef, nativeProps.value); <ide> } <ide> } <ide> } <ide><path>Libraries/Components/Switch/SwitchNativeComponent.js <ide> import type {BubblingEventHandler, WithDefault} from '../../Types/CodegenTypes'; <ide> import type {ColorValue} from '../../StyleSheet/StyleSheetTypes'; <ide> import type {ViewProps} from '../View/ViewPropTypes'; <add>import * as React from 'react'; <ide> <ide> import codegenNativeComponent from '../../Utilities/codegenNativeComponent'; <add>import codegenNativeCommands from 'react-native/Libraries/Utilities/codegenNativeCommands'; <ide> import type {HostComponent} from '../../Renderer/shims/ReactNativeTypes'; <ide> <ide> type SwitchChangeEvent = $ReadOnly<{| <ide> type NativeProps = $ReadOnly<{| <ide> onChange?: ?BubblingEventHandler<SwitchChangeEvent>, <ide> |}>; <ide> <add>type ComponentType = HostComponent<NativeProps>; <add> <add>interface NativeCommands { <add> +setValue: (viewRef: React.ElementRef<ComponentType>, value: boolean) => void; <add>} <add> <add>export const Commands: NativeCommands = codegenNativeCommands<NativeCommands>({ <add> supportedCommands: ['setValue'], <add>}); <add> <ide> export default (codegenNativeComponent<NativeProps>('Switch', { <ide> paperComponentName: 'RCTSwitch', <del>}): HostComponent<NativeProps>); <add> excludedPlatform: 'android', <add>}): ComponentType);
2
Javascript
Javascript
track parentid for reactperf
ab0ef89ec77962a39e3621ab4ce46cc073d3d7b2
<ide><path>src/isomorphic/devtools/ReactComponentTreeDevtool.js <ide> var ReactComponentTreeDevtool = { <ide> updateTree(id, item => item.displayName = displayName); <ide> }, <ide> <del> onSetChildren(id, childIDs) { <del> childIDs.forEach(childID => { <del> var childItem = tree[childID]; <del> expect(childItem).toBeDefined(); <del> expect(childItem.isComposite).toBeDefined(); <del> expect(childItem.displayName).toBeDefined(); <del> expect(childItem.childIDs || childItem.text).toBeDefined(); <add> onSetChildren(id, nextChildIDs) { <add> var prevChildIDs; <add> updateTree(id, item => { <add> prevChildIDs = item.childIDs || []; <add> item.childIDs = nextChildIDs; <ide> }); <ide> <del> updateTree(id, item => item.childIDs = childIDs); <add> prevChildIDs.forEach(prevChildID => { <add> if (tree[prevChildID] && nextChildIDs.indexOf(prevChildID) === -1) { <add> tree[prevChildID].parentID = null; <add> } <add> }); <add> <add> nextChildIDs.forEach(nextChildID => { <add> var item = tree[nextChildID]; <add> expect(item).toBeDefined(); <add> expect(item.isComposite).toBeDefined(); <add> expect(item.displayName).toBeDefined(); <add> expect(item.childIDs || item.text).toBeDefined(); <add> <add> if (tree[nextChildID] && prevChildIDs.indexOf(nextChildID) === -1) { <add> tree[nextChildID].parentID = id; <add> } <add> }); <ide> }, <ide> <del> onSetOwner(id, ownerDebugID) { <del> updateTree(id, item => item.ownerDebugID = ownerDebugID); <add> onSetOwner(id, ownerID) { <add> updateTree(id, item => item.ownerID = ownerID); <ide> }, <ide> <ide> onSetText(id, text) { <ide><path>src/isomorphic/devtools/__tests__/ReactComponentTreeDevtool-test.js <ide> describe('ReactComponentTreeDevtool', () => { <ide> ReactDebugTool.removeDevtool(ReactComponentTreeDevtool); <ide> }); <ide> <del> function denormalizeTree(tree, rootID, includeOwner) { <add> function denormalizeTree( <add> tree, <add> rootID, <add> includeOwner = false, <add> expectedParentID = null <add> ) { <ide> var item = tree[rootID]; <ide> var result = { <ide> isComposite: item.isComposite, <ide> displayName: item.displayName, <ide> }; <add> <add> if (expectedParentID) { <add> expect(item.parentID).toBe(expectedParentID); <add> } <add> <ide> if (item.childIDs) { <ide> result.children = item.childIDs.map(childID => <del> denormalizeTree(tree, childID, includeOwner) <add> denormalizeTree(tree, childID, includeOwner, rootID) <ide> ); <ide> } <ide> if (item.text != null) { <ide> result.text = item.text; <ide> } <del> if (includeOwner && item.ownerDebugID) { <del> result.ownerDisplayName = tree[item.ownerDebugID].displayName; <add> if (includeOwner && item.ownerID) { <add> result.ownerDisplayName = tree[item.ownerID].displayName; <ide> } <ide> return result; <ide> }
2
Javascript
Javascript
propagate originating parent
b8ce1da74d6ca01a7da2921b5911aa05aeaa6582
<ide><path>lib/buffer.js <ide> Buffer.dispose = function(obj) { <ide> }; <ide> <ide> <add>// pre-set for values that may exist in the future <add>Buffer.prototype.length = undefined; <add>Buffer.prototype.parent = undefined; <add> <add> <ide> // toString(encoding, start=0, end=buffer.length) <ide> Buffer.prototype.toString = function(encoding, start, end) { <ide> encoding = !!encoding ? (encoding + '').toLowerCase() : 'utf8'; <ide> Buffer.prototype.slice = function(start, end) { <ide> end = start; <ide> <ide> var buf = new Buffer(); <del> buf.parent = sliceOnto(this, buf, start, end); <add> sliceOnto(this, buf, start, end); <add> buf.parent = this.parent === undefined ? this : this.parent; <ide> buf.length = end - start; <ide> <ide> return buf; <ide><path>test/simple/test-buffer.js <ide> var common = require('../common'); <ide> var assert = require('assert'); <ide> <ide> var Buffer = require('buffer').Buffer; <add>var SlowBuffer = require('buffer').SlowBuffer; <ide> <ide> // counter to ensure unique value is always copied <ide> var cntr = 0; <ide> for (var j = 0; j < 100; j++) { <ide> } <ide> <ide> <add>// make sure only top level parent propagates from allocPool <add>var b = new Buffer(5); <add>var c = b.slice(0, 4); <add>var d = c.slice(0, 2); <add>assert.equal(b.parent, c.parent); <add>assert.equal(b.parent, d.parent); <add> <add>// also from a non-pooled instance <add>var b = new SlowBuffer(5); <add>var c = b.slice(0, 4); <add>var d = c.slice(0, 2); <add>assert.equal(b, c.parent); <add>assert.equal(b, d.parent); <add> <add> <ide> <ide> // Bug regression test <ide> var testValue = '\u00F6\u65E5\u672C\u8A9E'; // ö日本語
2
Ruby
Ruby
pass printer class to output_filename
01a4af4a74fdcda9a20db8de01fd7bb5d28d22cf
<ide><path>activesupport/lib/active_support/testing/performance.rb <ide> def record_benchmark(test_name, data, measure_mode) <ide> <ide> def record_profile(test_name, data, measure_mode) <ide> printer_classes.each do |printer_class| <del> fname = output_filename(test_name, printer, measure_mode) <add> fname = output_filename(test_name, printer_class, measure_mode) <ide> <ide> FileUtils.mkdir_p(File.dirname(fname)) <ide> File.open(fname, 'wb') do |file| <ide> def record_profile(test_name, data, measure_mode) <ide> end <ide> <ide> # The report filename is test_name + measure_mode + report_type <del> def output_filename(test_name, printer, measure_mode) <add> def output_filename(test_name, printer_class, measure_mode) <ide> suffix = <del> case printer <del> when RubyProf::FlatPrinter; 'flat.txt' <del> when RubyProf::GraphPrinter; 'graph.txt' <del> when RubyProf::GraphHtmlPrinter; 'graph.html' <del> when RubyProf::CallTreePrinter; 'tree.txt' <del> else printer.to_s.downcase <add> case printer_class.name.demodulize <add> when 'FlatPrinter'; 'flat.txt' <add> when 'GraphPrinter'; 'graph.txt' <add> when 'GraphHtmlPrinter'; 'graph.html' <add> when 'CallTreePrinter'; 'tree.txt' <add> else printer_class.name.sub(/Printer$/, '').underscore <ide> end <ide> <ide> "#{profile_options[:output]}/#{test_name}_#{ActiveSupport::Testing::Performance::Util.metric_name(measure_mode)}_#{suffix}"
1
Text
Text
update path to tf_record script
a8cc5eddbef8539c81c3f6166972019ed445f41f
<ide><path>research/object_detection/g3doc/running_pets.md <ide> Oxford-IIIT Pet dataset into TFRecords. Run the following commands from the <ide> <ide> ``` bash <ide> # From tensorflow/models/research/ <del>python object_detection/create_pet_tf_record.py \ <add>python object_detection/dataset_tools/create_pet_tf_record.py \ <ide> --label_map_path=object_detection/data/pet_label_map.pbtxt \ <ide> --data_dir=`pwd` \ <ide> --output_dir=`pwd`
1
Text
Text
add update example use
5e59d8a8b656bcd3aaa3ac27601ad7f98433c5d0
<ide><path>_SETUP.md <ide> or incremental (in watch mode) <ide> ```bash <ide> yarn type-lint --watch <ide> ``` <add> <add>### To update all examples use <add> <add>```bash <add>yarn build:examples <add>``` <add> <add>### To update some example use <add> <add>```bash <add>cd examples/(some example dir) <add>node build.js <add>```
1
PHP
PHP
fix bug and shorten exception message
85936b3c20cc06ea527e85e3389d7a20b696a50f
<ide><path>laravel/redis.php <ide> public function run($method, $parameters) <ide> { <ide> fwrite($this->connect(), $this->command($method, (array) $parameters)); <ide> <del> $ersponse = trim(fgets($this->connection, 512)); <add> $response = trim(fgets($this->connection, 512)); <ide> <del> switch (substr($ersponse, 0, 1)) <add> switch (substr($response, 0, 1)) <ide> { <ide> case '-': <del> throw new \RuntimeException('Redis error: '.substr(trim($ersponse), 4)); <add> throw new \RuntimeException('Redis error: '.substr(trim($response), 4)); <ide> <ide> case '+': <ide> case ':': <del> return $this->inline($ersponse); <add> return $this->inline($response); <ide> <ide> case '$': <del> return $this->bulk($ersponse); <add> return $this->bulk($response); <ide> <ide> case '*': <del> return $this->multibulk($ersponse); <add> return $this->multibulk($response); <ide> <ide> default: <del> throw new \UnexpectedValueException("Unknown response from Redis server: ".substr($ersponse, 0, 1)); <add> throw new \UnexpectedValueException("Unknown Redis response: ".substr($response, 0, 1)); <ide> } <ide> } <ide> <ide> public function __destruct() <ide> fclose($this->connection); <ide> } <ide> <del>} <add>} <ide>\ No newline at end of file
1
Mixed
Python
remove non-working --use-chars from train cli
5ceac425eeb02f9c5c6f952d8b4117022b3e649b
<ide><path>spacy/cli/train.py <ide> conv_depth=("Depth of CNN layers of Tok2Vec component", "option", "cd", int), <ide> cnn_window=("Window size for CNN layers of Tok2Vec component", "option", "cW", int), <ide> cnn_pieces=("Maxout size for CNN layers of Tok2Vec component. 1 for Mish", "option", "cP", int), <del> use_chars=("Whether to use character-based embedding of Tok2Vec component", "flag", "chr", bool), <ide> bilstm_depth=("Depth of BiLSTM layers of Tok2Vec component (requires PyTorch)", "option", "lstm", int), <ide> embed_rows=("Number of embedding rows of Tok2Vec component", "option", "er", int), <ide> n_iter=("Number of iterations", "option", "n", int), <ide> def train( <ide> conv_depth=4, <ide> cnn_window=1, <ide> cnn_pieces=3, <del> use_chars=False, <ide> bilstm_depth=0, <ide> embed_rows=2000, <ide> n_iter=30, <ide> def train( <ide> cfg["cnn_maxout_pieces"] = cnn_pieces <ide> cfg["embed_size"] = embed_rows <ide> cfg["conv_window"] = cnn_window <del> cfg["subword_features"] = not use_chars <ide> optimizer = nlp.begin_training(lambda: corpus.train_tuples, **cfg) <ide> <ide> nlp._optimizer = None <ide><path>website/docs/api/cli.md <ide> $ python -m spacy train [lang] [output_path] [train_path] [dev_path] <ide> | `--conv-depth`, `-cd` <Tag variant="new">2.2.4</Tag> | option | Depth of CNN layers of `Tok2Vec` component. | <ide> | `--cnn-window`, `-cW` <Tag variant="new">2.2.4</Tag> | option | Window size for CNN layers of `Tok2Vec` component. | <ide> | `--cnn-pieces`, `-cP` <Tag variant="new">2.2.4</Tag> | option | Maxout size for CNN layers of `Tok2Vec` component. | <del>| `--use-chars`, `-chr` <Tag variant="new">2.2.4</Tag> | flag | Whether to use character-based embedding of `Tok2Vec` component. | <ide> | `--bilstm-depth`, `-lstm` <Tag variant="new">2.2.4</Tag> | option | Depth of BiLSTM layers of `Tok2Vec` component (requires PyTorch). | <ide> | `--embed-rows`, `-er` <Tag variant="new">2.2.4</Tag> | option | Number of embedding rows of `Tok2Vec` component. | <ide> | `--noise-level`, `-nl` | option | Float indicating the amount of corruption for data augmentation. |
2
Javascript
Javascript
enhance test to catch bug reported in
fb74992454f9cfa1027523a3448036e07fc67f20
<ide><path>spec/tooltip-manager-spec.js <ide> describe('TooltipManager', () => { <ide> const element2 = document.createElement('div') <ide> jasmine.attachToDOM(element2) <ide> <del> const fakeJqueryWrapper = [element, element2] <del> fakeJqueryWrapper.jquery = 'any-version' <add> const fakeJqueryWrapper = { <add> 0: element, <add> 1: element2, <add> length: 2, <add> jquery: 'any-version' <add> } <ide> const disposable = manager.add(fakeJqueryWrapper, {title: 'Title'}) <ide> <ide> hover(element, () => expect(document.body.querySelector('.tooltip')).toHaveText('Title'))
1
Go
Go
add reference counting to aufs
5b6b8df0c1b5a54ae9a717810eedf9fc971e1321
<ide><path>daemon/graphdriver/aufs/aufs.go <ide> type Driver struct { <ide> root string <ide> uidMaps []idtools.IDMap <ide> gidMaps []idtools.IDMap <add> ctr *graphdriver.RefCounter <ide> pathCacheLock sync.Mutex <ide> pathCache map[string]string <ide> } <ide> func Init(root string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap <ide> uidMaps: uidMaps, <ide> gidMaps: gidMaps, <ide> pathCache: make(map[string]string), <add> ctr: graphdriver.NewRefCounter(graphdriver.NewFsChecker(graphdriver.FsMagicAufs)), <ide> } <ide> <ide> rootUID, rootGID, err := idtools.GetRootUIDGID(uidMaps, gidMaps) <ide> func (a *Driver) Get(id, mountLabel string) (string, error) { <ide> m = a.getMountpoint(id) <ide> } <ide> } <add> if count := a.ctr.Increment(m); count > 1 { <add> return m, nil <add> } <ide> <ide> // If a dir does not have a parent ( no layers )do not try to mount <ide> // just return the diff path to the data <ide> func (a *Driver) Put(id string) error { <ide> a.pathCache[id] = m <ide> } <ide> a.pathCacheLock.Unlock() <add> if count := a.ctr.Decrement(m); count > 0 { <add> return nil <add> } <ide> <ide> err := a.unmount(m) <ide> if err != nil { <ide><path>daemon/graphdriver/counter.go <ide> type RefCounter struct { <ide> <ide> // NewRefCounter returns a new RefCounter <ide> func NewRefCounter(c Checker) *RefCounter { <del> if c == nil { <del> c = &defaultChecker{} <del> } <ide> return &RefCounter{ <ide> checker: c, <ide> counts: make(map[string]*minfo), <ide><path>daemon/graphdriver/devmapper/driver.go <ide> func Init(home string, options []string, uidMaps, gidMaps []idtools.IDMap) (grap <ide> home: home, <ide> uidMaps: uidMaps, <ide> gidMaps: gidMaps, <del> ctr: graphdriver.NewRefCounter(nil), <add> ctr: graphdriver.NewRefCounter(graphdriver.NewDefaultChecker()), <ide> } <ide> <ide> return graphdriver.NewNaiveDiffDriver(d, uidMaps, gidMaps), nil <ide><path>daemon/graphdriver/driver.go <ide> type FileGetCloser interface { <ide> Close() error <ide> } <ide> <add>// Checker makes checks on specified filesystems. <add>type Checker interface { <add> // IsMounted returns true if the provided path is mounted for the specific checker <add> IsMounted(path string) bool <add>} <add> <ide> func init() { <ide> drivers = make(map[string]InitFunc) <ide> } <ide><path>daemon/graphdriver/driver_linux.go <ide> func GetFSMagic(rootpath string) (FsMagic, error) { <ide> return FsMagic(buf.Type), nil <ide> } <ide> <del>// Checker makes checks on specified filesystems. <del>type Checker interface { <del> // IsMounted returns true if the provided path is mounted for the specific checker <del> IsMounted(path string) bool <del>} <del> <ide> // NewFsChecker returns a checker configured for the provied FsMagic <ide> func NewFsChecker(t FsMagic) Checker { <ide> return &fsChecker{ <ide> func (c *fsChecker) IsMounted(path string) bool { <ide> return m <ide> } <ide> <add>// NewDefaultChecker returns a check that parses /proc/mountinfo to check <add>// if the specified path is mounted. <add>func NewDefaultChecker() Checker { <add> return &defaultChecker{} <add>} <add> <ide> type defaultChecker struct { <ide> } <ide>
5
Javascript
Javascript
remove unnecessary target assign
ab72cce9946ac6812cdf5eb7fefe5d8753cef552
<ide><path>lib/LoaderTargetPlugin.js <ide> class LoaderTargetPlugin { <ide> } <ide> <ide> apply(compiler) { <del> let target = this.target; <ide> compiler.plugin("compilation", (compilation) => { <del> compilation.plugin("normal-module-loader", (loaderContext) => loaderContext.target = target); <add> compilation.plugin("normal-module-loader", (loaderContext) => loaderContext.target = this.target); <ide> }); <ide> } <ide> }
1