content_type
stringclasses 8
values | main_lang
stringclasses 7
values | message
stringlengths 1
50
| sha
stringlengths 40
40
| patch
stringlengths 52
962k
| file_count
int64 1
300
|
---|---|---|---|---|---|
Text | Text | update changelog for 1.7.0-beta.2 | 44b99c9714fc3e53b1d8cf433cbcdbe47d0eae84 | <ide><path>CHANGELOG.md
<ide> # Ember Changelog
<ide>
<add>### Ember 1.7.0-beta.2 (July, 16, 2014)
<add>
<add>* [BUGFIX] Wrap es3 keywords in quotes.
<add>* [BUGFIX] Use injected integration test helpers instead of local functions.
<add>* [BUGFIX] Add alias descriptor, and replace `Ember.computed.alias` with new descriptor.
<add>* [BUGFIX] Fix `{{#with view.foo as bar}}`.
<add>* [BUGFIX] Force remove `required` attribute for IE8.
<add>* [BUGFIX] Controller precendence for `Ember.Route.prototype.render` updated.
<add>* [BUGFIX] fixes variable argument passing to triggerEvent helper.
<add>* [BUGFIX] Use view:toplevel for {{view}} instead of view:default.
<add>* [BUGFIX] Do not throw uncaught errors mid-transition.
<add>* [BUGFIX] Don't assume that the router has a container.
<add>
<ide> ### Ember 1.7.0-beta.1 (July, 8, 2014)
<ide>
<ide> * Fix components inside group helper. | 1 |
Javascript | Javascript | improve offset test setup and labels | 9e121482a532d61aa36d7b314ee46dd1ac40f29e | <ide><path>test/data/iframeTest.js
<del>
<ide> window.startIframeTest = function() {
<ide> var args = Array.prototype.slice.call( arguments );
<ide>
<ide><path>test/data/testinit.js
<ide> this.testIframe = function( title, fileName, func ) {
<ide> var iframe;
<ide> var done = assert.async();
<ide>
<add> // Test iframes are expected to invoke this via startIframeTest (cf. iframeTest.js)
<ide> window.iframeCallback = function() {
<ide> var args = Array.prototype.slice.call( arguments );
<ide>
<ide><path>test/unit/offset.js
<ide> if ( !jQuery.fn.offset ) {
<ide> return;
<ide> }
<ide>
<del>var supportsScroll, supportsFixedPosition,
<del> forceScroll = jQuery( "<div/>" ).css( { width: 2000, height: 2000 } ),
<del> checkSupport = function() {
<add>var supportsFixedPosition, supportsScroll, alwaysScrollable,
<add> forceScroll = supportjQuery( "<div/>" ).css( { width: 2000, height: 2000 } ),
<add> checkSupport = function( assert ) {
<ide>
<ide> // Only run once
<ide> checkSupport = false;
<ide>
<del> var checkFixed = jQuery( "<div/>" ).css( { position: "fixed", top: "20px" } ).appendTo( "#qunit-fixture" );
<add> var checkFixed = supportjQuery( "<div/>" )
<add> .css( { position: "fixed", top: "20px" } )
<add> .appendTo( "#qunit-fixture" );
<add> supportsFixedPosition = checkFixed[ 0 ].offsetTop === 20;
<add> checkFixed.remove();
<ide>
<del> // Must append to body because #qunit-fixture is hidden and elements inside it don't have a scrollTop
<add> // Append forceScroll to the body instead of #qunit-fixture because the latter is hidden
<ide> forceScroll.appendTo( "body" );
<ide> window.scrollTo( 200, 200 );
<ide> supportsScroll = document.documentElement.scrollTop || document.body.scrollTop;
<ide> forceScroll.detach();
<ide>
<del> supportsFixedPosition = checkFixed[ 0 ].offsetTop === 20;
<del> checkFixed.remove();
<add> // Support: iOS <=7
<add> // Hijack the iframe test infrastructure to detect viewport scrollability
<add> // for pages with position:fixed document element
<add> var done = assert.async(),
<add> $iframe = supportjQuery( "<iframe/>" )
<add> .css( { position: "absolute", width: "50px", left: "-60px" } )
<add> .attr( "src", url( "./data/offset/boxes.html" ) );
<add> window.iframeCallback = function( $, win, doc ) {
<add> doc.documentElement.style.position = "fixed";
<add> alwaysScrollable = win.pageXOffset !== 0;
<add> window.iframeCallback = undefined;
<add> $iframe.remove();
<add> done();
<add> };
<add> $iframe.appendTo( document.body );
<ide> };
<ide>
<del>QUnit.module( "offset", { setup: function() {
<add>QUnit.module( "offset", { setup: function( assert ) {
<ide> if ( typeof checkSupport === "function" ) {
<del> checkSupport();
<add> checkSupport( assert );
<ide> }
<ide>
<ide> // Force a scroll value on the main window to ensure incorrect results
<ide> QUnit.test( "chaining", function( assert ) {
<ide> 512, 256, 1024, 512, 2048, 1024, position,
<ide> position !== "fixed" && "documentElement" );
<ide> },
<del> viewportScroll = { top: 2, left: 1 },
<del>
<del> alwaysScrollable = false;
<del>
<del> // Support: iOS <=7
<del> // Detect viewport scrollability for pages with position:fixed document element
<del> ( function() {
<del> var $iframe = jQuery( "<iframe/>" )
<del> .css( { position: "absolute", width: "50px", left: "-60px" } )
<del> .attr( "src", url( "./data/offset/boxes.html" ) );
<del>
<del> // Hijack the iframe test infrastructure
<del> window.iframeCallback = function( $, win, doc ) {
<del> doc.documentElement.style.position = "fixed";
<del> alwaysScrollable = win.pageXOffset !== 0;
<del> window.iframeCallback = undefined;
<del> $iframe.remove();
<del> return;
<del> };
<del>
<del> $iframe.appendTo( document.body );
<del> return;
<del> } )();
<add> viewportScroll = { top: 2, left: 1 };
<ide>
<ide> function getExpectations( htmlPos, bodyPos ) {
<ide>
<ide> QUnit.test( "chaining", function( assert ) {
<ide> assert.deepEqual(
<ide> supportjQuery.extend( {}, $( "#" + id ).offset() ),
<ide> descriptor.offset,
<del> "jQuery('#" + id + "').offset()" );
<add> "jQuery('#" + id + "').offset(): top " + descriptor.offset.top +
<add> ", left " + descriptor.offset.left );
<ide> } );
<ide>
<ide> // Verify expected relative position
<ide> supportjQuery.each( expectations, function( id, descriptor ) {
<ide> assert.deepEqual(
<ide> supportjQuery.extend( {}, $( "#" + id ).position() ),
<ide> descriptor.pos,
<del> "jQuery('#" + id + "').position()" );
<add> "jQuery('#" + id + "').position(): top " + descriptor.pos.top +
<add> ", left " + descriptor.pos.left );
<ide> } );
<ide>
<ide> // Verify that values round-trip | 3 |
Javascript | Javascript | avoid unnecessary instantiation of objects | 898020bebbf2c15a2d7734813d0c4aaa05f00f96 | <ide><path>examples/js/postprocessing/OutlinePass.js
<ide> THREE.OutlinePass = function ( resolution, scene, camera, selectedObjects ) {
<ide> var MAX_EDGE_GLOW = 4;
<ide>
<ide> this.separableBlurMaterial1 = this.getSeperableBlurMaterial( MAX_EDGE_THICKNESS );
<del> this.separableBlurMaterial1.uniforms[ "texSize" ].value = new THREE.Vector2( resx, resy );
<add> this.separableBlurMaterial1.uniforms[ "texSize" ].value.set( resx, resy );
<ide> this.separableBlurMaterial1.uniforms[ "kernelRadius" ].value = 1;
<ide> this.separableBlurMaterial2 = this.getSeperableBlurMaterial( MAX_EDGE_GLOW );
<del> this.separableBlurMaterial2.uniforms[ "texSize" ].value = new THREE.Vector2( Math.round( resx / 2 ), Math.round( resy / 2 ) );
<add> this.separableBlurMaterial2.uniforms[ "texSize" ].value.set( Math.round( resx / 2 ), Math.round( resy / 2 ) );
<ide> this.separableBlurMaterial2.uniforms[ "kernelRadius" ].value = MAX_EDGE_GLOW;
<ide>
<ide> // Overlay material
<ide> THREE.OutlinePass.prototype = Object.assign( Object.create( THREE.Pass.prototype
<ide> this.renderTargetMaskDownSampleBuffer.setSize( resx, resy );
<ide> this.renderTargetBlurBuffer1.setSize( resx, resy );
<ide> this.renderTargetEdgeBuffer1.setSize( resx, resy );
<del> this.separableBlurMaterial1.uniforms[ "texSize" ].value = new THREE.Vector2( resx, resy );
<add> this.separableBlurMaterial1.uniforms[ "texSize" ].value.set( resx, resy );
<ide>
<ide> resx = Math.round( resx / 2 );
<ide> resy = Math.round( resy / 2 );
<ide>
<ide> this.renderTargetBlurBuffer2.setSize( resx, resy );
<ide> this.renderTargetEdgeBuffer2.setSize( resx, resy );
<ide>
<del> this.separableBlurMaterial2.uniforms[ "texSize" ].value = new THREE.Vector2( resx, resy );
<add> this.separableBlurMaterial2.uniforms[ "texSize" ].value.set( resx, resy );
<ide>
<ide> },
<ide>
<ide> THREE.OutlinePass.prototype = Object.assign( Object.create( THREE.Pass.prototype
<ide> // Make non selected objects invisible, and draw only the selected objects, by comparing the depth buffer of non selected objects
<ide> this.changeVisibilityOfNonSelectedObjects( false );
<ide> this.renderScene.overrideMaterial = this.prepareMaskMaterial;
<del> this.prepareMaskMaterial.uniforms[ "cameraNearFar" ].value = new THREE.Vector2( this.renderCamera.near, this.renderCamera.far );
<add> this.prepareMaskMaterial.uniforms[ "cameraNearFar" ].value.set( this.renderCamera.near, this.renderCamera.far );
<ide> this.prepareMaskMaterial.uniforms[ "depthTexture" ].value = this.renderTargetDepthBuffer.texture;
<ide> this.prepareMaskMaterial.uniforms[ "textureMatrix" ].value = this.textureMatrix;
<ide> renderer.setRenderTarget( this.renderTargetMaskBuffer );
<ide> THREE.OutlinePass.prototype = Object.assign( Object.create( THREE.Pass.prototype
<ide> // 3. Apply Edge Detection Pass
<ide> this.fsQuad.material = this.edgeDetectionMaterial;
<ide> this.edgeDetectionMaterial.uniforms[ "maskTexture" ].value = this.renderTargetMaskDownSampleBuffer.texture;
<del> this.edgeDetectionMaterial.uniforms[ "texSize" ].value = new THREE.Vector2( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
<add> this.edgeDetectionMaterial.uniforms[ "texSize" ].value.set( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
<ide> this.edgeDetectionMaterial.uniforms[ "visibleEdgeColor" ].value = this.tempPulseColor1;
<ide> this.edgeDetectionMaterial.uniforms[ "hiddenEdgeColor" ].value = this.tempPulseColor2;
<ide> renderer.setRenderTarget( this.renderTargetEdgeBuffer1 );
<ide> THREE.OutlinePass.prototype = Object.assign( Object.create( THREE.Pass.prototype
<ide> uniforms: {
<ide> "depthTexture": { value: null },
<ide> "cameraNearFar": { value: new THREE.Vector2( 0.5, 0.5 ) },
<del> "textureMatrix": { value: new THREE.Matrix4() }
<add> "textureMatrix": { value: null }
<ide> },
<ide>
<ide> vertexShader: [
<ide><path>examples/jsm/postprocessing/OutlinePass.js
<ide> var OutlinePass = function ( resolution, scene, camera, selectedObjects ) {
<ide> var MAX_EDGE_GLOW = 4;
<ide>
<ide> this.separableBlurMaterial1 = this.getSeperableBlurMaterial( MAX_EDGE_THICKNESS );
<del> this.separableBlurMaterial1.uniforms[ "texSize" ].value = new Vector2( resx, resy );
<add> this.separableBlurMaterial1.uniforms[ "texSize" ].value.set( resx, resy );
<ide> this.separableBlurMaterial1.uniforms[ "kernelRadius" ].value = 1;
<ide> this.separableBlurMaterial2 = this.getSeperableBlurMaterial( MAX_EDGE_GLOW );
<del> this.separableBlurMaterial2.uniforms[ "texSize" ].value = new Vector2( Math.round( resx / 2 ), Math.round( resy / 2 ) );
<add> this.separableBlurMaterial2.uniforms[ "texSize" ].value.set( Math.round( resx / 2 ), Math.round( resy / 2 ) );
<ide> this.separableBlurMaterial2.uniforms[ "kernelRadius" ].value = MAX_EDGE_GLOW;
<ide>
<ide> // Overlay material
<ide> OutlinePass.prototype = Object.assign( Object.create( Pass.prototype ), {
<ide> this.renderTargetMaskDownSampleBuffer.setSize( resx, resy );
<ide> this.renderTargetBlurBuffer1.setSize( resx, resy );
<ide> this.renderTargetEdgeBuffer1.setSize( resx, resy );
<del> this.separableBlurMaterial1.uniforms[ "texSize" ].value = new Vector2( resx, resy );
<add> this.separableBlurMaterial1.uniforms[ "texSize" ].value.set( resx, resy );
<ide>
<ide> resx = Math.round( resx / 2 );
<ide> resy = Math.round( resy / 2 );
<ide>
<ide> this.renderTargetBlurBuffer2.setSize( resx, resy );
<ide> this.renderTargetEdgeBuffer2.setSize( resx, resy );
<ide>
<del> this.separableBlurMaterial2.uniforms[ "texSize" ].value = new Vector2( resx, resy );
<add> this.separableBlurMaterial2.uniforms[ "texSize" ].value.set( resx, resy );
<ide>
<ide> },
<ide>
<ide> OutlinePass.prototype = Object.assign( Object.create( Pass.prototype ), {
<ide> // Make non selected objects invisible, and draw only the selected objects, by comparing the depth buffer of non selected objects
<ide> this.changeVisibilityOfNonSelectedObjects( false );
<ide> this.renderScene.overrideMaterial = this.prepareMaskMaterial;
<del> this.prepareMaskMaterial.uniforms[ "cameraNearFar" ].value = new Vector2( this.renderCamera.near, this.renderCamera.far );
<add> this.prepareMaskMaterial.uniforms[ "cameraNearFar" ].value.set( this.renderCamera.near, this.renderCamera.far );
<ide> this.prepareMaskMaterial.uniforms[ "depthTexture" ].value = this.renderTargetDepthBuffer.texture;
<ide> this.prepareMaskMaterial.uniforms[ "textureMatrix" ].value = this.textureMatrix;
<ide> renderer.setRenderTarget( this.renderTargetMaskBuffer );
<ide> OutlinePass.prototype = Object.assign( Object.create( Pass.prototype ), {
<ide> // 3. Apply Edge Detection Pass
<ide> this.fsQuad.material = this.edgeDetectionMaterial;
<ide> this.edgeDetectionMaterial.uniforms[ "maskTexture" ].value = this.renderTargetMaskDownSampleBuffer.texture;
<del> this.edgeDetectionMaterial.uniforms[ "texSize" ].value = new Vector2( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
<add> this.edgeDetectionMaterial.uniforms[ "texSize" ].value.set( this.renderTargetMaskDownSampleBuffer.width, this.renderTargetMaskDownSampleBuffer.height );
<ide> this.edgeDetectionMaterial.uniforms[ "visibleEdgeColor" ].value = this.tempPulseColor1;
<ide> this.edgeDetectionMaterial.uniforms[ "hiddenEdgeColor" ].value = this.tempPulseColor2;
<ide> renderer.setRenderTarget( this.renderTargetEdgeBuffer1 );
<ide> OutlinePass.prototype = Object.assign( Object.create( Pass.prototype ), {
<ide> uniforms: {
<ide> "depthTexture": { value: null },
<ide> "cameraNearFar": { value: new Vector2( 0.5, 0.5 ) },
<del> "textureMatrix": { value: new Matrix4() }
<add> "textureMatrix": { value: null }
<ide> },
<ide>
<ide> vertexShader: [ | 2 |
Python | Python | fix string concatenation using `f-strings` | 266384a63f4693b667f308d49fcbed9a10a41fce | <ide><path>airflow/providers/amazon/aws/hooks/sns.py
<ide> def _get_message_attribute(o):
<ide> if hasattr(o, '__iter__'):
<ide> return {'DataType': 'String.Array', 'StringValue': json.dumps(o)}
<ide> raise TypeError(
<del> 'Values in MessageAttributes must be one of bytes, str, int, float, or iterable; ' f'got {type(o)}'
<add> f'Values in MessageAttributes must be one of bytes, str, int, float, or iterable; got {type(o)}'
<ide> )
<ide>
<ide>
<ide><path>airflow/providers/apache/druid/hooks/druid.py
<ide> def submit_indexing_job(self, json_index_spec: Dict[str, Any]) -> None:
<ide> if self.max_ingestion_time and sec > self.max_ingestion_time:
<ide> # ensure that the job gets killed if the max ingestion time is exceeded
<ide> requests.post(f"{url}/{druid_task_id}/shutdown", auth=self.get_auth())
<del> raise AirflowException('Druid ingestion took more than ' f'{self.max_ingestion_time} seconds')
<add> raise AirflowException(f'Druid ingestion took more than {self.max_ingestion_time} seconds')
<ide>
<ide> time.sleep(self.timeout)
<ide>
<ide><path>airflow/providers/dingding/hooks/dingding.py
<ide> def send(self) -> None:
<ide>
<ide> # Dingding success send message will with errcode equal to 0
<ide> if int(resp.json().get('errcode')) != 0:
<del> raise AirflowException('Send Dingding message failed, receive error ' f'message {resp.text}')
<add> raise AirflowException(f'Send Dingding message failed, receive error message {resp.text}')
<ide> self.log.info('Success Send Dingding message')
<ide><path>airflow/providers/jenkins/operators/jenkins_job_trigger.py
<ide> def execute(self, context: Mapping[Any, Any]) -> Optional[str]:
<ide> time.sleep(self.sleep_time)
<ide> except jenkins.NotFoundException as err:
<ide> # pylint: disable=no-member
<del> raise AirflowException(
<del> 'Jenkins job status check failed. Final error was: ' f'{err.resp.status}'
<del> )
<add> raise AirflowException(f'Jenkins job status check failed. Final error was: {err.resp.status}')
<ide> except jenkins.JenkinsException as err:
<ide> raise AirflowException(
<ide> f'Jenkins call failed with error : {err}, if you have parameters '
<ide><path>kubernetes_tests/test_kubernetes_executor.py
<ide> def ensure_dag_expected_state(self, host, execution_date, dag_id, expected_final
<ide> # Wait some time for the operator to complete
<ide> while tries < max_tries:
<ide> time.sleep(5)
<del> get_string = f'http://{host}/api/experimental/dags/{dag_id}/' f'dag_runs/{execution_date}'
<add> get_string = f'http://{host}/api/experimental/dags/{dag_id}/dag_runs/{execution_date}'
<ide> print(f"Calling {get_string}")
<ide> # Trigger a new dagrun
<ide> result = self.session.get(get_string)
<ide> def ensure_dag_expected_state(self, host, execution_date, dag_id, expected_final
<ide> # Maybe check if we can retrieve the logs, but then we need to extend the API
<ide>
<ide> def start_dag(self, dag_id, host):
<del> get_string = f'http://{host}/api/experimental/' f'dags/{dag_id}/paused/false'
<add> get_string = f'http://{host}/api/experimental/dags/{dag_id}/paused/false'
<ide> print(f"Calling [start_dag]#1 {get_string}")
<ide> result = self.session.get(get_string)
<ide> try:
<ide> def start_dag(self, dag_id, host):
<ide> result_json = str(result)
<ide> print(f"Received [start_dag]#1 {result_json}")
<ide> assert result.status_code == 200, f"Could not enable DAG: {result_json}"
<del> post_string = f'http://{host}/api/experimental/' f'dags/{dag_id}/dag_runs'
<add> post_string = f'http://{host}/api/experimental/dags/{dag_id}/dag_runs'
<ide> print(f"Calling [start_dag]#2 {post_string}")
<ide> # Trigger a new dagrun
<ide> result = self.session.post(post_string, json={})
<ide> def start_dag(self, dag_id, host):
<ide> get_string = f'http://{host}/api/experimental/latest_runs'
<ide> print(f"Calling [start_dag]#3 {get_string}")
<ide> result = self.session.get(get_string)
<del> assert result.status_code == 200, "Could not get the latest DAG-run:" " {result}".format(
<del> result=result.json()
<del> )
<add> assert result.status_code == 200, f"Could not get the latest DAG-run: {result.json()}"
<ide> result_json = result.json()
<ide> print(f"Received: [start_dag]#3 {result_json}")
<ide> return result_json | 5 |
PHP | PHP | revert the removal of a bc relevant part | 8e0f15b3d6819e05c4865306bf0c6e54e300fe3e | <ide><path>lib/Cake/Model/Datasource/DboSource.php
<ide> public function buildAssociationQuery(Model $Model, $queryData) {
<ide> * Builds a string containing an SQL statement template.
<ide> *
<ide> * @param Model $Model Primary Model object.
<del> * @param Model $LinkModel Linked model object.
<add> * @param Model|null $LinkModel Linked model object.
<ide> * @param string $type Association type, one of the model association types ie. hasMany.
<ide> * @param string $association Association name.
<ide> * @param array $assocData Association data.
<ide> public function buildAssociationQuery(Model $Model, $queryData) {
<ide> */
<ide> public function generateAssociationQuery(Model $Model, $LinkModel, $type, $association, $assocData, &$queryData, $external) {
<ide> $assocData = $this->_scrubQueryData($assocData);
<add> $queryData = $this->_scrubQueryData($queryData);
<ide>
<ide> if ($LinkModel === null) {
<del> return '';
<add> return $this->buildStatement(
<add> array(
<add> 'fields' => array_unique($queryData['fields']),
<add> 'table' => $this->fullTableName($Model),
<add> 'alias' => $Model->alias,
<add> 'limit' => $queryData['limit'],
<add> 'offset' => $queryData['offset'],
<add> 'joins' => $queryData['joins'],
<add> 'conditions' => $queryData['conditions'],
<add> 'order' => $queryData['order'],
<add> 'group' => $queryData['group']
<add> ),
<add> $Model
<add> );
<ide> }
<ide>
<ide> if ($external && !empty($assocData['finderQuery'])) {
<ide> public function generateAssociationQuery(Model $Model, $LinkModel, $type, $assoc
<ide> 'conditions' => trim($this->conditions($conditions, true, false, $Model))
<ide> );
<ide>
<del> $queryData = $this->_scrubQueryData($queryData);
<del>
<ide> $fields = array();
<ide> if ($assocData['fields'] !== false) {
<ide> $fields = $this->fields($LinkModel, $association, $assocData['fields']);
<ide><path>lib/Cake/Test/Case/Model/Datasource/DboSourceTest.php
<ide> class DboSourceTest extends CakeTestCase {
<ide> */
<ide> public function setUp() {
<ide> parent::setUp();
<del> $this->__config = $this->db->config;
<ide>
<ide> $this->testDb = new DboTestSource();
<ide> $this->testDb->cacheSources = false;
<ide> public function testReadOnlyCallingQueryAssociationWhenDefined() {
<ide> */
<ide> public function testQueryAssociationUnneededQueries() {
<ide> $this->loadFixtures('Article', 'User', 'Comment', 'Attachment', 'Tag', 'ArticlesTag');
<del> $Comment = new Comment;
<add> $Comment = ClassRegistry::init('Comment');
<ide>
<ide> $fullDebug = $this->db->fullDebug;
<ide> $this->db->fullDebug = true;
<ide> public function testQueryAssociationUnneededQueries() {
<ide> $this->db->fullDebug = $fullDebug;
<ide> }
<ide>
<add>/**
<add> * Tests that generation association queries without LinkModel still works.
<add> * Mainly BC.
<add> *
<add> * @return void
<add> */
<add> public function testGenerateAssociationQuery() {
<add> $this->loadFixtures('Article');
<add> $Article = ClassRegistry::init('Article');
<add>
<add> $queryData = array(
<add> 'conditions' => array(
<add> 'Article.id' => 1
<add> ),
<add> 'fields' => array(
<add> 'Article.id',
<add> 'Article.title',
<add> ),
<add> 'joins' => array(),
<add> 'limit' => 2,
<add> 'offset' => 2,
<add> 'order' => array('title'),
<add> 'page' => 2,
<add> 'group' => null,
<add> 'callbacks' => 1
<add> );
<add>
<add> $result = $this->db->generateAssociationQuery($Article, null, null, null, null, $queryData, false);
<add> $this->assertContains('SELECT', $result);
<add> $this->assertContains('FROM', $result);
<add> $this->assertContains('WHERE', $result);
<add> $this->assertContains('ORDER', $result);
<add> }
<add>
<ide> /**
<ide> * test that fields() is using methodCache()
<ide> *
<ide> public function testLastError() {
<ide> */
<ide> public function testTransactionLogging() {
<ide> $conn = $this->getMock('MockPDO');
<del> $db = new DboTestSource;
<add> $db = new DboTestSource();
<ide> $db->setConnection($conn);
<ide> $conn->expects($this->exactly(2))->method('beginTransaction')
<ide> ->will($this->returnValue(true));
<ide> public function testBuildStatementDefaults() {
<ide> $conn->expects($this->at(0))
<ide> ->method('quote')
<ide> ->will($this->returnValue('foo bar'));
<del> $db = new DboTestSource;
<add> $db = new DboTestSource();
<ide> $db->setConnection($conn);
<ide> $subQuery = $db->buildStatement(
<ide> array(
<ide> public static function joinStatementsWithPrefix($schema) {
<ide> * @return void
<ide> */
<ide> public function testBuildJoinStatementWithTablePrefix($join, $expected) {
<del> $db = new DboTestSource;
<add> $db = new DboTestSource();
<ide> $db->config['prefix'] = 'pre_';
<ide> $result = $db->buildJoinStatement($join);
<ide> $this->assertEquals($expected, $result);
<ide> public function testBuildJoinStatementWithTablePrefix($join, $expected) {
<ide> public function testConditionKeysToString() {
<ide> $Article = ClassRegistry::init('Article');
<ide> $conn = $this->getMock('MockPDO', array('quote'));
<del> $db = new DboTestSource;
<add> $db = new DboTestSource();
<ide> $db->setConnection($conn);
<ide>
<ide> $conn->expects($this->at(0))
<ide> public function testConditionKeysToStringVirtualField() {
<ide> 'extra' => 'something virtual'
<ide> );
<ide> $conn = $this->getMock('MockPDO', array('quote'));
<del> $db = new DboTestSource;
<add> $db = new DboTestSource();
<ide> $db->setConnection($conn);
<ide>
<ide> $conn->expects($this->at(0))
<ide> public function testConditionKeysToStringVirtualField() {
<ide> * @return void
<ide> */
<ide> public function testLimit() {
<del> $db = new DboTestSource;
<add> $db = new DboTestSource();
<ide>
<ide> $result = $db->limit('0');
<ide> $this->assertNull($result); | 2 |
Python | Python | use uppercase for config and support any object | 35fd6eb22c4dec893770dd5720a51608a06fb8cd | <ide><path>flask.py
<ide> def from_pyfile(self, filename):
<ide> d = type(sys)('config')
<ide> d.__file__ = filename
<ide> execfile(filename, d.__dict__)
<del> self.from_module(d)
<add> self.from_object(d)
<ide>
<del> def from_module(self, module):
<del> """Updates the values from the given module. A module can be of one
<add> def from_object(self, obj):
<add> """Updates the values from the given object. An object can be of one
<ide> of the following two types:
<ide>
<del> - a string: in this case the module with that name will be imported
<del> - an actual module reference: that module is used directly
<add> - a string: in this case the object with that name will be imported
<add> - an actual object reference: that object is used directly
<ide>
<del> Just the uppercase variables in that module are stored in the config
<add> Objects are usually either modules or classes.
<add>
<add> Just the uppercase variables in that object are stored in the config
<ide> after lowercasing. Example usage::
<ide>
<del> app.config.from_module('yourapplication.default_config')
<add> app.config.from_object('yourapplication.default_config')
<ide> from yourapplication import default_config
<del> app.config.from_module(default_config)
<add> app.config.from_object(default_config)
<ide>
<ide> You should not use this function to load the actual configuration but
<ide> rather configuration defaults. The actual config should be loaded
<del> with :meth;`from_pyfile` and ideally from a location not within the
<add> with :meth:`from_pyfile` and ideally from a location not within the
<ide> package because the package might be installed system wide.
<ide>
<del> :param module: an import name or module
<add> :param obj: an import name or object
<ide> """
<del> if isinstance(module, basestring):
<del> d = import_string(module).__dict__
<del> else:
<del> d = module.__dict__
<del> for key, value in d.iteritems():
<add> if isinstance(obj, basestring):
<add> obj = import_string(obj)
<add> for key in dir(obj):
<ide> if key.isupper():
<del> self[key.lower()] = value
<add> self[key] = getattr(obj, key)
<ide>
<ide> def __repr__(self):
<ide> return '<%s %s>' % (self.__class__.__name__, dict.__repr__(self))
<ide> class Flask(_PackageBoundObject):
<ide> #: application. In debug mode the debugger will kick in when an unhandled
<ide> #: exception ocurrs and the integrated server will automatically reload
<ide> #: the application if changes in the code are detected.
<del> debug = ConfigAttribute('debug')
<add> debug = ConfigAttribute('DEBUG')
<ide>
<ide> #: if a secret key is set, cryptographic components can use this to
<ide> #: sign cookies and other things. Set this to a complex random value
<ide> #: when you want to use the secure cookie for instance.
<del> secret_key = ConfigAttribute('secret_key')
<add> secret_key = ConfigAttribute('SECRET_KEY')
<ide>
<ide> #: The secure cookie uses this for the name of the session cookie
<del> session_cookie_name = ConfigAttribute('session_cookie_name')
<add> session_cookie_name = ConfigAttribute('SESSION_COOKIE_NAME')
<ide>
<ide> #: A :class:`~datetime.timedelta` which is used to set the expiration
<ide> #: date of a permanent session. The default is 31 days which makes a
<ide> #: permanent session survive for roughly one month.
<del> permanent_session_lifetime = ConfigAttribute('permanent_session_lifetime')
<add> permanent_session_lifetime = ConfigAttribute('PERMANENT_SESSION_LIFETIME')
<ide>
<ide> #: Enable this if you want to use the X-Sendfile feature. Keep in
<ide> #: mind that the server has to support this. This only affects files
<ide> #: sent with the :func:`send_file` method.
<ide> #:
<ide> #: .. versionadded:: 0.2
<del> use_x_sendfile = ConfigAttribute('use_x_sendfile')
<add> use_x_sendfile = ConfigAttribute('USE_X_SENDFILE')
<ide>
<ide> #: the logging format used for the debug logger. This is only used when
<ide> #: the application is in debug mode, otherwise the attached logging
<ide> class Flask(_PackageBoundObject):
<ide>
<ide> #: default configuration parameters
<ide> default_config = ImmutableDict({
<del> 'debug': False,
<del> 'secret_key': None,
<del> 'session_cookie_name': 'session',
<del> 'permanent_session_lifetime': timedelta(days=31),
<del> 'use_x_sendfile': False
<add> 'DEBUG': False,
<add> 'SECRET_KEY': None,
<add> 'SESSION_COOKIE_NAME': 'session',
<add> 'PERMANENT_SESSION_LIFETIME': timedelta(days=31),
<add> 'USE_X_SENDFILE': False
<ide> })
<ide>
<ide> def __init__(self, import_name):
<ide><path>tests/flask_tests.py
<ide> def internal_server_error(e):
<ide>
<ide> class ConfigTestCase(unittest.TestCase):
<ide>
<del> def common_module_test(self, app):
<add> def common_object_test(self, app):
<ide> assert app.secret_key == 'devkey'
<del> assert app.config['test_key'] == 'foo'
<add> assert app.config['TEST_KEY'] == 'foo'
<ide> assert 'ConfigTestCase' not in app.config
<ide>
<ide> def test_config_from_file(self):
<ide> app = flask.Flask(__name__)
<ide> app.config.from_pyfile('flask_tests.py')
<del> self.common_module_test(app)
<add> self.common_object_test(app)
<ide>
<ide> def test_config_from_module(self):
<ide> app = flask.Flask(__name__)
<del> app.config.from_module(__name__)
<del> self.common_module_test(app)
<add> app.config.from_object(__name__)
<add> self.common_object_test(app)
<add>
<add> def test_config_from_class(self):
<add> class Base(object):
<add> TEST_KEY = 'foo'
<add> class Test(Base):
<add> SECRET_KEY = 'devkey'
<add> app = flask.Flask(__name__)
<add> app.config.from_object(Test)
<add> self.common_object_test(app)
<ide>
<ide>
<ide> def suite(): | 2 |
Java | Java | delete unused imports | d357ef706fca2cfcc970d51b5c582d58adb9cb0f | <ide><path>spring-aop/src/test/java/org/springframework/aop/aspectj/annotation/AbstractAspectJAdvisorFactoryTests.java
<ide> import java.lang.reflect.Method;
<ide> import java.lang.reflect.UndeclaredThrowableException;
<ide> import java.rmi.RemoteException;
<del>import java.util.Collections;
<ide> import java.util.LinkedList;
<ide> import java.util.List;
<ide>
<ide><path>spring-aop/src/test/java/org/springframework/aop/aspectj/autoproxy/AspectJPrecedenceComparatorTests.java
<ide> import org.springframework.aop.aspectj.AspectJMethodBeforeAdvice;
<ide> import org.springframework.aop.aspectj.AspectJPointcutAdvisor;
<ide> import org.springframework.aop.support.DefaultPointcutAdvisor;
<del>import org.springframework.lang.Nullable;
<ide>
<ide> import static org.assertj.core.api.Assertions.assertThat;
<ide>
<ide><path>spring-aop/src/test/java/org/springframework/aop/framework/MethodInvocationTests.java
<ide> import java.util.List;
<ide>
<ide> import org.aopalliance.intercept.MethodInterceptor;
<del>import org.aopalliance.intercept.MethodInvocation;
<ide> import org.junit.jupiter.api.Test;
<ide>
<ide> import org.springframework.tests.sample.beans.TestBean;
<ide><path>spring-aop/src/test/java/org/springframework/aop/framework/NullPrimitiveTests.java
<ide> package org.springframework.aop.framework;
<ide>
<ide> import org.aopalliance.intercept.MethodInterceptor;
<del>import org.aopalliance.intercept.MethodInvocation;
<ide> import org.junit.jupiter.api.Test;
<ide>
<ide> import org.springframework.aop.AopInvocationException; | 4 |
Python | Python | make views requiring session, keyword only args | d8dbdccef7cc14af7bacbfd4ebc48d8aabfaf7f0 | <ide><path>airflow/www/views.py
<ide> def rendered_templates(self, session):
<ide> )
<ide> @action_logging
<ide> @provide_session
<del> def rendered_k8s(self, session: Session = NEW_SESSION):
<add> def rendered_k8s(self, *, session: Session = NEW_SESSION):
<ide> """Get rendered k8s yaml."""
<ide> if not settings.IS_K8S_OR_K8SCELERY_EXECUTOR:
<ide> abort(404)
<ide> def gantt(self, dag_id, session=None):
<ide> )
<ide> @action_logging
<ide> @provide_session
<del> def extra_links(self, session: Session = NEW_SESSION):
<add> def extra_links(self, *, session: Session = NEW_SESSION):
<ide> """
<ide> A restful endpoint that returns external links for a given Operator
<ide> | 1 |
PHP | PHP | add support for returning decoded subjects | c694b47503f883ef46f9e0310290addec4d069ab | <ide><path>src/Mailer/Email.php
<ide> protected function _addEmail($varName, $email, $name)
<ide> * Get/Set Subject.
<ide> *
<ide> * @param string|null $subject Subject string.
<add> * @param bool $decode Whether to decode the subject.
<ide> * @return string|$this
<ide> */
<del> public function subject($subject = null)
<add> public function subject($subject = null, $decode = false)
<ide> {
<ide> if ($subject === null) {
<del> return $this->_subject;
<add> return ($decode) ? $this->_decode($this->_subject) : $this->_subject;
<ide> }
<ide> $this->_subject = $this->_encode((string)$subject);
<ide> return $this;
<ide> protected function _encode($text)
<ide> return $return;
<ide> }
<ide>
<add> /**
<add> * Decode the specified string
<add> *
<add> * @param string $text String to decode
<add> * @return string Decoded string
<add> */
<add> protected function _decode($text)
<add> {
<add> $restore = mb_internal_encoding();
<add> mb_internal_encoding($this->_appCharset);
<add> $return = mb_decode_mimeheader($text);
<add> mb_internal_encoding($restore);
<add> return $return;
<add> }
<add>
<ide> /**
<ide> * Translates a string for one charset to another if the App.encoding value
<ide> * differs and the mb_convert_encoding function exists
<ide><path>tests/TestCase/Mailer/EmailTest.php
<ide> public function encode($text)
<ide> return $this->_encode($text);
<ide> }
<ide>
<add> /**
<add> * Decode to protected method
<add> *
<add> * @return string
<add> */
<add> public function decode($text)
<add> {
<add> return $this->_decode($text);
<add> }
<add>
<ide> /**
<ide> * Render to protected method
<ide> *
<ide> public function testSubject()
<ide> $this->CakeEmail->subject(1);
<ide> $this->assertSame('1', $this->CakeEmail->subject());
<ide>
<del> $this->CakeEmail->subject('هذه رسالة بعنوان طويل مرسل للمستلم');
<add> $input = 'هذه رسالة بعنوان طويل مرسل للمستلم';
<add> $this->CakeEmail->subject($input);
<ide> $expected = '=?UTF-8?B?2YfYsNmHINix2LPYp9mE2Kkg2KjYudmG2YjYp9mGINi32YjZitmEINmF2LE=?=' . "\r\n" . ' =?UTF-8?B?2LPZhCDZhNmE2YXYs9iq2YTZhQ==?=';
<ide> $this->assertSame($expected, $this->CakeEmail->subject());
<add> $this->assertSame($input, $this->CakeEmail->subject(null, true));
<ide> }
<ide>
<ide> /**
<ide> public function testEncode()
<ide> $this->assertSame($expected, $result);
<ide> }
<ide>
<add> /**
<add> * Test CakeEmail::_decode function
<add> *
<add> * @return void
<add> */
<add> public function testDecode()
<add> {
<add> $this->CakeEmail->headerCharset = 'ISO-2022-JP';
<add> $result = $this->CakeEmail->decode('=?ISO-2022-JP?B?GyRCRnxLXDhsGyhC?=');
<add> $expected = '日本語';
<add> $this->assertSame($expected, $result);
<add>
<add> $this->CakeEmail->headerCharset = 'ISO-2022-JP';
<add> $result = $this->CakeEmail->decode("=?ISO-2022-JP?B?GyRCRDkkJEQ5JCREOSQkGyhCU3ViamVjdBskQiROPmw5ZyRPGyhCZm9s?=\r\n" .
<add> " =?ISO-2022-JP?B?ZGluZxskQiQ5JGskTiQsQDUkNyQkJHMkQCQxJEkkJCRDJD8kJCRJGyhC?=\r\n" .
<add> " =?ISO-2022-JP?B?GyRCJCYkSiRrJHMkQCRtJCYhKRsoQg==?=");
<add> $expected = '長い長い長いSubjectの場合はfoldingするのが正しいんだけどいったいどうなるんだろう?';
<add> $this->assertSame($expected, $result);
<add> }
<add>
<ide> /**
<ide> * Tests charset setter/getter
<ide> * | 2 |
PHP | PHP | fix current uri used by crawler in tests | 8964f375093904b05a426a664255ecaf73bdfd02 | <ide><path>src/Illuminate/Foundation/Testing/InteractsWithPages.php
<ide> protected function makeRequest($method, $uri, $parameters = [], $cookies = [], $
<ide>
<ide> $this->currentUri = $this->app->make('request')->fullUrl();
<ide>
<del> $this->crawler = new Crawler($this->response->getContent(), $uri);
<add> $this->crawler = new Crawler($this->response->getContent(), $this->currentUri);
<ide>
<ide> return $this;
<ide> } | 1 |
Python | Python | add missing symbol | b88c4193e7bbf02eaa2c026d5fa2518975a77bb0 | <ide><path>spacy/language_data/tag_map.py
<ide> from __future__ import unicode_literals
<ide>
<ide> from ..symbols import POS, ADV, NOUN, ADP, PRON, SCONJ, PROPN, DET, SYM, INTJ
<del>from ..symbols import PUNCT, NUM, AUX, X, CONJ, ADJ, VERB, PART, SPACE
<add>from ..symbols import PUNCT, NUM, AUX, X, CONJ, ADJ, VERB, PART, SPACE, CCONJ
<ide>
<ide>
<ide> TAG_MAP = { | 1 |
PHP | PHP | use environment options in database config | 83a5602df1eb4f1d58e9300da82ac6eef064b1b3 | <ide><path>config/database.php
<ide>
<ide> 'mysql' => [
<ide> 'driver' => 'mysql',
<del> 'host' => 'localhost',
<del> 'database' => 'forge',
<del> 'username' => 'forge',
<del> 'password' => '',
<add> 'host' => getenv('DB_HOST') ?: 'localhost',
<add> 'database' => getenv('DB_DATABASE') ?: 'forge',
<add> 'username' => getenv('DB_USERNAME') ?: 'forge',
<add> 'password' => getenv('DB_PASSWORD') ?: '',
<ide> 'charset' => 'utf8',
<ide> 'collation' => 'utf8_unicode_ci',
<ide> 'prefix' => '',
<ide> ],
<ide>
<ide> 'pgsql' => [
<ide> 'driver' => 'pgsql',
<del> 'host' => 'localhost',
<del> 'database' => 'forge',
<del> 'username' => 'forge',
<del> 'password' => '',
<add> 'host' => getenv('DB_HOST') ?: 'localhost',
<add> 'database' => getenv('DB_DATABASE') ?: 'forge',
<add> 'username' => getenv('DB_USERNAME') ?: 'forge',
<add> 'password' => getenv('DB_PASSWORD') ?: '',
<ide> 'charset' => 'utf8',
<ide> 'prefix' => '',
<ide> 'schema' => 'public',
<ide> ],
<ide>
<ide> 'sqlsrv' => [
<ide> 'driver' => 'sqlsrv',
<del> 'host' => 'localhost',
<del> 'database' => 'database',
<del> 'username' => 'root',
<del> 'password' => '',
<add> 'host' => getenv('DB_HOST') ?: 'localhost',
<add> 'database' => getenv('DB_DATABASE') ?: 'forge',
<add> 'username' => getenv('DB_USERNAME') ?: 'forge',
<add> 'password' => getenv('DB_PASSWORD') ?: '',
<ide> 'prefix' => '',
<ide> ],
<ide> | 1 |
Python | Python | fix bug train_batch_size not an int | 649e9774cdee5c074634fe2eb37d1c1ed9f27a81 | <ide><path>run_classifier.py
<ide> def main():
<ide> raise ValueError("Invalid accumulate_gradients parameter: {}, should be >= 1".format(
<ide> args.accumulate_gradients))
<ide>
<del> args.train_batch_size = args.train_batch_size / args.accumulate_gradients
<add> args.train_batch_size = int(args.train_batch_size / args.accumulate_gradients)
<ide>
<ide> random.seed(args.seed)
<ide> np.random.seed(args.seed)
<ide><path>run_squad.py
<ide> def main():
<ide> raise ValueError("Invalid accumulate_gradients parameter: {}, should be >= 1".format(
<ide> args.accumulate_gradients))
<ide>
<del> args.train_batch_size = args.train_batch_size / args.accumulate_gradients
<add> args.train_batch_size = int(args.train_batch_size / args.accumulate_gradients)
<ide>
<ide> random.seed(args.seed)
<ide> np.random.seed(args.seed) | 2 |
Ruby | Ruby | make logger a singleton on the class | a6fd462a8019f0be512bcba7ce5b9f9e482c7f8e | <ide><path>activesupport/lib/active_support/log_subscriber.rb
<ide> class LogSubscriber
<ide> mattr_accessor :colorize_logging
<ide> self.colorize_logging = true
<ide>
<del> class_attribute :logger
<del>
<ide> class << self
<del> remove_method :logger
<ide> def logger
<ide> @logger ||= Rails.logger if defined?(Rails)
<add> @logger
<ide> end
<ide>
<add> attr_writer :logger
<add>
<ide> def attach_to(namespace, log_subscriber=new, notifier=ActiveSupport::Notifications)
<ide> log_subscribers << log_subscriber
<ide> @@flushable_loggers = nil
<ide> def initialize
<ide> super
<ide> end
<ide>
<add> def logger
<add> LogSubscriber.logger
<add> end
<add>
<ide> def start(name, id, payload)
<ide> return unless logger
<ide> | 1 |
Javascript | Javascript | convert anonymous function to arrow function | fcc3910dd4578c65e24567b45a3a4097106c4625 | <ide><path>test/parallel/test-http-write-head-2.js
<ide> const http = require('http');
<ide> res.end();
<ide> }));
<ide>
<del> server.listen(0, common.mustCall(function() {
<add> server.listen(0, common.mustCall(() => {
<ide> http.get({ port: server.address().port }, common.mustCall((res) => {
<ide> assert.strictEqual(res.headers.test, '1');
<ide> assert.strictEqual(res.headers.test2, '2');
<ide> const http = require('http');
<ide> res.end();
<ide> }));
<ide>
<del> server.listen(0, common.mustCall(function() {
<add> server.listen(0, common.mustCall(() => {
<ide> http.get({ port: server.address().port }, common.mustCall((res) => {
<ide> res.resume().on('end', common.mustCall(() => {
<ide> server.close(); | 1 |
Go | Go | increase default connection timeout to 30s | f5e6f50a1ef193c1f3f5736829a0284c8f96a661 | <ide><path>registry/registry.go
<ide> func newClient(jar http.CookieJar, roots *x509.CertPool, certs []tls.Certificate
<ide> switch timeout {
<ide> case ConnectTimeout:
<ide> httpTransport.Dial = func(proto string, addr string) (net.Conn, error) {
<del> // Set the connect timeout to 5 seconds
<del> d := net.Dialer{Timeout: 5 * time.Second, DualStack: true}
<add> // Set the connect timeout to 30 seconds to allow for slower connection
<add> // times...
<add> d := net.Dialer{Timeout: 30 * time.Second, DualStack: true}
<ide>
<ide> conn, err := d.Dial(proto, addr)
<ide> if err != nil { | 1 |
Javascript | Javascript | use var for consistency | f3ed73d26ab9ff99e966d40707028222754dd6b5 | <ide><path>rollup-examples.config.js
<del>const path = require( 'path' );
<del>const fs = require( 'fs' );
<add>var path = require( 'path' );
<add>var fs = require( 'fs' );
<ide>
<ide> // Creates an rollup config object for the given file to
<ide> // be output to umd format
<ide> function createOutput( file ) {
<ide>
<del> const inputPath = path.resolve( file );
<del> const outputPath = inputPath.replace( /[\\\/]examples[\\\/]jsm[\\\/]/, '/examples/js/' );
<add> var inputPath = path.resolve( file );
<add> var outputPath = inputPath.replace( /[\\\/]examples[\\\/]jsm[\\\/]/, '/examples/js/' );
<ide>
<ide> // Every import is marked as external so the output is 1-to-1. We
<ide> // assume that that global object should be the THREE object so we
<ide> function createOutput( file ) {
<ide> // the callback for every file.
<ide> function walk( dir, cb ) {
<ide>
<del> const files = fs.readdirSync( dir );
<add> var files = fs.readdirSync( dir );
<ide> files.forEach( f => {
<ide>
<del> const p = path.join( dir, f );
<del> const stats = fs.statSync( p );
<add> var p = path.join( dir, f );
<add> var stats = fs.statSync( p );
<ide> if ( stats.isDirectory() ) {
<ide>
<ide> walk( p, cb );
<ide> function walk( dir, cb ) {
<ide> }
<ide>
<ide> // Gather up all the files
<del>const files = [];
<add>var files = [];
<ide> walk( 'examples/jsm/', p => files.push( p ) );
<ide>
<ide> // Create a rollup config for each module.js file | 1 |
Java | Java | fix typo in javadoc | 891d41c005d69f9016ccc35a52dc2e99c9da34a0 | <ide><path>spring-web/src/main/java/org/springframework/web/cors/CorsConfiguration.java
<ide> public void setMaxAge(Long maxAge) {
<ide> }
<ide>
<ide> /**
<del> * Return the configure maxAge value, possibly {@code null}.
<add> * Return the configured maxAge value, possibly {@code null}.
<ide> */
<ide> public Long getMaxAge() {
<ide> return maxAge;
<ide> }
<ide>
<del>
<ide> /**
<ide> * Check the origin of the request against the configured allowed origins.
<ide> * @param requestOrigin the origin to check. | 1 |
Javascript | Javascript | keep the parsed comments along with the ast | fe0c8cafb3a132064e5f0878d7cced39ce53ee8f | <ide><path>lib/Parser.js
<ide> var POSSIBLE_AST_OPTIONS = [{
<ide> }]
<ide>
<ide> Parser.prototype.parse = function parse(source, initialState) {
<del> var ast;
<add> var ast, comments = [];
<ide> for(var i = 0; i < POSSIBLE_AST_OPTIONS.length; i++) {
<ide> if(!ast) {
<ide> try {
<add> comments.length = 0;
<add> POSSIBLE_AST_OPTIONS[i].onComment = comments;
<ide> ast = acorn.parse(source, POSSIBLE_AST_OPTIONS[i]);
<ide> } catch(e) {
<ide> // ignore the error
<ide> Parser.prototype.parse = function parse(source, initialState) {
<ide> ranges: true,
<ide> locations: true,
<ide> ecmaVersion: 6,
<del> sourceType: "module"
<add> sourceType: "module",
<add> onComment: comments
<ide> });
<ide> }
<ide> if(!ast || typeof ast !== "object")
<ide> Parser.prototype.parse = function parse(source, initialState) {
<ide> renames: {}
<ide> };
<ide> var state = this.state = initialState || {};
<del> if(this.applyPluginsBailResult("program", ast) === undefined)
<add> if(this.applyPluginsBailResult("program", ast, comments) === undefined)
<ide> this.walkStatements(ast.body);
<ide> this.scope = oldScope;
<ide> this.state = oldState;
<ide><path>test/Parser.test.js
<ide> describe("Parser", function() {
<ide> });
<ide> });
<ide>
<add> it("should parse comments", function() {
<add> var source = "//comment1\n/*comment2*/";
<add> var state = [
<add> {
<add> type: 'Line',
<add> value: 'comment1'
<add> }, {
<add> type: 'Block',
<add> value: 'comment2'
<add> }
<add> ];
<add>
<add> var testParser = new Parser({});
<add>
<add> testParser.plugin("program", function(ast, comments) {
<add> if(!this.state.comments) this.state.comments = comments;
<add> return true;
<add> });
<add>
<add> var actual = testParser.parse(source);
<add> should.strictEqual(typeof actual, "object");
<add> should.strictEqual(typeof actual.comments, "object");
<add> actual.comments.forEach(function(element, index) {
<add> should.strictEqual(typeof element.type, "string");
<add> should.strictEqual(typeof element.value, "string");
<add> element.type.should.be.eql(state[index].type);
<add> element.value.should.be.eql(state[index].value);
<add> });
<add> });
<add>
<ide> describe("expression evaluation", function() {
<ide> function evaluateInParser(source) {
<ide> var parser = new Parser(); | 2 |
PHP | PHP | present tense and fix tests | 0ef29152f4fc8e0e768a847e60fb3850013bbb9f | <ide><path>src/TestSuite/Constraint/Response/CookieEncryptedEquals.php
<ide> public function matches($other)
<ide> */
<ide> public function toString()
<ide> {
<del> return sprintf('was encrypted in cookie \'%s\'', $this->cookieName);
<add> return sprintf('is encrypted in cookie \'%s\'', $this->cookieName);
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/TestSuite/IntegrationTestTraitTest.php
<ide> public function assertionFailureMessagesProvider()
<ide> 'assertContentTypeVerbose' => ['assertContentType', 'Possibly related to Cake\Routing\Exception\MissingRouteException: "A route matching "/notfound" could not be found."', '/notfound', 'test'],
<ide> 'assertCookie' => ['assertCookie', 'Failed asserting that \'test\' is in cookie \'remember_me\'.', '/posts/index', 'test', 'remember_me'],
<ide> 'assertCookieVerbose' => ['assertCookie', 'Possibly related to Cake\Routing\Exception\MissingRouteException: "A route matching "/notfound" could not be found."', '/notfound', 'test', 'remember_me'],
<del> 'assertCookieEncrypted' => ['assertCookieEncrypted', 'Failed asserting that \'test\' was encrypted in cookie \'NameOfCookie\'.', '/cookie_component_test/set_cookie', 'test', 'NameOfCookie'],
<add> 'assertCookieEncrypted' => ['assertCookieEncrypted', 'Failed asserting that \'test\' is encrypted in cookie \'NameOfCookie\'.', '/cookie_component_test/set_cookie', 'test', 'NameOfCookie'],
<ide> 'assertCookieEncryptedVerbose' => ['assertCookieEncrypted', 'Possibly related to Cake\Routing\Exception\MissingRouteException: "A route matching "/notfound" could not be found."', '/notfound', 'test', 'NameOfCookie'],
<ide> 'assertCookieNotSet' => ['assertCookieNotSet', 'Failed asserting that \'remember_me\' cookie is not set.', '/posts/index', 'remember_me'],
<ide> 'assertFileResponse' => ['assertFileResponse', 'Failed asserting that \'test\' file was sent.', '/posts/file', 'test'], | 2 |
Go | Go | redact secret data on "secret create" | 3fbc352cbbce06cd3001d6b14b2b1ebcb4d42cd5 | <ide><path>api/server/middleware/debug.go
<ide> func DebugRequestMiddleware(handler func(ctx context.Context, w http.ResponseWri
<ide>
<ide> var postForm map[string]interface{}
<ide> if err := json.Unmarshal(b, &postForm); err == nil {
<del> maskSecretKeys(postForm)
<add> maskSecretKeys(postForm, r.RequestURI)
<ide> formStr, errMarshal := json.Marshal(postForm)
<ide> if errMarshal == nil {
<ide> logrus.Debugf("form data: %s", string(formStr))
<ide> func DebugRequestMiddleware(handler func(ctx context.Context, w http.ResponseWri
<ide> }
<ide> }
<ide>
<del>func maskSecretKeys(inp interface{}) {
<add>func maskSecretKeys(inp interface{}, path string) {
<add> // Remove any query string from the path
<add> idx := strings.Index(path, "?")
<add> if idx != -1 {
<add> path = path[:idx]
<add> }
<add> // Remove trailing / characters
<add> path = strings.TrimRight(path, "/")
<add>
<ide> if arr, ok := inp.([]interface{}); ok {
<ide> for _, f := range arr {
<del> maskSecretKeys(f)
<add> maskSecretKeys(f, path)
<ide> }
<ide> return
<ide> }
<add>
<ide> if form, ok := inp.(map[string]interface{}); ok {
<ide> loop0:
<ide> for k, v := range form {
<ide> func maskSecretKeys(inp interface{}) {
<ide> continue loop0
<ide> }
<ide> }
<del> maskSecretKeys(v)
<add> maskSecretKeys(v, path)
<add> }
<add>
<add> // Route-specific redactions
<add> if strings.HasSuffix(path, "/secrets/create") {
<add> for k := range form {
<add> if k == "Data" {
<add> form[k] = "*****"
<add> }
<add> }
<ide> }
<ide> }
<ide> }
<ide><path>api/server/middleware/debug_test.go
<add>package middleware
<add>
<add>import (
<add> "testing"
<add>
<add> "github.com/stretchr/testify/assert"
<add>)
<add>
<add>func TestMaskSecretKeys(t *testing.T) {
<add> tests := []struct {
<add> path string
<add> input map[string]interface{}
<add> expected map[string]interface{}
<add> }{
<add> {
<add> path: "/v1.30/secrets/create",
<add> input: map[string]interface{}{"Data": "foo", "Name": "name", "Labels": map[string]interface{}{}},
<add> expected: map[string]interface{}{"Data": "*****", "Name": "name", "Labels": map[string]interface{}{}},
<add> },
<add> {
<add> path: "/v1.30/secrets/create//",
<add> input: map[string]interface{}{"Data": "foo", "Name": "name", "Labels": map[string]interface{}{}},
<add> expected: map[string]interface{}{"Data": "*****", "Name": "name", "Labels": map[string]interface{}{}},
<add> },
<add>
<add> {
<add> path: "/secrets/create?key=val",
<add> input: map[string]interface{}{"Data": "foo", "Name": "name", "Labels": map[string]interface{}{}},
<add> expected: map[string]interface{}{"Data": "*****", "Name": "name", "Labels": map[string]interface{}{}},
<add> },
<add> {
<add> path: "/v1.30/some/other/path",
<add> input: map[string]interface{}{
<add> "password": "pass",
<add> "other": map[string]interface{}{
<add> "secret": "secret",
<add> "jointoken": "jointoken",
<add> "unlockkey": "unlockkey",
<add> "signingcakey": "signingcakey",
<add> },
<add> },
<add> expected: map[string]interface{}{
<add> "password": "*****",
<add> "other": map[string]interface{}{
<add> "secret": "*****",
<add> "jointoken": "*****",
<add> "unlockkey": "*****",
<add> "signingcakey": "*****",
<add> },
<add> },
<add> },
<add> }
<add>
<add> for _, testcase := range tests {
<add> maskSecretKeys(testcase.input, testcase.path)
<add> assert.Equal(t, testcase.expected, testcase.input)
<add> }
<add>} | 2 |
PHP | PHP | add "notexists" method to query builder | af4eebebc748f3f7e969d64b3e5342a16f0b6eaf | <ide><path>src/Illuminate/Database/Query/Builder.php
<ide> public function exists()
<ide> return false;
<ide> }
<ide>
<add> /**
<add> * Determine if no rows exist for the current query.
<add> *
<add> * @return bool
<add> */
<add> public function notExists()
<add> {
<add> return ! $this->exists();
<add> }
<add>
<ide> /**
<ide> * Retrieve the "count" result of the query.
<ide> *
<ide><path>tests/Database/DatabaseQueryBuilderTest.php
<ide> public function testAggregateFunctions()
<ide> $results = $builder->from('users')->exists();
<ide> $this->assertTrue($results);
<ide>
<add> $builder = $this->getBuilder();
<add> $builder->getConnection()->shouldReceive('select')->once()->with('select exists(select * from "users") as "exists"', [], true)->andReturn([['exists' => 0]]);
<add> $results = $builder->from('users')->notExists();
<add> $this->assertTrue($results);
<add>
<ide> $builder = $this->getBuilder();
<ide> $builder->getConnection()->shouldReceive('select')->once()->with('select max("id") as aggregate from "users"', [], true)->andReturn([['aggregate' => 1]]);
<ide> $builder->getProcessor()->shouldReceive('processSelect')->once()->andReturnUsing(function ($builder, $results) { | 2 |
Text | Text | update translation for chinese | 7bbe27b5d112582293b43720f0e6ed8a3d2d0f5d | <ide><path>guide/chinese/bash/bash-cat/index.md
<ide> ---
<ide> title: Bash Cat
<del>localeTitle: 猛击猫
<del>---
<ide>## 猛击猫
<del>
<del>Cat是Unix操作系统中最常用的命令之一。
<del>
<del>Cat用于按顺序读取文件并将其打印到标准输出。 这个名字是从它的功能,骗子**猫** enate文件导出。
<del>
<del>### 用法
<del>
<del>```bash
<del>cat [options] [file_names]
<del>```
<del>
<del>最常用的选项:
<del>
<del>* `-b` ,数字非空白输出行
<del>* `-n` ,对所有输出行进行编号
<del>* `-s` ,挤压多个相邻的空白行
<del>* `-v` ,显示非打印字符,标签和行尾字符除外
<del>
<del>### 例
<del>
<del>在终端中打印file.txt的内容:
<del>
<del>```bash
<del>cat file.txt
<del>```
<del>
<del>连接两个文件的内容并在终端中显示结果:
<del>
<del>```bash
<del>cat file1.txt file2.txt
<del>```
<del>
<del>#### 更多信息:
<del>
<del>* 维基百科:https://en.wikipedia.org/wiki/Cat\_(Unix)
<ide>\ No newline at end of file
<add>localeTitle: Bash Cat
<add>---
<add>
<add>## Cat
<add>
<add>Cat是Unix操作系统中最常用的命令之一。
<add>
<add>Cat用于按顺序读取文件并将其打印到标准输出。 这个名字是从它的功能(con**cat**enate files)而来。
<add>
<add>### 用法
<add>
<add>```bash
<add>cat [options] [file_names]
<add>```
<add>
<add>最常用的选项:
<add>
<add>* `-b` ,数字非空白输出行
<add>* `-n` ,对所有输出行进行编号
<add>* `-s` ,挤压多个相邻的空白行
<add>* `-v` ,显示非打印字符,标签和行尾字符除外
<add>
<add>### 例子
<add>
<add>在终端机打印file.txt的内容:
<add>
<add>```bash
<add>cat file.txt
<add>```
<add>
<add>连接两个文件的内容并在终端机中显示结果:
<add>
<add>```bash
<add>cat file1.txt file2.txt
<add>```
<add>
<add>#### 更多信息:
<add>
<add>* 维基百科:https://en.wikipedia.org/wiki/Cat\_(Unix) | 1 |
PHP | PHP | apply fixes from styleci | f3b7651bd0ea0fcd4261c4669aadf1d1d5d5382f | <ide><path>src/Illuminate/Collections/Collection.php
<ide> public function offsetSet($key, $value)
<ide> /**
<ide> * Unset the item at a given offset.
<ide> *
<del><<<<<<< HEAD
<add> * <<<<<<< HEAD
<add> *
<ide> * @param TKey $key
<del>=======
<add> * =======
<ide> * @param mixed $key
<del>>>>>>>> 8.x
<add> * >>>>>>> 8.x
<ide> * @return void
<ide> */
<ide> #[\ReturnTypeWillChange] | 1 |
Ruby | Ruby | remove some documentation cruft on has_many | ef1297d896f6ee12b8d6d31d44d2a45f61f1a667 | <ide><path>activerecord/lib/active_record/associations.rb
<ide> module ClassMethods
<ide> # 'FROM people p, post_subscriptions ps ' +
<ide> # 'WHERE ps.post_id = #{id} AND ps.person_id = p.id ' +
<ide> # 'ORDER BY p.first_name'
<del> #
<del> # Specifying the :through option
<del> #
<ide> def has_many(association_id, options = {}, &extension)
<ide> reflection = create_has_many_reflection(association_id, options, &extension)
<ide> | 1 |
PHP | PHP | fix failing tests | 83a54a91249e0eb3f81bd45bf5564e6f459753a5 | <ide><path>src/ORM/Table.php
<ide> public function __debugInfo() {
<ide> 'table' => $this->table(),
<ide> 'alias' => $this->alias(),
<ide> 'entityClass' => $this->entityClass(),
<del> 'associated' => $this->_associated->keys(),
<add> 'associations' => $this->_associations->keys(),
<ide> 'behaviors' => $this->_behaviors->loaded(),
<ide> 'defaultConnection' => $this->defaultConnectionName(),
<ide> 'connectionName' => $conn ? $conn->configName() : null
<ide><path>tests/TestCase/ORM/TableTest.php
<ide> public function testDebugInfo() {
<ide> 'table' => 'articles',
<ide> 'alias' => 'articles',
<ide> 'entityClass' => 'TestApp\Model\Entity\Article',
<del> 'associated' => ['authors', 'tags', 'articlestags'],
<add> 'associations' => ['authors', 'tags', 'articlestags'],
<ide> 'behaviors' => ['Timestamp'],
<ide> 'defaultConnection' => 'default',
<ide> 'connectionName' => 'test' | 2 |
Python | Python | prepare changes for v3.6.1 release | d98974ae140065def70c4bdca8297886a5146b0c | <ide><path>libcloud/__init__.py
<ide>
<ide> __all__ = ["__version__", "enable_debug"]
<ide>
<del>__version__ = "3.6.1-dev"
<add>__version__ = "3.6.1"
<ide>
<ide>
<ide> def enable_debug(fo): | 1 |
PHP | PHP | pass cachemetadata option through to collection | 463918b89dc0a06f8c2ec7aa78d1086c8657757d | <ide><path>src/Database/Schema/Collection.php
<ide> public function __construct(Connection $connection) {
<ide> $config = $this->_connection->config();
<ide>
<ide> if (!empty($config['cacheMetadata'])) {
<del> $this->cacheMetadata(true);
<add> $this->cacheMetadata($config['cacheMetadata']);
<ide> }
<ide> }
<ide> | 1 |
Python | Python | skip flaky test_tf_question_answering | 20fc18fbda3669c2f4a3510e0705b2acd54bff07 | <ide><path>tests/test_pipelines.py
<ide> def test_question_answering(self):
<ide> self._test_multicolumn_pipeline(nlp, valid_samples, invalid_samples, mandatory_output_keys)
<ide>
<ide> @require_tf
<add> @unittest.skip("This test is failing intermittently. Skipping it until we resolve.")
<ide> def test_tf_question_answering(self):
<ide> mandatory_output_keys = {"score", "answer", "start", "end"}
<ide> valid_samples = [ | 1 |
Go | Go | remove remaining registry methods from dockercli | 1dd46e06444d1a0c73fc88dfccec4ae0b7f10bf6 | <ide><path>cli/command/container/create.go
<ide> func pullImage(ctx context.Context, dockerCli *command.DockerCli, image string,
<ide> return err
<ide> }
<ide>
<del> authConfig := dockerCli.ResolveAuthConfig(ctx, repoInfo.Index)
<add> authConfig := command.ResolveAuthConfig(ctx, dockerCli, repoInfo.Index)
<ide> encodedAuth, err := command.EncodeAuthToBase64(authConfig)
<ide> if err != nil {
<ide> return err
<ide><path>cli/command/image/pull.go
<ide> func runPull(dockerCli *command.DockerCli, opts pullOptions) error {
<ide>
<ide> ctx := context.Background()
<ide>
<del> authConfig := dockerCli.ResolveAuthConfig(ctx, repoInfo.Index)
<del> requestPrivilege := dockerCli.RegistryAuthenticationPrivilegedFunc(repoInfo.Index, "pull")
<add> authConfig := command.ResolveAuthConfig(ctx, dockerCli, repoInfo.Index)
<add> requestPrivilege := command.RegistryAuthenticationPrivilegedFunc(dockerCli, repoInfo.Index, "pull")
<ide>
<ide> if command.IsTrusted() && !registryRef.HasDigest() {
<ide> // Check if tag is digest
<ide><path>cli/command/image/push.go
<ide> func runPush(dockerCli *command.DockerCli, remote string) error {
<ide> ctx := context.Background()
<ide>
<ide> // Resolve the Auth config relevant for this server
<del> authConfig := dockerCli.ResolveAuthConfig(ctx, repoInfo.Index)
<del> requestPrivilege := dockerCli.RegistryAuthenticationPrivilegedFunc(repoInfo.Index, "push")
<add> authConfig := command.ResolveAuthConfig(ctx, dockerCli, repoInfo.Index)
<add> requestPrivilege := command.RegistryAuthenticationPrivilegedFunc(dockerCli, repoInfo.Index, "push")
<ide>
<ide> if command.IsTrusted() {
<ide> return trustedPush(ctx, dockerCli, repoInfo, ref, authConfig, requestPrivilege)
<ide><path>cli/command/image/search.go
<ide> func runSearch(dockerCli *command.DockerCli, opts searchOptions) error {
<ide>
<ide> ctx := context.Background()
<ide>
<del> authConfig := dockerCli.ResolveAuthConfig(ctx, indexInfo)
<del> requestPrivilege := dockerCli.RegistryAuthenticationPrivilegedFunc(indexInfo, "search")
<add> authConfig := command.ResolveAuthConfig(ctx, dockerCli, indexInfo)
<add> requestPrivilege := command.RegistryAuthenticationPrivilegedFunc(dockerCli, indexInfo, "search")
<ide>
<ide> encodedAuth, err := command.EncodeAuthToBase64(authConfig)
<ide> if err != nil {
<ide><path>cli/command/image/trust.go
<ide> func TrustedReference(ctx context.Context, cli *command.DockerCli, ref reference
<ide> }
<ide>
<ide> // Resolve the Auth config relevant for this server
<del> authConfig := cli.ResolveAuthConfig(ctx, repoInfo.Index)
<add> authConfig := command.ResolveAuthConfig(ctx, cli, repoInfo.Index)
<ide>
<ide> notaryRepo, err := GetNotaryRepository(cli, repoInfo, authConfig, "pull")
<ide> if err != nil {
<ide><path>cli/command/plugin/install.go
<ide> func runInstall(dockerCli *command.DockerCli, opts pluginOptions) error {
<ide> return err
<ide> }
<ide>
<del> authConfig := dockerCli.ResolveAuthConfig(ctx, repoInfo.Index)
<add> authConfig := command.ResolveAuthConfig(ctx, dockerCli, repoInfo.Index)
<ide>
<ide> encodedAuth, err := command.EncodeAuthToBase64(authConfig)
<ide> if err != nil {
<ide> return err
<ide> }
<ide>
<del> registryAuthFunc := dockerCli.RegistryAuthenticationPrivilegedFunc(repoInfo.Index, "plugin install")
<add> registryAuthFunc := command.RegistryAuthenticationPrivilegedFunc(dockerCli, repoInfo.Index, "plugin install")
<ide>
<ide> options := types.PluginInstallOptions{
<ide> RegistryAuth: encodedAuth,
<ide><path>cli/command/plugin/push.go
<ide> func runPush(dockerCli *command.DockerCli, name string) error {
<ide> if err != nil {
<ide> return err
<ide> }
<del> authConfig := dockerCli.ResolveAuthConfig(ctx, repoInfo.Index)
<add> authConfig := command.ResolveAuthConfig(ctx, dockerCli, repoInfo.Index)
<ide>
<ide> encodedAuth, err := command.EncodeAuthToBase64(authConfig)
<ide> if err != nil {
<ide><path>cli/command/registry.go
<ide> import (
<ide> )
<ide>
<ide> // ElectAuthServer returns the default registry to use (by asking the daemon)
<del>// TODO: only used in registry package and from ResolveAuthConfig
<del>func (cli *DockerCli) ElectAuthServer(ctx context.Context) string {
<add>func ElectAuthServer(ctx context.Context, cli *DockerCli) string {
<ide> // The daemon `/info` endpoint informs us of the default registry being
<ide> // used. This is essential in cross-platforms environment, where for
<ide> // example a Linux client might be interacting with a Windows daemon, hence
<ide> // the default registry URL might be Windows specific.
<ide> serverAddress := registry.IndexServer
<del> if info, err := cli.client.Info(ctx); err != nil {
<del> fmt.Fprintf(cli.out, "Warning: failed to get default registry endpoint from daemon (%v). Using system default: %s\n", err, serverAddress)
<add> if info, err := cli.Client().Info(ctx); err != nil {
<add> fmt.Fprintf(cli.Out(), "Warning: failed to get default registry endpoint from daemon (%v). Using system default: %s\n", err, serverAddress)
<ide> } else {
<ide> serverAddress = info.IndexServerAddress
<ide> }
<ide> func EncodeAuthToBase64(authConfig types.AuthConfig) (string, error) {
<ide>
<ide> // RegistryAuthenticationPrivilegedFunc returns a RequestPrivilegeFunc from the specified registry index info
<ide> // for the given command.
<del>// TODO: image/plugin
<del>func (cli *DockerCli) RegistryAuthenticationPrivilegedFunc(index *registrytypes.IndexInfo, cmdName string) types.RequestPrivilegeFunc {
<add>func RegistryAuthenticationPrivilegedFunc(cli *DockerCli, index *registrytypes.IndexInfo, cmdName string) types.RequestPrivilegeFunc {
<ide> return func() (string, error) {
<del> fmt.Fprintf(cli.out, "\nPlease login prior to %s:\n", cmdName)
<add> fmt.Fprintf(cli.Out(), "\nPlease login prior to %s:\n", cmdName)
<ide> indexServer := registry.GetAuthConfigKey(index)
<del> isDefaultRegistry := indexServer == cli.ElectAuthServer(context.Background())
<del> authConfig, err := cli.ConfigureAuth("", "", indexServer, isDefaultRegistry)
<add> isDefaultRegistry := indexServer == ElectAuthServer(context.Background(), cli)
<add> authConfig, err := ConfigureAuth(cli, "", "", indexServer, isDefaultRegistry)
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide> func (cli *DockerCli) RegistryAuthenticationPrivilegedFunc(index *registrytypes.
<ide> // ResolveAuthConfig is like registry.ResolveAuthConfig, but if using the
<ide> // default index, it uses the default index name for the daemon's platform,
<ide> // not the client's platform.
<del>// TODO: plugin/image/container and from RetrieveAuthTokenFromImage
<del>func (cli *DockerCli) ResolveAuthConfig(ctx context.Context, index *registrytypes.IndexInfo) types.AuthConfig {
<add>func ResolveAuthConfig(ctx context.Context, cli *DockerCli, index *registrytypes.IndexInfo) types.AuthConfig {
<ide> configKey := index.Name
<ide> if index.Official {
<del> configKey = cli.ElectAuthServer(ctx)
<add> configKey = ElectAuthServer(ctx, cli)
<ide> }
<ide>
<ide> a, _ := cli.CredentialsStore().Get(configKey)
<ide> return a
<ide> }
<ide>
<ide> // ConfigureAuth returns an AuthConfig from the specified user, password and server.
<del>// TODO: only used in registry package
<del>func (cli *DockerCli) ConfigureAuth(flUser, flPassword, serverAddress string, isDefaultRegistry bool) (types.AuthConfig, error) {
<add>func ConfigureAuth(cli *DockerCli, flUser, flPassword, serverAddress string, isDefaultRegistry bool) (types.AuthConfig, error) {
<ide> // On Windows, force the use of the regular OS stdin stream. Fixes #14336/#14210
<ide> if runtime.GOOS == "windows" {
<ide> cli.in = NewInStream(os.Stdin)
<ide> func (cli *DockerCli) ConfigureAuth(flUser, flPassword, serverAddress string, is
<ide> if flUser = strings.TrimSpace(flUser); flUser == "" {
<ide> if isDefaultRegistry {
<ide> // if this is a default registry (docker hub), then display the following message.
<del> fmt.Fprintln(cli.out, "Login with your Docker ID to push and pull images from Docker Hub. If you don't have a Docker ID, head over to https://hub.docker.com to create one.")
<add> fmt.Fprintln(cli.Out(), "Login with your Docker ID to push and pull images from Docker Hub. If you don't have a Docker ID, head over to https://hub.docker.com to create one.")
<ide> }
<del> cli.promptWithDefault("Username", authconfig.Username)
<del> flUser = readInput(cli.in, cli.out)
<add> promptWithDefault(cli.Out(), "Username", authconfig.Username)
<add> flUser = readInput(cli.In(), cli.Out())
<ide> flUser = strings.TrimSpace(flUser)
<ide> if flUser == "" {
<ide> flUser = authconfig.Username
<ide> func (cli *DockerCli) ConfigureAuth(flUser, flPassword, serverAddress string, is
<ide> if err != nil {
<ide> return authconfig, err
<ide> }
<del> fmt.Fprintf(cli.out, "Password: ")
<add> fmt.Fprintf(cli.Out(), "Password: ")
<ide> term.DisableEcho(cli.In().FD(), oldState)
<ide>
<del> flPassword = readInput(cli.in, cli.out)
<del> fmt.Fprint(cli.out, "\n")
<add> flPassword = readInput(cli.In(), cli.Out())
<add> fmt.Fprint(cli.Out(), "\n")
<ide>
<ide> term.RestoreTerminal(cli.In().FD(), oldState)
<ide> if flPassword == "" {
<ide> func readInput(in io.Reader, out io.Writer) string {
<ide> return string(line)
<ide> }
<ide>
<del>func (cli *DockerCli) promptWithDefault(prompt string, configDefault string) {
<add>func promptWithDefault(out io.Writer, prompt string, configDefault string) {
<ide> if configDefault == "" {
<del> fmt.Fprintf(cli.out, "%s: ", prompt)
<add> fmt.Fprintf(out, "%s: ", prompt)
<ide> } else {
<del> fmt.Fprintf(cli.out, "%s (%s): ", prompt, configDefault)
<add> fmt.Fprintf(out, "%s (%s): ", prompt, configDefault)
<ide> }
<ide> }
<ide>
<ide> // RetrieveAuthTokenFromImage retrieves an encoded auth token given a complete image
<del>// TODO: used in service/stack packages
<del>func (cli *DockerCli) RetrieveAuthTokenFromImage(ctx context.Context, image string) (string, error) {
<add>func RetrieveAuthTokenFromImage(ctx context.Context, cli *DockerCli, image string) (string, error) {
<ide> // Retrieve encoded auth token from the image reference
<del> authConfig, err := cli.resolveAuthConfigFromImage(ctx, image)
<add> authConfig, err := resolveAuthConfigFromImage(ctx, cli, image)
<ide> if err != nil {
<ide> return "", err
<ide> }
<ide> func (cli *DockerCli) RetrieveAuthTokenFromImage(ctx context.Context, image stri
<ide> }
<ide>
<ide> // resolveAuthConfigFromImage retrieves that AuthConfig using the image string
<del>func (cli *DockerCli) resolveAuthConfigFromImage(ctx context.Context, image string) (types.AuthConfig, error) {
<add>func resolveAuthConfigFromImage(ctx context.Context, cli *DockerCli, image string) (types.AuthConfig, error) {
<ide> registryRef, err := reference.ParseNamed(image)
<ide> if err != nil {
<ide> return types.AuthConfig{}, err
<ide> func (cli *DockerCli) resolveAuthConfigFromImage(ctx context.Context, image stri
<ide> if err != nil {
<ide> return types.AuthConfig{}, err
<ide> }
<del> authConfig := cli.ResolveAuthConfig(ctx, repoInfo.Index)
<del> return authConfig, nil
<add> return ResolveAuthConfig(ctx, cli, repoInfo.Index), nil
<ide> }
<ide><path>cli/command/registry/login.go
<ide> func runLogin(dockerCli *command.DockerCli, opts loginOptions) error {
<ide>
<ide> var (
<ide> serverAddress string
<del> authServer = dockerCli.ElectAuthServer(ctx)
<add> authServer = command.ElectAuthServer(ctx, dockerCli)
<ide> )
<ide> if opts.serverAddress != "" {
<ide> serverAddress = opts.serverAddress
<ide> func runLogin(dockerCli *command.DockerCli, opts loginOptions) error {
<ide>
<ide> isDefaultRegistry := serverAddress == authServer
<ide>
<del> authConfig, err := dockerCli.ConfigureAuth(opts.user, opts.password, serverAddress, isDefaultRegistry)
<add> authConfig, err := command.ConfigureAuth(dockerCli, opts.user, opts.password, serverAddress, isDefaultRegistry)
<ide> if err != nil {
<ide> return err
<ide> }
<ide><path>cli/command/registry/logout.go
<ide> func runLogout(dockerCli *command.DockerCli, serverAddress string) error {
<ide> var isDefaultRegistry bool
<ide>
<ide> if serverAddress == "" {
<del> serverAddress = dockerCli.ElectAuthServer(ctx)
<add> serverAddress = command.ElectAuthServer(ctx, dockerCli)
<ide> isDefaultRegistry = true
<ide> }
<ide>
<ide><path>cli/command/service/create.go
<ide> func runCreate(dockerCli *command.DockerCli, opts *serviceOptions) error {
<ide> // only send auth if flag was set
<ide> if opts.registryAuth {
<ide> // Retrieve encoded auth token from the image reference
<del> encodedAuth, err := dockerCli.RetrieveAuthTokenFromImage(ctx, opts.image)
<add> encodedAuth, err := command.RetrieveAuthTokenFromImage(ctx, dockerCli, opts.image)
<ide> if err != nil {
<ide> return err
<ide> }
<ide><path>cli/command/service/update.go
<ide> func runUpdate(dockerCli *command.DockerCli, flags *pflag.FlagSet, serviceID str
<ide> // Retrieve encoded auth token from the image reference
<ide> // This would be the old image if it didn't change in this update
<ide> image := service.Spec.TaskTemplate.ContainerSpec.Image
<del> encodedAuth, err := dockerCli.RetrieveAuthTokenFromImage(ctx, image)
<add> encodedAuth, err := command.RetrieveAuthTokenFromImage(ctx, dockerCli, image)
<ide> if err != nil {
<ide> return err
<ide> }
<ide><path>cli/command/stack/deploy.go
<ide> func deployServices(
<ide> if sendAuth {
<ide> // Retrieve encoded auth token from the image reference
<ide> image := serviceSpec.TaskTemplate.ContainerSpec.Image
<del> encodedAuth, err = dockerCli.RetrieveAuthTokenFromImage(ctx, image)
<add> encodedAuth, err = command.RetrieveAuthTokenFromImage(ctx, dockerCli, image)
<ide> if err != nil {
<ide> return err
<ide> } | 13 |
Javascript | Javascript | fix var redeclarations in test-fs-* | 754bcff73ee06553f4183fd7871cf97e40e7b084 | <ide><path>test/parallel/test-fs-utimes.js
<ide> function stat_resource(resource) {
<ide> }
<ide>
<ide> function check_mtime(resource, mtime) {
<del> var mtime = fs._toUnixTimestamp(mtime);
<add> mtime = fs._toUnixTimestamp(mtime);
<ide> var stats = stat_resource(resource);
<ide> var real_mtime = fs._toUnixTimestamp(stats.mtime);
<ide> // check up to single-second precision
<ide><path>test/parallel/test-fs-write-file.js
<ide> fs.writeFile(filename3, n, { mode: m }, function(e) {
<ide>
<ide> // test that writeFile accepts file descriptors
<ide> var filename4 = join(common.tmpDir, 'test4.txt');
<del>var buf = new Buffer(s, 'utf8');
<ide>
<ide> fs.open(filename4, 'w+', function(e, fd) {
<ide> if (e) throw e; | 2 |
Python | Python | replace false parameter by a buffer | c8ed1b8b59d4b074c7cca8605b6be97636f54318 | <ide><path>src/transformers/models/m2m_100/modeling_m2m_100.py
<ide> def make_weights(self, num_embeddings: int, embedding_dim: int, padding_idx: Opt
<ide> # in forward put the weights on the correct dtype and device of the param
<ide> emb_weights = emb_weights.to(dtype=self.weights.dtype, device=self.weights.device)
<ide>
<del> self.weights = nn.Parameter(emb_weights)
<del> self.weights.requires_grad = False
<del> self.weights.detach_()
<add> self.register_buffer("weights", emb_weights)
<ide>
<ide> @staticmethod
<ide> def get_embedding(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None):
<ide><path>src/transformers/models/xglm/modeling_xglm.py
<ide> def make_weights(self, num_embeddings: int, embedding_dim: int, padding_idx: Opt
<ide> # in forward put the weights on the correct dtype and device of the param
<ide> emb_weights = emb_weights.to(dtype=self.weights.dtype, device=self.weights.device)
<ide>
<del> self.weights = nn.Parameter(emb_weights)
<del> self.weights.requires_grad = False
<del> self.weights.detach_()
<add> self.register_buffer("weights", emb_weights)
<ide>
<ide> @staticmethod
<ide> def get_embedding(num_embeddings: int, embedding_dim: int, padding_idx: Optional[int] = None): | 2 |
Text | Text | realign indentation and checkstyle of bracket | 30b630b5b7294354f26c2887b60fd9e40f05e0b2 | <ide><path>guide/english/cplusplus/vector/index.md
<ide> int main(){
<ide> }
<ide> ```
<ide> In C++11, you can also sort with lambda function, which can be useful.
<del>```cpp11
<del>#include<bits/stdc++.h>
<add>```cpp
<add>#include <bits/stdc++.h>
<ide> using namespace std;
<ide>
<del>int main()
<del>{
<add>int main(){
<ide> vector<int > v {3, 1, 2};
<ide> sort(v.begin(), v.end(), [] (int i, int j) -> bool {
<ide> return i < j;
<ide> int main()
<ide> cout << e << " ";
<ide> }
<ide>
<del> return 0;
<add> return 0;
<ide> }
<ide> ```
<ide> ### Sorting Vector In Descending Order | 1 |
Python | Python | handle inplace build option for numscons | 33e34091563f318ef373a2e48a05432f38081cef | <ide><path>numpy/distutils/command/scons.py
<ide> class scons(old_build_ext):
<ide> user_options = old_build_ext.user_options + \
<ide> [('jobs=', None,
<ide> "specify number of worker threads when executing scons"),
<add> ('inplace', 'i', 'If specified, build in place.'),
<ide> ('scons-tool-path=', None, 'specify additional path '\
<ide> '(absolute) to look for scons tools'),
<ide> ('silent=', None, 'specify whether scons output should less verbose'\
<ide> def initialize_options(self):
<ide> self.scons_fcompiler = None
<ide>
<ide> self.package_list = None
<add> self.inplace = 0
<ide>
<ide> # Only critical things
<ide> self.log_level = 50
<ide> def run(self):
<ide> cmd = [scons_exec, "-f", sconscript, '-I.']
<ide> if self.jobs:
<ide> cmd.append(" --jobs=%d" % int(self.jobs))
<add> if self.inplace:
<add> cmd.append("inplace=1")
<ide> cmd.append('scons_tool_path="%s"' % self.scons_tool_path)
<ide> cmd.append('src_dir="%s"' % pdirname(sconscript))
<ide> cmd.append('pkg_name="%s"' % pkg_name)
<ide> cmd.append('log_level=%s' % self.log_level)
<ide> #cmd.append('distutils_libdir=%s' % protect_path(pjoin(self.build_lib,
<ide> # pdirname(sconscript))))
<del> cmd.append('distutils_libdir=%s' %
<add> cmd.append('distutils_libdir=%s' %
<ide> protect_path(get_distutils_libdir(self, sconscript)))
<ide>
<ide> if not self._bypass_distutils_cc: | 1 |
Ruby | Ruby | add todos based on review call | 7362459d60b9070dce0a5dcbe67f26a5e511d5ca | <ide><path>app/controllers/action_mailroom/inbound_emails_controller.rb
<ide> # TODO: Add access protection using basic auth with verified tokens. Maybe coming from credentials by default?
<add># TODO: Spam/malware catching?
<add># TODO: Specific bounces for SMTP good citizenship: 200/404/400
<ide> class ActionMailroom::InboundEmailsController < ActionController::Base
<ide> skip_forgery_protection
<ide> before_action :require_rfc822_message
<ide><path>app/models/action_mailroom/inbound_email.rb
<ide> def mail_from_raw_content(raw_email_content)
<ide> private
<ide> def extract_message_id(raw_email)
<ide> mail_from_raw_content(raw_email.read).message_id
<add> rescue => e
<add> # TODO: Assign message id if it can't be extracted?
<ide> end
<ide> end
<ide> | 2 |
Python | Python | fix .iteritems() access in flask.sessions | 135c53a5f2f990512d2be348dc16ef719233a314 | <ide><path>flask/sessions.py
<ide> from werkzeug.http import http_date, parse_date
<ide> from werkzeug.datastructures import CallbackDict
<ide> from . import Markup, json
<add>from ._compat import iteritems, text_type
<ide>
<ide> from itsdangerous import URLSafeTimedSerializer, BadSignature
<del>import six
<ide>
<ide>
<ide> def total_seconds(td):
<ide> def _tag(value):
<ide> elif isinstance(value, uuid.UUID):
<ide> return {' u': value.hex}
<ide> elif callable(getattr(value, '__html__', None)):
<del> return {' m': six.text_type(value.__html__())}
<add> return {' m': text_type(value.__html__())}
<ide> elif isinstance(value, list):
<ide> return [_tag(x) for x in value]
<ide> elif isinstance(value, datetime):
<ide> return {' d': http_date(value)}
<ide> elif isinstance(value, dict):
<del> return dict((k, _tag(v)) for k, v in six.iteritems(value))
<add> return dict((k, _tag(v)) for k, v in iteritems(value))
<ide> elif isinstance(value, str):
<ide> try:
<del> return six.text_type(value)
<add> return text_type(value)
<ide> except UnicodeError:
<ide> raise UnexpectedUnicodeError(u'A byte string with '
<ide> u'non-ASCII data was passed to the session system '
<ide> def loads(self, value):
<ide> def object_hook(obj):
<ide> if len(obj) != 1:
<ide> return obj
<del> the_key, the_value = six.advance_iterator(obj.iteritems())
<add> the_key, the_value = next(iteritems(obj))
<ide> if the_key == ' t':
<ide> return tuple(the_value)
<ide> elif the_key == ' u': | 1 |
PHP | PHP | use dispatchesjobs in dispatchescommands | 9b0b8e87ce000336b1dad57bba61d2a946de916a | <ide><path>src/Illuminate/Foundation/Bus/DispatchesCommands.php
<ide> use ArrayAccess;
<ide>
<ide> /**
<del> * @deprecated since version 5.1. Use the DispatchesJobs trait instead.
<add> * @deprecated since version 5.1. Use the DispatchesJobs trait directly.
<ide> */
<ide> trait DispatchesCommands {
<ide>
<del> /**
<del> * Dispatch a command to its appropriate handler.
<del> *
<del> * @param mixed $command
<del> * @return mixed
<del> */
<del> protected function dispatch($command)
<del> {
<del> return app('Illuminate\Contracts\Bus\Dispatcher')->dispatch($command);
<del> }
<del>
<del> /**
<del> * Marshal a command and dispatch it to its appropriate handler.
<del> *
<del> * @param mixed $command
<del> * @param array $array
<del> * @return mixed
<del> */
<del> protected function dispatchFromArray($command, array $array)
<del> {
<del> return app('Illuminate\Contracts\Bus\Dispatcher')->dispatchFromArray($command, $array);
<del> }
<del>
<del> /**
<del> * Marshal a command and dispatch it to its appropriate handler.
<del> *
<del> * @param mixed $command
<del> * @param \ArrayAccess $source
<del> * @param array $extras
<del> * @return mixed
<del> */
<del> protected function dispatchFrom($command, ArrayAccess $source, $extras = [])
<del> {
<del> return app('Illuminate\Contracts\Bus\Dispatcher')->dispatchFrom($command, $source, $extras);
<del> }
<add> use DispatchesJobs;
<ide>
<ide> } | 1 |
Text | Text | add imyller to collaborators | 2804518174b806da345f0924642c3f04fc39c30e | <ide><path>README.md
<ide> more information about the governance of the Node.js project, see
<ide> **Wyatt Preul** <[email protected]>
<ide> * [iarna](https://github.com/iarna) -
<ide> **Rebecca Turner** <[email protected]>
<add>* [imyller](https://github.com/imyller) -
<add>**Ilkka Myller** <[email protected]>
<ide> * [isaacs](https://github.com/isaacs) -
<ide> **Isaac Z. Schlueter** <[email protected]>
<ide> * [iWuzHere](https://github.com/iWuzHere) - | 1 |
Text | Text | add a sample to clarify animation callback | 60828566a759dc579dbae1d76a8426e1e479166e | <ide><path>docs/animated.md
<ide> In most cases, you will be using `timing()`. By default, it uses a symmetric eas
<ide>
<ide> Animations are started by calling `start()` on your animation. `start()` takes a completion callback that will be called when the animation is done. If the animation finished running normally, the completion callback will be invoked with `{finished: true}`. If the animation is done because `stop()` was called on it before it could finish (e.g. because it was interrupted by a gesture or another animation), then it will receive `{finished: false}`.
<ide>
<add>```javascript
<add>this.animateValue.spring({}).start(({finished}) => {
<add> if (finished) {
<add> console.log('Animation was completed')
<add> } else {
<add> console.log('Animation was aborted')
<add> }
<add>})
<add>```
<add>
<ide> ### Using the native driver
<ide>
<ide> By using the native driver, we send everything about the animation to native before starting the animation, allowing native code to perform the animation on the UI thread without having to go through the bridge on every frame. Once the animation has started, the JS thread can be blocked without affecting the animation. | 1 |
Javascript | Javascript | remove `install` command | a21e226b68b1a7224cf6f8925dac6ac2bee80e40 | <ide><path>local-cli/__tests__/install-test.js
<del>'use strict';
<del>
<del>jest.dontMock('../install');
<del>jest.dontMock('fs');
<del>jest.dontMock('path');
<del>
<del>var install = require('../install.js');
<del>
<del>var openingReactTag = '#<React-Native>';
<del>var closingReactTag = '#</React-Native>';
<del>
<del>var fs = require.requireActual('fs');
<del>var path = require.requireActual('path');
<del>
<del>process.chdir(__dirname);
<del>
<del>describe('setup Podfile', function() {
<del>
<del> it('creates a Podfile if none exists', function() {
<del> try {
<del> fs.unlinkSync(path.resolve(__dirname, '../Podfile'));
<del> } catch(e) {}
<del>
<del> var setupPodfile = install.setupPodfile();
<del>
<del> expect(setupPodfile.created).toBe(true);
<del> });
<del>
<del> it('does not create Podfile if one exists', function() {
<del> var setupPodfile = install.setupPodfile();
<del> expect(setupPodfile.created).toBe(false);
<del> });
<del>
<del> it('includes React Native Tags', function() {
<del> var setupPodfile = install.setupPodfile();
<del>
<del> expect(setupPodfile.podfileText).toContain(openingReactTag);
<del> expect(setupPodfile.podfileText).toContain(closingReactTag);
<del>
<del> // cleanup
<del> try {
<del> fs.unlinkSync(path.resolve(__dirname, '../Podfile'));
<del> } catch(e) {
<del> throw new Error('failed to cleanup Podfile', e);
<del> }
<del> });
<del>});
<ide><path>local-cli/cli.js
<ide> var Config = require('../private-cli/src/util/Config');
<ide> var fs = require('fs');
<ide> var generateAndroid = require('./generate-android.js');
<ide> var init = require('./init.js');
<del>var install = require('./install.js');
<ide> var newLibrary = require('./new-library.js');
<ide> var runAndroid = require('./run-android.js');
<ide> var runPackager = require('./run-packager.js');
<ide> function printUsage() {
<ide> '',
<ide> 'Commands:',
<ide> ' start: starts the webserver',
<del> ' install: installs npm react components',
<ide> ' bundle: builds the javascript bundle for offline use',
<ide> ' new-library: generates a native library bridge',
<ide> ' android: generates an Android project for your app'
<ide> function run() {
<ide> case 'start':
<ide> runPackager();
<ide> break;
<del> case 'install':
<del> install.init();
<del> break;
<ide> case 'bundle':
<ide> bundle(args, Config.get(__dirname)).done();
<ide> // bundle_DEPRECATED.init(args);
<ide><path>local-cli/install.js
<del>/**
<del> * Copyright 2004-present Facebook. All Rights Reserved.
<del> */
<del>
<del> 'use strict';
<del>
<del>var fs = require('fs');
<del>var path = require('path');
<del>var exec = require('child_process').exec;
<del>
<del>var NODE_MODULE_PATH = path.resolve(__dirname, 'node_modules');
<del>
<del>var PODFILE_PATH = path.resolve(__dirname, 'Podfile');
<del>
<del>function addDependency(name, path) {
<del> console.log('Found dependency: ' + name);
<del>
<del> var podfileText;
<del> try {
<del> podfileText = fs.readFileSync(PODFILE_PATH, 'utf8');
<del> } catch(e) {}
<del>
<del> if (podfileText.indexOf('pod \'' + name + '\'') === -1) {
<del> var indexOfReactComponents = podfileText.indexOf('#</React-Native>') - 1;
<del>
<del> var insertedDependency = '\npod \'' + name + '\', :path => \'' + path + '\'\n';
<del> var newPodfileText = [podfileText.slice(0, indexOfReactComponents),
<del> insertedDependency,
<del> podfileText.slice(indexOfReactComponents)].join('');
<del>
<del> fs.writeFileSync(PODFILE_PATH, newPodfileText);
<del> console.log('Added ' + name + ' to Podfile.');
<del> } else {
<del> console.log(name + ' already in Podfile');
<del> }
<del>}
<del>
<del>function installDependecies() {
<del> console.log('Installing dependencies...');
<del> exec('pod install', function(error, stdout, stderr) {
<del> if (!stderr) {
<del> console.log('Installed Pod dependencies.');
<del> } else {
<del> console.error('Error installing Pod dependencies.', stderr);
<del> }
<del> process.exit(1);
<del> });
<del>}
<del>
<del>module.exports = {
<del> setupPodfile: function() {
<del> var returnArgs = {
<del> created: false
<del> };
<del>
<del> var podfileText;
<del> try {
<del> podfileText = fs.readFileSync(PODFILE_PATH, 'utf8');
<del> } catch(e) {}
<del>
<del> var openingReactTag = '#<React-Native>';
<del> var closingReactTag = '\n#</React-Native>';
<del> var reactPodfileBoilerplate = openingReactTag + closingReactTag;
<del>
<del> if (!podfileText) {
<del> returnArgs.created = true;
<del> fs.appendFileSync(PODFILE_PATH, reactPodfileBoilerplate);
<del> } else {
<del> if (podfileText.indexOf(openingReactTag) === -1 || podfileText.indexOf(closingReactTag) === -1) {
<del> fs.appendFileSync(PODFILE_PATH, reactPodfileBoilerplate);
<del> }
<del> }
<del>
<del> try {
<del> podfileText = fs.readFileSync(PODFILE_PATH, 'utf8');
<del> returnArgs.podfileText = podfileText;
<del> } catch(e) {}
<del>
<del> if (podfileText.indexOf('pod \'React\'') === -1) {
<del> var indexOfReactComponents = podfileText.indexOf(openingReactTag) + openingReactTag.length;
<del>
<del> var insertedReactDependency = '\npod \'React\', :path => \'node_modules/react-native\'\n';
<del> try {
<del> var newPodfileText = [podfileText.slice(0, indexOfReactComponents),
<del> insertedReactDependency,
<del> podfileText.slice(indexOfReactComponents)].join('');
<del>
<del> fs.writeFileSync(PODFILE_PATH, newPodfileText);
<del> returnArgs.podfileText = newPodfileText;
<del> } catch(e) {
<del> throw e;
<del> }
<del> }
<del>
<del> return returnArgs;
<del> },
<del> init: function(arguement) {
<del> // arguement is available for future arguement commands
<del> console.log('Searching for installable React Native components...');
<del> this.setupPodfile();
<del>
<del> var nodeModuleList = fs.readdirSync(NODE_MODULE_PATH);
<del>
<del> if (nodeModuleList.length > 0) {
<del> nodeModuleList.forEach(function(nodeModule) {
<del> // Module would not start with '.' hidden file identifier
<del> if (nodeModule.charAt(0) !== '.') {
<del> var modulePath = './node_modules/' + nodeModule;
<del>
<del> var nodeModulePackage;
<del> try {
<del> nodeModulePackage = fs.readFileSync(modulePath + '/package.json', 'utf8');
<del> } catch(error) {
<del> console.error('Error reading Node Module: `%s` package.json', nodeModule);
<del> throw error;
<del> }
<del>
<del> var packageJSON = JSON.parse(nodeModulePackage);
<del> console.log(packageJSON.hasOwnProperty('react-native-component'));
<del> if (packageJSON.hasOwnProperty('react-native-component')) {
<del> addDependency(nodeModule, modulePath);
<del> }
<del> }
<del> });
<del>
<del> installDependecies();
<del> } else {
<del> console.error('./node_modules directory contains 0 modules');
<del> console.log('No React Native components found.');
<del> process.exit(1);
<del> }
<del> }
<del>}; | 3 |
Go | Go | add integration tests for encrypted swarm | 8b1f72ad44f03d4786cb3e881b480c94a143885f | <ide><path>integration-cli/daemon.go
<ide> func (d *Daemon) getBaseDeviceSize(c *check.C) int64 {
<ide> // Cmd will execute a docker CLI command against this Daemon.
<ide> // Example: d.Cmd("version") will run docker -H unix://path/to/unix.sock version
<ide> func (d *Daemon) Cmd(args ...string) (string, error) {
<del> c := exec.Command(dockerBinary, d.prependHostArg(args)...)
<del> b, err := c.CombinedOutput()
<add> b, err := d.command(args...).CombinedOutput()
<ide> return string(b), err
<ide> }
<ide>
<add>func (d *Daemon) command(args ...string) *exec.Cmd {
<add> return exec.Command(dockerBinary, d.prependHostArg(args)...)
<add>}
<add>
<ide> func (d *Daemon) prependHostArg(args []string) []string {
<ide> for _, arg := range args {
<ide> if arg == "--host" || arg == "-H" {
<ide><path>integration-cli/docker_cli_swarm_test.go
<ide> package main
<ide>
<ide> import (
<add> "bytes"
<ide> "encoding/json"
<ide> "fmt"
<ide> "io/ioutil"
<ide> func (s *DockerSwarmSuite) TestDNSConfigUpdate(c *check.C) {
<ide> c.Assert(err, checker.IsNil)
<ide> c.Assert(strings.TrimSpace(out), checker.Equals, "{[1.2.3.4] [example.com] [timeout:3]}")
<ide> }
<add>
<add>func (s *DockerSwarmSuite) TestSwarmInitLocked(c *check.C) {
<add> d := s.AddDaemon(c, false, false)
<add>
<add> cmd := d.command("swarm", "init", "--lock-key")
<add> cmd.Stdin = bytes.NewBufferString("my-secret-key")
<add> out, err := cmd.CombinedOutput()
<add> c.Assert(err, checker.IsNil, check.Commentf("out: %v", string(out)))
<add>
<add> c.Assert(string(out), checker.Contains, "docker swarm unlock")
<add>
<add> info, err := d.info()
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateActive)
<add>
<add> c.Assert(d.Stop(), checker.IsNil)
<add> c.Assert(d.Start(), checker.IsNil)
<add>
<add> info, err = d.info()
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateLocked)
<add>
<add> cmd = d.command("swarm", "unlock")
<add> cmd.Stdin = bytes.NewBufferString("wrong-secret-key")
<add> out, err = cmd.CombinedOutput()
<add> c.Assert(err, checker.NotNil, check.Commentf("out: %v", string(out)))
<add> c.Assert(string(out), checker.Contains, "invalid key")
<add>
<add> cmd = d.command("swarm", "unlock")
<add> cmd.Stdin = bytes.NewBufferString("my-secret-key")
<add> out, err = cmd.CombinedOutput()
<add> c.Assert(err, checker.IsNil, check.Commentf("out: %v", string(out)))
<add>
<add> info, err = d.info()
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateActive)
<add>}
<add>
<add>func (s *DockerSwarmSuite) TestSwarmLeaveLocked(c *check.C) {
<add> d := s.AddDaemon(c, false, false)
<add>
<add> cmd := d.command("swarm", "init", "--lock-key")
<add> cmd.Stdin = bytes.NewBufferString("foobar")
<add> out, err := cmd.CombinedOutput()
<add> c.Assert(err, checker.IsNil, check.Commentf("out: %v", string(out)))
<add>
<add> c.Assert(d.Stop(), checker.IsNil)
<add> c.Assert(d.Start(), checker.IsNil)
<add>
<add> info, err := d.info()
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateLocked)
<add>
<add> outs, err := d.Cmd("swarm", "leave", "--force")
<add> c.Assert(err, checker.IsNil, check.Commentf("out: %v", outs))
<add>
<add> info, err = d.info()
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateInactive)
<add>
<add> outs, err = d.Cmd("swarm", "init")
<add> c.Assert(err, checker.IsNil, check.Commentf("out: %v", outs))
<add>
<add> info, err = d.info()
<add> c.Assert(err, checker.IsNil)
<add> c.Assert(info.LocalNodeState, checker.Equals, swarm.LocalNodeStateActive)
<add>} | 2 |
PHP | PHP | add hasany() method to request | bec5df0410516ceb62d5d4acd7db6927dbdca675 | <ide><path>src/Illuminate/Http/Concerns/InteractsWithInput.php
<ide> public function has($key)
<ide> return true;
<ide> }
<ide>
<add> /**
<add> * Determine if the request contains any of the given inputs.
<add> *
<add> * @param dynamic $key
<add> * @return bool
<add> */
<add> public function hasAny(...$keys)
<add> {
<add> $input = $this->all();
<add>
<add> foreach ($keys as $key) {
<add> if (Arr::has($input, $key)) {
<add> return true;
<add> }
<add> }
<add>
<add> return false;
<add> }
<add>
<ide> /**
<ide> * Determine if the request contains a non-empty value for an input item.
<ide> *
<ide><path>tests/Http/HttpRequestTest.php
<ide> public function testHasMethod()
<ide> $this->assertTrue($request->has('foo.baz'));
<ide> }
<ide>
<add> public function testHasAnyMethod()
<add> {
<add> $request = Request::create('/', 'GET', ['name' => 'Taylor', 'age' => '', 'city' => null]);
<add> $this->assertTrue($request->hasAny('name'));
<add> $this->assertTrue($request->hasAny('age'));
<add> $this->assertTrue($request->hasAny('city'));
<add> $this->assertFalse($request->hasAny('foo'));
<add> $this->assertTrue($request->hasAny('name', 'email'));
<add>
<add> $request = Request::create('/', 'GET', ['name' => 'Taylor', 'email' => 'foo']);
<add> $this->assertTrue($request->hasAny('name', 'email'));
<add> $this->assertFalse($request->hasAny('surname', 'password'));
<add>
<add> $request = Request::create('/', 'GET', ['foo' => ['bar' => null, 'baz' => '']]);
<add> $this->assertTrue($request->hasAny('foo.bar'));
<add> $this->assertTrue($request->hasAny('foo.baz'));
<add> $this->assertFalse($request->hasAny('foo.bax'));
<add> }
<add>
<ide> public function testFilledMethod()
<ide> {
<ide> $request = Request::create('/', 'GET', ['name' => 'Taylor', 'age' => '', 'city' => null]); | 2 |
PHP | PHP | add test cases | 66211ebed5a737e2c21fa94a43440ab456245e79 | <ide><path>src/View/Helper/HtmlHelper.php
<ide> public function link($title, $url = null, array $options = []): string
<ide> * over value of `escape`)
<ide> * - `confirm` JavaScript confirmation message.
<ide> *
<del> * @param string|array $title The content to be wrapped by `<a>` tags.
<del> * Can be an array if $url is null. If $url is null, $title will be used as both the URL and title.
<add> * @param string $title The content to be wrapped by `<a>` tags.
<ide> * @param string $path Cake-relative route path.
<ide> * @param array $options Array of options and HTML attributes.
<ide> * @return string An `<a />` element.
<add> * @see \Cake\Routing\Router::pathUrl()
<ide> * @link https://book.cakephp.org/3/en/views/helpers/html.html#creating-links
<ide> */
<del> public function linkFromPath($title, string $path, array $options = []): string
<add> public function linkFromPath(string $title, string $path, array $options = []): string
<ide> {
<ide> return $this->link($title, ['_path' => $path], $options);
<ide> }
<ide><path>src/View/Helper/UrlHelper.php
<ide> public function build($url = null, array $options = []): string
<ide> * @param string $path Cake-relative route path.
<ide> * @param array $options Array of options.
<ide> * @return string Full translated URL with base path.
<add> * @see \Cake\Routing\Router::pathUrl()
<ide> */
<ide> public function buildFromPath(string $path, array $options = []): string
<ide> {
<del> return $this->build($path, $options);
<add> return $this->build(['_path' => $path], $options);
<ide> }
<ide>
<ide> /**
<ide><path>tests/TestCase/View/Helper/HtmlHelperTest.php
<ide> use Cake\Core\Plugin;
<ide> use Cake\Filesystem\Filesystem;
<ide> use Cake\Http\ServerRequest;
<add>use Cake\Routing\Route\DashedRoute;
<ide> use Cake\Routing\RouteBuilder;
<ide> use Cake\Routing\Router;
<ide> use Cake\TestSuite\TestCase;
<ide> use Cake\View\Helper\HtmlHelper;
<add>use Cake\View\View;
<ide>
<ide> /**
<ide> * HtmlHelperTest class
<ide> public function setUp(): void
<ide> Router::reload();
<ide> Router::setRequest($request);
<ide>
<del> $this->View = $this->getMockBuilder('Cake\View\View')
<add> $this->View = $this->getMockBuilder(View::class)
<ide> ->setMethods(['append'])
<ide> ->setConstructorArgs([$request])
<ide> ->getMock();
<ide> public function setUp(): void
<ide> Configure::write('Asset.timestamp', false);
<ide>
<ide> Router::scope('/', function (RouteBuilder $routes) {
<del> $routes->fallbacks();
<add> $routes->fallbacks(DashedRoute::class);
<ide> });
<ide> }
<ide>
<ide> public function testLink()
<ide> $this->assertHtml($expected, $result);
<ide> }
<ide>
<add> /**
<add> * @return void
<add> */
<add> public function testLinkFromPath(): void
<add> {
<add> $expected = '<a href="/articles">Home</a>';
<add> $result = $this->Html->linkFromPath('Home', 'Articles::index');
<add> $this->assertSame($result, $expected);
<add> }
<add>
<ide> /**
<ide> * testImageTag method
<ide> *
<ide><path>tests/TestCase/View/Helper/UrlHelperTest.php
<ide>
<ide> use Cake\Core\Configure;
<ide> use Cake\Http\ServerRequest;
<add>use Cake\Routing\Route\DashedRoute;
<ide> use Cake\Routing\RouteBuilder;
<ide> use Cake\Routing\Router;
<ide> use Cake\TestSuite\TestCase;
<ide> public function setUp(): void
<ide> static::setAppNamespace();
<ide> $this->loadPlugins(['TestTheme']);
<ide> Router::scope('/', function (RouteBuilder $routes) {
<del> $routes->fallbacks();
<add> $routes->fallbacks(DashedRoute::class);
<ide> });
<ide> }
<ide>
<ide> public function testBuildUrlConversionUnescaped()
<ide> $this->assertSame('/posts/view?k=v&1=2¶m=%257Baround%2520here%257D%255Bthings%255D%255Bare%255D%2524%2524', $result);
<ide> }
<ide>
<add> /**
<add> * @return void
<add> */
<add> public function testBuildFromPath(): void
<add> {
<add> $expected = '/articles';
<add> $result = Router::pathUrl('Articles::index');
<add> $this->assertSame($result, $expected);
<add> }
<add>
<ide> /**
<ide> * test assetTimestamp application
<ide> * | 4 |
Go | Go | add benchmark tests for truncindex | 84a76f27962d4f4b3871abe925ca17024bdeea73 | <ide><path>utils/utils_test.go
<ide> func assertIndexGet(t *testing.T, index *TruncIndex, input, expectedResult strin
<ide> }
<ide> }
<ide>
<add>func BenchmarkTruncIndexAdd(b *testing.B) {
<add> ids := []string{"banana", "bananaa", "bananab"}
<add> b.ResetTimer()
<add> for i := 0; i < b.N; i++ {
<add> index := NewTruncIndex([]string{})
<add> for _, id := range ids {
<add> index.Add(id)
<add> }
<add> }
<add>}
<add>
<add>func BenchmarkTruncIndexNew(b *testing.B) {
<add> ids := []string{"banana", "bananaa", "bananab"}
<add> b.ResetTimer()
<add> for i := 0; i < b.N; i++ {
<add> NewTruncIndex(ids)
<add> }
<add>}
<add>
<ide> func assertKernelVersion(t *testing.T, a, b *KernelVersionInfo, result int) {
<ide> if r := CompareKernelVersion(a, b); r != result {
<ide> t.Fatalf("Unexpected kernel version comparison result. Found %d, expected %d", r, result) | 1 |
Javascript | Javascript | remove trailing whitespace in new test | 868a2c401fd75fd95c50dc25d531734d6443657e | <ide><path>test/jqLiteSpec.js
<ide> describe('jqLite', function() {
<ide> };
<ide> iframe_.src = "/base/test/fixtures/iframe.html";
<ide> jqLite(document).find('body').append(iframe);
<del>
<add>
<ide> // This test is potentially flaky on CI cloud instances, so there is a generous
<ide> // wait period...
<ide> waitsFor(function() { return tested; }, 2000); | 1 |
Python | Python | add test for gzip loader | 34f564a48d8530dc21fe59c13d35879fd6808bf7 | <ide><path>numpy/lib/tests/test_io.py
<ide> def test_recfromcsv(self):
<ide> self.failUnless(isinstance(test, np.recarray))
<ide> assert_equal(test, control)
<ide>
<add>def test_gzip_load():
<add> import gzip
<add> from StringIO import StringIO
<ide>
<add> a = np.random.random((5, 5))
<add>
<add> s = StringIO()
<add> f = gzip.GzipFile(fileobj=s, mode="w")
<add>
<add> np.save(f, a)
<add> f.close()
<add> s.seek(0)
<add>
<add> f = gzip.GzipFile(fileobj=s, mode="r")
<add> assert_array_equal(np.load(f), a)
<ide>
<ide>
<ide> if __name__ == "__main__": | 1 |
Ruby | Ruby | remove unnecessary setup and teardown | 4d9e4ea47ac2e48960bf901d9eeb6731ab39a507 | <ide><path>actionmailer/test/mail_layout_test.rb
<ide> def logout
<ide> end
<ide>
<ide> class LayoutMailerTest < ActiveSupport::TestCase
<del> def setup
<del> set_delivery_method :test
<del> ActionMailer::Base.perform_deliveries = true
<del> ActionMailer::Base.deliveries.clear
<del> end
<del>
<del> def teardown
<del> restore_delivery_method
<del> end
<del>
<ide> def test_should_pickup_default_layout
<ide> mail = AutoLayoutMailer.hello
<ide> assert_equal "Hello from layout Inside", mail.body.to_s.strip | 1 |
Python | Python | add inbetween print statement | 70a95045603827fa2ca582128450702c67a89ab9 | <ide><path>examples/pipeline/multi_processing.py
<ide> def main(output_dir, model='en_core_web_sm', n_jobs=4, batch_size=1000,
<ide> print("Loading IMDB data...")
<ide> data, _ = thinc.extra.datasets.imdb()
<ide> texts, _ = zip(*data[-limit:])
<add> print("Processing texts...")
<ide> partitions = partition_all(batch_size, texts)
<ide> items = ((i, [nlp(text) for text in texts], output_dir) for i, texts
<ide> in enumerate(partitions)) | 1 |
Text | Text | apply suggestions from code review | 0fed05a7522797f256086d0b042fc0702cec6a4e | <ide><path>docs/Homebrew-brew-Maintainer-Guide.md
<ide> reports are available to Homebrew maintainers on [buildpulse.io](https://buildpu
<ide> summaries are published to [`#buildpulse-health`](https://machomebrew.slack.com/archives/C0268BSJBJ8) in Slack.
<ide>
<ide> BuildPulse can be used as a guide to identify which flaky tests are causing the most disruption to the CI suite. To make
<del>the biggest improvements to the reliability of the build, we can focus on the most disruptive flaky tests first (i.e.,
<add>the biggest improvements to the reliability of the build, we can focus on the most disruptive flaky tests first (i.e.
<ide> the tests causing the most intermittent failures).
<ide>
<ide> To help find the root cause for a particular flaky test, buildpulse.io provides links to the most recent CI job and
<ide> commit where the test failed and then passed with no change to the underlying code. You may want to check out the code
<ide> at that commit to attempt to reproduce the failure locally. You can also see the list of recent failures on
<del>buildpulse.io to determine if the test always fails the same way.
<add>[buildpulse.io](https://buildpulse.io) to determine if the test always fails the same way.
<ide>
<ide> ## Manpages and shell completions
<ide> | 1 |
Text | Text | correct the typo "wether" | d9059ceb2482b29b91fe9cc0ad67bc5f3045a312 | <ide><path>guide/english/algorithms/flood-fill/index.md
<ide> next.
<ide> The problem is pretty simple and usually follows these steps:
<ide>
<ide> 1. Take the position of the starting point.
<del> 2. Decide wether you want to go in 4 directions (**N, S, W, E**) or 8 directions (**N, S, W, E, NW, NE, SW, SE**).
<add> 2. Decide whether you want to go in 4 directions (**N, S, W, E**) or 8 directions (**N, S, W, E, NW, NE, SW, SE**).
<ide> 3. Choose a replacement color and a target color.
<ide> 4. Travel in those directions.
<ide> 5. If the tile you land on is a target, replace it with the chosen color. | 1 |
Javascript | Javascript | correct function extensions | c6a52be3da2d035c149ef65ebc6ca92baab56fb9 | <ide><path>packages/ember-runtime/lib/ext/function.js
<ide>
<ide> import { ENV } from 'ember-environment';
<ide> import {
<add> on,
<ide> computed,
<ide> observer
<ide> } from 'ember-metal';
<ide> import { assert, deprecateFunc } from 'ember-debug';
<ide>
<del>const a_slice = Array.prototype.slice;
<ide> const FunctionPrototype = Function.prototype;
<ide>
<ide> if (ENV.EXTEND_PROTOTYPES.Function) {
<ide> if (ENV.EXTEND_PROTOTYPES.Function) {
<ide> @public
<ide> */
<ide> FunctionPrototype.property = function () {
<del> let ret = computed(this);
<del> // ComputedProperty.prototype.property expands properties; no need for us to
<del> // do so here.
<del> return ret.property(...arguments);
<add> return computed(...arguments, this);
<ide> };
<ide>
<ide> /**
<ide> if (ENV.EXTEND_PROTOTYPES.Function) {
<ide> @for Function
<ide> @public
<ide> */
<del> FunctionPrototype.observes = function(...args) {
<del> args.push(this);
<del> return observer.apply(this, args);
<add> FunctionPrototype.observes = function() {
<add> return observer(...arguments, this);
<ide> };
<ide>
<ide>
<ide> if (ENV.EXTEND_PROTOTYPES.Function) {
<ide> @public
<ide> */
<ide> FunctionPrototype.on = function () {
<del> let events = a_slice.call(arguments);
<del> this.__ember_listens__ = events;
<del>
<del> return this;
<add> return on(...arguments, this);
<ide> };
<ide> } | 1 |
Python | Python | add resnet56 cpu benchmark and accuracy tests. | f21337b1a88559f0866509aaf37bcb19716cd678 | <ide><path>official/resnet/keras/keras_cifar_benchmark.py
<ide> def benchmark_1_gpu(self):
<ide> FLAGS.enable_eager = True
<ide> self._run_and_report_benchmark()
<ide>
<add> def benchmark_cpu(self):
<add> """Test keras based model on CPU."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.data_dir = self.data_dir
<add> FLAGS.batch_size = 128
<add> FLAGS.train_epochs = 182
<add> FLAGS.model_dir = self._get_model_dir('benchmark_cpu')
<add> FLAGS.dtype = 'fp32'
<add> FLAGS.enable_eager = True
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<add> def benchmark_cpu_no_dist_strat(self):
<add> """Test keras based model on CPU without distribution strategies."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.data_dir = self.data_dir
<add> FLAGS.batch_size = 128
<add> FLAGS.train_epochs = 182
<add> FLAGS.model_dir = self._get_model_dir('benchmark_cpu_no_dist_strat')
<add> FLAGS.dtype = 'fp32'
<add> FLAGS.enable_eager = True
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<add> def benchmark_cpu_no_dist_strat_run_eagerly(self):
<add> """Test keras based model on CPU w/forced eager and no dist_strat."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.data_dir = self.data_dir
<add> FLAGS.batch_size = 128
<add> FLAGS.train_epochs = 182
<add> FLAGS.model_dir = self._get_model_dir(
<add> 'benchmark_cpu_no_dist_strat_run_eagerly')
<add> FLAGS.dtype = 'fp32'
<add> FLAGS.enable_eager = True
<add> FLAGS.run_eagerly = True
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<ide> def benchmark_1_gpu_no_dist_strat(self):
<ide> """Test keras based model with eager and no dist strat."""
<ide> self._setup()
<ide> def benchmark_1_gpu_no_dist_strat(self):
<ide> self._run_and_report_benchmark()
<ide>
<ide> def benchmark_1_gpu_no_dist_strat_run_eagerly(self):
<del> """Test keras based model with forced eager and no dist_strat."""
<add> """Test keras based model w/forced eager and no dist_strat."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.data_dir = self.data_dir
<ide> def _run_and_report_benchmark(self):
<ide> total_batch_size=FLAGS.batch_size,
<ide> log_steps=FLAGS.log_steps)
<ide>
<del> def benchmark_1_gpu_no_dist_strat(self):
<del> self._setup()
<del> FLAGS.num_gpus = 1
<del> FLAGS.enable_eager = True
<del> FLAGS.distribution_strategy = 'off'
<del> FLAGS.model_dir = self._get_model_dir('benchmark_1_gpu_no_dist_strat')
<del> FLAGS.batch_size = 128
<del> self._run_and_report_benchmark()
<del>
<del> def benchmark_1_gpu_no_dist_strat_tweaked(self):
<del> """Test no distribution strategy with manual config."""
<add> def benchmark_1_gpu(self):
<add> """Test 1 gpu."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.enable_eager = True
<del> FLAGS.explicit_gpu_placement = True
<del> FLAGS.distribution_strategy = 'off'
<del> FLAGS.set_learning_phase_to_train = False
<del> FLAGS.model_dir = self._get_model_dir(
<del> 'benchmark_1_gpu_no_dist_strat_tweaked')
<add> FLAGS.distribution_strategy = 'default'
<add> FLAGS.model_dir = self._get_model_dir('benchmark_1_gpu')
<ide> FLAGS.batch_size = 128
<ide> self._run_and_report_benchmark()
<ide>
<del> def benchmark_graph_1_gpu_no_dist_strat(self):
<add> def benchmark_graph_1_gpu(self):
<add> """Test 1 gpu graph."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.enable_eager = False
<del> FLAGS.distribution_strategy = 'off'
<del> FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu_no_dist_strat')
<add> FLAGS.distribution_strategy = 'default'
<add> FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu')
<ide> FLAGS.batch_size = 128
<ide> self._run_and_report_benchmark()
<ide>
<del> def benchmark_1_gpu(self):
<add> def benchmark_1_gpu_no_dist_strat(self):
<add> """Test 1 gpu without distribution strategies."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.enable_eager = True
<del> FLAGS.distribution_strategy = 'default'
<del> FLAGS.model_dir = self._get_model_dir('benchmark_1_gpu')
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.model_dir = self._get_model_dir('benchmark_1_gpu_no_dist_strat')
<ide> FLAGS.batch_size = 128
<ide> self._run_and_report_benchmark()
<ide>
<del> def benchmark_graph_1_gpu(self):
<add> def benchmark_graph_1_gpu_no_dist_strat(self):
<add> """Test 1 gpu graph mode without distribution strategies."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.enable_eager = False
<del> FLAGS.distribution_strategy = 'default'
<del> FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu')
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.model_dir = self._get_model_dir('benchmark_graph_1_gpu_no_dist_strat')
<ide> FLAGS.batch_size = 128
<ide> self._run_and_report_benchmark()
<ide>
<ide> def benchmark_1_gpu_no_dist_strat_run_eagerly(self):
<del> """Test keras based model with forced eager."""
<add> """Test 1 gpu without distribution strategy and forced eager."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 1
<ide> FLAGS.batch_size = 128
<ide> def benchmark_1_gpu_no_dist_strat_run_eagerly(self):
<ide> self._run_and_report_benchmark()
<ide>
<ide> def benchmark_2_gpu(self):
<add> """Test 2 gpu."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 2
<ide> FLAGS.enable_eager = True
<ide> def benchmark_2_gpu(self):
<ide> self._run_and_report_benchmark()
<ide>
<ide> def benchmark_graph_2_gpu(self):
<add> """Test 2 gpu graph mode."""
<ide> self._setup()
<ide> FLAGS.num_gpus = 2
<ide> FLAGS.enable_eager = False
<ide> def benchmark_graph_2_gpu(self):
<ide> FLAGS.batch_size = 128 * 2 # 2 GPUs
<ide> self._run_and_report_benchmark()
<ide>
<add> def benchmark_cpu(self):
<add> """Test cpu."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.enable_eager = True
<add> FLAGS.model_dir = self._get_model_dir('benchmark_cpu')
<add> FLAGS.batch_size = 128
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<add> def benchmark_graph_cpu(self):
<add> """Test cpu graph mode."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.enable_eager = False
<add> FLAGS.model_dir = self._get_model_dir('benchmark_graph_cpu')
<add> FLAGS.batch_size = 128
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<add> def benchmark_cpu_no_dist_strat_run_eagerly(self):
<add> """Test cpu without distribution strategy and forced eager."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.enable_eager = True
<add> FLAGS.run_eagerly = True
<add> FLAGS.model_dir = self._get_model_dir(
<add> 'benchmark_cpu_no_dist_strat_run_eagerly')
<add> FLAGS.batch_size = 128
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<add> def benchmark_cpu_no_dist_strat(self):
<add> """Test cpu without distribution strategies."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.enable_eager = True
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.model_dir = self._get_model_dir('benchmark_cpu_no_dist_strat')
<add> FLAGS.batch_size = 128
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<add> def benchmark_graph_cpu_no_dist_strat(self):
<add> """Test cpu graph mode without distribution strategies."""
<add> self._setup()
<add> FLAGS.num_gpus = 0
<add> FLAGS.enable_eager = False
<add> FLAGS.distribution_strategy = 'off'
<add> FLAGS.model_dir = self._get_model_dir('benchmark_graph_cpu_no_dist_strat')
<add> FLAGS.batch_size = 128
<add> FLAGS.data_format = 'channels_last'
<add> self._run_and_report_benchmark()
<add>
<ide>
<ide> class Resnet56KerasBenchmarkSynth(Resnet56KerasBenchmarkBase):
<ide> """Synthetic benchmarks for ResNet56 and Keras.""" | 1 |
Text | Text | add 2.15.1 to changelog.md | 9a6b038d303ade2f643ad0499054457403f86294 | <ide><path>CHANGELOG.md
<ide> - [#15528](https://github.com/emberjs/ember.js/pull/15528) [DEPRECATION] Deprecate `Controller#content` alias.
<ide> - [#15552](https://github.com/emberjs/ember.js/pull/15552) [FEATURE] Update blueprints and tests to RFC #176.
<ide>
<add>### 2.15.1 (October 2, 2017)
<add>
<add>- [#15600](https://github.com/emberjs/ember.js/pull/15600) [BUGFIX] ensure “pause on exception” pauses in the right place.
<add>- [#15689](https://github.com/emberjs/ember.js/pull/15689) [BUGFIX] Mark error as handled before transition for error routes and substates.
<add>
<ide> ### 2.15.0 (August 31, 2017)
<ide>
<ide> - [#15577](https://github.com/emberjs/ember.js/pull/15577) [BUGFIX] Include missing sourcemaps in vendorTree. | 1 |
Javascript | Javascript | fix breakage on chrome 73 | 1aa70297edd063e397a250f6dcbfe53524abebd8 | <ide><path>examples/js/vr/HelioWebXRPolyfill.js
<ide> if ( /(Helio)/g.test( navigator.userAgent ) && "xr" in navigator ) {
<ide>
<ide> // WebXRManager - xrFrame.getPose() Polyfill - line 259
<ide>
<del> const tempGetPose = frame.getPose.bind( frame );
<add> const tempGetPose = (isHelio96 ? null : frame.getPose.bind( frame ));
<ide>
<ide> frame.getPose = function ( targetRaySpace, referenceSpace ) {
<ide> | 1 |
Ruby | Ruby | add gtk-doc 1.31 | 3a6a75ac0e5af4c9069fce67d260662e6784eb70 | <ide><path>Library/Homebrew/dev-cmd/audit.rb
<ide> def audit_specs
<ide> libart 2.3.21
<ide> pygtkglext 1.1.0
<ide> gtk-mac-integration 2.1.3
<add> gtk-doc 1.31
<ide> ].each_slice(2).to_a.map do |formula, version|
<ide> [formula, version.split(".")[0..1].join(".")]
<ide> end | 1 |
Text | Text | add readme for the daemonconfig directory | 8dfd4b677b60bc2b5de0214bb3cef1a04da12bed | <ide><path>daemonconfig/README.md
<add>This directory contains code pertaining to the configuration of the docker deamon
<add>
<add>These are the configuration settings that you pass to the docker daemon when you launch it with say: `docker -d -e lxc` | 1 |
Python | Python | fix issues in layer conversion interfaces | 7696a139956577fd0f5b527698341d2c4d9e90eb | <ide><path>keras/legacy/interfaces.py
<ide> def wrapper(*args, **kwargs):
<ide> raise TypeError('Layer `' + layer_name +
<ide> '` can accept only ' +
<ide> str(len(allowed_positional_args)) +
<del> ' positional arguments (' +
<del> str(allowed_positional_args) + '), but '
<add> ' positional arguments ' +
<add> str(tuple(allowed_positional_args)) + ', but '
<ide> 'you passed the following '
<ide> 'positional arguments: ' +
<del> str(args[1:]))
<add> str(list(args[1:])))
<ide> for key in value_conversions:
<ide> if key in kwargs:
<ide> old_value = kwargs[key]
<ide> def wrapper(*args, **kwargs):
<ide> converted.append((new_name, old_name))
<ide> if converted:
<ide> signature = '`' + layer_name + '('
<del> for value in args[1:]:
<add> for i, value in enumerate(args[1:]):
<ide> if isinstance(value, six.string_types):
<ide> signature += '"' + value + '"'
<ide> else:
<ide> signature += str(value)
<del> signature += ', '
<add> if i < len(args[1:]) - 1 or kwargs:
<add> signature += ', '
<ide> for i, (name, value) in enumerate(kwargs.items()):
<ide> signature += name + '='
<ide> if isinstance(value, six.string_types):
<ide> def lstm_args_preprocessor(args, kwargs):
<ide>
<ide>
<ide> def conv1d_args_preprocessor(args, kwargs):
<add> converted = []
<ide> if 'input_dim' in kwargs:
<ide> if 'input_length' in kwargs:
<ide> length = kwargs.pop('input_length')
<ide> else:
<ide> length = None
<ide> input_shape = (length, kwargs.pop('input_dim'))
<ide> kwargs['input_shape'] = input_shape
<del> return args, kwargs, [('input_shape', 'input_dim')]
<add> converted.append(('input_shape', 'input_dim'))
<add> return args, kwargs, converted
<ide>
<ide> legacy_conv1d_support = generate_legacy_interface(
<ide> allowed_positional_args=['filters', 'kernel_size'],
<ide> def conv1d_args_preprocessor(args, kwargs):
<ide>
<ide>
<ide> def conv2d_args_preprocessor(args, kwargs):
<add> converted = []
<ide> if len(args) > 4:
<ide> raise TypeError('Layer can receive at most 3 positional arguments.')
<ide> if len(args) == 4:
<ide> def conv2d_args_preprocessor(args, kwargs):
<ide> 'as a keyword argument instead.')
<ide> kernel_size = (args[2], args[3])
<ide> args = [args[0], args[1], kernel_size]
<add> converted.append(('kernel_size', 'nb_row/nb_col'))
<ide> elif len(args) == 3 and isinstance(args[2], int):
<ide> if 'nb_col' in kwargs:
<ide> kernel_size = (args[2], kwargs.pop('nb_col'))
<ide> args = [args[0], args[1], kernel_size]
<add> converted.append(('kernel_size', 'nb_row/nb_col'))
<ide> elif len(args) == 2:
<ide> if 'nb_row' in kwargs and 'nb_col' in kwargs:
<ide> kernel_size = (kwargs.pop('nb_row'), kwargs.pop('nb_col'))
<ide> args = [args[0], args[1], kernel_size]
<del> return args, kwargs, [('kernel_size', 'nb_row/nb_col')]
<add> converted.append(('kernel_size', 'nb_row/nb_col'))
<add> return args, kwargs, converted
<ide>
<ide> legacy_conv2d_support = generate_legacy_interface(
<ide> allowed_positional_args=['filters', 'kernel_size'], | 1 |
Ruby | Ruby | improve template#inspect output | dcb13470991539ab581e02670738900c39976ff4 | <ide><path>actionview/lib/action_view/template.rb
<ide> def refresh(view)
<ide> end
<ide> end
<ide>
<add> def short_identifier
<add> @short_identifier ||= defined?(Rails.root) ? identifier.sub("#{Rails.root}/", "") : identifier
<add> end
<add>
<ide> def inspect
<del> @inspect ||= defined?(Rails.root) ? identifier.sub("#{Rails.root}/", "") : identifier
<add> "#<#{self.class.name} #{short_identifier} locals=#{@locals.inspect}>"
<ide> end
<ide>
<ide> # This method is responsible for properly setting the encoding of the
<ide> def method_name
<ide> end
<ide>
<ide> def identifier_method_name
<del> inspect.tr("^a-z_", "_")
<add> short_identifier.tr("^a-z_", "_")
<ide> end
<ide>
<ide> def instrument(action, &block) # :doc:
<ide><path>actionview/test/template/template_test.rb
<ide> def with_external_encoding(encoding)
<ide> ensure
<ide> silence_warnings { Encoding.default_external = old }
<ide> end
<add>
<add> def test_short_identifier
<add> @template = new_template("hello")
<add> assert_equal "hello template", @template.short_identifier
<add> end
<add>
<add> def test_template_inspect
<add> @template = new_template("hello")
<add> assert_equal "#<ActionView::Template hello template locals=[]>", @template.inspect
<add> end
<ide> end | 2 |
Javascript | Javascript | ensure code sample is not escaped | 2b6c2c5fbd74fca14eb454b6f12f3e3e45be733c | <ide><path>src/ng/compile.js
<ide> function directiveNormalize(name) {
<ide> * element attributes. The values reflect current binding state `{{ }}`. The normalization is
<ide> * needed since all of these are treated as equivalent in Angular:
<ide> *
<add> * ```
<ide> * <span ng:bind="a" ng-bind="a" data-ng-bind="a" x-ng-bind="a">
<add> * ```
<ide> */
<ide>
<ide> /** | 1 |
Javascript | Javascript | add p() like in ruby | de6036669d37b16fc9d169ff6f0eabd0c3f9b5b5 | <ide><path>src/file.js
<ide> stdin.fd = File.STDIN_FILENO;
<ide> this.puts = function (data, callback) {
<ide> stdout.puts(data, callback);
<ide> }
<add>
<add>this.p = function (data, callback) {
<add> puts(JSON.stringify(data), callback);
<add>}
<ide><path>test/test_http.js
<del>puts(JSON.stringify({hello: "world"}));
<add>p({hello: "world"});
<ide> new node.http.Server(function (msg) {
<ide> setTimeout(function () {
<ide> msg.sendHeader(200, [["Content-Type", "text/plain"]]); | 2 |
Go | Go | fix filemode (staticcheck) | e92e0d358a98945dd9aa1d59e28257a056cc065c | <ide><path>pkg/filenotify/poller_test.go
<ide> func TestPollerEvent(t *testing.T) {
<ide> default:
<ide> }
<ide>
<del> if err := ioutil.WriteFile(f.Name(), []byte("hello"), 0644); err != nil {
<add> if err := ioutil.WriteFile(f.Name(), []byte("hello"), 0600); err != nil {
<ide> t.Fatal(err)
<ide> }
<add> assertFileMode(t, f.Name(), 0600)
<ide> if err := assertEvent(w, fsnotify.Write); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide>
<del> if err := os.Chmod(f.Name(), 600); err != nil {
<add> if err := os.Chmod(f.Name(), 0644); err != nil {
<ide> t.Fatal(err)
<ide> }
<add> assertFileMode(t, f.Name(), 0644)
<ide> if err := assertEvent(w, fsnotify.Chmod); err != nil {
<ide> t.Fatal(err)
<ide> }
<ide> func TestPollerClose(t *testing.T) {
<ide> }
<ide> }
<ide>
<add>func assertFileMode(t *testing.T, fileName string, mode uint32) {
<add> t.Helper()
<add> f, err := os.Stat(fileName)
<add> if err != nil {
<add> t.Fatal(err)
<add> }
<add> if f.Mode() != os.FileMode(mode) {
<add> t.Fatalf("expected file %s to have mode %#o, but got %#o", fileName, mode, f.Mode())
<add> }
<add>}
<add>
<ide> func assertEvent(w FileWatcher, eType fsnotify.Op) error {
<ide> var err error
<ide> select { | 1 |
Python | Python | make printed errors from approx_array_* better | c47e6cb2d885c753ece0667fa3f5fad74668e314 | <ide><path>numpy/testing/utils.py
<ide> def build_err_msg(arrays, err_msg, header='Items are not equal:',
<ide> r = repr(a)
<ide> except:
<ide> r = '[repr failed]'
<del> r = r[:100]
<add> if r.count('\n') > 3:
<add> r = '\n'.join(r.splitlines()[:3])
<add> r += '...'
<ide> msg.append(' %s: %s' % (names[i], r))
<ide> return '\n'.join(msg)
<ide> | 1 |
Text | Text | show love to contributors | eeb9635ffa8f04001f8c22b285feedcbd87f6e46 | <ide><path>CONTRIBUTING.md
<ide> Please note that *we only accept bug reports here*. No feature requests or quest
<ide> If you have a question about how to use Ruby on Rails, [ask the Rails mailing list](https://groups.google.com/forum/?fromgroups#!forum/rubyonrails-talk).
<ide>
<ide> If you have a change or new feature in mind, [suggest it on the Rails-core mailing list](https://groups.google.com/forum/?fromgroups#!forum/rubyonrails-core) and start writing code. Check out the [Contributing to Ruby on Rails guide](http://edgeguides.rubyonrails.org/contributing_to_ruby_on_rails.html) for a comprehensive introduction.
<add>
<add>Thanks! :heart: :heart: :heart: <br />
<add>Rails Team
<ide>\ No newline at end of file | 1 |
Python | Python | use make_tempdir instead | 0efb7413f9642608fcd295f1aad740154dc3744a | <ide><path>spacy/tests/test_misc.py
<ide>
<ide> from thinc.api import get_current_ops, NumpyOps, CupyOps
<ide>
<del>from .util import get_random_doc, make_named_tempfile
<add>from .util import get_random_doc, make_tempdir
<ide>
<ide>
<ide> @pytest.fixture
<ide> def make_dummy_component(
<ide> return DummyComponent(nlp.vocab, name)
<ide> """
<ide>
<del> with make_named_tempfile(mode="w", suffix=".py") as fileh:
<del> fileh.write(code_str)
<del> fileh.flush()
<add> with make_tempdir() as temp_dir:
<add> code_path = os.path.join(temp_dir, "code.py")
<add> with open(code_path, "w") as fileh:
<add> fileh.write(code_str)
<ide>
<del> import_file("python_code", fileh.name)
<add> import_file("python_code", code_path)
<ide> config = {"initialize": {"components": {"dummy_component": {"dummy_param": 1}}}}
<ide> nlp = English.from_config(config)
<ide> nlp.add_pipe("dummy_component")
<ide><path>spacy/tests/util.py
<ide> def make_tempfile(mode="r"):
<ide> f.close()
<ide>
<ide>
<del>@contextlib.contextmanager
<del>def make_named_tempfile(mode="r", suffix=None):
<del> f = tempfile.NamedTemporaryFile(mode=mode, suffix=suffix)
<del> yield f
<del> f.close()
<del>
<del>
<ide> def get_batch(batch_size):
<ide> vocab = Vocab()
<ide> docs = [] | 2 |
Python | Python | remove training arg | 0b395f650aa8c95afb219553e8ca654b493660e3 | <ide><path>official/nlp/modeling/models/seq2seq_transformer.py
<ide> def get_config(self):
<ide> "params": self.params,
<ide> }
<ide>
<del> def call(self, inputs, training):
<add> def call(self, inputs):
<ide> """Calculate target logits or inferred target sequences.
<ide>
<ide> Args:
<ide> def call(self, inputs, training):
<ide> pos_encoding = tf.cast(pos_encoding, self.params["dtype"])
<ide> encoder_inputs = embedded_inputs + pos_encoding
<ide>
<del> # if training:
<del> # encoder_inputs = tf.nn.dropout(
<del> # encoder_inputs, rate=self.params["layer_postprocess_dropout"])
<del>
<ide> encoder_inputs = self.encoder_dropout(encoder_inputs)
<ide>
<ide> encoder_outputs = self.encoder_layer(encoder_inputs,
<ide> def call(self, inputs, training):
<ide> self.params["dtype"])
<ide>
<ide> symbols_to_logits_fn = self._get_symbols_to_logits_fn(
<del> max_decode_length, training)
<add> max_decode_length)
<ide>
<ide> # Create initial set of IDs that will be passed to symbols_to_logits_fn.
<ide> initial_ids = tf.zeros([batch_size], dtype=tf.int32)
<ide> def call(self, inputs, training):
<ide> pos_encoding = tf.cast(pos_encoding, self.params["dtype"])
<ide> decoder_inputs += pos_encoding
<ide>
<del> # if training:
<del> # decoder_inputs = tf.nn.dropout(
<del> # decoder_inputs, rate=self.params["layer_postprocess_dropout"])
<del>
<ide> decoder_inputs = self.decoder_dropout(decoder_inputs)
<ide>
<ide> decoder_shape = tf_utils.get_shape_list(decoder_inputs,
<ide> def call(self, inputs, training):
<ide> return logits
<ide>
<ide>
<del> def _get_symbols_to_logits_fn(self, max_decode_length, training):
<add> def _get_symbols_to_logits_fn(self, max_decode_length):
<ide> """Returns a decoding function that calculates logits of the next tokens."""
<ide> timing_signal = self.position_embedding(
<ide> inputs=None, length=max_decode_length + 1) | 1 |
Ruby | Ruby | help autotools with 10.13 sdk on 10.12 | 733d485065e55ad1cf159eec89927c4990bbdfaf | <ide><path>Library/Homebrew/extend/os/mac/extend/ENV/super.rb
<ide> def setup_build_environment(formula = nil)
<ide> self["SDKROOT"] = MacOS.sdk_path
<ide> end
<ide>
<del> # Filter out symbols known not to be defined on 10.11 since GNU Autotools
<del> # can't reliably figure this out with Xcode 8 on its own yet.
<del> if MacOS.version == "10.11" && MacOS::Xcode.installed? && MacOS::Xcode.version >= "8.0"
<add> # Filter out symbols known not to be defined since GNU Autotools can't
<add> # reliably figure this out with Xcode 8 and above.
<add> if MacOS.version == "10.12" && MacOS::Xcode.installed? && MacOS::Xcode.version >= "9.0"
<add> ENV["ac_cv_func_futimens"] = "no"
<add> ENV["ac_cv_func_utimensat"] = "no"
<add> elsif MacOS.version == "10.11" && MacOS::Xcode.installed? && MacOS::Xcode.version >= "8.0"
<ide> %w[basename_r clock_getres clock_gettime clock_settime dirname_r
<ide> getentropy mkostemp mkostemps timingsafe_bcmp].each do |s|
<ide> ENV["ac_cv_func_#{s}"] = "no" | 1 |
Ruby | Ruby | remove nil as that is already expected behaviour | eca528ccccf2c73c6059fc821f756de7630c6251 | <ide><path>Library/Homebrew/utils/github.rb
<ide> def get_repo_license(user, repo)
<ide> res = GitHub.open_api("#{GitHub::API_URL}/repos/#{user}/#{repo}/license")
<ide> return unless res.key?("license")
<ide>
<del> res["license"]["spdx_id"] || nil
<add> res["license"]["spdx_id"]
<ide> rescue GitHub::HTTPNotFoundError
<ide> nil
<ide> end | 1 |
Python | Python | convert env variable flags and fix ldshared | 65687ff01c932b53d641fbe91b8d945d1a550744 | <ide><path>numpy/distutils/fcompiler/__init__.py
<ide> class FCompiler(CCompiler):
<ide> compiler_f90 = ('exe.compiler_f90', 'F90', 'f90exec', None, False),
<ide> compiler_fix = ('exe.compiler_fix', 'F90', 'f90exec', None, False),
<ide> version_cmd = ('exe.version_cmd', None, None, None, False),
<del> linker_so = ('exe.linker_so', 'LDSHARED', 'ldshared', None, True),
<add> linker_so = ('exe.linker_so', 'LDSHARED', 'ldshared', None, False),
<ide> linker_exe = ('exe.linker_exe', 'LD', 'ld', None, False),
<ide> archiver = (None, 'AR', 'ar', None, False),
<ide> ranlib = (None, 'RANLIB', 'ranlib', None, False),
<ide><path>numpy/distutils/fcompiler/environment.py
<ide> def _get_var(self, name, conf_desc):
<ide> if envvar is not None:
<ide> envvar_contents = os.environ.get(envvar)
<ide> if envvar_contents is not None:
<add> if convert is not None:
<add> envvar_contents = convert(envvar_contents)
<add> else:
<add> envvar_contents = [envvar_contents]
<ide> if append and os.environ.get('NPY_DISTUTILS_APPEND_FLAGS', '0') == '1':
<ide> if var is None:
<ide> var = ''
<ide> try:
<del> var = ' '.join(var + [envvar_contents])
<add> var = ' '.join(var + envvar_contents)
<ide> except TypeError:
<del> var = ' '.join([var] + [envvar_contents])
<add> var = ' '.join([var] + envvar_contents)
<ide> else:
<ide> var = envvar_contents
<ide> if confvar is not None and self._conf: | 2 |
Ruby | Ruby | use attr_accessor to suppress warning | 43c39afb8f2b8026ac8fbdb1f8aa570f29a6e477 | <ide><path>Library/Homebrew/superenv.rb
<ide> def [] key
<ide> fetch(key)
<ide> elsif %w{CPPFLAGS CFLAGS LDFLAGS}.include? key
<ide> class << (a = "")
<del> attr :key, true
<add> attr_accessor :key
<ide> def + value
<ide> ENV[key] = value
<ide> end | 1 |
Javascript | Javascript | allow eai_fail in test-http-dns-error.js | 50364d98d97afd9e7dc3947c270c45ef64944a6f | <ide><path>test/parallel/test-http-dns-error.js
<ide> const https = require('https');
<ide> const host = '*'.repeat(64);
<ide> const MAX_TRIES = 5;
<ide>
<del>let errCode = 'ENOTFOUND';
<del>if (common.isOpenBSD)
<del> errCode = 'EAI_FAIL';
<add>const errCodes = ['ENOTFOUND', 'EAI_FAIL'];
<ide>
<ide> function tryGet(mod, tries) {
<ide> // Bad host name should not throw an uncatchable exception.
<ide> function tryGet(mod, tries) {
<ide> tryGet(mod, ++tries);
<ide> return;
<ide> }
<del> assert.strictEqual(err.code, errCode);
<add> assert(errCodes.includes(err.code), err);
<ide> }));
<ide> // http.get() called req1.end() for us
<ide> }
<ide> function tryRequest(mod, tries) {
<ide> tryRequest(mod, ++tries);
<ide> return;
<ide> }
<del> assert.strictEqual(err.code, errCode);
<add> assert(errCodes.includes(err.code), err);
<ide> }));
<ide> req.end();
<ide> } | 1 |
Javascript | Javascript | fix lint errors for examples/todos-with-undo/ | 3dface09761895b6e925f824d3ef7f42788372c5 | <ide><path>examples/todos-with-undo/actions.js
<del>export const ADD_TODO = 'ADD_TODO';
<del>export const COMPLETE_TODO = 'COMPLETE_TODO';
<del>export const SET_VISIBILITY_FILTER = 'SET_VISIBILITY_FILTER';
<add>export const ADD_TODO = 'ADD_TODO'
<add>export const COMPLETE_TODO = 'COMPLETE_TODO'
<add>export const SET_VISIBILITY_FILTER = 'SET_VISIBILITY_FILTER'
<ide>
<ide> export const VisibilityFilters = {
<ide> SHOW_ALL: 'SHOW_ALL',
<ide> SHOW_COMPLETED: 'SHOW_COMPLETED',
<ide> SHOW_ACTIVE: 'SHOW_ACTIVE'
<del>};
<add>}
<ide>
<ide> export function addTodo(text) {
<del> return { type: ADD_TODO, text };
<add> return { type: ADD_TODO, text }
<ide> }
<ide>
<ide> export function completeTodo(index) {
<del> return { type: COMPLETE_TODO, index };
<add> return { type: COMPLETE_TODO, index }
<ide> }
<ide>
<ide> export function setVisibilityFilter(filter) {
<del> return { type: SET_VISIBILITY_FILTER, filter };
<add> return { type: SET_VISIBILITY_FILTER, filter }
<ide> }
<ide><path>examples/todos-with-undo/components/AddTodo.js
<del>import React, { Component, PropTypes } from 'react';
<add>import React, { Component, PropTypes } from 'react'
<ide>
<ide> export default class AddTodo extends Component {
<ide> handleSubmit(e) {
<del> e.preventDefault();
<del> const node = this.refs.input;
<del> const text = node.value.trim();
<add> e.preventDefault()
<add> const node = this.refs.input
<add> const text = node.value.trim()
<ide> if (text) {
<del> this.props.onAddSubmit(text);
<del> node.value = '';
<add> this.props.onAddSubmit(text)
<add> node.value = ''
<ide> }
<ide> }
<ide>
<ide> export default class AddTodo extends Component {
<ide> </button>
<ide> </form>
<ide> </div>
<del> );
<add> )
<ide> }
<ide> }
<ide>
<ide> AddTodo.propTypes = {
<ide> onAddSubmit: PropTypes.func.isRequired
<del>};
<add>}
<ide><path>examples/todos-with-undo/components/Footer.js
<del>import React, { Component, PropTypes } from 'react';
<add>import React, { Component, PropTypes } from 'react'
<ide>
<ide> export default class Footer extends Component {
<ide> renderFilter(filter, name) {
<ide> if (filter === this.props.filter) {
<del> return name;
<add> return name
<ide> }
<ide>
<ide> return (
<ide> <a href="#" onClick={e => {
<del> e.preventDefault();
<del> this.props.onFilterChange(filter);
<add> e.preventDefault()
<add> this.props.onFilterChange(filter)
<ide> }}>
<ide> {name}
<ide> </a>
<del> );
<add> )
<ide> }
<ide>
<ide> renderFilters() {
<ide> export default class Footer extends Component {
<ide> {this.renderFilter('SHOW_ACTIVE', 'Active')}
<ide> .
<ide> </p>
<del> );
<add> )
<ide> }
<ide>
<ide> renderUndo() {
<ide> export default class Footer extends Component {
<ide> <button onClick={this.props.onUndo} disabled={this.props.undoDisabled}>Undo</button>
<ide> <button onClick={this.props.onRedo} disabled={this.props.redoDisabled}>Redo</button>
<ide> </p>
<del> );
<add> )
<ide> }
<ide>
<ide> render() {
<ide> export default class Footer extends Component {
<ide> {this.renderFilters()}
<ide> {this.renderUndo()}
<ide> </div>
<del> );
<add> )
<ide> }
<ide> }
<ide>
<ide> Footer.propTypes = {
<ide> 'SHOW_COMPLETED',
<ide> 'SHOW_ACTIVE'
<ide> ]).isRequired
<del>};
<add>}
<ide><path>examples/todos-with-undo/components/Todo.js
<del>import React, { Component, PropTypes } from 'react';
<add>import React, { Component, PropTypes } from 'react'
<ide>
<ide> export default class Todo extends Component {
<ide> render() {
<ide> export default class Todo extends Component {
<ide> }}>
<ide> {this.props.text}
<ide> </li>
<del> );
<add> )
<ide> }
<ide> }
<ide>
<ide> Todo.propTypes = {
<ide> onClick: PropTypes.func.isRequired,
<ide> text: PropTypes.string.isRequired,
<ide> completed: PropTypes.bool.isRequired
<del>};
<add>}
<ide><path>examples/todos-with-undo/components/TodoList.js
<del>import React, { Component, PropTypes } from 'react';
<del>import Todo from './Todo';
<add>import React, { Component, PropTypes } from 'react'
<add>import Todo from './Todo'
<ide>
<ide> export default class TodoList extends Component {
<ide> render() {
<ide> export default class TodoList extends Component {
<ide> onClick={() => this.props.onTodoClick(index)} />
<ide> )}
<ide> </ul>
<del> );
<add> )
<ide> }
<ide> }
<ide>
<ide> TodoList.propTypes = {
<ide> text: PropTypes.string.isRequired,
<ide> completed: PropTypes.bool.isRequired
<ide> }).isRequired).isRequired
<del>};
<add>}
<ide><path>examples/todos-with-undo/containers/App.js
<del>import React, { Component, PropTypes } from 'react';
<del>import { connect } from 'react-redux';
<del>import { ActionCreators } from 'redux-undo';
<del>import { addTodo, completeTodo, setVisibilityFilter, VisibilityFilters } from '../actions';
<del>import AddTodo from '../components/AddTodo';
<del>import TodoList from '../components/TodoList';
<del>import Footer from '../components/Footer';
<add>import React, { Component, PropTypes } from 'react'
<add>import { connect } from 'react-redux'
<add>import { ActionCreators } from 'redux-undo'
<add>import { addTodo, completeTodo, setVisibilityFilter, VisibilityFilters } from '../actions'
<add>import AddTodo from '../components/AddTodo'
<add>import TodoList from '../components/TodoList'
<add>import Footer from '../components/Footer'
<ide>
<ide> class App extends Component {
<ide> render() {
<del> const { dispatch, visibleTodos, visibilityFilter } = this.props;
<add> const { dispatch, visibleTodos, visibilityFilter } = this.props
<ide> return (
<ide> <div>
<ide> <AddTodo
<ide> class App extends Component {
<ide> undoDisabled={this.props.undoDisabled}
<ide> redoDisabled={this.props.redoDisabled} />
<ide> </div>
<del> );
<add> )
<ide> }
<ide> }
<ide>
<ide> App.propTypes = {
<ide> ]).isRequired,
<ide> undoDisabled: PropTypes.bool.isRequired,
<ide> redoDisabled: PropTypes.bool.isRequired
<del>};
<add>}
<ide>
<ide> function selectTodos(todos, filter) {
<ide> switch (filter) {
<del> default:
<del> case VisibilityFilters.SHOW_ALL:
<del> return todos;
<del> case VisibilityFilters.SHOW_COMPLETED:
<del> return todos.filter(todo => todo.completed);
<del> case VisibilityFilters.SHOW_ACTIVE:
<del> return todos.filter(todo => !todo.completed);
<add> default:
<add> case VisibilityFilters.SHOW_ALL:
<add> return todos
<add> case VisibilityFilters.SHOW_COMPLETED:
<add> return todos.filter(todo => todo.completed)
<add> case VisibilityFilters.SHOW_ACTIVE:
<add> return todos.filter(todo => !todo.completed)
<ide> }
<ide> }
<ide>
<ide> function select(state) {
<ide> redoDisabled: state.todos.future.length === 0,
<ide> visibleTodos: selectTodos(state.todos.present, state.visibilityFilter),
<ide> visibilityFilter: state.visibilityFilter
<del> };
<add> }
<ide> }
<ide>
<del>export default connect(select)(App);
<add>export default connect(select)(App)
<ide><path>examples/todos-with-undo/index.js
<del>import React from 'react';
<del>import { render } from 'react-dom';
<del>import { createStore } from 'redux';
<del>import { Provider } from 'react-redux';
<del>import App from './containers/App';
<del>import todoApp from './reducers';
<add>import React from 'react'
<add>import { render } from 'react-dom'
<add>import { createStore } from 'redux'
<add>import { Provider } from 'react-redux'
<add>import App from './containers/App'
<add>import todoApp from './reducers'
<ide>
<del>const store = createStore(todoApp);
<add>const store = createStore(todoApp)
<ide>
<del>const rootElement = document.getElementById('root');
<add>const rootElement = document.getElementById('root')
<ide> render(
<ide> <Provider store={store}>
<ide> <App />
<ide> </Provider>,
<ide> rootElement
<del>);
<add>)
<ide><path>examples/todos-with-undo/reducers.js
<del>import { combineReducers } from 'redux';
<del>import undoable, { distinctState } from 'redux-undo';
<add>import { combineReducers } from 'redux'
<add>import undoable, { distinctState } from 'redux-undo'
<ide>
<del>import { ADD_TODO, COMPLETE_TODO, SET_VISIBILITY_FILTER, VisibilityFilters } from './actions';
<del>const { SHOW_ALL } = VisibilityFilters;
<add>import { ADD_TODO, COMPLETE_TODO, SET_VISIBILITY_FILTER, VisibilityFilters } from './actions'
<add>const { SHOW_ALL } = VisibilityFilters
<ide>
<ide> function visibilityFilter(state = SHOW_ALL, action) {
<ide> switch (action.type) {
<del> case SET_VISIBILITY_FILTER:
<del> return action.filter;
<del> default:
<del> return state;
<add> case SET_VISIBILITY_FILTER:
<add> return action.filter
<add> default:
<add> return state
<ide> }
<ide> }
<ide>
<ide> function todos(state = [], action) {
<ide> switch (action.type) {
<del> case ADD_TODO:
<del> return [...state, {
<del> text: action.text,
<del> completed: false
<del> }];
<del> case COMPLETE_TODO:
<del> return [
<del> ...state.slice(0, action.index),
<del> Object.assign({}, state[action.index], {
<del> completed: true
<del> }),
<del> ...state.slice(action.index + 1)
<del> ];
<del> default:
<del> return state;
<add> case ADD_TODO:
<add> return [
<add> ...state,
<add> {
<add> text: action.text,
<add> completed: false
<add> }
<add> ]
<add> case COMPLETE_TODO:
<add> return [
<add> ...state.slice(0, action.index),
<add> Object.assign({}, state[action.index], {
<add> completed: true
<add> }),
<add> ...state.slice(action.index + 1)
<add> ]
<add> default:
<add> return state
<ide> }
<ide> }
<ide>
<ide> const todoApp = combineReducers({
<ide> visibilityFilter,
<ide> todos: undoable(todos, { filter: distinctState() })
<del>});
<add>})
<ide>
<del>export default todoApp;
<add>export default todoApp
<ide><path>examples/todos-with-undo/server.js
<del>var webpack = require('webpack');
<del>var webpackDevMiddleware = require('webpack-dev-middleware');
<del>var webpackHotMiddleware = require('webpack-hot-middleware');
<del>var config = require('./webpack.config');
<add>var webpack = require('webpack')
<add>var webpackDevMiddleware = require('webpack-dev-middleware')
<add>var webpackHotMiddleware = require('webpack-hot-middleware')
<add>var config = require('./webpack.config')
<ide>
<del>var app = new require('express')();
<del>var port = 3000;
<add>var app = new require('express')()
<add>var port = 3000
<ide>
<del>var compiler = webpack(config);
<del>app.use(webpackDevMiddleware(compiler, { noInfo: true, publicPath: config.output.publicPath }));
<del>app.use(webpackHotMiddleware(compiler));
<add>var compiler = webpack(config)
<add>app.use(webpackDevMiddleware(compiler, { noInfo: true, publicPath: config.output.publicPath }))
<add>app.use(webpackHotMiddleware(compiler))
<ide>
<ide> app.get("/", function(req, res) {
<del> res.sendFile(__dirname + '/index.html');
<del>});
<add> res.sendFile(__dirname + '/index.html')
<add>})
<ide>
<ide> app.listen(port, function(error) {
<ide> if (error) {
<del> console.error(error);
<add> console.error(error)
<ide> } else {
<del> console.info("==> 🌎 Listening on port %s. Open up http://localhost:%s/ in your browser.", port, port);
<add> console.info("==> 🌎 Listening on port %s. Open up http://localhost:%s/ in your browser.", port, port)
<ide> }
<del>});
<add>})
<ide><path>examples/todos-with-undo/webpack.config.js
<del>var path = require('path');
<del>var webpack = require('webpack');
<add>var path = require('path')
<add>var webpack = require('webpack')
<ide>
<ide> module.exports = {
<ide> devtool: 'cheap-module-eval-source-map',
<ide> module.exports = {
<ide> include: __dirname
<ide> }]
<ide> }
<del>};
<add>}
<ide>
<ide>
<ide> // When inside Redux repo, prefer src to compiled version.
<ide> // You can safely delete these lines in your project.
<del>var reduxSrc = path.join(__dirname, '..', '..', 'src');
<del>var reduxNodeModules = path.join(__dirname, '..', '..', 'node_modules');
<del>var fs = require('fs');
<add>var reduxSrc = path.join(__dirname, '..', '..', 'src')
<add>var reduxNodeModules = path.join(__dirname, '..', '..', 'node_modules')
<add>var fs = require('fs')
<ide> if (fs.existsSync(reduxSrc) && fs.existsSync(reduxNodeModules)) {
<ide> // Resolve Redux to source
<del> module.exports.resolve = { alias: { 'redux': reduxSrc } };
<add> module.exports.resolve = { alias: { 'redux': reduxSrc } }
<ide> // Compile Redux from source
<ide> module.exports.module.loaders.push({
<ide> test: /\.js$/,
<ide> loaders: ['babel'],
<ide> include: reduxSrc
<del> });
<add> })
<ide> } | 10 |
Python | Python | add quotes to printing strings | 74b68e37a478ca5d6e515196af1108e9494a01f1 | <ide><path>numpy/core/arrayprint.py
<ide> def _array2string(a, max_line_width, precision, suppress_small, separator=' ',
<ide> data.imag, precision, suppress_small, sign=1)
<ide> format_function = lambda x: \
<ide> _formatComplex(x, real_format, imag_format)
<del> elif issubclass(dtype, _nt.unicode_):
<add> elif issubclass(dtype, _nt.unicode_) or \
<add> issubclass(dtype, _nt.string_):
<ide> format = "%s"
<ide> format_function = lambda x: repr(x)
<ide> else: | 1 |
Python | Python | enable specifying api version | 141c177076f6ed7700344d0d65088fc2650e2c1a | <ide><path>libcloud/common/dimensiondata.py
<ide> class DimensionDataConnection(ConnectionUserAndKey):
<ide> allow_insecure = False
<ide>
<ide> def __init__(self, user_id, key, secure=True, host=None, port=None,
<del> url=None, timeout=None, proxy_url=None, **conn_kwargs):
<add> url=None, timeout=None, proxy_url=None,
<add> api_version=None, **conn_kwargs):
<ide> super(DimensionDataConnection, self).__init__(
<ide> user_id=user_id,
<ide> key=key,
<ide> def __init__(self, user_id, key, secure=True, host=None, port=None,
<ide> if conn_kwargs['region']:
<ide> self.host = conn_kwargs['region']['host']
<ide>
<add> if api_version:
<add> if api_version.startswith('2'):
<add> self.api_version_2 = api_version
<add>
<ide> def add_default_headers(self, headers):
<ide> headers['Authorization'] = \
<ide> ('Basic %s' % b64encode(b('%s:%s' % (self.user_id,
<ide><path>libcloud/compute/drivers/dimensiondata.py
<ide> def __init__(self, key, secret=None, secure=True, host=None, port=None,
<ide> if region is not None:
<ide> self.selected_region = API_ENDPOINTS[region]
<ide>
<add> if api_version is not None:
<add> self.api_version = api_version
<add>
<ide> super(DimensionDataNodeDriver, self).__init__(key=key, secret=secret,
<ide> secure=secure, host=host,
<ide> port=port,
<ide> def _ex_connection_class_kwargs(self):
<ide> kwargs = super(DimensionDataNodeDriver,
<ide> self)._ex_connection_class_kwargs()
<ide> kwargs['region'] = self.selected_region
<add> kwargs['api_version'] = self.api_version
<ide> return kwargs
<ide>
<ide> def _create_node_mcp1(self, name, image, auth, ex_description, | 2 |
Ruby | Ruby | improve command --help output | 4ace1af297b633b1297a57650f5ca85c5aa9becb | <ide><path>Library/Homebrew/help.rb
<ide> def help(cmd = nil, flags = {})
<ide>
<ide> def command_help(path)
<ide> # Let OptionParser generate help text for commands which have a parser defined
<add> cmd = path.basename(path.extname)
<add> cmd_args_method_name = "#{cmd.to_s.tr("-", "_")}_args".to_sym
<ide> begin
<del> cmd = path.basename(path.extname)
<del> return Homebrew.send("#{cmd.to_s.tr("-", "_")}_args".to_sym).generate_help_text
<del> rescue NoMethodError
<add> return Homebrew.send(cmd_args_method_name)
<add> .generate_help_text
<add> rescue NoMethodError => e
<add> raise if e.name != cmd_args_method_name
<ide> nil
<ide> end
<ide>
<ide> def command_help(path)
<ide> HOMEBREW_HELP
<ide> else
<ide> help_lines.map do |line|
<del> line.sub(/^ \* /, "#{Tty.bold}brew#{Tty.reset} ")
<add> line.gsub(/^ /, "")
<add> .sub(/^\* /, "#{Tty.bold}Usage: brew#{Tty.reset} ")
<ide> .gsub(/`(.*?)`/, "#{Tty.bold}\\1#{Tty.reset}")
<ide> .gsub(%r{<([^\s]+?://[^\s]+?)>}) { |url| Formatter.url(url) }
<ide> .gsub(/<(.*?)>/, "#{Tty.underline}\\1#{Tty.reset}")
<ide><path>Library/Homebrew/test/cmd/help_spec.rb
<ide>
<ide> it "prints help for a documented shell command" do
<ide> expect { brew "help", "update" }
<del> .to output(/^brew update/).to_stdout
<add> .to output(/^Usage: brew update/).to_stdout
<ide> .and be_a_success
<ide> end
<ide> | 2 |
Ruby | Ruby | fix relocation of frameworks" | f59ad1c9e91b0d9d39287a51d1a3b009b622cec0 | <ide><path>Library/Homebrew/keg_relocate.rb
<ide> class Keg
<ide> PREFIX_PLACEHOLDER = "".freeze
<ide> CELLAR_PLACEHOLDER = "".freeze
<ide>
<del> # Matches framework references like `XXX.framework/Versions/YYY/XXX` and
<del> # `XXX.framework/XXX`, both with or without a slash-delimited prefix.
<del> FRAMEWORK_RX = %r{(?:^|/)(?<suffix>(?<name>[^/]+)\.framework/(?:Versions/[^/]+/)?\k<name>)$}.freeze
<del>
<ide> def fix_install_names(options = {})
<ide> mach_o_files.each do |file|
<ide> file.ensure_writable do
<ide> def fixed_name(file, bad_name)
<ide> "@loader_path/#{bad_name}"
<ide> elsif file.mach_o_executable? && (lib + bad_name).exist?
<ide> "#{lib}/#{bad_name}"
<del> elsif (abs_name = find_dylib(bad_name)) && abs_name.exist?
<add> elsif (abs_name = find_dylib(Pathname.new(bad_name).basename)) && abs_name.exist?
<ide> abs_name.to_s
<ide> else
<ide> opoo "Could not fix #{bad_name} in #{file}"
<ide> def dylib_id_for(file, options)
<ide> opt_record.join(relative_dirname, basename).to_s
<ide> end
<ide>
<del> def find_dylib_suffix_from(bad_name)
<del> if (framework = bad_name.match(FRAMEWORK_RX))
<del> framework["suffix"]
<del> else
<del> File.basename(bad_name)
<del> end
<del> end
<del>
<del> def find_dylib(bad_name)
<del> return unless lib.directory?
<del> suffix = "/#{find_dylib_suffix_from(bad_name)}"
<del> lib.find { |pn| break pn if pn.to_s.end_with?(suffix) }
<add> def find_dylib(name)
<add> lib.find { |pn| break pn if pn.basename == name } if lib.directory?
<ide> end
<ide>
<ide> def mach_o_files | 1 |
Python | Python | use _umath_linalg for eigvals() | 15a9c3b25c0aff2799c927ea9e602fc3c134d3fc | <ide><path>numpy/linalg/linalg.py
<ide> def eigvals(a):
<ide>
<ide> Parameters
<ide> ----------
<del> a : (M, M) array_like
<add> a : (..., M, M) array_like
<ide> A complex- or real-valued matrix whose eigenvalues will be computed.
<ide>
<ide> Returns
<ide> -------
<del> w : (M,) ndarray
<add> w : (..., M,) ndarray
<ide> The eigenvalues, each repeated according to its multiplicity.
<ide> They are not necessarily ordered, nor are they necessarily
<ide> real for real matrices.
<ide> def eigvals(a):
<ide>
<ide> Notes
<ide> -----
<del> This is a simple interface to the LAPACK routines dgeev and zgeev
<del> that sets those routines' flags to return only the eigenvalues of
<del> general real and complex arrays, respectively.
<add> Broadcasting rules apply, see the `numpy.linalg` documentation for
<add> details.
<add>
<add> This is implemented using the _geev LAPACK routines which compute
<add> the eigenvalues and eigenvectors of general square arrays.
<ide>
<ide> Examples
<ide> --------
<ide> def eigvals(a):
<ide>
<ide> """
<ide> a, wrap = _makearray(a)
<del> _assertRank2(a)
<del> _assertSquareness(a)
<add> _assertNonEmpty(a)
<add> _assertRankAtLeast2(a)
<add> _assertNdSquareness(a)
<ide> _assertFinite(a)
<ide> t, result_t = _commonType(a)
<del> real_t = _linalgRealType(t)
<del> a = _fastCopyAndTranspose(t, a)
<del> a = _to_native_byte_order(a)
<del> n = a.shape[0]
<del> dummy = zeros((1,), t)
<del> if isComplexType(t):
<del> lapack_routine = lapack_lite.zgeev
<del> w = zeros((n,), t)
<del> rwork = zeros((n,), real_t)
<del> lwork = 1
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(_N, _N, n, a, n, w,
<del> dummy, 1, dummy, 1, work, -1, rwork, 0)
<del> lwork = int(abs(work[0]))
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(_N, _N, n, a, n, w,
<del> dummy, 1, dummy, 1, work, lwork, rwork, 0)
<del> else:
<del> lapack_routine = lapack_lite.dgeev
<del> wr = zeros((n,), t)
<del> wi = zeros((n,), t)
<del> lwork = 1
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(_N, _N, n, a, n, wr, wi,
<del> dummy, 1, dummy, 1, work, -1, 0)
<del> lwork = int(work[0])
<del> work = zeros((lwork,), t)
<del> results = lapack_routine(_N, _N, n, a, n, wr, wi,
<del> dummy, 1, dummy, 1, work, lwork, 0)
<del> if all(wi == 0.):
<del> w = wr
<add>
<add> extobj = get_linalg_error_extobj(
<add> _raise_linalgerror_eigenvalues_nonconvergence)
<add>
<add> w = _umath_linalg.eigvals(a.astype(t), extobj=extobj)
<add>
<add> if not isComplexType(t):
<add> if all(w.imag == 0):
<add> w = w.real
<ide> result_t = _realType(result_t)
<ide> else:
<del> w = wr+1j*wi
<ide> result_t = _complexType(result_t)
<del> if results['info'] > 0:
<del> raise LinAlgError('Eigenvalues did not converge')
<del> return w.astype(result_t)
<ide>
<add> return w.astype(result_t)
<ide>
<ide> def eigvalsh(a, UPLO='L'):
<ide> """ | 1 |
Go | Go | move user build test to integration-cli | 360fb3d4ea192e28a4d2579589cd16954bb959c1 | <ide><path>integration-cli/docker_cli_build_test.go
<ide> func TestBuildMaintainer(t *testing.T) {
<ide> logDone("build - maintainer")
<ide> }
<ide>
<add>func TestBuildUser(t *testing.T) {
<add> checkSimpleBuild(t,
<add> `
<add> FROM busybox
<add> RUN echo 'dockerio:x:1001:1001::/bin:/bin/false' >> /etc/passwd
<add> USER dockerio
<add> RUN [ $(whoami) = 'dockerio' ]
<add> `,
<add> "testbuildimg",
<add> "{{json .config.User}}",
<add> `"dockerio"`)
<add>
<add> deleteImages("testbuildimg")
<add> logDone("build - user")
<add>}
<add>
<ide> // TODO: TestCaching
<ide>
<ide> // TODO: TestADDCacheInvalidation
<ide><path>integration/buildfile_test.go
<ide> func buildImage(context testContextTemplate, t *testing.T, eng *engine.Engine, u
<ide> return image, err
<ide> }
<ide>
<del>func TestBuildUser(t *testing.T) {
<del> img, err := buildImage(testContextTemplate{`
<del> from {IMAGE}
<del> user dockerio
<del> `, nil, nil}, t, nil, true)
<del> if err != nil {
<del> t.Fatal(err)
<del> }
<del>
<del> if img.Config.User != "dockerio" {
<del> t.Fail()
<del> }
<del>}
<del>
<ide> func TestBuildRelativeWorkdir(t *testing.T) {
<ide> img, err := buildImage(testContextTemplate{`
<ide> FROM {IMAGE} | 2 |
Python | Python | fix occasional test failure | abbce640d974049f6c2a2c701e80c2cf8c680f59 | <ide><path>libcloud/test/compute/test_ec2.py
<ide> def _RunInstances(self, method, url, body, headers):
<ide>
<ide> def _ex_user_data_RunInstances(self, method, url, body, headers):
<ide> # test_create_node_with_ex_userdata
<add> if url.startswith('/'):
<add> url = url[1:]
<add>
<add> if url.startswith('?'):
<add> url = url[1:]
<add>
<ide> params = parse_qs(url)
<ide>
<del> self.assertTrue('UserData' in params)
<add> self.assertTrue('UserData' in params, 'UserData not in params, actual params: %s' % (str(params)))
<ide> user_data = base64.b64decode(b(params['UserData'][0])).decode('utf-8')
<ide> self.assertEqual(user_data, 'foo\nbar\foo')
<ide>
<ide> def test_create_node_with_ex_userdata(self):
<ide>
<ide> result = self.driver.create_node(name='foo', image=image, size=size,
<ide> ex_userdata='foo\nbar\foo')
<add> self.assertTrue(result)
<ide>
<ide>
<ide> class FCUMockHttp(EC2MockHttp): | 1 |
Ruby | Ruby | initialize the right variable | 7f7e2f12ab835526c6914843b983619ed12c9b68 | <ide><path>railties/test/railties/railtie_test.rb
<ide> class Foo < Rails::Railtie ; config.to_prepare { $to_prepare = true } ; end
<ide> end
<ide>
<ide> test "railtie have access to application in before_configuration callbacks" do
<del> $after_initialize = false
<add> $before_configuration = false
<ide> class Foo < Rails::Railtie ; config.before_configuration { $before_configuration = Rails.root.to_path } ; end
<ide> assert_not $before_configuration
<ide> require "#{app_path}/config/environment" | 1 |
PHP | PHP | add test for multiple=false input | 3427600fb598559d80b9a78c56a58f12be8969d9 | <ide><path>src/View/Helper/FormHelper.php
<ide> protected function _magicOptions($fieldName, $options, $allowOverride)
<ide>
<ide> if ($allowOverride && substr($fieldName, -5) === '._ids') {
<ide> $options['type'] = 'select';
<del> if ( (!isset($options['multiple']) || ($options['multiple'] && $options['multiple'] != 'checkbox')) ) {
<add> if ((!isset($options['multiple']) || ($options['multiple'] && $options['multiple'] != 'checkbox'))) {
<ide> $options['multiple'] = true;
<ide> }
<ide> }
<ide><path>tests/TestCase/View/Helper/FormHelperTest.php
<ide> public function testSelectMultiple()
<ide> ['multiple' => 'multiple', 'form' => 'my-form']
<ide> );
<ide> $this->assertHtml($expected, $result);
<add>
<add> $result = $this->Form->select(
<add> 'Model.multi_field',
<add> $options,
<add> ['form' => 'my-form', 'multiple' => false]
<add> );
<add> $this->assertNotContains('multiple', $result);
<ide> }
<ide>
<ide> /** | 2 |
Python | Python | remove comma that caused list to wrap in tuple! | 06c25a888244e8520d5eeb2df8d5d86499325f48 | <ide><path>spacy/lang/ga/tokenizer_exceptions.py
<ide>
<ide> "led'": [
<ide> {ORTH: "le", LEMMA: "le", NORM: "le", POS: ADP},
<del> {ORTH: "d'", LEMMA: "mo", NORM: "do", POS: DET}],
<del>
<add> {ORTH: "d'", LEMMA: "mo", NORM: "do", POS: DET}]
<ide> }
<ide>
<ide> for exc_data in [
<ide> {ORTH: "Uas.", LEMMA: "Uasal", POS: NOUN},
<ide> {ORTH: "uimh.", LEMMA: "uimhir", POS: NOUN},
<ide> {ORTH: "Uimh.", LEMMA: "uimhir", POS: NOUN}]:
<del> _exc[exc_data[ORTH]] = [dict(exc_data)],
<add> _exc[exc_data[ORTH]] = [exc_data]
<ide>
<ide> for orth in [
<ide> "d'", "D'"]:
<ide> _exc[orth] = [{ORTH: orth}]
<ide>
<ide>
<del>TOKENIZER_EXCEPTIONS = dict(_exc)
<add>TOKENIZER_EXCEPTIONS = _exc | 1 |
Text | Text | add right model and tokenizer path in example | b88bda6af36cf3a22dfe69abb81fd3590062ac11 | <ide><path>model_cards/mrm8488/xlm-multi-finetuned-xquadv1/README.md
<ide> from transformers import pipeline
<ide>
<ide> qa_pipeline = pipeline(
<ide> "question-answering",
<del> model="mrm8488/bert-multi-uncased-finetuned-xquadv1",
<del> tokenizer="bert-multi-uncased-finetuned-xquadv1"
<add> model="mrm8488/xlm-multi-finetuned-xquadv1",
<add> tokenizer="mrm8488/xlm-multi-finetuned-xquadv1"
<ide> )
<ide>
<ide> # English
<ide> qa_pipeline({
<ide>
<ide> #Output: {'answer': 'работал в репозитории hugginface /','end': 76, 'score': 0.00012340750456964894, 'start': 42}
<ide> ```
<del>Try it on a Colab:
<add>Try it on a Colab (*Do not forget to change the model and tokenizer path in the Colab if necessary*):
<ide>
<ide> <a href="https://colab.research.google.com/github/mrm8488/shared_colab_notebooks/blob/master/Try_mrm8488_xquad_finetuned_uncased_model.ipynb" target="_parent"><img src="https://camo.githubusercontent.com/52feade06f2fecbf006889a904d221e6a730c194/68747470733a2f2f636f6c61622e72657365617263682e676f6f676c652e636f6d2f6173736574732f636f6c61622d62616467652e737667" alt="Open In Colab" data-canonical-src="https://colab.research.google.com/assets/colab-badge.svg"></a>
<ide> | 1 |
Text | Text | add more info for timer.setinterval | 4fc6ef53bc1e48d54eb885e7d4f12fdc0ab60718 | <ide><path>doc/api/timers.md
<ide> added: v15.9.0
<ide> -->
<ide>
<ide> Returns an async iterator that generates values in an interval of `delay` ms.
<add>If `ref` is `true`, you need to call `next()` of async iterator explicitly
<add>or implicitly to keep the event loop alive.
<ide>
<ide> * `delay` {number} The number of milliseconds to wait between iterations.
<ide> **Default:** `1`. | 1 |
Mixed | Python | update nel examples and documentation | f67343295de38be3f88360f009e99de7eb2e199c | <ide><path>bin/wiki_entity_linking/README.md
<del>## Entity Linking with Wikipedia and Wikidata
<del>
<del>### Step 1: Create a Knowledge Base (KB) and training data
<del>
<del>Run `wikidata_pretrain_kb.py`
<del>* This takes as input the locations of a **Wikipedia and a Wikidata dump**, and produces a **KB directory** + **training file**
<del> * WikiData: get `latest-all.json.bz2` from https://dumps.wikimedia.org/wikidatawiki/entities/
<del> * Wikipedia: get `enwiki-latest-pages-articles-multistream.xml.bz2` from https://dumps.wikimedia.org/enwiki/latest/ (or for any other language)
<del>* You can set the filtering parameters for KB construction:
<del> * `max_per_alias` (`-a`): (max) number of candidate entities in the KB per alias/synonym
<del> * `min_freq` (`-f`): threshold of number of times an entity should occur in the corpus to be included in the KB
<del> * `min_pair` (`-c`): threshold of number of times an entity+alias combination should occur in the corpus to be included in the KB
<del>* Further parameters to set:
<del> * `descriptions_from_wikipedia` (`-wp`): whether to parse descriptions from Wikipedia (`True`) or Wikidata (`False`)
<del> * `entity_vector_length` (`-v`): length of the pre-trained entity description vectors
<del> * `lang` (`-la`): language for which to fetch Wikidata information (as the dump contains all languages)
<del>
<del>Quick testing and rerunning:
<del>* When trying out the pipeline for a quick test, set `limit_prior` (`-lp`), `limit_train` (`-lt`) and/or `limit_wd` (`-lw`) to read only parts of the dumps instead of everything.
<del> * e.g. set `-lt 20000 -lp 2000 -lw 3000 -f 1`
<del>* If you only want to (re)run certain parts of the pipeline, just remove the corresponding files and they will be recalculated or reparsed.
<del>
<del>
<del>### Step 2: Train an Entity Linking model
<del>
<del>Run `wikidata_train_entity_linker.py`
<del>* This takes the **KB directory** produced by Step 1, and trains an **Entity Linking model**
<del>* Specify the output directory (`-o`) in which the final, trained model will be saved
<del>* You can set the learning parameters for the EL training:
<del> * `epochs` (`-e`): number of training iterations
<del> * `dropout` (`-p`): dropout rate
<del> * `lr` (`-n`): learning rate
<del> * `l2` (`-r`): L2 regularization
<del>* Specify the number of training and dev testing articles with `train_articles` (`-t`) and `dev_articles` (`-d`) respectively
<del> * If not specified, the full dataset will be processed - this may take a LONG time !
<del>* Further parameters to set:
<del> * `labels_discard` (`-l`): NER label types to discard during training
<ide><path>bin/wiki_entity_linking/__init__.py
<del>TRAINING_DATA_FILE = "gold_entities.jsonl"
<del>KB_FILE = "kb"
<del>KB_MODEL_DIR = "nlp_kb"
<del>OUTPUT_MODEL_DIR = "nlp"
<del>
<del>PRIOR_PROB_PATH = "prior_prob.csv"
<del>ENTITY_DEFS_PATH = "entity_defs.csv"
<del>ENTITY_FREQ_PATH = "entity_freq.csv"
<del>ENTITY_ALIAS_PATH = "entity_alias.csv"
<del>ENTITY_DESCR_PATH = "entity_descriptions.csv"
<del>
<del>LOG_FORMAT = '%(asctime)s - %(levelname)s - %(name)s - %(message)s'
<ide><path>bin/wiki_entity_linking/entity_linker_evaluation.py
<del># coding: utf-8
<del>from __future__ import unicode_literals
<del>
<del>import logging
<del>import random
<del>from tqdm import tqdm
<del>from collections import defaultdict
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>
<del>class Metrics(object):
<del> true_pos = 0
<del> false_pos = 0
<del> false_neg = 0
<del>
<del> def update_results(self, true_entity, candidate):
<del> candidate_is_correct = true_entity == candidate
<del>
<del> # Assume that we have no labeled negatives in the data (i.e. cases where true_entity is "NIL")
<del> # Therefore, if candidate_is_correct then we have a true positive and never a true negative.
<del> self.true_pos += candidate_is_correct
<del> self.false_neg += not candidate_is_correct
<del> if candidate and candidate not in {"", "NIL"}:
<del> # A wrong prediction (e.g. Q42 != Q3) counts both as a FP as well as a FN.
<del> self.false_pos += not candidate_is_correct
<del>
<del> def calculate_precision(self):
<del> if self.true_pos == 0:
<del> return 0.0
<del> else:
<del> return self.true_pos / (self.true_pos + self.false_pos)
<del>
<del> def calculate_recall(self):
<del> if self.true_pos == 0:
<del> return 0.0
<del> else:
<del> return self.true_pos / (self.true_pos + self.false_neg)
<del>
<del> def calculate_fscore(self):
<del> p = self.calculate_precision()
<del> r = self.calculate_recall()
<del> if p + r == 0:
<del> return 0.0
<del> else:
<del> return 2 * p * r / (p + r)
<del>
<del>
<del>class EvaluationResults(object):
<del> def __init__(self):
<del> self.metrics = Metrics()
<del> self.metrics_by_label = defaultdict(Metrics)
<del>
<del> def update_metrics(self, ent_label, true_entity, candidate):
<del> self.metrics.update_results(true_entity, candidate)
<del> self.metrics_by_label[ent_label].update_results(true_entity, candidate)
<del>
<del> def report_metrics(self, model_name):
<del> model_str = model_name.title()
<del> recall = self.metrics.calculate_recall()
<del> precision = self.metrics.calculate_precision()
<del> fscore = self.metrics.calculate_fscore()
<del> return (
<del> "{}: ".format(model_str)
<del> + "F-score = {} | ".format(round(fscore, 3))
<del> + "Recall = {} | ".format(round(recall, 3))
<del> + "Precision = {} | ".format(round(precision, 3))
<del> + "F-score by label = {}".format(
<del> {k: v.calculate_fscore() for k, v in sorted(self.metrics_by_label.items())}
<del> )
<del> )
<del>
<del>
<del>class BaselineResults(object):
<del> def __init__(self):
<del> self.random = EvaluationResults()
<del> self.prior = EvaluationResults()
<del> self.oracle = EvaluationResults()
<del>
<del> def report_performance(self, model):
<del> results = getattr(self, model)
<del> return results.report_metrics(model)
<del>
<del> def update_baselines(
<del> self,
<del> true_entity,
<del> ent_label,
<del> random_candidate,
<del> prior_candidate,
<del> oracle_candidate,
<del> ):
<del> self.oracle.update_metrics(ent_label, true_entity, oracle_candidate)
<del> self.prior.update_metrics(ent_label, true_entity, prior_candidate)
<del> self.random.update_metrics(ent_label, true_entity, random_candidate)
<del>
<del>
<del>def measure_performance(dev_data, kb, el_pipe, baseline=True, context=True, dev_limit=None):
<del> counts = dict()
<del> baseline_results = BaselineResults()
<del> context_results = EvaluationResults()
<del> combo_results = EvaluationResults()
<del>
<del> for doc, gold in tqdm(dev_data, total=dev_limit, leave=False, desc='Processing dev data'):
<del> if len(doc) > 0:
<del> correct_ents = dict()
<del> for entity, kb_dict in gold.links.items():
<del> start, end = entity
<del> for gold_kb, value in kb_dict.items():
<del> if value:
<del> # only evaluating on positive examples
<del> offset = _offset(start, end)
<del> correct_ents[offset] = gold_kb
<del>
<del> if baseline:
<del> _add_baseline(baseline_results, counts, doc, correct_ents, kb)
<del>
<del> if context:
<del> # using only context
<del> el_pipe.cfg["incl_context"] = True
<del> el_pipe.cfg["incl_prior"] = False
<del> _add_eval_result(context_results, doc, correct_ents, el_pipe)
<del>
<del> # measuring combined accuracy (prior + context)
<del> el_pipe.cfg["incl_context"] = True
<del> el_pipe.cfg["incl_prior"] = True
<del> _add_eval_result(combo_results, doc, correct_ents, el_pipe)
<del>
<del> if baseline:
<del> logger.info("Counts: {}".format({k: v for k, v in sorted(counts.items())}))
<del> logger.info(baseline_results.report_performance("random"))
<del> logger.info(baseline_results.report_performance("prior"))
<del> logger.info(baseline_results.report_performance("oracle"))
<del>
<del> if context:
<del> logger.info(context_results.report_metrics("context only"))
<del> logger.info(combo_results.report_metrics("context and prior"))
<del>
<del>
<del>def _add_eval_result(results, doc, correct_ents, el_pipe):
<del> """
<del> Evaluate the ent.kb_id_ annotations against the gold standard.
<del> Only evaluate entities that overlap between gold and NER, to isolate the performance of the NEL.
<del> """
<del> try:
<del> doc = el_pipe(doc)
<del> for ent in doc.ents:
<del> ent_label = ent.label_
<del> start = ent.start_char
<del> end = ent.end_char
<del> offset = _offset(start, end)
<del> gold_entity = correct_ents.get(offset, None)
<del> # the gold annotations are not complete so we can't evaluate missing annotations as 'wrong'
<del> if gold_entity is not None:
<del> pred_entity = ent.kb_id_
<del> results.update_metrics(ent_label, gold_entity, pred_entity)
<del>
<del> except Exception as e:
<del> logging.error("Error assessing accuracy " + str(e))
<del>
<del>
<del>def _add_baseline(baseline_results, counts, doc, correct_ents, kb):
<del> """
<del> Measure 3 performance baselines: random selection, prior probabilities, and 'oracle' prediction for upper bound.
<del> Only evaluate entities that overlap between gold and NER, to isolate the performance of the NEL.
<del> """
<del> for ent in doc.ents:
<del> ent_label = ent.label_
<del> start = ent.start_char
<del> end = ent.end_char
<del> offset = _offset(start, end)
<del> gold_entity = correct_ents.get(offset, None)
<del>
<del> # the gold annotations are not complete so we can't evaluate missing annotations as 'wrong'
<del> if gold_entity is not None:
<del> candidates = kb.get_candidates(ent.text)
<del> oracle_candidate = ""
<del> prior_candidate = ""
<del> random_candidate = ""
<del> if candidates:
<del> scores = []
<del>
<del> for c in candidates:
<del> scores.append(c.prior_prob)
<del> if c.entity_ == gold_entity:
<del> oracle_candidate = c.entity_
<del>
<del> best_index = scores.index(max(scores))
<del> prior_candidate = candidates[best_index].entity_
<del> random_candidate = random.choice(candidates).entity_
<del>
<del> current_count = counts.get(ent_label, 0)
<del> counts[ent_label] = current_count+1
<del>
<del> baseline_results.update_baselines(
<del> gold_entity,
<del> ent_label,
<del> random_candidate,
<del> prior_candidate,
<del> oracle_candidate,
<del> )
<del>
<del>
<del>def _offset(start, end):
<del> return "{}_{}".format(start, end)
<ide><path>bin/wiki_entity_linking/kb_creator.py
<del># coding: utf-8
<del>from __future__ import unicode_literals
<del>
<del>import logging
<del>
<del>from spacy.kb import KnowledgeBase
<del>
<del>from bin.wiki_entity_linking.train_descriptions import EntityEncoder
<del>from bin.wiki_entity_linking import wiki_io as io
<del>
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>
<del>def create_kb(
<del> nlp,
<del> max_entities_per_alias,
<del> min_entity_freq,
<del> min_occ,
<del> entity_def_path,
<del> entity_descr_path,
<del> entity_alias_path,
<del> entity_freq_path,
<del> prior_prob_path,
<del> entity_vector_length,
<del>):
<del> # Create the knowledge base from Wikidata entries
<del> kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=entity_vector_length)
<del> entity_list, filtered_title_to_id = _define_entities(nlp, kb, entity_def_path, entity_descr_path, min_entity_freq, entity_freq_path, entity_vector_length)
<del> _define_aliases(kb, entity_alias_path, entity_list, filtered_title_to_id, max_entities_per_alias, min_occ, prior_prob_path)
<del> return kb
<del>
<del>
<del>def _define_entities(nlp, kb, entity_def_path, entity_descr_path, min_entity_freq, entity_freq_path, entity_vector_length):
<del> # read the mappings from file
<del> title_to_id = io.read_title_to_id(entity_def_path)
<del> id_to_descr = io.read_id_to_descr(entity_descr_path)
<del>
<del> # check the length of the nlp vectors
<del> if "vectors" in nlp.meta and nlp.vocab.vectors.size:
<del> input_dim = nlp.vocab.vectors_length
<del> logger.info("Loaded pretrained vectors of size %s" % input_dim)
<del> else:
<del> raise ValueError(
<del> "The `nlp` object should have access to pretrained word vectors, "
<del> " cf. https://spacy.io/usage/models#languages."
<del> )
<del>
<del> logger.info("Filtering entities with fewer than {} mentions".format(min_entity_freq))
<del> entity_frequencies = io.read_entity_to_count(entity_freq_path)
<del> # filter the entities for in the KB by frequency, because there's just too much data (8M entities) otherwise
<del> filtered_title_to_id, entity_list, description_list, frequency_list = get_filtered_entities(
<del> title_to_id,
<del> id_to_descr,
<del> entity_frequencies,
<del> min_entity_freq
<del> )
<del> logger.info("Kept {} entities from the set of {}".format(len(description_list), len(title_to_id.keys())))
<del>
<del> logger.info("Training entity encoder")
<del> encoder = EntityEncoder(nlp, input_dim, entity_vector_length)
<del> encoder.train(description_list=description_list, to_print=True)
<del>
<del> logger.info("Getting entity embeddings")
<del> embeddings = encoder.apply_encoder(description_list)
<del>
<del> logger.info("Adding {} entities".format(len(entity_list)))
<del> kb.set_entities(
<del> entity_list=entity_list, freq_list=frequency_list, vector_list=embeddings
<del> )
<del> return entity_list, filtered_title_to_id
<del>
<del>
<del>def _define_aliases(kb, entity_alias_path, entity_list, filtered_title_to_id, max_entities_per_alias, min_occ, prior_prob_path):
<del> logger.info("Adding aliases from Wikipedia and Wikidata")
<del> _add_aliases(
<del> kb,
<del> entity_list=entity_list,
<del> title_to_id=filtered_title_to_id,
<del> max_entities_per_alias=max_entities_per_alias,
<del> min_occ=min_occ,
<del> prior_prob_path=prior_prob_path,
<del> )
<del>
<del>
<del>def get_filtered_entities(title_to_id, id_to_descr, entity_frequencies,
<del> min_entity_freq: int = 10):
<del> filtered_title_to_id = dict()
<del> entity_list = []
<del> description_list = []
<del> frequency_list = []
<del> for title, entity in title_to_id.items():
<del> freq = entity_frequencies.get(title, 0)
<del> desc = id_to_descr.get(entity, None)
<del> if desc and freq > min_entity_freq:
<del> entity_list.append(entity)
<del> description_list.append(desc)
<del> frequency_list.append(freq)
<del> filtered_title_to_id[title] = entity
<del> return filtered_title_to_id, entity_list, description_list, frequency_list
<del>
<del>
<del>def _add_aliases(kb, entity_list, title_to_id, max_entities_per_alias, min_occ, prior_prob_path):
<del> wp_titles = title_to_id.keys()
<del>
<del> # adding aliases with prior probabilities
<del> # we can read this file sequentially, it's sorted by alias, and then by count
<del> logger.info("Adding WP aliases")
<del> with prior_prob_path.open("r", encoding="utf8") as prior_file:
<del> # skip header
<del> prior_file.readline()
<del> line = prior_file.readline()
<del> previous_alias = None
<del> total_count = 0
<del> counts = []
<del> entities = []
<del> while line:
<del> splits = line.replace("\n", "").split(sep="|")
<del> new_alias = splits[0]
<del> count = int(splits[1])
<del> entity = splits[2]
<del>
<del> if new_alias != previous_alias and previous_alias:
<del> # done reading the previous alias --> output
<del> if len(entities) > 0:
<del> selected_entities = []
<del> prior_probs = []
<del> for ent_count, ent_string in zip(counts, entities):
<del> if ent_string in wp_titles:
<del> wd_id = title_to_id[ent_string]
<del> p_entity_givenalias = ent_count / total_count
<del> selected_entities.append(wd_id)
<del> prior_probs.append(p_entity_givenalias)
<del>
<del> if selected_entities:
<del> try:
<del> kb.add_alias(
<del> alias=previous_alias,
<del> entities=selected_entities,
<del> probabilities=prior_probs,
<del> )
<del> except ValueError as e:
<del> logger.error(e)
<del> total_count = 0
<del> counts = []
<del> entities = []
<del>
<del> total_count += count
<del>
<del> if len(entities) < max_entities_per_alias and count >= min_occ:
<del> counts.append(count)
<del> entities.append(entity)
<del> previous_alias = new_alias
<del>
<del> line = prior_file.readline()
<del>
<del>
<del>def read_kb(nlp, kb_file):
<del> kb = KnowledgeBase(vocab=nlp.vocab)
<del> kb.load_bulk(kb_file)
<del> return kb
<ide><path>bin/wiki_entity_linking/train_descriptions.py
<del># coding: utf-8
<del>from random import shuffle
<del>
<del>import logging
<del>import numpy as np
<del>
<del>from spacy._ml import zero_init, create_default_optimizer
<del>from spacy.cli.pretrain import get_cossim_loss
<del>
<del>from thinc.v2v import Model
<del>from thinc.api import chain
<del>from thinc.neural._classes.affine import Affine
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>
<del>class EntityEncoder:
<del> """
<del> Train the embeddings of entity descriptions to fit a fixed-size entity vector (e.g. 64D).
<del> This entity vector will be stored in the KB, for further downstream use in the entity model.
<del> """
<del>
<del> DROP = 0
<del> BATCH_SIZE = 1000
<del>
<del> # Set min. acceptable loss to avoid a 'mean of empty slice' warning by numpy
<del> MIN_LOSS = 0.01
<del>
<del> # Reasonable default to stop training when things are not improving
<del> MAX_NO_IMPROVEMENT = 20
<del>
<del> def __init__(self, nlp, input_dim, desc_width, epochs=5):
<del> self.nlp = nlp
<del> self.input_dim = input_dim
<del> self.desc_width = desc_width
<del> self.epochs = epochs
<del>
<del> def apply_encoder(self, description_list):
<del> if self.encoder is None:
<del> raise ValueError("Can not apply encoder before training it")
<del>
<del> batch_size = 100000
<del>
<del> start = 0
<del> stop = min(batch_size, len(description_list))
<del> encodings = []
<del>
<del> while start < len(description_list):
<del> docs = list(self.nlp.pipe(description_list[start:stop]))
<del> doc_embeddings = [self._get_doc_embedding(doc) for doc in docs]
<del> enc = self.encoder(np.asarray(doc_embeddings))
<del> encodings.extend(enc.tolist())
<del>
<del> start = start + batch_size
<del> stop = min(stop + batch_size, len(description_list))
<del> logger.info("Encoded: {} entities".format(stop))
<del>
<del> return encodings
<del>
<del> def train(self, description_list, to_print=False):
<del> processed, loss = self._train_model(description_list)
<del> if to_print:
<del> logger.info(
<del> "Trained entity descriptions on {} ".format(processed) +
<del> "(non-unique) descriptions across {} ".format(self.epochs) +
<del> "epochs"
<del> )
<del> logger.info("Final loss: {}".format(loss))
<del>
<del> def _train_model(self, description_list):
<del> best_loss = 1.0
<del> iter_since_best = 0
<del> self._build_network(self.input_dim, self.desc_width)
<del>
<del> processed = 0
<del> loss = 1
<del> # copy this list so that shuffling does not affect other functions
<del> descriptions = description_list.copy()
<del> to_continue = True
<del>
<del> for i in range(self.epochs):
<del> shuffle(descriptions)
<del>
<del> batch_nr = 0
<del> start = 0
<del> stop = min(self.BATCH_SIZE, len(descriptions))
<del>
<del> while to_continue and start < len(descriptions):
<del> batch = []
<del> for descr in descriptions[start:stop]:
<del> doc = self.nlp(descr)
<del> doc_vector = self._get_doc_embedding(doc)
<del> batch.append(doc_vector)
<del>
<del> loss = self._update(batch)
<del> if batch_nr % 25 == 0:
<del> logger.info("loss: {} ".format(loss))
<del> processed += len(batch)
<del>
<del> # in general, continue training if we haven't reached our ideal min yet
<del> to_continue = loss > self.MIN_LOSS
<del>
<del> # store the best loss and track how long it's been
<del> if loss < best_loss:
<del> best_loss = loss
<del> iter_since_best = 0
<del> else:
<del> iter_since_best += 1
<del>
<del> # stop learning if we haven't seen improvement since the last few iterations
<del> if iter_since_best > self.MAX_NO_IMPROVEMENT:
<del> to_continue = False
<del>
<del> batch_nr += 1
<del> start = start + self.BATCH_SIZE
<del> stop = min(stop + self.BATCH_SIZE, len(descriptions))
<del>
<del> return processed, loss
<del>
<del> @staticmethod
<del> def _get_doc_embedding(doc):
<del> indices = np.zeros((len(doc),), dtype="i")
<del> for i, word in enumerate(doc):
<del> if word.orth in doc.vocab.vectors.key2row:
<del> indices[i] = doc.vocab.vectors.key2row[word.orth]
<del> else:
<del> indices[i] = 0
<del> word_vectors = doc.vocab.vectors.data[indices]
<del> doc_vector = np.mean(word_vectors, axis=0)
<del> return doc_vector
<del>
<del> def _build_network(self, orig_width, hidden_with):
<del> with Model.define_operators({">>": chain}):
<del> # very simple encoder-decoder model
<del> self.encoder = Affine(hidden_with, orig_width)
<del> self.model = self.encoder >> zero_init(
<del> Affine(orig_width, hidden_with, drop_factor=0.0)
<del> )
<del> self.sgd = create_default_optimizer(self.model.ops)
<del>
<del> def _update(self, vectors):
<del> predictions, bp_model = self.model.begin_update(
<del> np.asarray(vectors), drop=self.DROP
<del> )
<del> loss, d_scores = self._get_loss(scores=predictions, golds=np.asarray(vectors))
<del> bp_model(d_scores, sgd=self.sgd)
<del> return loss / len(vectors)
<del>
<del> @staticmethod
<del> def _get_loss(golds, scores):
<del> loss, gradients = get_cossim_loss(scores, golds)
<del> return loss, gradients
<ide><path>bin/wiki_entity_linking/wiki_io.py
<del># coding: utf-8
<del>from __future__ import unicode_literals
<del>
<del>import sys
<del>import csv
<del>
<del># min() needed to prevent error on windows, cf https://stackoverflow.com/questions/52404416/
<del>csv.field_size_limit(min(sys.maxsize, 2147483646))
<del>
<del>""" This class provides reading/writing methods for temp files """
<del>
<del>
<del># Entity definition: WP title -> WD ID #
<del>def write_title_to_id(entity_def_output, title_to_id):
<del> with entity_def_output.open("w", encoding="utf8") as id_file:
<del> id_file.write("WP_title" + "|" + "WD_id" + "\n")
<del> for title, qid in title_to_id.items():
<del> id_file.write(title + "|" + str(qid) + "\n")
<del>
<del>
<del>def read_title_to_id(entity_def_output):
<del> title_to_id = dict()
<del> with entity_def_output.open("r", encoding="utf8") as id_file:
<del> csvreader = csv.reader(id_file, delimiter="|")
<del> # skip header
<del> next(csvreader)
<del> for row in csvreader:
<del> title_to_id[row[0]] = row[1]
<del> return title_to_id
<del>
<del>
<del># Entity aliases from WD: WD ID -> WD alias #
<del>def write_id_to_alias(entity_alias_path, id_to_alias):
<del> with entity_alias_path.open("w", encoding="utf8") as alias_file:
<del> alias_file.write("WD_id" + "|" + "alias" + "\n")
<del> for qid, alias_list in id_to_alias.items():
<del> for alias in alias_list:
<del> alias_file.write(str(qid) + "|" + alias + "\n")
<del>
<del>
<del>def read_id_to_alias(entity_alias_path):
<del> id_to_alias = dict()
<del> with entity_alias_path.open("r", encoding="utf8") as alias_file:
<del> csvreader = csv.reader(alias_file, delimiter="|")
<del> # skip header
<del> next(csvreader)
<del> for row in csvreader:
<del> qid = row[0]
<del> alias = row[1]
<del> alias_list = id_to_alias.get(qid, [])
<del> alias_list.append(alias)
<del> id_to_alias[qid] = alias_list
<del> return id_to_alias
<del>
<del>
<del>def read_alias_to_id_generator(entity_alias_path):
<del> """ Read (aliases, qid) tuples """
<del>
<del> with entity_alias_path.open("r", encoding="utf8") as alias_file:
<del> csvreader = csv.reader(alias_file, delimiter="|")
<del> # skip header
<del> next(csvreader)
<del> for row in csvreader:
<del> qid = row[0]
<del> alias = row[1]
<del> yield alias, qid
<del>
<del>
<del># Entity descriptions from WD: WD ID -> WD alias #
<del>def write_id_to_descr(entity_descr_output, id_to_descr):
<del> with entity_descr_output.open("w", encoding="utf8") as descr_file:
<del> descr_file.write("WD_id" + "|" + "description" + "\n")
<del> for qid, descr in id_to_descr.items():
<del> descr_file.write(str(qid) + "|" + descr + "\n")
<del>
<del>
<del>def read_id_to_descr(entity_desc_path):
<del> id_to_desc = dict()
<del> with entity_desc_path.open("r", encoding="utf8") as descr_file:
<del> csvreader = csv.reader(descr_file, delimiter="|")
<del> # skip header
<del> next(csvreader)
<del> for row in csvreader:
<del> id_to_desc[row[0]] = row[1]
<del> return id_to_desc
<del>
<del>
<del># Entity counts from WP: WP title -> count #
<del>def write_entity_to_count(prior_prob_input, count_output):
<del> # Write entity counts for quick access later
<del> entity_to_count = dict()
<del> total_count = 0
<del>
<del> with prior_prob_input.open("r", encoding="utf8") as prior_file:
<del> # skip header
<del> prior_file.readline()
<del> line = prior_file.readline()
<del>
<del> while line:
<del> splits = line.replace("\n", "").split(sep="|")
<del> # alias = splits[0]
<del> count = int(splits[1])
<del> entity = splits[2]
<del>
<del> current_count = entity_to_count.get(entity, 0)
<del> entity_to_count[entity] = current_count + count
<del>
<del> total_count += count
<del>
<del> line = prior_file.readline()
<del>
<del> with count_output.open("w", encoding="utf8") as entity_file:
<del> entity_file.write("entity" + "|" + "count" + "\n")
<del> for entity, count in entity_to_count.items():
<del> entity_file.write(entity + "|" + str(count) + "\n")
<del>
<del>
<del>def read_entity_to_count(count_input):
<del> entity_to_count = dict()
<del> with count_input.open("r", encoding="utf8") as csvfile:
<del> csvreader = csv.reader(csvfile, delimiter="|")
<del> # skip header
<del> next(csvreader)
<del> for row in csvreader:
<del> entity_to_count[row[0]] = int(row[1])
<del>
<del> return entity_to_count
<ide><path>bin/wiki_entity_linking/wiki_namespaces.py
<del># coding: utf8
<del>from __future__ import unicode_literals
<del>
<del># List of meta pages in Wikidata, should be kept out of the Knowledge base
<del>WD_META_ITEMS = [
<del> "Q163875",
<del> "Q191780",
<del> "Q224414",
<del> "Q4167836",
<del> "Q4167410",
<del> "Q4663903",
<del> "Q11266439",
<del> "Q13406463",
<del> "Q15407973",
<del> "Q18616576",
<del> "Q19887878",
<del> "Q22808320",
<del> "Q23894233",
<del> "Q33120876",
<del> "Q42104522",
<del> "Q47460393",
<del> "Q64875536",
<del> "Q66480449",
<del>]
<del>
<del>
<del># TODO: add more cases from non-English WP's
<del>
<del># List of prefixes that refer to Wikipedia "file" pages
<del>WP_FILE_NAMESPACE = ["Bestand", "File"]
<del>
<del># List of prefixes that refer to Wikipedia "category" pages
<del>WP_CATEGORY_NAMESPACE = ["Kategori", "Category", "Categorie"]
<del>
<del># List of prefixes that refer to Wikipedia "meta" pages
<del># these will/should be matched ignoring case
<del>WP_META_NAMESPACE = (
<del> WP_FILE_NAMESPACE
<del> + WP_CATEGORY_NAMESPACE
<del> + [
<del> "b",
<del> "betawikiversity",
<del> "Book",
<del> "c",
<del> "Commons",
<del> "d",
<del> "dbdump",
<del> "download",
<del> "Draft",
<del> "Education",
<del> "Foundation",
<del> "Gadget",
<del> "Gadget definition",
<del> "Gebruiker",
<del> "gerrit",
<del> "Help",
<del> "Image",
<del> "Incubator",
<del> "m",
<del> "mail",
<del> "mailarchive",
<del> "media",
<del> "MediaWiki",
<del> "MediaWiki talk",
<del> "Mediawikiwiki",
<del> "MediaZilla",
<del> "Meta",
<del> "Metawikipedia",
<del> "Module",
<del> "mw",
<del> "n",
<del> "nost",
<del> "oldwikisource",
<del> "otrs",
<del> "OTRSwiki",
<del> "Overleg gebruiker",
<del> "outreach",
<del> "outreachwiki",
<del> "Portal",
<del> "phab",
<del> "Phabricator",
<del> "Project",
<del> "q",
<del> "quality",
<del> "rev",
<del> "s",
<del> "spcom",
<del> "Special",
<del> "species",
<del> "Strategy",
<del> "sulutil",
<del> "svn",
<del> "Talk",
<del> "Template",
<del> "Template talk",
<del> "Testwiki",
<del> "ticket",
<del> "TimedText",
<del> "Toollabs",
<del> "tools",
<del> "tswiki",
<del> "User",
<del> "User talk",
<del> "v",
<del> "voy",
<del> "w",
<del> "Wikibooks",
<del> "Wikidata",
<del> "wikiHow",
<del> "Wikinvest",
<del> "wikilivres",
<del> "Wikimedia",
<del> "Wikinews",
<del> "Wikipedia",
<del> "Wikipedia talk",
<del> "Wikiquote",
<del> "Wikisource",
<del> "Wikispecies",
<del> "Wikitech",
<del> "Wikiversity",
<del> "Wikivoyage",
<del> "wikt",
<del> "wiktionary",
<del> "wmf",
<del> "wmania",
<del> "WP",
<del> ]
<del>)
<ide><path>bin/wiki_entity_linking/wikidata_pretrain_kb.py
<del># coding: utf-8
<del>"""Script to process Wikipedia and Wikidata dumps and create a knowledge base (KB)
<del>with specific parameters. Intermediate files are written to disk.
<del>
<del>Running the full pipeline on a standard laptop, may take up to 13 hours of processing.
<del>Use the -p, -d and -s options to speed up processing using the intermediate files
<del>from a previous run.
<del>
<del>For the Wikidata dump: get the latest-all.json.bz2 from https://dumps.wikimedia.org/wikidatawiki/entities/
<del>For the Wikipedia dump: get enwiki-latest-pages-articles-multistream.xml.bz2
<del>from https://dumps.wikimedia.org/enwiki/latest/
<del>
<del>"""
<del>from __future__ import unicode_literals
<del>
<del>import logging
<del>from pathlib import Path
<del>import plac
<del>
<del>from bin.wiki_entity_linking import wikipedia_processor as wp, wikidata_processor as wd
<del>from bin.wiki_entity_linking import wiki_io as io
<del>from bin.wiki_entity_linking import kb_creator
<del>from bin.wiki_entity_linking import TRAINING_DATA_FILE, KB_FILE, ENTITY_DESCR_PATH, KB_MODEL_DIR, LOG_FORMAT
<del>from bin.wiki_entity_linking import ENTITY_FREQ_PATH, PRIOR_PROB_PATH, ENTITY_DEFS_PATH, ENTITY_ALIAS_PATH
<del>import spacy
<del>from bin.wiki_entity_linking.kb_creator import read_kb
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>
<del>@plac.annotations(
<del> wd_json=("Path to the downloaded WikiData JSON dump.", "positional", None, Path),
<del> wp_xml=("Path to the downloaded Wikipedia XML dump.", "positional", None, Path),
<del> output_dir=("Output directory", "positional", None, Path),
<del> model=("Model name or path, should include pretrained vectors.", "positional", None, str),
<del> max_per_alias=("Max. # entities per alias (default 10)", "option", "a", int),
<del> min_freq=("Min. count of an entity in the corpus (default 20)", "option", "f", int),
<del> min_pair=("Min. count of entity-alias pairs (default 5)", "option", "c", int),
<del> entity_vector_length=("Length of entity vectors (default 64)", "option", "v", int),
<del> loc_prior_prob=("Location to file with prior probabilities", "option", "p", Path),
<del> loc_entity_defs=("Location to file with entity definitions", "option", "d", Path),
<del> loc_entity_desc=("Location to file with entity descriptions", "option", "s", Path),
<del> descr_from_wp=("Flag for using descriptions from WP instead of WD (default False)", "flag", "wp"),
<del> limit_prior=("Threshold to limit lines read from WP for prior probabilities", "option", "lp", int),
<del> limit_train=("Threshold to limit lines read from WP for training set", "option", "lt", int),
<del> limit_wd=("Threshold to limit lines read from WD", "option", "lw", int),
<del> lang=("Optional language for which to get Wikidata titles. Defaults to 'en'", "option", "la", str),
<del>)
<del>def main(
<del> wd_json,
<del> wp_xml,
<del> output_dir,
<del> model,
<del> max_per_alias=10,
<del> min_freq=20,
<del> min_pair=5,
<del> entity_vector_length=64,
<del> loc_prior_prob=None,
<del> loc_entity_defs=None,
<del> loc_entity_alias=None,
<del> loc_entity_desc=None,
<del> descr_from_wp=False,
<del> limit_prior=None,
<del> limit_train=None,
<del> limit_wd=None,
<del> lang="en",
<del>):
<del> entity_defs_path = loc_entity_defs if loc_entity_defs else output_dir / ENTITY_DEFS_PATH
<del> entity_alias_path = loc_entity_alias if loc_entity_alias else output_dir / ENTITY_ALIAS_PATH
<del> entity_descr_path = loc_entity_desc if loc_entity_desc else output_dir / ENTITY_DESCR_PATH
<del> entity_freq_path = output_dir / ENTITY_FREQ_PATH
<del> prior_prob_path = loc_prior_prob if loc_prior_prob else output_dir / PRIOR_PROB_PATH
<del> training_entities_path = output_dir / TRAINING_DATA_FILE
<del> kb_path = output_dir / KB_FILE
<del>
<del> logger.info("Creating KB with Wikipedia and WikiData")
<del>
<del> # STEP 0: set up IO
<del> if not output_dir.exists():
<del> output_dir.mkdir(parents=True)
<del>
<del> # STEP 1: Load the NLP object
<del> logger.info("STEP 1: Loading NLP model {}".format(model))
<del> nlp = spacy.load(model)
<del>
<del> # check the length of the nlp vectors
<del> if "vectors" not in nlp.meta or not nlp.vocab.vectors.size:
<del> raise ValueError(
<del> "The `nlp` object should have access to pretrained word vectors, "
<del> " cf. https://spacy.io/usage/models#languages."
<del> )
<del>
<del> # STEP 2: create prior probabilities from WP
<del> if not prior_prob_path.exists():
<del> # It takes about 2h to process 1000M lines of Wikipedia XML dump
<del> logger.info("STEP 2: Writing prior probabilities to {}".format(prior_prob_path))
<del> if limit_prior is not None:
<del> logger.warning("Warning: reading only {} lines of Wikipedia dump".format(limit_prior))
<del> wp.read_prior_probs(wp_xml, prior_prob_path, limit=limit_prior)
<del> else:
<del> logger.info("STEP 2: Reading prior probabilities from {}".format(prior_prob_path))
<del>
<del> # STEP 3: calculate entity frequencies
<del> if not entity_freq_path.exists():
<del> logger.info("STEP 3: Calculating and writing entity frequencies to {}".format(entity_freq_path))
<del> io.write_entity_to_count(prior_prob_path, entity_freq_path)
<del> else:
<del> logger.info("STEP 3: Reading entity frequencies from {}".format(entity_freq_path))
<del>
<del> # STEP 4: reading definitions and (possibly) descriptions from WikiData or from file
<del> if (not entity_defs_path.exists()) or (not descr_from_wp and not entity_descr_path.exists()):
<del> # It takes about 10h to process 55M lines of Wikidata JSON dump
<del> logger.info("STEP 4: Parsing and writing Wikidata entity definitions to {}".format(entity_defs_path))
<del> if limit_wd is not None:
<del> logger.warning("Warning: reading only {} lines of Wikidata dump".format(limit_wd))
<del> title_to_id, id_to_descr, id_to_alias = wd.read_wikidata_entities_json(
<del> wd_json,
<del> limit_wd,
<del> to_print=False,
<del> lang=lang,
<del> parse_descr=(not descr_from_wp),
<del> )
<del> io.write_title_to_id(entity_defs_path, title_to_id)
<del>
<del> logger.info("STEP 4b: Writing Wikidata entity aliases to {}".format(entity_alias_path))
<del> io.write_id_to_alias(entity_alias_path, id_to_alias)
<del>
<del> if not descr_from_wp:
<del> logger.info("STEP 4c: Writing Wikidata entity descriptions to {}".format(entity_descr_path))
<del> io.write_id_to_descr(entity_descr_path, id_to_descr)
<del> else:
<del> logger.info("STEP 4: Reading entity definitions from {}".format(entity_defs_path))
<del> logger.info("STEP 4b: Reading entity aliases from {}".format(entity_alias_path))
<del> if not descr_from_wp:
<del> logger.info("STEP 4c: Reading entity descriptions from {}".format(entity_descr_path))
<del>
<del> # STEP 5: Getting gold entities from Wikipedia
<del> if (not training_entities_path.exists()) or (descr_from_wp and not entity_descr_path.exists()):
<del> logger.info("STEP 5: Parsing and writing Wikipedia gold entities to {}".format(training_entities_path))
<del> if limit_train is not None:
<del> logger.warning("Warning: reading only {} lines of Wikipedia dump".format(limit_train))
<del> wp.create_training_and_desc(wp_xml, entity_defs_path, entity_descr_path,
<del> training_entities_path, descr_from_wp, limit_train)
<del> if descr_from_wp:
<del> logger.info("STEP 5b: Parsing and writing Wikipedia descriptions to {}".format(entity_descr_path))
<del> else:
<del> logger.info("STEP 5: Reading gold entities from {}".format(training_entities_path))
<del> if descr_from_wp:
<del> logger.info("STEP 5b: Reading entity descriptions from {}".format(entity_descr_path))
<del>
<del> # STEP 6: creating the actual KB
<del> # It takes ca. 30 minutes to pretrain the entity embeddings
<del> if not kb_path.exists():
<del> logger.info("STEP 6: Creating the KB at {}".format(kb_path))
<del> kb = kb_creator.create_kb(
<del> nlp=nlp,
<del> max_entities_per_alias=max_per_alias,
<del> min_entity_freq=min_freq,
<del> min_occ=min_pair,
<del> entity_def_path=entity_defs_path,
<del> entity_descr_path=entity_descr_path,
<del> entity_alias_path=entity_alias_path,
<del> entity_freq_path=entity_freq_path,
<del> prior_prob_path=prior_prob_path,
<del> entity_vector_length=entity_vector_length,
<del> )
<del> kb.dump(kb_path)
<del> logger.info("kb entities: {}".format(kb.get_size_entities()))
<del> logger.info("kb aliases: {}".format(kb.get_size_aliases()))
<del> nlp.to_disk(output_dir / KB_MODEL_DIR)
<del> else:
<del> logger.info("STEP 6: KB already exists at {}".format(kb_path))
<del>
<del> logger.info("Done!")
<del>
<del>
<del>if __name__ == "__main__":
<del> logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
<del> plac.call(main)
<ide><path>bin/wiki_entity_linking/wikidata_processor.py
<del># coding: utf-8
<del>from __future__ import unicode_literals
<del>
<del>import bz2
<del>import json
<del>import logging
<del>
<del>from bin.wiki_entity_linking.wiki_namespaces import WD_META_ITEMS
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>
<del>def read_wikidata_entities_json(wikidata_file, limit=None, to_print=False, lang="en", parse_descr=True):
<del> # Read the JSON wiki data and parse out the entities. Takes about 7-10h to parse 55M lines.
<del> # get latest-all.json.bz2 from https://dumps.wikimedia.org/wikidatawiki/entities/
<del>
<del> site_filter = '{}wiki'.format(lang)
<del>
<del> # filter: currently defined as OR: one hit suffices to be removed from further processing
<del> exclude_list = WD_META_ITEMS
<del>
<del> # punctuation
<del> exclude_list.extend(["Q1383557", "Q10617810"])
<del>
<del> # letters etc
<del> exclude_list.extend(["Q188725", "Q19776628", "Q3841820", "Q17907810", "Q9788", "Q9398093"])
<del>
<del> neg_prop_filter = {
<del> 'P31': exclude_list, # instance of
<del> 'P279': exclude_list # subclass
<del> }
<del>
<del> title_to_id = dict()
<del> id_to_descr = dict()
<del> id_to_alias = dict()
<del>
<del> # parse appropriate fields - depending on what we need in the KB
<del> parse_properties = False
<del> parse_sitelinks = True
<del> parse_labels = False
<del> parse_aliases = True
<del> parse_claims = True
<del>
<del> with bz2.open(wikidata_file, mode='rb') as file:
<del> for cnt, line in enumerate(file):
<del> if limit and cnt >= limit:
<del> break
<del> if cnt % 500000 == 0 and cnt > 0:
<del> logger.info("processed {} lines of WikiData JSON dump".format(cnt))
<del> clean_line = line.strip()
<del> if clean_line.endswith(b","):
<del> clean_line = clean_line[:-1]
<del> if len(clean_line) > 1:
<del> obj = json.loads(clean_line)
<del> entry_type = obj["type"]
<del>
<del> if entry_type == "item":
<del> keep = True
<del>
<del> claims = obj["claims"]
<del> if parse_claims:
<del> for prop, value_set in neg_prop_filter.items():
<del> claim_property = claims.get(prop, None)
<del> if claim_property:
<del> for cp in claim_property:
<del> cp_id = (
<del> cp["mainsnak"]
<del> .get("datavalue", {})
<del> .get("value", {})
<del> .get("id")
<del> )
<del> cp_rank = cp["rank"]
<del> if cp_rank != "deprecated" and cp_id in value_set:
<del> keep = False
<del>
<del> if keep:
<del> unique_id = obj["id"]
<del>
<del> if to_print:
<del> print("ID:", unique_id)
<del> print("type:", entry_type)
<del>
<del> # parsing all properties that refer to other entities
<del> if parse_properties:
<del> for prop, claim_property in claims.items():
<del> cp_dicts = [
<del> cp["mainsnak"]["datavalue"].get("value")
<del> for cp in claim_property
<del> if cp["mainsnak"].get("datavalue")
<del> ]
<del> cp_values = [
<del> cp_dict.get("id")
<del> for cp_dict in cp_dicts
<del> if isinstance(cp_dict, dict)
<del> if cp_dict.get("id") is not None
<del> ]
<del> if cp_values:
<del> if to_print:
<del> print("prop:", prop, cp_values)
<del>
<del> found_link = False
<del> if parse_sitelinks:
<del> site_value = obj["sitelinks"].get(site_filter, None)
<del> if site_value:
<del> site = site_value["title"]
<del> if to_print:
<del> print(site_filter, ":", site)
<del> title_to_id[site] = unique_id
<del> found_link = True
<del>
<del> if parse_labels:
<del> labels = obj["labels"]
<del> if labels:
<del> lang_label = labels.get(lang, None)
<del> if lang_label:
<del> if to_print:
<del> print(
<del> "label (" + lang + "):", lang_label["value"]
<del> )
<del>
<del> if found_link and parse_descr:
<del> descriptions = obj["descriptions"]
<del> if descriptions:
<del> lang_descr = descriptions.get(lang, None)
<del> if lang_descr:
<del> if to_print:
<del> print(
<del> "description (" + lang + "):",
<del> lang_descr["value"],
<del> )
<del> id_to_descr[unique_id] = lang_descr["value"]
<del>
<del> if parse_aliases:
<del> aliases = obj["aliases"]
<del> if aliases:
<del> lang_aliases = aliases.get(lang, None)
<del> if lang_aliases:
<del> for item in lang_aliases:
<del> if to_print:
<del> print(
<del> "alias (" + lang + "):", item["value"]
<del> )
<del> alias_list = id_to_alias.get(unique_id, [])
<del> alias_list.append(item["value"])
<del> id_to_alias[unique_id] = alias_list
<del>
<del> if to_print:
<del> print()
<del>
<del> # log final number of lines processed
<del> logger.info("Finished. Processed {} lines of WikiData JSON dump".format(cnt))
<del> return title_to_id, id_to_descr, id_to_alias
<del>
<del>
<ide><path>bin/wiki_entity_linking/wikidata_train_entity_linker.py
<del># coding: utf-8
<del>"""Script that takes a previously created Knowledge Base and trains an entity linking
<del>pipeline. The provided KB directory should hold the kb, the original nlp object and
<del>its vocab used to create the KB, and a few auxiliary files such as the entity definitions,
<del>as created by the script `wikidata_create_kb`.
<del>
<del>For the Wikipedia dump: get enwiki-latest-pages-articles-multistream.xml.bz2
<del>from https://dumps.wikimedia.org/enwiki/latest/
<del>"""
<del>from __future__ import unicode_literals
<del>
<del>import random
<del>import logging
<del>import spacy
<del>from pathlib import Path
<del>import plac
<del>from tqdm import tqdm
<del>
<del>from bin.wiki_entity_linking import wikipedia_processor
<del>from bin.wiki_entity_linking import TRAINING_DATA_FILE, KB_MODEL_DIR, KB_FILE, LOG_FORMAT, OUTPUT_MODEL_DIR
<del>from bin.wiki_entity_linking.entity_linker_evaluation import measure_performance
<del>from bin.wiki_entity_linking.kb_creator import read_kb
<del>
<del>from spacy.util import minibatch, compounding
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>
<del>@plac.annotations(
<del> dir_kb=("Directory with KB, NLP and related files", "positional", None, Path),
<del> output_dir=("Output directory", "option", "o", Path),
<del> loc_training=("Location to training data", "option", "k", Path),
<del> epochs=("Number of training iterations (default 10)", "option", "e", int),
<del> dropout=("Dropout to prevent overfitting (default 0.5)", "option", "p", float),
<del> lr=("Learning rate (default 0.005)", "option", "n", float),
<del> l2=("L2 regularization", "option", "r", float),
<del> train_articles=("# training articles (default 90% of all)", "option", "t", int),
<del> dev_articles=("# dev test articles (default 10% of all)", "option", "d", int),
<del> labels_discard=("NER labels to discard (default None)", "option", "l", str),
<del>)
<del>def main(
<del> dir_kb,
<del> output_dir=None,
<del> loc_training=None,
<del> epochs=10,
<del> dropout=0.5,
<del> lr=0.005,
<del> l2=1e-6,
<del> train_articles=None,
<del> dev_articles=None,
<del> labels_discard=None
<del>):
<del> if not output_dir:
<del> logger.warning("No output dir specified so no results will be written, are you sure about this ?")
<del>
<del> logger.info("Creating Entity Linker with Wikipedia and WikiData")
<del>
<del> output_dir = Path(output_dir) if output_dir else dir_kb
<del> training_path = loc_training if loc_training else dir_kb / TRAINING_DATA_FILE
<del> nlp_dir = dir_kb / KB_MODEL_DIR
<del> kb_path = dir_kb / KB_FILE
<del> nlp_output_dir = output_dir / OUTPUT_MODEL_DIR
<del>
<del> # STEP 0: set up IO
<del> if not output_dir.exists():
<del> output_dir.mkdir()
<del>
<del> # STEP 1 : load the NLP object
<del> logger.info("STEP 1a: Loading model from {}".format(nlp_dir))
<del> nlp = spacy.load(nlp_dir)
<del> logger.info("Original NLP pipeline has following pipeline components: {}".format(nlp.pipe_names))
<del>
<del> # check that there is a NER component in the pipeline
<del> if "ner" not in nlp.pipe_names:
<del> raise ValueError("The `nlp` object should have a pretrained `ner` component.")
<del>
<del> logger.info("STEP 1b: Loading KB from {}".format(kb_path))
<del> kb = read_kb(nlp, kb_path)
<del>
<del> # STEP 2: read the training dataset previously created from WP
<del> logger.info("STEP 2: Reading training & dev dataset from {}".format(training_path))
<del> train_indices, dev_indices = wikipedia_processor.read_training_indices(training_path)
<del> logger.info("Training set has {} articles, limit set to roughly {} articles per epoch"
<del> .format(len(train_indices), train_articles if train_articles else "all"))
<del> logger.info("Dev set has {} articles, limit set to rougly {} articles for evaluation"
<del> .format(len(dev_indices), dev_articles if dev_articles else "all"))
<del> if dev_articles:
<del> dev_indices = dev_indices[0:dev_articles]
<del>
<del> # STEP 3: create and train an entity linking pipe
<del> logger.info("STEP 3: Creating and training an Entity Linking pipe for {} epochs".format(epochs))
<del> if labels_discard:
<del> labels_discard = [x.strip() for x in labels_discard.split(",")]
<del> logger.info("Discarding {} NER types: {}".format(len(labels_discard), labels_discard))
<del> else:
<del> labels_discard = []
<del>
<del> el_pipe = nlp.create_pipe(
<del> name="entity_linker", config={"pretrained_vectors": nlp.vocab.vectors.name,
<del> "labels_discard": labels_discard}
<del> )
<del> el_pipe.set_kb(kb)
<del> nlp.add_pipe(el_pipe, last=True)
<del>
<del> other_pipes = [pipe for pipe in nlp.pipe_names if pipe != "entity_linker"]
<del> with nlp.disable_pipes(*other_pipes): # only train Entity Linking
<del> optimizer = nlp.begin_training()
<del> optimizer.learn_rate = lr
<del> optimizer.L2 = l2
<del>
<del> logger.info("Dev Baseline Accuracies:")
<del> dev_data = wikipedia_processor.read_el_docs_golds(nlp=nlp, entity_file_path=training_path,
<del> dev=True, line_ids=dev_indices,
<del> kb=kb, labels_discard=labels_discard)
<del>
<del> measure_performance(dev_data, kb, el_pipe, baseline=True, context=False, dev_limit=len(dev_indices))
<del>
<del> for itn in range(epochs):
<del> random.shuffle(train_indices)
<del> losses = {}
<del> batches = minibatch(train_indices, size=compounding(8.0, 128.0, 1.001))
<del> batchnr = 0
<del> articles_processed = 0
<del>
<del> # we either process the whole training file, or just a part each epoch
<del> bar_total = len(train_indices)
<del> if train_articles:
<del> bar_total = train_articles
<del>
<del> with tqdm(total=bar_total, leave=False, desc='Epoch ' + str(itn)) as pbar:
<del> for batch in batches:
<del> if not train_articles or articles_processed < train_articles:
<del> with nlp.disable_pipes("entity_linker"):
<del> train_batch = wikipedia_processor.read_el_docs_golds(nlp=nlp, entity_file_path=training_path,
<del> dev=False, line_ids=batch,
<del> kb=kb, labels_discard=labels_discard)
<del> docs, golds = zip(*train_batch)
<del> try:
<del> with nlp.disable_pipes(*other_pipes):
<del> nlp.update(
<del> docs=docs,
<del> golds=golds,
<del> sgd=optimizer,
<del> drop=dropout,
<del> losses=losses,
<del> )
<del> batchnr += 1
<del> articles_processed += len(docs)
<del> pbar.update(len(docs))
<del> except Exception as e:
<del> logger.error("Error updating batch:" + str(e))
<del> if batchnr > 0:
<del> logging.info("Epoch {} trained on {} articles, train loss {}"
<del> .format(itn, articles_processed, round(losses["entity_linker"] / batchnr, 2)))
<del> # re-read the dev_data (data is returned as a generator)
<del> dev_data = wikipedia_processor.read_el_docs_golds(nlp=nlp, entity_file_path=training_path,
<del> dev=True, line_ids=dev_indices,
<del> kb=kb, labels_discard=labels_discard)
<del> measure_performance(dev_data, kb, el_pipe, baseline=False, context=True, dev_limit=len(dev_indices))
<del>
<del> if output_dir:
<del> # STEP 4: write the NLP pipeline (now including an EL model) to file
<del> logger.info("Final NLP pipeline has following pipeline components: {}".format(nlp.pipe_names))
<del> logger.info("STEP 4: Writing trained NLP to {}".format(nlp_output_dir))
<del> nlp.to_disk(nlp_output_dir)
<del>
<del> logger.info("Done!")
<del>
<del>
<del>if __name__ == "__main__":
<del> logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
<del> plac.call(main)
<ide><path>bin/wiki_entity_linking/wikipedia_processor.py
<del># coding: utf-8
<del>from __future__ import unicode_literals
<del>
<del>import re
<del>import bz2
<del>import logging
<del>import random
<del>import json
<del>
<del>from spacy.gold import GoldParse
<del>from bin.wiki_entity_linking import wiki_io as io
<del>from bin.wiki_entity_linking.wiki_namespaces import (
<del> WP_META_NAMESPACE,
<del> WP_FILE_NAMESPACE,
<del> WP_CATEGORY_NAMESPACE,
<del>)
<del>
<del>"""
<del>Process a Wikipedia dump to calculate entity frequencies and prior probabilities in combination with certain mentions.
<del>Write these results to file for downstream KB and training data generation.
<del>
<del>Process Wikipedia interlinks to generate a training dataset for the EL algorithm.
<del>"""
<del>
<del>ENTITY_FILE = "gold_entities.csv"
<del>
<del>map_alias_to_link = dict()
<del>
<del>logger = logging.getLogger(__name__)
<del>
<del>title_regex = re.compile(r"(?<=<title>).*(?=</title>)")
<del>id_regex = re.compile(r"(?<=<id>)\d*(?=</id>)")
<del>text_tag_regex = re.compile(r"(?<=<text).*?(?=>)")
<del>text_regex = re.compile(r"(?<=<text>).*(?=</text)")
<del>info_regex = re.compile(r"{[^{]*?}")
<del>html_regex = re.compile(r"<!--[^-]*-->")
<del>ref_regex = re.compile(r"<ref.*?>") # non-greedy
<del>ref_2_regex = re.compile(r"</ref.*?>") # non-greedy
<del>
<del># find the links
<del>link_regex = re.compile(r"\[\[[^\[\]]*\]\]")
<del>
<del># match on interwiki links, e.g. `en:` or `:fr:`
<del>ns_regex = r":?" + "[a-z][a-z]" + ":"
<del># match on Namespace: optionally preceded by a :
<del>for ns in WP_META_NAMESPACE:
<del> ns_regex += "|" + ":?" + ns + ":"
<del>ns_regex = re.compile(ns_regex, re.IGNORECASE)
<del>
<del>files = r""
<del>for f in WP_FILE_NAMESPACE:
<del> files += "\[\[" + f + ":[^[\]]+]]" + "|"
<del>files = files[0 : len(files) - 1]
<del>file_regex = re.compile(files)
<del>
<del>cats = r""
<del>for c in WP_CATEGORY_NAMESPACE:
<del> cats += "\[\[" + c + ":[^\[]*]]" + "|"
<del>cats = cats[0 : len(cats) - 1]
<del>category_regex = re.compile(cats)
<del>
<del>
<del>def read_prior_probs(wikipedia_input, prior_prob_output, limit=None):
<del> """
<del> Read the XML wikipedia data and parse out intra-wiki links to estimate prior probabilities.
<del> The full file takes about 2-3h to parse 1100M lines.
<del> It works relatively fast because it runs line by line, irrelevant of which article the intrawiki is from,
<del> though dev test articles are excluded in order not to get an artificially strong baseline.
<del> """
<del> cnt = 0
<del> read_id = False
<del> current_article_id = None
<del> with bz2.open(wikipedia_input, mode="rb") as file:
<del> line = file.readline()
<del> while line and (not limit or cnt < limit):
<del> if cnt % 25000000 == 0 and cnt > 0:
<del> logger.info("processed {} lines of Wikipedia XML dump".format(cnt))
<del> clean_line = line.strip().decode("utf-8")
<del>
<del> # we attempt at reading the article's ID (but not the revision or contributor ID)
<del> if "<revision>" in clean_line or "<contributor>" in clean_line:
<del> read_id = False
<del> if "<page>" in clean_line:
<del> read_id = True
<del>
<del> if read_id:
<del> ids = id_regex.search(clean_line)
<del> if ids:
<del> current_article_id = ids[0]
<del>
<del> # only processing prior probabilities from true training (non-dev) articles
<del> if not is_dev(current_article_id):
<del> aliases, entities, normalizations = get_wp_links(clean_line)
<del> for alias, entity, norm in zip(aliases, entities, normalizations):
<del> _store_alias(
<del> alias, entity, normalize_alias=norm, normalize_entity=True
<del> )
<del>
<del> line = file.readline()
<del> cnt += 1
<del> logger.info("processed {} lines of Wikipedia XML dump".format(cnt))
<del> logger.info("Finished. processed {} lines of Wikipedia XML dump".format(cnt))
<del>
<del> # write all aliases and their entities and count occurrences to file
<del> with prior_prob_output.open("w", encoding="utf8") as outputfile:
<del> outputfile.write("alias" + "|" + "count" + "|" + "entity" + "\n")
<del> for alias, alias_dict in sorted(map_alias_to_link.items(), key=lambda x: x[0]):
<del> s_dict = sorted(alias_dict.items(), key=lambda x: x[1], reverse=True)
<del> for entity, count in s_dict:
<del> outputfile.write(alias + "|" + str(count) + "|" + entity + "\n")
<del>
<del>
<del>def _store_alias(alias, entity, normalize_alias=False, normalize_entity=True):
<del> alias = alias.strip()
<del> entity = entity.strip()
<del>
<del> # remove everything after # as this is not part of the title but refers to a specific paragraph
<del> if normalize_entity:
<del> # wikipedia titles are always capitalized
<del> entity = _capitalize_first(entity.split("#")[0])
<del> if normalize_alias:
<del> alias = alias.split("#")[0]
<del>
<del> if alias and entity:
<del> alias_dict = map_alias_to_link.get(alias, dict())
<del> entity_count = alias_dict.get(entity, 0)
<del> alias_dict[entity] = entity_count + 1
<del> map_alias_to_link[alias] = alias_dict
<del>
<del>
<del>def get_wp_links(text):
<del> aliases = []
<del> entities = []
<del> normalizations = []
<del>
<del> matches = link_regex.findall(text)
<del> for match in matches:
<del> match = match[2:][:-2].replace("_", " ").strip()
<del>
<del> if ns_regex.match(match):
<del> pass # ignore the entity if it points to a "meta" page
<del>
<del> # this is a simple [[link]], with the alias the same as the mention
<del> elif "|" not in match:
<del> aliases.append(match)
<del> entities.append(match)
<del> normalizations.append(True)
<del>
<del> # in wiki format, the link is written as [[entity|alias]]
<del> else:
<del> splits = match.split("|")
<del> entity = splits[0].strip()
<del> alias = splits[1].strip()
<del> # specific wiki format [[alias (specification)|]]
<del> if len(alias) == 0 and "(" in entity:
<del> alias = entity.split("(")[0]
<del> aliases.append(alias)
<del> entities.append(entity)
<del> normalizations.append(False)
<del> else:
<del> aliases.append(alias)
<del> entities.append(entity)
<del> normalizations.append(False)
<del>
<del> return aliases, entities, normalizations
<del>
<del>
<del>def _capitalize_first(text):
<del> if not text:
<del> return None
<del> result = text[0].capitalize()
<del> if len(result) > 0:
<del> result += text[1:]
<del> return result
<del>
<del>
<del>def create_training_and_desc(
<del> wp_input, def_input, desc_output, training_output, parse_desc, limit=None
<del>):
<del> wp_to_id = io.read_title_to_id(def_input)
<del> _process_wikipedia_texts(
<del> wp_input, wp_to_id, desc_output, training_output, parse_desc, limit
<del> )
<del>
<del>
<del>def _process_wikipedia_texts(
<del> wikipedia_input, wp_to_id, output, training_output, parse_descriptions, limit=None
<del>):
<del> """
<del> Read the XML wikipedia data to parse out training data:
<del> raw text data + positive instances
<del> """
<del>
<del> read_ids = set()
<del>
<del> with output.open("a", encoding="utf8") as descr_file, training_output.open(
<del> "w", encoding="utf8"
<del> ) as entity_file:
<del> if parse_descriptions:
<del> _write_training_description(descr_file, "WD_id", "description")
<del> with bz2.open(wikipedia_input, mode="rb") as file:
<del> article_count = 0
<del> article_text = ""
<del> article_title = None
<del> article_id = None
<del> reading_text = False
<del> reading_revision = False
<del>
<del> for line in file:
<del> clean_line = line.strip().decode("utf-8")
<del>
<del> if clean_line == "<revision>":
<del> reading_revision = True
<del> elif clean_line == "</revision>":
<del> reading_revision = False
<del>
<del> # Start reading new page
<del> if clean_line == "<page>":
<del> article_text = ""
<del> article_title = None
<del> article_id = None
<del> # finished reading this page
<del> elif clean_line == "</page>":
<del> if article_id:
<del> clean_text, entities = _process_wp_text(
<del> article_title, article_text, wp_to_id
<del> )
<del> if clean_text is not None and entities is not None:
<del> _write_training_entities(
<del> entity_file, article_id, clean_text, entities
<del> )
<del>
<del> if article_title in wp_to_id and parse_descriptions:
<del> description = " ".join(
<del> clean_text[:1000].split(" ")[:-1]
<del> )
<del> _write_training_description(
<del> descr_file, wp_to_id[article_title], description
<del> )
<del> article_count += 1
<del> if article_count % 10000 == 0 and article_count > 0:
<del> logger.info(
<del> "Processed {} articles".format(article_count)
<del> )
<del> if limit and article_count >= limit:
<del> break
<del> article_text = ""
<del> article_title = None
<del> article_id = None
<del> reading_text = False
<del> reading_revision = False
<del>
<del> # start reading text within a page
<del> if "<text" in clean_line:
<del> reading_text = True
<del>
<del> if reading_text:
<del> article_text += " " + clean_line
<del>
<del> # stop reading text within a page (we assume a new page doesn't start on the same line)
<del> if "</text" in clean_line:
<del> reading_text = False
<del>
<del> # read the ID of this article (outside the revision portion of the document)
<del> if not reading_revision:
<del> ids = id_regex.search(clean_line)
<del> if ids:
<del> article_id = ids[0]
<del> if article_id in read_ids:
<del> logger.info(
<del> "Found duplicate article ID", article_id, clean_line
<del> ) # This should never happen ...
<del> read_ids.add(article_id)
<del>
<del> # read the title of this article (outside the revision portion of the document)
<del> if not reading_revision:
<del> titles = title_regex.search(clean_line)
<del> if titles:
<del> article_title = titles[0].strip()
<del> logger.info("Finished. Processed {} articles".format(article_count))
<del>
<del>
<del>def _process_wp_text(article_title, article_text, wp_to_id):
<del> # ignore meta Wikipedia pages
<del> if ns_regex.match(article_title):
<del> return None, None
<del>
<del> # remove the text tags
<del> text_search = text_tag_regex.sub("", article_text)
<del> text_search = text_regex.search(text_search)
<del> if text_search is None:
<del> return None, None
<del> text = text_search.group(0)
<del>
<del> # stop processing if this is a redirect page
<del> if text.startswith("#REDIRECT"):
<del> return None, None
<del>
<del> # get the raw text without markup etc, keeping only interwiki links
<del> clean_text, entities = _remove_links(_get_clean_wp_text(text), wp_to_id)
<del> return clean_text, entities
<del>
<del>
<del>def _get_clean_wp_text(article_text):
<del> clean_text = article_text.strip()
<del>
<del> # remove bolding & italic markup
<del> clean_text = clean_text.replace("'''", "")
<del> clean_text = clean_text.replace("''", "")
<del>
<del> # remove nested {{info}} statements by removing the inner/smallest ones first and iterating
<del> try_again = True
<del> previous_length = len(clean_text)
<del> while try_again:
<del> clean_text = info_regex.sub(
<del> "", clean_text
<del> ) # non-greedy match excluding a nested {
<del> if len(clean_text) < previous_length:
<del> try_again = True
<del> else:
<del> try_again = False
<del> previous_length = len(clean_text)
<del>
<del> # remove HTML comments
<del> clean_text = html_regex.sub("", clean_text)
<del>
<del> # remove Category and File statements
<del> clean_text = category_regex.sub("", clean_text)
<del> clean_text = file_regex.sub("", clean_text)
<del>
<del> # remove multiple =
<del> while "==" in clean_text:
<del> clean_text = clean_text.replace("==", "=")
<del>
<del> clean_text = clean_text.replace(". =", ".")
<del> clean_text = clean_text.replace(" = ", ". ")
<del> clean_text = clean_text.replace("= ", ".")
<del> clean_text = clean_text.replace(" =", "")
<del>
<del> # remove refs (non-greedy match)
<del> clean_text = ref_regex.sub("", clean_text)
<del> clean_text = ref_2_regex.sub("", clean_text)
<del>
<del> # remove additional wikiformatting
<del> clean_text = re.sub(r"<blockquote>", "", clean_text)
<del> clean_text = re.sub(r"</blockquote>", "", clean_text)
<del>
<del> # change special characters back to normal ones
<del> clean_text = clean_text.replace(r"<", "<")
<del> clean_text = clean_text.replace(r">", ">")
<del> clean_text = clean_text.replace(r""", '"')
<del> clean_text = clean_text.replace(r"&nbsp;", " ")
<del> clean_text = clean_text.replace(r"&", "&")
<del>
<del> # remove multiple spaces
<del> while " " in clean_text:
<del> clean_text = clean_text.replace(" ", " ")
<del>
<del> return clean_text.strip()
<del>
<del>
<del>def _remove_links(clean_text, wp_to_id):
<del> # read the text char by char to get the right offsets for the interwiki links
<del> entities = []
<del> final_text = ""
<del> open_read = 0
<del> reading_text = True
<del> reading_entity = False
<del> reading_mention = False
<del> reading_special_case = False
<del> entity_buffer = ""
<del> mention_buffer = ""
<del> for index, letter in enumerate(clean_text):
<del> if letter == "[":
<del> open_read += 1
<del> elif letter == "]":
<del> open_read -= 1
<del> elif letter == "|":
<del> if reading_text:
<del> final_text += letter
<del> # switch from reading entity to mention in the [[entity|mention]] pattern
<del> elif reading_entity:
<del> reading_text = False
<del> reading_entity = False
<del> reading_mention = True
<del> else:
<del> reading_special_case = True
<del> else:
<del> if reading_entity:
<del> entity_buffer += letter
<del> elif reading_mention:
<del> mention_buffer += letter
<del> elif reading_text:
<del> final_text += letter
<del> else:
<del> raise ValueError("Not sure at point", clean_text[index - 2 : index + 2])
<del>
<del> if open_read > 2:
<del> reading_special_case = True
<del>
<del> if open_read == 2 and reading_text:
<del> reading_text = False
<del> reading_entity = True
<del> reading_mention = False
<del>
<del> # we just finished reading an entity
<del> if open_read == 0 and not reading_text:
<del> if "#" in entity_buffer or entity_buffer.startswith(":"):
<del> reading_special_case = True
<del> # Ignore cases with nested structures like File: handles etc
<del> if not reading_special_case:
<del> if not mention_buffer:
<del> mention_buffer = entity_buffer
<del> start = len(final_text)
<del> end = start + len(mention_buffer)
<del> qid = wp_to_id.get(entity_buffer, None)
<del> if qid:
<del> entities.append((mention_buffer, qid, start, end))
<del> final_text += mention_buffer
<del>
<del> entity_buffer = ""
<del> mention_buffer = ""
<del>
<del> reading_text = True
<del> reading_entity = False
<del> reading_mention = False
<del> reading_special_case = False
<del> return final_text, entities
<del>
<del>
<del>def _write_training_description(outputfile, qid, description):
<del> if description is not None:
<del> line = str(qid) + "|" + description + "\n"
<del> outputfile.write(line)
<del>
<del>
<del>def _write_training_entities(outputfile, article_id, clean_text, entities):
<del> entities_data = [
<del> {"alias": ent[0], "entity": ent[1], "start": ent[2], "end": ent[3]}
<del> for ent in entities
<del> ]
<del> line = (
<del> json.dumps(
<del> {
<del> "article_id": article_id,
<del> "clean_text": clean_text,
<del> "entities": entities_data,
<del> },
<del> ensure_ascii=False,
<del> )
<del> + "\n"
<del> )
<del> outputfile.write(line)
<del>
<del>
<del>def read_training_indices(entity_file_path):
<del> """ This method creates two lists of indices into the training file: one with indices for the
<del> training examples, and one for the dev examples."""
<del> train_indices = []
<del> dev_indices = []
<del>
<del> with entity_file_path.open("r", encoding="utf8") as file:
<del> for i, line in enumerate(file):
<del> example = json.loads(line)
<del> article_id = example["article_id"]
<del> clean_text = example["clean_text"]
<del>
<del> if is_valid_article(clean_text):
<del> if is_dev(article_id):
<del> dev_indices.append(i)
<del> else:
<del> train_indices.append(i)
<del>
<del> return train_indices, dev_indices
<del>
<del>
<del>def read_el_docs_golds(nlp, entity_file_path, dev, line_ids, kb, labels_discard=None):
<del> """ This method provides training/dev examples that correspond to the entity annotations found by the nlp object.
<del> For training, it will include both positive and negative examples by using the candidate generator from the kb.
<del> For testing (kb=None), it will include all positive examples only."""
<del> if not labels_discard:
<del> labels_discard = []
<del>
<del> max_index = max(line_ids)
<del>
<del> with entity_file_path.open("r", encoding="utf8") as _file:
<del> line = _file.readline()
<del> i = 0
<del> while line and i < max_index:
<del> if i in line_ids:
<del> example = json.loads(line)
<del> article_id = example["article_id"]
<del> clean_text = example["clean_text"]
<del> entities = example["entities"]
<del>
<del> if dev != is_dev(article_id) or not is_valid_article(clean_text):
<del> continue
<del>
<del> doc = nlp(clean_text)
<del> gold = _get_gold_parse(doc, entities, dev=dev, kb=kb, labels_discard=labels_discard)
<del> if gold and len(gold.links) > 0:
<del> yield doc, gold
<del> i += 1
<del> line = _file.readline()
<del>
<del>
<del>def _get_gold_parse(doc, entities, dev, kb, labels_discard):
<del> gold_entities = {}
<del> tagged_ent_positions = {
<del> (ent.start_char, ent.end_char): ent
<del> for ent in doc.ents
<del> if ent.label_ not in labels_discard
<del> }
<del>
<del> for entity in entities:
<del> entity_id = entity["entity"]
<del> alias = entity["alias"]
<del> start = entity["start"]
<del> end = entity["end"]
<del>
<del> candidate_ids = []
<del> if kb and not dev:
<del> candidates = kb.get_candidates(alias)
<del> candidate_ids = [cand.entity_ for cand in candidates]
<del>
<del> tagged_ent = tagged_ent_positions.get((start, end), None)
<del> if tagged_ent:
<del> # TODO: check that alias == doc.text[start:end]
<del> should_add_ent = (dev or entity_id in candidate_ids) and is_valid_sentence(
<del> tagged_ent.sent.text
<del> )
<del>
<del> if should_add_ent:
<del> value_by_id = {entity_id: 1.0}
<del> if not dev:
<del> random.shuffle(candidate_ids)
<del> value_by_id.update(
<del> {kb_id: 0.0 for kb_id in candidate_ids if kb_id != entity_id}
<del> )
<del> gold_entities[(start, end)] = value_by_id
<del>
<del> return GoldParse(doc, links=gold_entities)
<del>
<del>
<del>def is_dev(article_id):
<del> if not article_id:
<del> return False
<del> return article_id.endswith("3")
<del>
<del>
<del>def is_valid_article(doc_text):
<del> # custom length cut-off
<del> return 10 < len(doc_text) < 30000
<del>
<del>
<del>def is_valid_sentence(sent_text):
<del> if not 10 < len(sent_text) < 3000:
<del> # custom length cut-off
<del> return False
<del>
<del> if sent_text.strip().startswith("*") or sent_text.strip().startswith("#"):
<del> # remove 'enumeration' sentences (occurs often on Wikipedia)
<del> return False
<del>
<del> return True
<add><path>examples/training/create_kb.py
<del><path>examples/training/pretrain_kb.py
<ide> #!/usr/bin/env python
<ide> # coding: utf8
<ide>
<del>"""Example of defining and (pre)training spaCy's knowledge base,
<add>"""Example of defining a knowledge base in spaCy,
<ide> which is needed to implement entity linking functionality.
<ide>
<ide> For more details, see the documentation:
<ide> * Knowledge base: https://spacy.io/api/kb
<ide> * Entity Linking: https://spacy.io/usage/linguistic-features#entity-linking
<ide>
<del>Compatible with: spaCy v2.2.3
<del>Last tested with: v2.2.3
<add>Compatible with: spaCy v2.2.4
<add>Last tested with: v2.2.4
<ide> """
<ide> from __future__ import unicode_literals, print_function
<ide>
<ide> import spacy
<ide> from spacy.kb import KnowledgeBase
<ide>
<del>from bin.wiki_entity_linking.train_descriptions import EntityEncoder
<del>
<ide>
<ide> # Q2146908 (Russ Cochran): American golfer
<ide> # Q7381115 (Russ Cochran): publisher
<ide> ENTITIES = {"Q2146908": ("American golfer", 342), "Q7381115": ("publisher", 17)}
<ide>
<del>INPUT_DIM = 300 # dimension of pretrained input vectors
<del>DESC_WIDTH = 64 # dimension of output entity vectors
<del>
<ide>
<ide> @plac.annotations(
<ide> model=("Model name, should have pretrained word embeddings", "positional", None, str),
<ide> output_dir=("Optional output directory", "option", "o", Path),
<del> n_iter=("Number of training iterations", "option", "n", int),
<ide> )
<del>def main(model=None, output_dir=None, n_iter=50):
<del> """Load the model, create the KB and pretrain the entity encodings.
<add>def main(model=None, output_dir=None):
<add> """Load the model and create the KB with pre-defined entity encodings.
<ide> If an output_dir is provided, the KB will be stored there in a file 'kb'.
<ide> The updated vocab will also be written to a directory in the output_dir."""
<ide>
<ide> def main(model=None, output_dir=None, n_iter=50):
<ide> " cf. https://spacy.io/usage/models#languages."
<ide> )
<ide>
<del> kb = KnowledgeBase(vocab=nlp.vocab)
<add> # You can change the dimension of vectors in your KB by using an encoder that changes the dimensionality.
<add> # For simplicity, we'll just use the original vector dimension here instead.
<add> vectors_dim = nlp.vocab.vectors.shape[1]
<add> kb = KnowledgeBase(vocab=nlp.vocab, entity_vector_length=vectors_dim)
<ide>
<ide> # set up the data
<ide> entity_ids = []
<del> descriptions = []
<add> descr_embeddings = []
<ide> freqs = []
<ide> for key, value in ENTITIES.items():
<ide> desc, freq = value
<ide> entity_ids.append(key)
<del> descriptions.append(desc)
<add> descr_embeddings.append(nlp(desc).vector)
<ide> freqs.append(freq)
<ide>
<del> # training entity description encodings
<del> # this part can easily be replaced with a custom entity encoder
<del> encoder = EntityEncoder(
<del> nlp=nlp,
<del> input_dim=INPUT_DIM,
<del> desc_width=DESC_WIDTH,
<del> epochs=n_iter,
<del> )
<del> encoder.train(description_list=descriptions, to_print=True)
<del>
<del> # get the pretrained entity vectors
<del> embeddings = encoder.apply_encoder(descriptions)
<del>
<ide> # set the entities, can also be done by calling `kb.add_entity` for each entity
<del> kb.set_entities(entity_list=entity_ids, freq_list=freqs, vector_list=embeddings)
<add> kb.set_entities(entity_list=entity_ids, freq_list=freqs, vector_list=descr_embeddings)
<ide>
<ide> # adding aliases, the entities need to be defined in the KB beforehand
<ide> kb.add_alias(
<ide> def main(model=None, output_dir=None, n_iter=50):
<ide> vocab2 = Vocab().from_disk(vocab_path)
<ide> kb2 = KnowledgeBase(vocab=vocab2)
<ide> kb2.load_bulk(kb_path)
<del> _print_kb(kb2)
<ide> print()
<add> _print_kb(kb2)
<ide>
<ide>
<ide> def _print_kb(kb):
<ide> def _print_kb(kb):
<ide> plac.call(main)
<ide>
<ide> # Expected output:
<del>
<ide> # 2 kb entities: ['Q2146908', 'Q7381115']
<ide> # 1 kb aliases: ['Russ Cochran']
<ide><path>examples/training/train_entity_linker.py
<ide> #!/usr/bin/env python
<ide> # coding: utf8
<ide>
<del>"""Example of training spaCy's entity linker, starting off with an
<del>existing model and a pre-defined knowledge base.
<add>"""Example of training spaCy's entity linker, starting off with a predefined
<add>knowledge base and corresponding vocab, and a blank English model.
<ide>
<ide> For more details, see the documentation:
<ide> * Training: https://spacy.io/usage/training
<ide> * Entity Linking: https://spacy.io/usage/linguistic-features#entity-linking
<ide>
<del>Compatible with: spaCy v2.2.3
<del>Last tested with: v2.2.3
<add>Compatible with: spaCy v2.2.4
<add>Last tested with: v2.2.4
<ide> """
<ide> from __future__ import unicode_literals, print_function
<ide>
<ide> import plac
<ide> import random
<ide> from pathlib import Path
<ide>
<del>from spacy.symbols import PERSON
<ide> from spacy.vocab import Vocab
<ide>
<ide> import spacy
<ide> from spacy.kb import KnowledgeBase
<ide> from spacy.pipeline import EntityRuler
<del>from spacy.tokens import Span
<ide> from spacy.util import minibatch, compounding
<ide>
<ide>
<ide><path>spacy/tests/regression/test_issue5314.py
<del>import pytest
<del>
<del>from bin.wiki_entity_linking.wikipedia_processor import _process_wp_text
<del>
<del>old_format_text = """<text bytes="11456" xml:space="preserve">[[Fil:Archäologie schichtengrabung.jpg|thumb|Arkæologisk [[udgravning]] med profil.]] '''Arkæologi''' er studiet af tidligere tiders [[menneske]]lige [[aktivitet]], primært gennem studiet af menneskets materielle levn.</text>"""
<del>new_format_text = """<text xml:space="preserve">[[Fil:Archäologie schichtengrabung.jpg|thumb|Arkæologisk [[udgravning]] med profil.]] '''Arkæologi''' er studiet af tidligere tiders [[menneske]]lige [[aktivitet]], primært gennem studiet af menneskets materielle levn.</text>"""
<del>potential_future_format = """<text bytes="11456" xml:space="preserve">[[Fil:Archäologie schichtengrabung.jpg|thumb|Arkæologisk [[udgravning]] med profil.]] '''Arkæologi''' er studiet af tidligere tiders [[menneske]]lige [[aktivitet]], primært gennem studiet af menneskets materielle levn.</text>"""
<del>
<del>
<del>@pytest.mark.parametrize(
<del> "text", [old_format_text, new_format_text, potential_future_format]
<del>)
<del>def test_issue5314(text):
<del> title = "Arkæologi"
<del> clean_text, _ = _process_wp_text(title, text, {})
<del>
<del> expected_text = "Arkæologi er studiet af tidligere tiders menneskelige aktivitet, primært gennem studiet af menneskets materielle levn."
<del> assert clean_text.strip() == expected_text
<ide><path>website/docs/usage/examples.md
<ide> start.
<ide> https://github.com/explosion/spaCy/tree/master/examples/training/train_new_entity_type.py
<ide> ```
<ide>
<add>### Creating a Knowledge Base for Named Entity Linking {#kb}
<add>
<add>This example shows how to create a knowledge base in spaCy,
<add>which is needed to implement entity linking functionality.
<add>It requires as input a spaCy model with pretrained word vectors,
<add>and it stores the KB to file (if an `output_dir` is provided).
<add>
<add>```python
<add>https://github.com/explosion/spaCy/tree/master/examples/training/create_kb.py
<add>```
<add>
<add>### Training spaCy's Named Entity Linker {#nel}
<add>
<add>This example shows how to train spaCy's entity linker with your own custom
<add>examples, starting off with a predefined knowledge base and its vocab,
<add>and using a blank `English` class.
<add>
<add>```python
<add>https://github.com/explosion/spaCy/tree/master/examples/training/train_entity_linker.py
<add>```
<add>
<ide> ### Training spaCy's Dependency Parser {#parser}
<ide>
<ide> This example shows how to update spaCy's dependency parser, starting off with an
<ide><path>website/docs/usage/linguistic-features.md
<ide> import DisplacyEntHtml from 'images/displacy-ent2.html'
<ide>
<ide> To ground the named entities into the "real world", spaCy provides functionality
<ide> to perform entity linking, which resolves a textual entity to a unique
<del>identifier from a knowledge base (KB). The
<del>[processing scripts](https://github.com/explosion/spaCy/tree/master/bin/wiki_entity_linking)
<del>we provide use WikiData identifiers, but you can create your own
<add>identifier from a knowledge base (KB). You can create your own
<ide> [`KnowledgeBase`](/api/kb) and
<ide> [train a new Entity Linking model](/usage/training#entity-linker) using that
<ide> custom-made KB.
<ide><path>website/docs/usage/training.md
<ide> your data** to find a solution that works best for you.
<ide> ### Updating the Named Entity Recognizer {#example-train-ner}
<ide>
<ide> This example shows how to update spaCy's entity recognizer with your own
<del>examples, starting off with an existing, pretrained model, or from scratch
<del>using a blank `Language` class. To do this, you'll need **example texts** and
<del>the **character offsets** and **labels** of each entity contained in the texts.
<add>examples, starting off with an existing, pretrained model, or from scratch using
<add>a blank `Language` class. To do this, you'll need **example texts** and the
<add>**character offsets** and **labels** of each entity contained in the texts.
<ide>
<ide> ```python
<ide> https://github.com/explosion/spaCy/tree/master/examples/training/train_ner.py
<ide> https://github.com/explosion/spaCy/tree/master/examples/training/train_parser.py
<ide> training the parser.
<ide> 2. **Add the dependency labels** to the parser using the
<ide> [`add_label`](/api/dependencyparser#add_label) method. If you're starting off
<del> with a pretrained spaCy model, this is usually not necessary – but it
<del> doesn't hurt either, just to be safe.
<add> with a pretrained spaCy model, this is usually not necessary – but it doesn't
<add> hurt either, just to be safe.
<ide> 3. **Shuffle and loop over** the examples. For each example, **update the
<ide> model** by calling [`nlp.update`](/api/language#update), which steps through
<ide> the words of the input. At each word, it makes a **prediction**. It then
<ide> To train an entity linking model, you first need to define a knowledge base
<ide>
<ide> A KB consists of a list of entities with unique identifiers. Each such entity
<ide> has an entity vector that will be used to measure similarity with the context in
<del>which an entity is used. These vectors are pretrained and stored in the KB
<del>before the entity linking model will be trained.
<add>which an entity is used. These vectors have a fixed length and are stored in the
<add>KB.
<ide>
<ide> The following example shows how to build a knowledge base from scratch, given a
<del>list of entities and potential aliases. The script further demonstrates how to
<del>pretrain and store the entity vectors. To run this example, the script needs
<del>access to a `vocab` instance or an `nlp` model with pretrained word embeddings.
<add>list of entities and potential aliases. The script requires an `nlp` model with
<add>pretrained word vectors to obtain an encoding of an entity's description as its
<add>vector.
<ide>
<ide> ```python
<del>https://github.com/explosion/spaCy/tree/master/examples/training/pretrain_kb.py
<add>https://github.com/explosion/spaCy/tree/master/examples/training/create_kb.py
<ide> ```
<ide>
<ide> #### Step by step guide {#step-by-step-kb} | 17 |
Java | Java | change methodnotallowedexception to use httpmethod | 290e9bea14ee017519194254f763ea3572310734 | <ide><path>spring-web/src/main/java/org/springframework/web/server/MethodNotAllowedException.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2017 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import java.util.HashSet;
<ide> import java.util.Set;
<ide>
<add>import org.springframework.http.HttpMethod;
<ide> import org.springframework.http.HttpStatus;
<ide> import org.springframework.util.Assert;
<ide>
<ide> @SuppressWarnings("serial")
<ide> public class MethodNotAllowedException extends ResponseStatusException {
<ide>
<del> private String method;
<add> private final String method;
<ide>
<del> private Set<String> supportedMethods;
<add> private final Set<HttpMethod> supportedMethods;
<ide>
<ide>
<del> public MethodNotAllowedException(String method, Collection<String> supportedMethods) {
<add> public MethodNotAllowedException(HttpMethod method, Collection<HttpMethod> supportedMethods) {
<add> this(method.name(), supportedMethods);
<add> }
<add>
<add> public MethodNotAllowedException(String method, Collection<HttpMethod> supportedMethods) {
<ide> super(HttpStatus.METHOD_NOT_ALLOWED, "Request method '" + method + "' not supported");
<ide> Assert.notNull(method, "'method' is required");
<add> if (supportedMethods == null) {
<add> supportedMethods = Collections.emptySet();
<add> }
<ide> this.method = method;
<ide> this.supportedMethods = Collections.unmodifiableSet(new HashSet<>(supportedMethods));
<ide> }
<ide> public String getHttpMethod() {
<ide> /**
<ide> * Return the list of supported HTTP methods.
<ide> */
<del> public Set<String> getSupportedMethods() {
<add> public Set<HttpMethod> getSupportedMethods() {
<ide> return supportedMethods;
<ide> }
<ide> }
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/resource/ResourceWebHandler.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2017 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import java.net.URLDecoder;
<ide> import java.time.Instant;
<ide> import java.util.ArrayList;
<del>import java.util.Arrays;
<ide> import java.util.Collections;
<add>import java.util.EnumSet;
<ide> import java.util.HashMap;
<del>import java.util.LinkedHashSet;
<ide> import java.util.List;
<ide> import java.util.Map;
<ide> import java.util.Optional;
<ide> implements WebHandler, InitializingBean, SmartInitializingSingleton {
<ide>
<ide> /** Set of supported HTTP methods */
<del> private static final Set<String> SUPPORTED_METHODS = new LinkedHashSet<>(2);
<add> private static final Set<HttpMethod> SUPPORTED_METHODS = EnumSet.of(HttpMethod.GET, HttpMethod.HEAD);
<ide>
<ide> private static final Log logger = LogFactory.getLog(ResourceWebHandler.class);
<ide>
<del> static {
<del> SUPPORTED_METHODS.addAll(Arrays.asList("GET", "HEAD"));
<del> }
<del>
<ide>
<ide> private final List<Resource> locations = new ArrayList<>(4);
<ide>
<ide> public Mono<Void> handle(ServerWebExchange exchange) {
<ide> }
<ide>
<ide> // Supported methods and required session
<del> String httpMehtod = exchange.getRequest().getMethod().name();
<del> if (!SUPPORTED_METHODS.contains(httpMehtod)) {
<del> return Mono.error(new MethodNotAllowedException(httpMehtod, SUPPORTED_METHODS));
<add> HttpMethod httpMethod = exchange.getRequest().getMethod();
<add> if (!SUPPORTED_METHODS.contains(httpMethod)) {
<add> return Mono.error(new MethodNotAllowedException(httpMethod, SUPPORTED_METHODS));
<ide> }
<ide>
<ide> // Header phase
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/result/method/RequestMappingInfoHandlerMapping.java
<ide> /*
<del> * Copyright 2002-2016 the original author or authors.
<add> * Copyright 2002-2017 the original author or authors.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide> import java.util.ArrayList;
<ide> import java.util.Collections;
<ide> import java.util.Comparator;
<add>import java.util.EnumSet;
<ide> import java.util.LinkedHashMap;
<ide> import java.util.LinkedHashSet;
<ide> import java.util.List;
<ide> protected HandlerMethod handleNoMatch(Set<RequestMappingInfo> infos, String look
<ide>
<ide> if (helper.hasMethodsMismatch()) {
<ide> HttpMethod httpMethod = request.getMethod();
<del> Set<String> methods = helper.getAllowedMethods();
<add> Set<HttpMethod> methods = helper.getAllowedMethods();
<ide> if (HttpMethod.OPTIONS.matches(httpMethod.name())) {
<ide> HttpOptionsHandler handler = new HttpOptionsHandler(methods);
<ide> return new HandlerMethod(handler, HTTP_OPTIONS_HANDLE_METHOD);
<ide> }
<del> throw new MethodNotAllowedException(httpMethod.name(), methods);
<add> throw new MethodNotAllowedException(httpMethod, methods);
<ide> }
<ide>
<ide> if (helper.hasConsumesMismatch()) {
<ide> public boolean isEmpty() {
<ide> * Any partial matches for "methods"?
<ide> */
<ide> public boolean hasMethodsMismatch() {
<del> return !this.partialMatches.stream().
<del> filter(PartialMatch::hasMethodsMatch).findAny().isPresent();
<add> return this.partialMatches.stream().
<add> noneMatch(PartialMatch::hasMethodsMatch);
<ide> }
<ide>
<ide> /**
<ide> * Any partial matches for "methods" and "consumes"?
<ide> */
<ide> public boolean hasConsumesMismatch() {
<del> return !this.partialMatches.stream().
<del> filter(PartialMatch::hasConsumesMatch).findAny().isPresent();
<add> return this.partialMatches.stream().
<add> noneMatch(PartialMatch::hasConsumesMatch);
<ide> }
<ide>
<ide> /**
<ide> * Any partial matches for "methods", "consumes", and "produces"?
<ide> */
<ide> public boolean hasProducesMismatch() {
<del> return !this.partialMatches.stream().
<del> filter(PartialMatch::hasProducesMatch).findAny().isPresent();
<add> return this.partialMatches.stream().
<add> noneMatch(PartialMatch::hasProducesMatch);
<ide> }
<ide>
<ide> /**
<ide> * Any partial matches for "methods", "consumes", "produces", and "params"?
<ide> */
<ide> public boolean hasParamsMismatch() {
<del> return !this.partialMatches.stream().
<del> filter(PartialMatch::hasParamsMatch).findAny().isPresent();
<add> return this.partialMatches.stream().
<add> noneMatch(PartialMatch::hasParamsMatch);
<ide> }
<ide>
<ide> /**
<ide> * Return declared HTTP methods.
<ide> */
<del> public Set<String> getAllowedMethods() {
<add> public Set<HttpMethod> getAllowedMethods() {
<ide> return this.partialMatches.stream().
<ide> flatMap(m -> m.getInfo().getMethodsCondition().getMethods().stream()).
<del> map(requestMethod -> requestMethod.name()).
<del> collect(Collectors.toCollection(LinkedHashSet::new));
<add> map(requestMethod -> HttpMethod.resolve(requestMethod.name())).
<add> collect(Collectors.toSet());
<ide> }
<ide>
<ide> /**
<ide> private static class HttpOptionsHandler {
<ide> private final HttpHeaders headers = new HttpHeaders();
<ide>
<ide>
<del> public HttpOptionsHandler(Set<String> declaredMethods) {
<add> public HttpOptionsHandler(Set<HttpMethod> declaredMethods) {
<ide> this.headers.setAllow(initAllowedHttpMethods(declaredMethods));
<ide> }
<ide>
<del> private static Set<HttpMethod> initAllowedHttpMethods(Set<String> declaredMethods) {
<del> Set<HttpMethod> result = new LinkedHashSet<>(declaredMethods.size());
<add> private static Set<HttpMethod> initAllowedHttpMethods(Set<HttpMethod> declaredMethods) {
<ide> if (declaredMethods.isEmpty()) {
<del> for (HttpMethod method : HttpMethod.values()) {
<del> if (!HttpMethod.TRACE.equals(method)) {
<del> result.add(method);
<del> }
<del> }
<add> return EnumSet.allOf(HttpMethod.class).stream()
<add> .filter(method -> !method.equals(HttpMethod.TRACE))
<add> .collect(Collectors.toSet());
<ide> }
<ide> else {
<del> boolean hasHead = declaredMethods.contains("HEAD");
<del> for (String method : declaredMethods) {
<del> result.add(HttpMethod.valueOf(method));
<del> if (!hasHead && "GET".equals(method)) {
<del> result.add(HttpMethod.HEAD);
<del> }
<add> Set<HttpMethod> result = new LinkedHashSet<>(declaredMethods);
<add> if (result.contains(HttpMethod.GET)) {
<add> result.add(HttpMethod.HEAD);
<ide> }
<add> return result;
<ide> }
<del> return result;
<ide> }
<ide>
<ide> @SuppressWarnings("unused")
<ide><path>spring-webflux/src/main/java/org/springframework/web/reactive/socket/server/support/HandshakeWebSocketService.java
<ide> public Mono<Void> handleRequest(ServerWebExchange exchange, WebSocketHandler han
<ide> }
<ide>
<ide> if (HttpMethod.GET != method) {
<del> return Mono.error(new MethodNotAllowedException(method.name(), Collections.singleton("GET")));
<add> return Mono.error(new MethodNotAllowedException(method, Collections.singleton(HttpMethod.GET)));
<ide> }
<ide>
<ide> if (!"WebSocket".equalsIgnoreCase(headers.getUpgrade())) {
<ide><path>spring-webflux/src/test/java/org/springframework/web/reactive/result/method/RequestMappingInfoHandlerMappingTests.java
<ide> import java.net.URI;
<ide> import java.util.Arrays;
<ide> import java.util.Collections;
<add>import java.util.EnumSet;
<ide> import java.util.HashSet;
<ide> import java.util.Map;
<ide> import java.util.Optional;
<ide> import org.springframework.web.server.support.HttpRequestPathHelper;
<ide>
<ide> import static org.hamcrest.CoreMatchers.containsString;
<del>import static org.junit.Assert.assertEquals;
<del>import static org.junit.Assert.assertNotNull;
<del>import static org.junit.Assert.assertNull;
<del>import static org.junit.Assert.assertThat;
<del>import static org.junit.Assert.assertTrue;
<add>import static org.junit.Assert.*;
<ide> import static org.springframework.mock.http.server.reactive.test.MockServerHttpRequest.get;
<ide> import static org.springframework.web.bind.annotation.RequestMethod.GET;
<ide> import static org.springframework.web.bind.annotation.RequestMethod.HEAD;
<ide> public void getHandlerRequestMethodNotAllowed() throws Exception {
<ide> Mono<Object> mono = this.handlerMapping.getHandler(exchange);
<ide>
<ide> assertError(mono, MethodNotAllowedException.class,
<del> ex -> assertEquals(new HashSet<>(Arrays.asList("GET", "HEAD")), ex.getSupportedMethods()));
<add> ex -> assertEquals(EnumSet.of(HttpMethod.GET, HttpMethod.HEAD), ex.getSupportedMethods()));
<ide> }
<ide>
<ide> @Test // SPR-9603
<ide> public void getHandlerTestRequestParamMismatch() throws Exception {
<ide>
<ide> @Test
<ide> public void getHandlerHttpOptions() throws Exception {
<del> testHttpOptions("/foo", "GET,HEAD");
<del> testHttpOptions("/person/1", "PUT");
<del> testHttpOptions("/persons", "GET,HEAD,POST,PUT,PATCH,DELETE,OPTIONS");
<del> testHttpOptions("/something", "PUT,POST");
<add> testHttpOptions("/foo", EnumSet.of(HttpMethod.GET, HttpMethod.HEAD));
<add> testHttpOptions("/person/1", EnumSet.of(HttpMethod.PUT));
<add> testHttpOptions("/persons", EnumSet.of(HttpMethod.GET, HttpMethod.HEAD, HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH, HttpMethod.DELETE, HttpMethod.OPTIONS));
<add> testHttpOptions("/something", EnumSet.of(HttpMethod.PUT, HttpMethod.POST));
<ide> }
<ide>
<ide> @Test
<ide> private void testHttpMediaTypeNotSupportedException(String url) throws Exception
<ide> ex.getSupportedMediaTypes()));
<ide> }
<ide>
<del> private void testHttpOptions(String requestURI, String allowHeader) throws Exception {
<add> private void testHttpOptions(String requestURI, Set<HttpMethod> allowedMethods) throws Exception {
<ide> ServerWebExchange exchange = MockServerHttpRequest.options(requestURI).toExchange();
<ide> HandlerMethod handlerMethod = (HandlerMethod) this.handlerMapping.getHandler(exchange).block();
<ide>
<ide> private void testHttpOptions(String requestURI, String allowHeader) throws Excep
<ide> Optional<Object> value = result.getReturnValue();
<ide> assertTrue(value.isPresent());
<ide> assertEquals(HttpHeaders.class, value.get().getClass());
<del> assertEquals(allowHeader, ((HttpHeaders) value.get()).getFirst("Allow"));
<add> assertEquals(allowedMethods, ((HttpHeaders) value.get()).getAllow());
<ide> }
<ide>
<ide> private void testMediaTypeNotAcceptable(String url) throws Exception { | 5 |
Text | Text | fix arxiv links in readme of neural_gpu model | c015f6962e82379d7d4b887ece19be85066928b0 | <ide><path>neural_gpu/README.md
<ide> # NeuralGPU
<del>Code for the Neural GPU model described in [[http://arxiv.org/abs/1511.08228]].
<del>The extended version was described in [[https://arxiv.org/abs/1610.08613]].
<add>Code for the Neural GPU model described in http://arxiv.org/abs/1511.08228.
<add>The extended version was described in https://arxiv.org/abs/1610.08613.
<ide>
<ide> Requirements:
<ide> * TensorFlow (see tensorflow.org for how to install) | 1 |
Ruby | Ruby | remove duplicated test | 69b1716d2366fe425a21efaecace64c73fa4a5fc | <ide><path>railties/test/application/assets_test.rb
<ide> class ::PostsController < ActionController::Base ; end
<ide> assert_no_match(/<script src="\/assets\/xmlhr-([0-z]+)\.js"><\/script>/, last_response.body)
<ide> end
<ide>
<del> test "assets aren't concatened when compile is true is on and debug_assets params is true" do
<del> app_with_assets_in_view
<del> add_to_env_config "production", "config.assets.compile = true"
<del> add_to_env_config "production", "config.assets.allow_debugging = true"
<del>
<del> ENV["RAILS_ENV"] = "production"
<del> require "#{app_path}/config/environment"
<del>
<del> class ::PostsController < ActionController::Base ; end
<del>
<del> get '/posts?debug_assets=true'
<del> assert_match(/<script src="\/assets\/application-([0-z]+)\.js\?body=1"><\/script>/, last_response.body)
<del> assert_match(/<script src="\/assets\/xmlhr-([0-z]+)\.js\?body=1"><\/script>/, last_response.body)
<del> end
<del>
<ide> test "assets can access model information when precompiling" do
<ide> app_file "app/models/post.rb", "class Post; end"
<ide> app_file "app/assets/javascripts/application.js", "//= require_tree ." | 1 |
Javascript | Javascript | update copyright headers for 2015 | 3e0750a4ad2444c2df708b144ff0c8af7628881d | <ide><path>docs/_js/html-jsx.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>grunt/config/browserify.js
<ide> var LICENSE_TEMPLATE =
<ide> '/**\n\
<ide> * @PACKAGE@ v@VERSION@\n\
<ide> *\n\
<del> * Copyright 2013-2014, Facebook, Inc.\n\
<add> * Copyright 2013-2015, Facebook, Inc.\n\
<ide> * All rights reserved.\n\
<ide> *\n\
<ide> * This source code is licensed under the BSD-style license found in the\n\
<ide><path>src/addons/ReactComponentWithPureRenderMixin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/ReactRAFBatchingStrategy.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/__tests__/update-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/link/LinkedStateMixin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/link/ReactLink.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/link/__tests__/LinkedStateMixin-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/link/__tests__/ReactLinkPropTypes-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/ReactCSSTransitionGroup.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/ReactCSSTransitionGroupChild.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/ReactTransitionChildMapping.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/ReactTransitionEvents.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/ReactTransitionGroup.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/__tests__/ReactCSSTransitionGroup-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/__tests__/ReactTransitionChildMapping-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/transitions/__tests__/ReactTransitionGroup-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/addons/update.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ClientReactRootIndex.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ReactBrowserEventEmitter.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ReactDOM.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ReactPutListenerQueue.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ReactReconcileTransaction.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ReactWithAddons.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/__tests__/ReactBrowserEventEmitter-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/__tests__/ReactDOM-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/__tests__/ReactWebWorker-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/__tests__/findDOMNode-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/AnalyticsEventPluginFactory.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/BeforeInputEventPlugin.js
<ide> /**
<del> * Copyright 2013 Facebook, Inc.
<add> * Copyright 2013-2015 Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/ChangeEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/DefaultEventPluginOrder.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/EnterLeaveEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/FallbackCompositionState.js
<ide> /**
<del> * Copyright 2013 Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/MobileSafariClickEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/ResponderEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/SelectEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/SimpleEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/TapEventPlugin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/__tests__/AnalyticsEventPlugin-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/__tests__/EnterLeaveEventPlugin-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/__tests__/FallbackCompositionState-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/eventPlugins/__tests__/ResponderEventPlugin-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/findDOMNode.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/server/ReactMarkupChecksum.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/server/ReactServerRendering.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/server/ReactServerRenderingTransaction.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/server/ServerReactRootIndex.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/server/__tests__/ReactServerRendering-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticClipboardEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticCompositionEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticDragEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticFocusEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticInputEvent.js
<ide> /**
<del> * Copyright 2013 Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticKeyboardEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticMouseEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticTouchEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticUIEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/SyntheticWheelEvent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/__tests__/SyntheticEvent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/syntheticEvents/__tests__/SyntheticWheelEvent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/React.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactBrowserComponentMixin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactComponentBrowserEnvironment.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactDOMComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactDOMIDOperations.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactDOMSelection.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactDOMTextComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactDefaultInjection.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactEventListener.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactInjection.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactInputSelection.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/ReactMount.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactDOMComponent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactDOMIDOperations-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactDOMTextComponent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactEventListener-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactMount-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactMountDestruction-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/__tests__/ReactRenderDocument-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/CSSProperty.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/CSSPropertyOperations.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/DOMChildrenOperations.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/DOMProperty.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/DOMPropertyOperations.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/Danger.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/HTMLDOMPropertyConfig.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/SVGDOMPropertyConfig.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/ViewportMetrics.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/__tests__/CSSProperty-test.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/__tests__/CSSPropertyOperations-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/__tests__/DOMPropertyOperations-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/__tests__/Danger-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/__tests__/getNodeForCharacterOffset-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/AutoFocusMixin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/LinkedValueUtils.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/LocalEventTrapMixin.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMButton.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMForm.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMImg.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMInput.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMOption.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMSelect.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/ReactDOMTextarea.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/__tests__/LocalEventTrapMixin-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/__tests__/ReactDOMButton-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/__tests__/ReactDOMInput-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/__tests__/ReactDOMSelect-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/__tests__/ReactDOMTextarea-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/components/createFullPageComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/dangerousStyleValue.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/getEventCharCode.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/getEventKey.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/getEventModifierState.js
<ide> /**
<del> * Copyright 2013 Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/getEventTarget.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/getNodeForCharacterOffset.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/getTextContentAccessor.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/isEventSupported.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/dom/setInnerHTML.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/browser/ui/getReactRootElementInContainer.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/__tests__/ReactContextValidator-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/class/ReactClass.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/class/ReactDoNotBindDeprecated.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/class/__tests__/ReactBind-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/class/__tests__/ReactClass-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/class/__tests__/ReactClassMixin-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/element/ReactElement.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/element/ReactElementValidator.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/element/__tests__/ReactElement-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/element/__tests__/ReactElementValidator-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/types/ReactPropTypeLocationNames.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/types/ReactPropTypeLocations.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/types/ReactPropTypes.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/classic/types/__tests__/ReactPropTypes-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactChildReconciler.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactComponentEnvironment.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactCompositeComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactContext.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactCurrentOwner.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactDefaultBatchingStrategy.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactEmptyComponent.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactEventEmitterMixin.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactInstanceHandles.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactInstanceMap.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactMultiChild.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactMultiChildUpdateTypes.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactNativeComponent.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactOwner.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactPropTransferer.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactReconciler.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactRef.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactRootIndex.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactStateSetters.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/ReactUpdates.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactComponent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactComponentLifeCycle-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactCompositeComponent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactCompositeComponentDOMMinimalism-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactCompositeComponentError-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactCompositeComponentState-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactEmptyComponent-test.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactIdentity-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactInstanceHandles-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactMockedComponent-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactMultiChild-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactMultiChildReconcile-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactMultiChildText-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactStateSetters-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/ReactUpdates-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/refs-destruction-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/__tests__/refs-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/instantiateReactComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/core/shouldUpdateReactComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/EventConstants.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/EventPluginHub.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/EventPluginRegistry.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/EventPluginUtils.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/EventPropagators.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/__tests__/EventPluginHub-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/event/__tests__/EventPluginRegistry-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/modern/class/ReactComponent.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/modern/class/__tests__/ReactES6Class-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/modern/element/__tests__/ReactJSXElement-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/modern/element/__tests__/ReactJSXElementValidator-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/modern/types/__tests__/ReactFlowPropTypes-test.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/modern/types/__tests__/ReactTypeScriptPropTypes-test.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/stubs/Object.assign.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/ReactDefaultPerf.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/ReactDefaultPerfAnalysis.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/ReactPerf.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/ReactTestUtils.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/__tests__/ReactTestUtils-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/__tests__/reactComponentExpect-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/createHierarchyRenderer.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/getTestDocument.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/mock-modules.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/mock-timers.js
<ide> var mockTimers = {
<ide> clearInterval: _clearInterval,
<ide>
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/mocks.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/test/reactComponentExpect.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/CallbackQueue.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/OrderedMap.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/PooledClass.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/ReactChildren.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/Transaction.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/OrderedMap-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/PooledClass-test.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/ReactChildren-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/Transaction-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/accumulateInto-test.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/cloneWithProps-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/escapeTextForBrowser-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/joinClasses-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/keyMirror-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/memoizeStringOnly-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/onlyChild-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/sliceChildren-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/__tests__/traverseAllChildren-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/accumulate.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/accumulateInto.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/adler32.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/cloneWithProps.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/deprecated.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/escapeTextForBrowser.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/flattenChildren.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/forEachAccumulated.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/getIteratorFn.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/isTextInputElement.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/joinClasses.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/keyMirror.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/memoizeStringOnly.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/onlyChild.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/shallowEqual.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/sliceChildren.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/utils/traverseAllChildren.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/CSSCore.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/ExecutionEnvironment.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/TouchEventUtils.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/camelize.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/camelizeStyleName.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/createArrayFrom.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/createNodesFromMarkup.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/dom/containsNode.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/dom/focusNode.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/dom/getActiveElement.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/dom/getUnboundedScrollPosition.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/dom/isNode.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/dom/isTextNode.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/emptyFunction.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/emptyObject.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/getMarkupWrap.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/hyphenate.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/hyphenateStyleName.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/invariant.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/keyOf.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/mapObject.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/mergeDeep.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/mergeDeepInto.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/mergeHelpers.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/monitorCodeUse.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/nativeRequestAnimationFrame.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/requestAnimationFrame.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/toArray.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/core/warning.js
<ide> /**
<del> * Copyright 2014, Facebook, Inc.
<add> * Copyright 2014-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/immutable/Immutable.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/immutable/ImmutableObject.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/performance/performance.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/performance/performanceNow.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/stubs/EventListener.js
<ide> /**
<del> * Copyright 2013-2014 Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> *
<ide> * Licensed under the Apache License, Version 2.0 (the "License");
<ide> * you may not use this file except in compliance with the License.
<ide><path>src/vendor/stubs/ReactErrorUtils.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor/stubs/cx.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor_deprecated/core/copyProperties.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor_deprecated/core/merge.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>src/vendor_deprecated/core/mergeInto.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/browser-transforms.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/constants.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/fbtransform/transforms/__tests__/react-displayName-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/fbtransform/transforms/__tests__/react-test.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/fbtransform/transforms/react.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/fbtransform/transforms/reactDisplayName.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/fbtransform/transforms/xjs.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/jasmine/console.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the
<ide><path>vendor/jasmine/phantom.js
<ide> /**
<del> * Copyright 2013-2014, Facebook, Inc.
<add> * Copyright 2013-2015, Facebook, Inc.
<ide> * All rights reserved.
<ide> *
<ide> * This source code is licensed under the BSD-style license found in the | 283 |
Javascript | Javascript | replace var with let/const | 00e22766d171f9914d2cff4608bffdd317c50e23 | <ide><path>lib/internal/encoding.js
<ide> const {
<ide> encodeUtf8String
<ide> } = internalBinding('buffer');
<ide>
<del>var Buffer;
<add>let Buffer;
<ide> function lazyBuffer() {
<ide> if (Buffer === undefined)
<ide> Buffer = require('buffer').Buffer;
<ide> const encodings = new Map([
<ide> // Unfortunately, String.prototype.trim also removes non-ascii whitespace,
<ide> // so we have to do this manually
<ide> function trimAsciiWhitespace(label) {
<del> var s = 0;
<del> var e = label.length;
<add> let s = 0;
<add> let e = label.length;
<ide> while (s < e && (
<ide> label[s] === '\u0009' ||
<ide> label[s] === '\u000a' ||
<ide> function makeTextDecoderICU() {
<ide> if (enc === undefined)
<ide> throw new ERR_ENCODING_NOT_SUPPORTED(encoding);
<ide>
<del> var flags = 0;
<add> let flags = 0;
<ide> if (options !== null) {
<ide> flags |= options.fatal ? CONVERTER_FLAGS_FATAL : 0;
<ide> flags |= options.ignoreBOM ? CONVERTER_FLAGS_IGNORE_BOM : 0;
<ide> function makeTextDecoderICU() {
<ide> }
<ide> validateArgument(options, 'object', 'options', 'Object');
<ide>
<del> var flags = 0;
<add> let flags = 0;
<ide> if (options !== null)
<ide> flags |= options.stream ? 0 : CONVERTER_FLAGS_FLUSH;
<ide>
<ide> function makeTextDecoderICU() {
<ide> }
<ide>
<ide> function makeTextDecoderJS() {
<del> var StringDecoder;
<add> let StringDecoder;
<ide> function lazyStringDecoder() {
<ide> if (StringDecoder === undefined)
<ide> ({ StringDecoder } = require('string_decoder'));
<ide> function makeTextDecoderJS() {
<ide> if (enc === undefined || !hasConverter(enc))
<ide> throw new ERR_ENCODING_NOT_SUPPORTED(encoding);
<ide>
<del> var flags = 0;
<add> let flags = 0;
<ide> if (options !== null) {
<ide> if (options.fatal) {
<ide> throw new ERR_NO_ICU('"fatal" option'); | 1 |
Go | Go | add test coverage for pkg/stringutils | 6c36572e8b77bd7a4e8c1afa5be00fb4e7618c12 | <ide><path>pkg/stringutils/stringutils_test.go
<ide> func TestInSlice(t *testing.T) {
<ide> t.Fatalf("Expected string notinslice not to be in slice")
<ide> }
<ide> }
<add>
<add>func TestShellQuoteArgumentsEmpty(t *testing.T) {
<add> actual := ShellQuoteArguments([]string{})
<add> expected := ""
<add> if actual != expected {
<add> t.Fatalf("Expected an empty string")
<add> }
<add>}
<add>
<add>func TestShellQuoteArguments(t *testing.T) {
<add> simpleString := "simpleString"
<add> complexString := "This is a 'more' complex $tring with some special char *"
<add> actual := ShellQuoteArguments([]string{simpleString, complexString})
<add> expected := "simpleString 'This is a '\\''more'\\'' complex $tring with some special char *'"
<add> if actual != expected {
<add> t.Fatalf("Expected \"%v\", got \"%v\"", expected, actual)
<add> }
<add>} | 1 |
Python | Python | improve tokenization for ud dutch corpora | f4ef64a5264d4cd7f57059150cebeda388dd202d | <ide><path>spacy/lang/nl/__init__.py
<ide> from .lex_attrs import LEX_ATTRS
<ide> from .tag_map import TAG_MAP
<ide> from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS
<del>from .punctuation import TOKENIZER_INFIXES, TOKENIZER_SUFFIXES
<add>from .punctuation import TOKENIZER_PREFIXES, TOKENIZER_INFIXES
<add>from .punctuation import TOKENIZER_SUFFIXES
<ide> from .lemmatizer import DutchLemmatizer
<ide> from ..tokenizer_exceptions import BASE_EXCEPTIONS
<ide> from ..norm_exceptions import BASE_NORMS
<ide> class DutchDefaults(Language.Defaults):
<ide> tokenizer_exceptions = update_exc(BASE_EXCEPTIONS, TOKENIZER_EXCEPTIONS)
<ide> stop_words = STOP_WORDS
<ide> tag_map = TAG_MAP
<add> prefixes = TOKENIZER_PREFIXES
<ide> infixes = TOKENIZER_INFIXES
<ide> suffixes = TOKENIZER_SUFFIXES
<ide>
<ide><path>spacy/lang/nl/punctuation.py
<ide> # coding: utf8
<ide> from __future__ import unicode_literals
<ide>
<del>from ..char_classes import LIST_ELLIPSES, LIST_ICONS
<add>from ..char_classes import LIST_ELLIPSES, LIST_ICONS, LIST_UNITS, merge_chars
<add>from ..char_classes import LIST_PUNCT, LIST_QUOTES, CURRENCY, PUNCT
<ide> from ..char_classes import CONCAT_QUOTES, ALPHA, ALPHA_LOWER, ALPHA_UPPER
<ide>
<del>from ..punctuation import TOKENIZER_SUFFIXES as DEFAULT_TOKENIZER_SUFFIXES
<add>from ..punctuation import TOKENIZER_PREFIXES as BASE_TOKENIZER_PREFIXES
<add>
<add>
<add>_prefixes = [",,"] + BASE_TOKENIZER_PREFIXES
<ide>
<ide>
<ide> # Copied from `de` package. Main purpose is to ensure that hyphens are not
<ide> r"(?<=[{a}]),(?=[{a}])".format(a=ALPHA),
<ide> r"(?<=[{a}])([{q}\)\]\(\[])(?=[{a}])".format(a=ALPHA, q=_quotes),
<ide> r"(?<=[{a}])--(?=[{a}])".format(a=ALPHA),
<del> r"(?<=[0-9])-(?=[0-9])",
<ide> ]
<ide> )
<ide>
<ide>
<del># Remove "'s" suffix from suffix list. In Dutch, "'s" is a plural ending when
<del># it occurs as a suffix and a clitic for "eens" in standalone use. To avoid
<del># ambiguity it's better to just leave it attached when it occurs as a suffix.
<del>default_suffix_blacklist = ("'s", "'S", "’s", "’S")
<del>_suffixes = [
<del> suffix
<del> for suffix in DEFAULT_TOKENIZER_SUFFIXES
<del> if suffix not in default_suffix_blacklist
<del>]
<add>_list_units = [u for u in LIST_UNITS if u != "%"]
<add>_units = merge_chars(" ".join(_list_units))
<add>
<add>_suffixes = (
<add> ["''"]
<add> + LIST_PUNCT
<add> + LIST_ELLIPSES
<add> + LIST_QUOTES
<add> + LIST_ICONS
<add> + ["—", "–"]
<add> + [
<add> r"(?<=[0-9])\+",
<add> r"(?<=°[FfCcKk])\.",
<add> r"(?<=[0-9])(?:{c})".format(c=CURRENCY),
<add> r"(?<=[0-9])(?:{u})".format(u=_units),
<add> r"(?<=[0-9{al}{e}{p}(?:{q})])\.".format(
<add> al=ALPHA_LOWER, e=r"%²\-\+", q=CONCAT_QUOTES, p=PUNCT
<add> ),
<add> r"(?<=[{au}][{au}])\.".format(au=ALPHA_UPPER),
<add> ]
<add>)
<add>
<ide>
<add>TOKENIZER_PREFIXES = _prefixes
<ide> TOKENIZER_INFIXES = _infixes
<ide> TOKENIZER_SUFFIXES = _suffixes
<ide><path>spacy/lang/nl/tokenizer_exceptions.py
<ide> # are extremely domain-specific. Tokenizer performance may benefit from some
<ide> # slight pruning, although no performance regression has been observed so far.
<ide>
<del># fmt: off
<del>abbrevs = ['a.2d.', 'a.a.', 'a.a.j.b.', 'a.f.t.', 'a.g.j.b.',
<del> 'a.h.v.', 'a.h.w.', 'a.hosp.', 'a.i.', 'a.j.b.', 'a.j.t.',
<del> 'a.m.', 'a.m.r.', 'a.p.m.', 'a.p.r.', 'a.p.t.', 'a.s.',
<del> 'a.t.d.f.', 'a.u.b.', 'a.v.a.', 'a.w.', 'aanbev.',
<del> 'aanbev.comm.', 'aant.', 'aanv.st.', 'aanw.', 'vnw.',
<del> 'aanw.vnw.', 'abd.', 'abm.', 'abs.', 'acc.act.',
<del> 'acc.bedr.m.', 'acc.bedr.t.', 'achterv.', 'act.dr.',
<del> 'act.dr.fam.', 'act.fisc.', 'act.soc.', 'adm.akk.',
<del> 'adm.besl.', 'adm.lex.', 'adm.onderr.', 'adm.ov.', 'adv.',
<del> 'adv.', 'gen.', 'adv.bl.', 'afd.', 'afl.', 'aggl.verord.',
<del> 'agr.', 'al.', 'alg.', 'alg.richts.', 'amén.', 'ann.dr.',
<del> 'ann.dr.lg.', 'ann.dr.sc.pol.', 'ann.ét.eur.',
<del> 'ann.fac.dr.lg.', 'ann.jur.créd.',
<del> 'ann.jur.créd.règl.coll.', 'ann.not.', 'ann.parl.',
<del> 'ann.prat.comm.', 'app.', 'arb.', 'aud.', 'arbbl.',
<del> 'arbh.', 'arbit.besl.', 'arbrb.', 'arr.', 'arr.cass.',
<del> 'arr.r.v.st.', 'arr.verbr.', 'arrondrb.', 'art.', 'artw.',
<del> 'aud.', 'b.', 'b.', 'b.&w.', 'b.a.', 'b.a.s.', 'b.b.o.',
<del> 'b.best.dep.', 'b.br.ex.', 'b.coll.fr.gem.comm.',
<del> 'b.coll.vl.gem.comm.', 'b.d.cult.r.', 'b.d.gem.ex.',
<del> 'b.d.gem.reg.', 'b.dep.', 'b.e.b.', 'b.f.r.',
<del> 'b.fr.gem.ex.', 'b.fr.gem.reg.', 'b.i.h.', 'b.inl.j.d.',
<del> 'b.inl.s.reg.', 'b.j.', 'b.l.', 'b.o.z.', 'b.prov.r.',
<del> 'b.r.h.', 'b.s.', 'b.sr.', 'b.stb.', 'b.t.i.r.',
<del> 'b.t.s.z.', 'b.t.w.rev.', 'b.v.',
<del> 'b.ver.coll.gem.gem.comm.', 'b.verg.r.b.', 'b.versl.',
<del> 'b.vl.ex.', 'b.voorl.reg.', 'b.w.', 'b.w.gew.ex.',
<del> 'b.z.d.g.', 'b.z.v.', 'bab.', 'bedr.org.', 'begins.',
<del> 'beheersov.', 'bekendm.comm.', 'bel.', 'bel.besch.',
<del> 'bel.w.p.', 'beleidsov.', 'belg.', 'grondw.', 'ber.',
<del> 'ber.w.', 'besch.', 'besl.', 'beslagr.', 'bestuurswet.',
<del> 'bet.', 'betr.', 'betr.', 'vnw.', 'bevest.', 'bew.',
<del> 'bijbl.', 'ind.', 'eig.', 'bijbl.n.bijdr.', 'bijl.',
<del> 'bijv.', 'bijw.', 'bijz.decr.', 'bin.b.', 'bkh.', 'bl.',
<del> 'blz.', 'bm.', 'bn.', 'rh.', 'bnw.', 'bouwr.', 'br.parl.',
<del> 'bs.', 'bull.', 'bull.adm.pénit.', 'bull.ass.',
<del> 'bull.b.m.m.', 'bull.bel.', 'bull.best.strafinr.',
<del> 'bull.bmm.', 'bull.c.b.n.', 'bull.c.n.c.', 'bull.cbn.',
<del> 'bull.centr.arb.', 'bull.cnc.', 'bull.contr.',
<del> 'bull.doc.min.fin.', 'bull.f.e.b.', 'bull.feb.',
<del> 'bull.fisc.fin.r.', 'bull.i.u.m.',
<del> 'bull.inf.ass.secr.soc.', 'bull.inf.i.e.c.',
<del> 'bull.inf.i.n.a.m.i.', 'bull.inf.i.r.e.', 'bull.inf.iec.',
<del> 'bull.inf.inami.', 'bull.inf.ire.', 'bull.inst.arb.',
<del> 'bull.ium.', 'bull.jur.imm.', 'bull.lég.b.', 'bull.off.',
<del> 'bull.trim.b.dr.comp.', 'bull.us.', 'bull.v.b.o.',
<del> 'bull.vbo.', 'bv.', 'bw.', 'bxh.', 'byz.', 'c.', 'c.a.',
<del> 'c.a.-a.', 'c.a.b.g.', 'c.c.', 'c.c.i.', 'c.c.s.',
<del> 'c.conc.jur.', 'c.d.e.', 'c.d.p.k.', 'c.e.', 'c.ex.',
<del> 'c.f.', 'c.h.a.', 'c.i.f.', 'c.i.f.i.c.', 'c.j.', 'c.l.',
<del> 'c.n.', 'c.o.d.', 'c.p.', 'c.pr.civ.', 'c.q.', 'c.r.',
<del> 'c.r.a.', 'c.s.', 'c.s.a.', 'c.s.q.n.', 'c.v.', 'c.v.a.',
<del> 'c.v.o.', 'ca.', 'cadeaust.', 'cah.const.',
<del> 'cah.dr.europ.', 'cah.dr.immo.', 'cah.dr.jud.', 'cal.',
<del> '2d.', 'cal.', '3e.', 'cal.', 'rprt.', 'cap.', 'carg.',
<del> 'cass.', 'cass.', 'verw.', 'cert.', 'cf.', 'ch.', 'chron.',
<del> 'chron.d.s.', 'chron.dr.not.', 'cie.', 'cie.',
<del> 'verz.schr.', 'cir.', 'circ.', 'circ.z.', 'cit.',
<del> 'cit.loc.', 'civ.', 'cl.et.b.', 'cmt.', 'co.',
<del> 'cognoss.v.', 'coll.', 'v.', 'b.', 'colp.w.', 'com.',
<del> 'com.', 'cas.', 'com.v.min.', 'comm.', 'comm.', 'v.',
<del> 'comm.bijz.ov.', 'comm.erf.', 'comm.fin.', 'comm.ger.',
<del> 'comm.handel.', 'comm.pers.', 'comm.pub.', 'comm.straf.',
<del> 'comm.v.', 'comm.venn.', 'comm.verz.', 'comm.voor.',
<del> 'comp.', 'compt.w.', 'computerr.', 'con.m.', 'concl.',
<del> 'concr.', 'conf.', 'confl.w.', 'confl.w.huwbetr.', 'cons.',
<del> 'conv.', 'coöp.', 'ver.', 'corr.', 'corr.bl.',
<del> 'cour.fisc.', 'cour.immo.', 'cridon.', 'crim.', 'cur.',
<del> 'cur.', 'crt.', 'curs.', 'd.', 'd.-g.', 'd.a.', 'd.a.v.',
<del> 'd.b.f.', 'd.c.', 'd.c.c.r.', 'd.d.', 'd.d.p.', 'd.e.t.',
<del> 'd.gem.r.', 'd.h.', 'd.h.z.', 'd.i.', 'd.i.t.', 'd.j.',
<del> 'd.l.r.', 'd.m.', 'd.m.v.', 'd.o.v.', 'd.parl.', 'd.w.z.',
<del> 'dact.', 'dat.', 'dbesch.', 'dbesl.', 'decr.', 'decr.d.',
<del> 'decr.fr.', 'decr.vl.', 'decr.w.', 'def.', 'dep.opv.',
<del> 'dep.rtl.', 'derg.', 'desp.', 'det.mag.', 'deurw.regl.',
<del> 'dez.', 'dgl.', 'dhr.', 'disp.', 'diss.', 'div.',
<del> 'div.act.', 'div.bel.', 'dl.', 'dln.', 'dnotz.', 'doc.',
<del> 'hist.', 'doc.jur.b.', 'doc.min.fin.', 'doc.parl.',
<del> 'doctr.', 'dpl.', 'dpl.besl.', 'dr.', 'dr.banc.fin.',
<del> 'dr.circ.', 'dr.inform.', 'dr.mr.', 'dr.pén.entr.',
<del> 'dr.q.m.', 'drs.', 'dtp.', 'dwz.', 'dyn.', 'e.', 'e.a.',
<del> 'e.b.', 'tek.mod.', 'e.c.', 'e.c.a.', 'e.d.', 'e.e.',
<del> 'e.e.a.', 'e.e.g.', 'e.g.', 'e.g.a.', 'e.h.a.', 'e.i.',
<del> 'e.j.', 'e.m.a.', 'e.n.a.c.', 'e.o.', 'e.p.c.', 'e.r.c.',
<del> 'e.r.f.', 'e.r.h.', 'e.r.o.', 'e.r.p.', 'e.r.v.',
<del> 'e.s.r.a.', 'e.s.t.', 'e.v.', 'e.v.a.', 'e.w.', 'e&o.e.',
<del> 'ec.pol.r.', 'econ.', 'ed.', 'ed(s).', 'eff.', 'eig.',
<del> 'eig.mag.', 'eil.', 'elektr.', 'enmb.', 'enz.', 'err.',
<del> 'etc.', 'etq.', 'eur.', 'parl.', 'eur.t.s.', 'ev.', 'evt.',
<del> 'ex.', 'ex.crim.', 'exec.', 'f.', 'f.a.o.', 'f.a.q.',
<del> 'f.a.s.', 'f.i.b.', 'f.j.f.', 'f.o.b.', 'f.o.r.', 'f.o.s.',
<del> 'f.o.t.', 'f.r.', 'f.supp.', 'f.suppl.', 'fa.', 'facs.',
<del> 'fasc.', 'fg.', 'fid.ber.', 'fig.', 'fin.verh.w.', 'fisc.',
<del> 'fisc.', 'tijdschr.', 'fisc.act.', 'fisc.koer.', 'fl.',
<del> 'form.', 'foro.', 'it.', 'fr.', 'fr.cult.r.', 'fr.gem.r.',
<del> 'fr.parl.', 'fra.', 'ft.', 'g.', 'g.a.', 'g.a.v.',
<del> 'g.a.w.v.', 'g.g.d.', 'g.m.t.', 'g.o.', 'g.omt.e.', 'g.p.',
<del> 'g.s.', 'g.v.', 'g.w.w.', 'geb.', 'gebr.', 'gebrs.',
<del> 'gec.', 'gec.decr.', 'ged.', 'ged.st.', 'gedipl.',
<del> 'gedr.st.', 'geh.', 'gem.', 'gem.', 'gem.',
<del> 'gem.gem.comm.', 'gem.st.', 'gem.stem.', 'gem.w.',
<del> 'gemeensch.optr.', 'gemeensch.standp.', 'gemeensch.strat.',
<del> 'gemeent.', 'gemeent.b.', 'gemeent.regl.',
<del> 'gemeent.verord.', 'geol.', 'geopp.', 'gepubl.',
<del> 'ger.deurw.', 'ger.w.', 'gerekw.', 'gereq.', 'gesch.',
<del> 'get.', 'getr.', 'gev.m.', 'gev.maatr.', 'gew.', 'ghert.',
<del> 'gir.eff.verk.', 'gk.', 'gr.', 'gramm.', 'grat.w.',
<del> 'grootb.w.', 'grs.', 'grvm.', 'grw.', 'gst.', 'gw.',
<del> 'h.a.', 'h.a.v.o.', 'h.b.o.', 'h.e.a.o.', 'h.e.g.a.',
<del> 'h.e.geb.', 'h.e.gestr.', 'h.l.', 'h.m.', 'h.o.', 'h.r.',
<del> 'h.t.l.', 'h.t.m.', 'h.w.geb.', 'hand.', 'handelsn.w.',
<del> 'handelspr.', 'handelsr.w.', 'handelsreg.w.', 'handv.',
<del> 'harv.l.rev.', 'hc.', 'herald.', 'hert.', 'herz.',
<del> 'hfdst.', 'hfst.', 'hgrw.', 'hhr.', 'hist.', 'hooggel.',
<del> 'hoogl.', 'hosp.', 'hpw.', 'hr.', 'hr.', 'ms.', 'hr.ms.',
<del> 'hregw.', 'hrg.', 'hst.', 'huis.just.', 'huisv.w.',
<del> 'huurbl.', 'hv.vn.', 'hw.', 'hyp.w.', 'i.b.s.', 'i.c.',
<del> 'i.c.m.h.', 'i.e.', 'i.f.', 'i.f.p.', 'i.g.v.', 'i.h.',
<del> 'i.h.a.', 'i.h.b.', 'i.l.pr.', 'i.o.', 'i.p.o.', 'i.p.r.',
<del> 'i.p.v.', 'i.pl.v.', 'i.r.d.i.', 'i.s.m.', 'i.t.t.',
<del> 'i.v.', 'i.v.m.', 'i.v.s.', 'i.w.tr.', 'i.z.', 'ib.',
<del> 'ibid.', 'icip-ing.cons.', 'iem.', 'indic.soc.', 'indiv.',
<del> 'inf.', 'inf.i.d.a.c.', 'inf.idac.', 'inf.r.i.z.i.v.',
<del> 'inf.riziv.', 'inf.soc.secr.', 'ing.', 'ing.', 'cons.',
<del> 'ing.cons.', 'inst.', 'int.', 'int.', 'rechtsh.',
<del> 'strafz.', 'interm.', 'intern.fisc.act.',
<del> 'intern.vervoerr.', 'inv.', 'inv.', 'f.', 'inv.w.',
<del> 'inv.wet.', 'invord.w.', 'inz.', 'ir.', 'irspr.', 'iwtr.',
<del> 'j.', 'j.-cl.', 'j.c.b.', 'j.c.e.', 'j.c.fl.', 'j.c.j.',
<del> 'j.c.p.', 'j.d.e.', 'j.d.f.', 'j.d.s.c.', 'j.dr.jeun.',
<del> 'j.j.d.', 'j.j.p.', 'j.j.pol.', 'j.l.', 'j.l.m.b.',
<del> 'j.l.o.', 'j.p.a.', 'j.r.s.', 'j.t.', 'j.t.d.e.',
<del> 'j.t.dr.eur.', 'j.t.o.', 'j.t.t.', 'jaarl.', 'jb.hand.',
<del> 'jb.kred.', 'jb.kred.c.s.', 'jb.l.r.b.', 'jb.lrb.',
<del> 'jb.markt.', 'jb.mens.', 'jb.t.r.d.', 'jb.trd.',
<del> 'jeugdrb.', 'jeugdwerkg.w.', 'jg.', 'jis.', 'jl.',
<del> 'journ.jur.', 'journ.prat.dr.fisc.fin.', 'journ.proc.',
<del> 'jrg.', 'jur.', 'jur.comm.fl.', 'jur.dr.soc.b.l.n.',
<del> 'jur.f.p.e.', 'jur.fpe.', 'jur.niv.', 'jur.trav.brux.',
<del> 'jurambt.', 'jv.cass.', 'jv.h.r.j.', 'jv.hrj.', 'jw.',
<del> 'k.', 'k.', 'k.b.', 'k.g.', 'k.k.', 'k.m.b.o.', 'k.o.o.',
<del> 'k.v.k.', 'k.v.v.v.', 'kadasterw.', 'kaderb.', 'kador.',
<del> 'kbo-nr.', 'kg.', 'kh.', 'kiesw.', 'kind.bes.v.', 'kkr.',
<del> 'koopv.', 'kr.', 'krankz.w.', 'ksbel.', 'kt.', 'ktg.',
<del> 'ktr.', 'kvdm.', 'kw.r.', 'kymr.', 'kzr.', 'kzw.', 'l.',
<del> 'l.b.', 'l.b.o.', 'l.bas.', 'l.c.', 'l.gew.', 'l.j.',
<del> 'l.k.', 'l.l.', 'l.o.', 'l.r.b.', 'l.u.v.i.', 'l.v.r.',
<del> 'l.v.w.', 'l.w.', "l'exp.-compt.b..", 'l’exp.-compt.b.',
<del> 'landinr.w.', 'landscrt.', 'lat.', 'law.ed.', 'lett.',
<del> 'levensverz.', 'lgrs.', 'lidw.', 'limb.rechtsl.', 'lit.',
<del> 'litt.', 'liw.', 'liwet.', 'lk.', 'll.', 'll.(l.)l.r.',
<del> 'loonw.', 'losbl.', 'ltd.', 'luchtv.', 'luchtv.w.', 'm.',
<del> 'm.', 'not.', 'm.a.v.o.', 'm.a.w.', 'm.b.', 'm.b.o.',
<del> 'm.b.r.', 'm.b.t.', 'm.d.g.o.', 'm.e.a.o.', 'm.e.r.',
<del> 'm.h.', 'm.h.d.', 'm.i.v.', 'm.j.t.', 'm.k.', 'm.m.',
<del> 'm.m.a.', 'm.m.h.h.', 'm.m.v.', 'm.n.', 'm.not.fisc.',
<del> 'm.nt.', 'm.o.', 'm.r.', 'm.s.a.', 'm.u.p.', 'm.v.a.',
<del> 'm.v.h.n.', 'm.v.t.', 'm.z.', 'maatr.teboekgest.luchtv.',
<del> 'maced.', 'mand.', 'max.', 'mbl.not.', 'me.', 'med.',
<del> 'med.', 'v.b.o.', 'med.b.u.f.r.', 'med.bufr.', 'med.vbo.',
<del> 'meerv.', 'meetbr.w.', 'mém.adm.', 'mgr.', 'mgrs.', 'mhd.',
<del> 'mi.verantw.', 'mil.', 'mil.bed.', 'mil.ger.', 'min.',
<del> 'min.', 'aanbev.', 'min.', 'circ.', 'min.', 'fin.',
<del> 'min.j.omz.', 'min.just.circ.', 'mitt.', 'mnd.', 'mod.',
<del> 'mon.', 'mouv.comm.', 'mr.', 'ms.', 'muz.', 'mv.', 'n.',
<del> 'chr.', 'n.a.', 'n.a.g.', 'n.a.v.', 'n.b.', 'n.c.',
<del> 'n.chr.', 'n.d.', 'n.d.r.', 'n.e.a.', 'n.g.', 'n.h.b.c.',
<del> 'n.j.', 'n.j.b.', 'n.j.w.', 'n.l.', 'n.m.', 'n.m.m.',
<del> 'n.n.', 'n.n.b.', 'n.n.g.', 'n.n.k.', 'n.o.m.', 'n.o.t.k.',
<del> 'n.rapp.', 'n.tijd.pol.', 'n.v.', 'n.v.d.r.', 'n.v.d.v.',
<del> 'n.v.o.b.', 'n.v.t.', 'nat.besch.w.', 'nat.omb.',
<del> 'nat.pers.', 'ned.cult.r.', 'neg.verkl.', 'nhd.', 'wisk.',
<del> 'njcm-bull.', 'nl.', 'nnd.', 'no.', 'not.fisc.m.',
<del> 'not.w.', 'not.wet.', 'nr.', 'nrs.', 'nste.', 'nt.',
<del> 'numism.', 'o.', 'o.a.', 'o.b.', 'o.c.', 'o.g.', 'o.g.v.',
<del> 'o.i.', 'o.i.d.', 'o.m.', 'o.o.', 'o.o.d.', 'o.o.v.',
<del> 'o.p.', 'o.r.', 'o.regl.', 'o.s.', 'o.t.s.', 'o.t.t.',
<del> 'o.t.t.t.', 'o.t.t.z.', 'o.tk.t.', 'o.v.t.', 'o.v.t.t.',
<del> 'o.v.tk.t.', 'o.v.v.', 'ob.', 'obsv.', 'octr.',
<del> 'octr.gem.regl.', 'octr.regl.', 'oe.', 'off.pol.', 'ofra.',
<del> 'ohd.', 'omb.', 'omnil.', 'omz.', 'on.ww.', 'onderr.',
<del> 'onfrank.', 'onteig.w.', 'ontw.', 'b.w.', 'onuitg.',
<del> 'onz.', 'oorl.w.', 'op.cit.', 'opin.pa.', 'opm.', 'or.',
<del> 'ord.br.', 'ord.gem.', 'ors.', 'orth.', 'os.', 'osm.',
<del> 'ov.', 'ov.w.i.', 'ov.w.ii.', 'ov.ww.', 'overg.w.',
<del> 'overw.', 'ovkst.', 'oz.', 'p.', 'p.a.', 'p.a.o.',
<del> 'p.b.o.', 'p.e.', 'p.g.', 'p.j.', 'p.m.', 'p.m.a.', 'p.o.',
<del> 'p.o.j.t.', 'p.p.', 'p.v.', 'p.v.s.', 'pachtw.', 'pag.',
<del> 'pan.', 'pand.b.', 'pand.pér.', 'parl.gesch.',
<del> 'parl.gesch.', 'inv.', 'parl.st.', 'part.arb.', 'pas.',
<del> 'pasin.', 'pat.', 'pb.c.', 'pb.l.', 'pens.',
<del> 'pensioenverz.', 'per.ber.i.b.r.', 'per.ber.ibr.', 'pers.',
<del> 'st.', 'pft.', 'pk.', 'pktg.', 'plv.', 'po.', 'pol.',
<del> 'pol.off.', 'pol.r.', 'pol.w.', 'postbankw.', 'postw.',
<del> 'pp.', 'pr.', 'preadv.', 'pres.', 'prf.', 'prft.', 'prg.',
<del> 'prijz.w.', 'proc.', 'procesregl.', 'prof.', 'prot.',
<del> 'prov.', 'prov.b.', 'prov.instr.h.m.g.', 'prov.regl.',
<del> 'prov.verord.', 'prov.w.', 'publ.', 'pun.', 'pw.',
<del> 'q.b.d.', 'q.e.d.', 'q.q.', 'q.r.', 'r.', 'r.a.b.g.',
<del> 'r.a.c.e.', 'r.a.j.b.', 'r.b.d.c.', 'r.b.d.i.', 'r.b.s.s.',
<del> 'r.c.', 'r.c.b.', 'r.c.d.c.', 'r.c.j.b.', 'r.c.s.j.',
<del> 'r.cass.', 'r.d.c.', 'r.d.i.', 'r.d.i.d.c.', 'r.d.j.b.',
<del> 'r.d.j.p.', 'r.d.p.c.', 'r.d.s.', 'r.d.t.i.', 'r.e.',
<del> 'r.f.s.v.p.', 'r.g.a.r.', 'r.g.c.f.', 'r.g.d.c.', 'r.g.f.',
<del> 'r.g.z.', 'r.h.a.', 'r.i.c.', 'r.i.d.a.', 'r.i.e.j.',
<del> 'r.i.n.', 'r.i.s.a.', 'r.j.d.a.', 'r.j.i.', 'r.k.', 'r.l.',
<del> 'r.l.g.b.', 'r.med.', 'r.med.rechtspr.', 'r.n.b.', 'r.o.',
<del> 'r.ov.', 'r.p.', 'r.p.d.b.', 'r.p.o.t.', 'r.p.r.j.',
<del> 'r.p.s.', 'r.r.d.', 'r.r.s.', 'r.s.', 'r.s.v.p.',
<del> 'r.stvb.', 'r.t.d.f.', 'r.t.d.h.', 'r.t.l.',
<del> 'r.trim.dr.eur.', 'r.v.a.', 'r.verkb.', 'r.w.', 'r.w.d.',
<del> 'rap.ann.c.a.', 'rap.ann.c.c.', 'rap.ann.c.e.',
<del> 'rap.ann.c.s.j.', 'rap.ann.ca.', 'rap.ann.cass.',
<del> 'rap.ann.cc.', 'rap.ann.ce.', 'rap.ann.csj.', 'rapp.',
<del> 'rb.', 'rb.kh.', 'rdn.', 'rdnr.', 're.pers.', 'rec.',
<del> 'rec.c.i.j.', 'rec.c.j.c.e.', 'rec.cij.', 'rec.cjce.',
<del> 'rec.gén.enr.not.', 'rechtsk.t.', 'rechtspl.zeem.',
<del> 'rechtspr.arb.br.', 'rechtspr.b.f.e.', 'rechtspr.bfe.',
<del> 'rechtspr.soc.r.b.l.n.', 'recl.reg.', 'rect.', 'red.',
<del> 'reg.', 'reg.huiz.bew.', 'reg.w.', 'registr.w.', 'regl.',
<del> 'regl.', 'r.v.k.', 'regl.besl.', 'regl.onderr.',
<del> 'regl.r.t.', 'rep.', 'rép.fisc.', 'rép.not.', 'rep.r.j.',
<del> 'rep.rj.', 'req.', 'res.', 'resp.', 'rev.', 'rev.',
<del> 'comp.', 'rev.', 'trim.', 'civ.', 'rev.', 'trim.', 'comm.',
<del> 'rev.acc.trav.', 'rev.adm.', 'rev.b.compt.',
<del> 'rev.b.dr.const.', 'rev.b.dr.intern.', 'rev.b.séc.soc.',
<del> 'rev.banc.fin.', 'rev.comm.', 'rev.cons.prud.',
<del> 'rev.dr.b.', 'rev.dr.commun.', 'rev.dr.étr.',
<del> 'rev.dr.fam.', 'rev.dr.intern.comp.', 'rev.dr.mil.',
<del> 'rev.dr.min.', 'rev.dr.pén.', 'rev.dr.pén.mil.',
<del> 'rev.dr.rur.', 'rev.dr.u.l.b.', 'rev.dr.ulb.', 'rev.exp.',
<del> 'rev.faill.', 'rev.fisc.', 'rev.gd.', 'rev.hist.dr.',
<del> 'rev.i.p.c.', 'rev.ipc.', 'rev.not.b.',
<del> 'rev.prat.dr.comm.', 'rev.prat.not.b.', 'rev.prat.soc.',
<del> 'rev.rec.', 'rev.rw.', 'rev.trav.', 'rev.trim.d.h.',
<del> 'rev.trim.dr.fam.', 'rev.urb.', 'richtl.', 'riv.dir.int.',
<del> 'riv.dir.int.priv.proc.', 'rk.', 'rln.', 'roln.', 'rom.',
<del> 'rondz.', 'rov.', 'rtl.', 'rubr.', 'ruilv.wet.',
<del> 'rv.verdr.', 'rvkb.', 's.', 's.', 's.a.', 's.b.n.',
<del> 's.ct.', 's.d.', 's.e.c.', 's.e.et.o.', 's.e.w.',
<del> 's.exec.rept.', 's.hrg.', 's.j.b.', 's.l.', 's.l.e.a.',
<del> 's.l.n.d.', 's.p.a.', 's.s.', 's.t.', 's.t.b.', 's.v.',
<del> 's.v.p.', 'samenw.', 'sc.', 'sch.', 'scheidsr.uitspr.',
<del> 'schepel.besl.', 'secr.comm.', 'secr.gen.', 'sect.soc.',
<del> 'sess.', 'cas.', 'sir.', 'soc.', 'best.', 'soc.', 'handv.',
<del> 'soc.', 'verz.', 'soc.act.', 'soc.best.', 'soc.kron.',
<del> 'soc.r.', 'soc.sw.', 'soc.weg.', 'sofi-nr.', 'somm.',
<del> 'somm.ann.', 'sp.c.c.', 'sr.', 'ss.', 'st.doc.b.c.n.a.r.',
<del> 'st.doc.bcnar.', 'st.vw.', 'stagever.', 'stas.', 'stat.',
<del> 'stb.', 'stbl.', 'stcrt.', 'stud.dipl.', 'su.', 'subs.',
<del> 'subst.', 'succ.w.', 'suppl.', 'sv.', 'sw.', 't.', 't.a.',
<del> 't.a.a.', 't.a.n.', 't.a.p.', 't.a.s.n.', 't.a.v.',
<del> 't.a.v.w.', 't.aann.', 't.acc.', 't.agr.r.', 't.app.',
<del> 't.b.b.r.', 't.b.h.', 't.b.m.', 't.b.o.', 't.b.p.',
<del> 't.b.r.', 't.b.s.', 't.b.v.', 't.bankw.', 't.belg.not.',
<del> 't.desk.', 't.e.m.', 't.e.p.', 't.f.r.', 't.fam.',
<del> 't.fin.r.', 't.g.r.', 't.g.t.', 't.g.v.', 't.gem.',
<del> 't.gez.', 't.huur.', 't.i.n.', 't.j.k.', 't.l.l.',
<del> 't.l.v.', 't.m.', 't.m.r.', 't.m.w.', 't.mil.r.',
<del> 't.mil.strafr.', 't.not.', 't.o.', 't.o.r.b.', 't.o.v.',
<del> 't.ontv.', 't.p.r.', 't.pol.', 't.r.', 't.r.g.',
<del> 't.r.o.s.', 't.r.v.', 't.s.r.', 't.strafr.', 't.t.',
<del> 't.u.', 't.v.c.', 't.v.g.', 't.v.m.r.', 't.v.o.', 't.v.v.',
<del> 't.v.v.d.b.', 't.v.w.', 't.verz.', 't.vred.', 't.vreemd.',
<del> 't.w.', 't.w.k.', 't.w.v.', 't.w.v.r.', 't.wrr.', 't.z.',
<del> 't.z.t.', 't.z.v.', 'taalk.', 'tar.burg.z.', 'td.',
<del> 'techn.', 'telecomm.', 'toel.', 'toel.st.v.w.', 'toep.',
<del> 'toep.regl.', 'tom.', 'top.', 'trans.b.', 'transp.r.',
<del> 'trb.', 'trib.', 'trib.civ.', 'trib.gr.inst.', 'ts.',
<del> 'ts.', 'best.', 'ts.', 'verv.', 'turnh.rechtsl.', 'tvpol.',
<del> 'tvpr.', 'tvrechtsgesch.', 'tw.', 'u.', 'u.a.', 'u.a.r.',
<del> 'u.a.v.', 'u.c.', 'u.c.c.', 'u.g.', 'u.p.', 'u.s.',
<del> 'u.s.d.c.', 'uitdr.', 'uitl.w.', 'uitv.besch.div.b.',
<del> 'uitv.besl.', 'uitv.besl.', 'succ.w.', 'uitv.besl.bel.rv.',
<del> 'uitv.besl.l.b.', 'uitv.reg.', 'inv.w.', 'uitv.reg.bel.d.',
<del> 'uitv.reg.afd.verm.', 'uitv.reg.lb.', 'uitv.reg.succ.w.',
<del> 'univ.', 'univ.verkl.', 'v.', 'v.', 'chr.', 'v.a.',
<del> 'v.a.v.', 'v.c.', 'v.chr.', 'v.h.', 'v.huw.verm.', 'v.i.',
<del> 'v.i.o.', 'v.k.a.', 'v.m.', 'v.o.f.', 'v.o.n.',
<del> 'v.onderh.verpl.', 'v.p.', 'v.r.', 'v.s.o.', 'v.t.t.',
<del> 'v.t.t.t.', 'v.tk.t.', 'v.toep.r.vert.', 'v.v.b.',
<del> 'v.v.g.', 'v.v.t.', 'v.v.t.t.', 'v.v.tk.t.', 'v.w.b.',
<del> 'v.z.m.', 'vb.', 'vb.bo.', 'vbb.', 'vc.', 'vd.', 'veldw.',
<del> 'ver.k.', 'ver.verg.gem.', 'gem.comm.', 'verbr.', 'verd.',
<del> 'verdr.', 'verdr.v.', 'tek.mod.', 'verenw.', 'verg.',
<del> 'verg.fr.gem.', 'comm.', 'verkl.', 'verkl.herz.gw.',
<del> 'verl.', 'deelw.', 'vern.', 'verord.', 'vers.r.',
<del> 'versch.', 'versl.c.s.w.', 'versl.csw.', 'vert.', 'verw.',
<del> 'verz.', 'verz.w.', 'verz.wett.besl.',
<del> 'verz.wett.decr.besl.', 'vgl.', 'vid.', 'viss.w.',
<del> 'vl.parl.', 'vl.r.', 'vl.t.gez.', 'vl.w.reg.',
<del> 'vl.w.succ.', 'vlg.', 'vn.', 'vnl.', 'vnw.', 'vo.',
<del> 'vo.bl.', 'voegw.', 'vol.', 'volg.', 'volt.', 'deelw.',
<del> 'voorl.', 'voorz.', 'vord.w.', 'vorst.d.', 'vr.', 'vred.',
<del> 'vrg.', 'vnw.', 'vrijgrs.', 'vs.', 'vt.', 'vw.', 'vz.',
<del> 'vzngr.', 'vzr.', 'w.', 'w.a.', 'w.b.r.', 'w.c.h.',
<del> 'w.conf.huw.', 'w.conf.huwelijksb.', 'w.consum.kr.',
<del> 'w.f.r.', 'w.g.', 'w.gew.r.', 'w.ident.pl.', 'w.just.doc.',
<del> 'w.kh.', 'w.l.r.', 'w.l.v.', 'w.mil.straf.spr.', 'w.n.',
<del> 'w.not.ambt.', 'w.o.', 'w.o.d.huurcomm.', 'w.o.d.k.',
<del> 'w.openb.manif.', 'w.parl.', 'w.r.', 'w.reg.', 'w.succ.',
<del> 'w.u.b.', 'w.uitv.pl.verord.', 'w.v.', 'w.v.k.',
<del> 'w.v.m.s.', 'w.v.r.', 'w.v.w.', 'w.venn.', 'wac.', 'wd.',
<del> 'wetb.', 'n.v.h.', 'wgb.', 'winkelt.w.', 'wisk.',
<del> 'wka-verkl.', 'wnd.', 'won.w.', 'woningw.', 'woonr.w.',
<del> 'wrr.', 'wrr.ber.', 'wrsch.', 'ws.', 'wsch.', 'wsr.',
<del> 'wtvb.', 'ww.', 'x.d.', 'z.a.', 'z.g.', 'z.i.', 'z.j.',
<del> 'z.o.z.', 'z.p.', 'z.s.m.', 'zg.', 'zgn.', 'zn.', 'znw.',
<del> 'zr.', 'zr.', 'ms.', 'zr.ms.']
<del># fmt: on
<add>abbrevs = [
<add> "a.2d.",
<add> "a.a.",
<add> "a.a.j.b.",
<add> "a.f.t.",
<add> "a.g.j.b.",
<add> "a.h.v.",
<add> "a.h.w.",
<add> "a.hosp.",
<add> "a.i.",
<add> "a.j.b.",
<add> "a.j.t.",
<add> "a.m.",
<add> "a.m.r.",
<add> "a.p.m.",
<add> "a.p.r.",
<add> "a.p.t.",
<add> "a.s.",
<add> "a.t.d.f.",
<add> "a.u.b.",
<add> "a.v.a.",
<add> "a.w.",
<add> "aanbev.",
<add> "aanbev.comm.",
<add> "aant.",
<add> "aanv.st.",
<add> "aanw.",
<add> "vnw.",
<add> "aanw.vnw.",
<add> "abd.",
<add> "abm.",
<add> "abs.",
<add> "acc.act.",
<add> "acc.bedr.m.",
<add> "acc.bedr.t.",
<add> "achterv.",
<add> "act.dr.",
<add> "act.dr.fam.",
<add> "act.fisc.",
<add> "act.soc.",
<add> "adm.akk.",
<add> "adm.besl.",
<add> "adm.lex.",
<add> "adm.onderr.",
<add> "adm.ov.",
<add> "adv.",
<add> "adv.",
<add> "gen.",
<add> "adv.bl.",
<add> "afd.",
<add> "afl.",
<add> "aggl.verord.",
<add> "agr.",
<add> "al.",
<add> "alg.",
<add> "alg.richts.",
<add> "amén.",
<add> "ann.dr.",
<add> "ann.dr.lg.",
<add> "ann.dr.sc.pol.",
<add> "ann.ét.eur.",
<add> "ann.fac.dr.lg.",
<add> "ann.jur.créd.",
<add> "ann.jur.créd.règl.coll.",
<add> "ann.not.",
<add> "ann.parl.",
<add> "ann.prat.comm.",
<add> "app.",
<add> "arb.",
<add> "aud.",
<add> "arbbl.",
<add> "arbh.",
<add> "arbit.besl.",
<add> "arbrb.",
<add> "arr.",
<add> "arr.cass.",
<add> "arr.r.v.st.",
<add> "arr.verbr.",
<add> "arrondrb.",
<add> "art.",
<add> "artw.",
<add> "aud.",
<add> "b.",
<add> "b.",
<add> "b.&w.",
<add> "b.a.",
<add> "b.a.s.",
<add> "b.b.o.",
<add> "b.best.dep.",
<add> "b.br.ex.",
<add> "b.coll.fr.gem.comm.",
<add> "b.coll.vl.gem.comm.",
<add> "b.d.cult.r.",
<add> "b.d.gem.ex.",
<add> "b.d.gem.reg.",
<add> "b.dep.",
<add> "b.e.b.",
<add> "b.f.r.",
<add> "b.fr.gem.ex.",
<add> "b.fr.gem.reg.",
<add> "b.i.h.",
<add> "b.inl.j.d.",
<add> "b.inl.s.reg.",
<add> "b.j.",
<add> "b.l.",
<add> "b.o.z.",
<add> "b.prov.r.",
<add> "b.r.h.",
<add> "b.s.",
<add> "b.sr.",
<add> "b.stb.",
<add> "b.t.i.r.",
<add> "b.t.s.z.",
<add> "b.t.w.rev.",
<add> "b.v.",
<add> "b.ver.coll.gem.gem.comm.",
<add> "b.verg.r.b.",
<add> "b.versl.",
<add> "b.vl.ex.",
<add> "b.voorl.reg.",
<add> "b.w.",
<add> "b.w.gew.ex.",
<add> "b.z.d.g.",
<add> "b.z.v.",
<add> "bab.",
<add> "bedr.org.",
<add> "begins.",
<add> "beheersov.",
<add> "bekendm.comm.",
<add> "bel.",
<add> "bel.besch.",
<add> "bel.w.p.",
<add> "beleidsov.",
<add> "belg.",
<add> "grondw.",
<add> "ber.",
<add> "ber.w.",
<add> "besch.",
<add> "besl.",
<add> "beslagr.",
<add> "bestuurswet.",
<add> "bet.",
<add> "betr.",
<add> "betr.",
<add> "vnw.",
<add> "bevest.",
<add> "bew.",
<add> "bijbl.",
<add> "ind.",
<add> "eig.",
<add> "bijbl.n.bijdr.",
<add> "bijl.",
<add> "bijv.",
<add> "bijw.",
<add> "bijz.decr.",
<add> "bin.b.",
<add> "bkh.",
<add> "bl.",
<add> "blz.",
<add> "bm.",
<add> "bn.",
<add> "rh.",
<add> "bnw.",
<add> "bouwr.",
<add> "br.parl.",
<add> "bs.",
<add> "bull.",
<add> "bull.adm.pénit.",
<add> "bull.ass.",
<add> "bull.b.m.m.",
<add> "bull.bel.",
<add> "bull.best.strafinr.",
<add> "bull.bmm.",
<add> "bull.c.b.n.",
<add> "bull.c.n.c.",
<add> "bull.cbn.",
<add> "bull.centr.arb.",
<add> "bull.cnc.",
<add> "bull.contr.",
<add> "bull.doc.min.fin.",
<add> "bull.f.e.b.",
<add> "bull.feb.",
<add> "bull.fisc.fin.r.",
<add> "bull.i.u.m.",
<add> "bull.inf.ass.secr.soc.",
<add> "bull.inf.i.e.c.",
<add> "bull.inf.i.n.a.m.i.",
<add> "bull.inf.i.r.e.",
<add> "bull.inf.iec.",
<add> "bull.inf.inami.",
<add> "bull.inf.ire.",
<add> "bull.inst.arb.",
<add> "bull.ium.",
<add> "bull.jur.imm.",
<add> "bull.lég.b.",
<add> "bull.off.",
<add> "bull.trim.b.dr.comp.",
<add> "bull.us.",
<add> "bull.v.b.o.",
<add> "bull.vbo.",
<add> "bv.",
<add> "bw.",
<add> "bxh.",
<add> "byz.",
<add> "c.",
<add> "c.a.",
<add> "c.a.-a.",
<add> "c.a.b.g.",
<add> "c.c.",
<add> "c.c.i.",
<add> "c.c.s.",
<add> "c.conc.jur.",
<add> "c.d.e.",
<add> "c.d.p.k.",
<add> "c.e.",
<add> "c.ex.",
<add> "c.f.",
<add> "c.h.a.",
<add> "c.i.f.",
<add> "c.i.f.i.c.",
<add> "c.j.",
<add> "c.l.",
<add> "c.n.",
<add> "c.o.d.",
<add> "c.p.",
<add> "c.pr.civ.",
<add> "c.q.",
<add> "c.r.",
<add> "c.r.a.",
<add> "c.s.",
<add> "c.s.a.",
<add> "c.s.q.n.",
<add> "c.v.",
<add> "c.v.a.",
<add> "c.v.o.",
<add> "ca.",
<add> "cadeaust.",
<add> "cah.const.",
<add> "cah.dr.europ.",
<add> "cah.dr.immo.",
<add> "cah.dr.jud.",
<add> "cal.",
<add> "2d.",
<add> "cal.",
<add> "3e.",
<add> "cal.",
<add> "rprt.",
<add> "cap.",
<add> "carg.",
<add> "cass.",
<add> "cass.",
<add> "verw.",
<add> "cert.",
<add> "cf.",
<add> "ch.",
<add> "chron.",
<add> "chron.d.s.",
<add> "chron.dr.not.",
<add> "cie.",
<add> "cie.",
<add> "verz.schr.",
<add> "cir.",
<add> "circ.",
<add> "circ.z.",
<add> "cit.",
<add> "cit.loc.",
<add> "civ.",
<add> "cl.et.b.",
<add> "cmt.",
<add> "co.",
<add> "cognoss.v.",
<add> "coll.",
<add> "v.",
<add> "b.",
<add> "colp.w.",
<add> "com.",
<add> "com.",
<add> "cas.",
<add> "com.v.min.",
<add> "comm.",
<add> "comm.",
<add> "v.",
<add> "comm.bijz.ov.",
<add> "comm.erf.",
<add> "comm.fin.",
<add> "comm.ger.",
<add> "comm.handel.",
<add> "comm.pers.",
<add> "comm.pub.",
<add> "comm.straf.",
<add> "comm.v.",
<add> "comm.venn.",
<add> "comm.verz.",
<add> "comm.voor.",
<add> "comp.",
<add> "compt.w.",
<add> "computerr.",
<add> "con.m.",
<add> "concl.",
<add> "concr.",
<add> "conf.",
<add> "confl.w.",
<add> "confl.w.huwbetr.",
<add> "cons.",
<add> "conv.",
<add> "coöp.",
<add> "ver.",
<add> "corr.",
<add> "corr.bl.",
<add> "cour.fisc.",
<add> "cour.immo.",
<add> "cridon.",
<add> "crim.",
<add> "cur.",
<add> "cur.",
<add> "crt.",
<add> "curs.",
<add> "d.",
<add> "d.-g.",
<add> "d.a.",
<add> "d.a.v.",
<add> "d.b.f.",
<add> "d.c.",
<add> "d.c.c.r.",
<add> "d.d.",
<add> "d.d.p.",
<add> "d.e.t.",
<add> "d.gem.r.",
<add> "d.h.",
<add> "d.h.z.",
<add> "d.i.",
<add> "d.i.t.",
<add> "d.j.",
<add> "d.l.r.",
<add> "d.m.",
<add> "d.m.v.",
<add> "d.o.v.",
<add> "d.parl.",
<add> "d.w.z.",
<add> "dact.",
<add> "dat.",
<add> "dbesch.",
<add> "dbesl.",
<add> "dec.",
<add> "decr.",
<add> "decr.d.",
<add> "decr.fr.",
<add> "decr.vl.",
<add> "decr.w.",
<add> "def.",
<add> "dep.opv.",
<add> "dep.rtl.",
<add> "derg.",
<add> "desp.",
<add> "det.mag.",
<add> "deurw.regl.",
<add> "dez.",
<add> "dgl.",
<add> "dhr.",
<add> "disp.",
<add> "diss.",
<add> "div.",
<add> "div.act.",
<add> "div.bel.",
<add> "dl.",
<add> "dln.",
<add> "dnotz.",
<add> "doc.",
<add> "hist.",
<add> "doc.jur.b.",
<add> "doc.min.fin.",
<add> "doc.parl.",
<add> "doctr.",
<add> "dpl.",
<add> "dpl.besl.",
<add> "dr.",
<add> "dr.banc.fin.",
<add> "dr.circ.",
<add> "dr.inform.",
<add> "dr.mr.",
<add> "dr.pén.entr.",
<add> "dr.q.m.",
<add> "drs.",
<add> "ds.",
<add> "dtp.",
<add> "dwz.",
<add> "dyn.",
<add> "e.",
<add> "e.a.",
<add> "e.b.",
<add> "tek.mod.",
<add> "e.c.",
<add> "e.c.a.",
<add> "e.d.",
<add> "e.e.",
<add> "e.e.a.",
<add> "e.e.g.",
<add> "e.g.",
<add> "e.g.a.",
<add> "e.h.a.",
<add> "e.i.",
<add> "e.j.",
<add> "e.m.a.",
<add> "e.n.a.c.",
<add> "e.o.",
<add> "e.p.c.",
<add> "e.r.c.",
<add> "e.r.f.",
<add> "e.r.h.",
<add> "e.r.o.",
<add> "e.r.p.",
<add> "e.r.v.",
<add> "e.s.r.a.",
<add> "e.s.t.",
<add> "e.v.",
<add> "e.v.a.",
<add> "e.w.",
<add> "e&o.e.",
<add> "ec.pol.r.",
<add> "econ.",
<add> "ed.",
<add> "ed(s).",
<add> "eff.",
<add> "eig.",
<add> "eig.mag.",
<add> "eil.",
<add> "elektr.",
<add> "enmb.",
<add> "enz.",
<add> "err.",
<add> "etc.",
<add> "etq.",
<add> "eur.",
<add> "parl.",
<add> "eur.t.s.",
<add> "ev.",
<add> "evt.",
<add> "ex.",
<add> "ex.crim.",
<add> "exec.",
<add> "f.",
<add> "f.a.o.",
<add> "f.a.q.",
<add> "f.a.s.",
<add> "f.i.b.",
<add> "f.j.f.",
<add> "f.o.b.",
<add> "f.o.r.",
<add> "f.o.s.",
<add> "f.o.t.",
<add> "f.r.",
<add> "f.supp.",
<add> "f.suppl.",
<add> "fa.",
<add> "facs.",
<add> "fasc.",
<add> "fg.",
<add> "fid.ber.",
<add> "fig.",
<add> "fin.verh.w.",
<add> "fisc.",
<add> "fisc.",
<add> "tijdschr.",
<add> "fisc.act.",
<add> "fisc.koer.",
<add> "fl.",
<add> "form.",
<add> "foro.",
<add> "it.",
<add> "fr.",
<add> "fr.cult.r.",
<add> "fr.gem.r.",
<add> "fr.parl.",
<add> "fra.",
<add> "ft.",
<add> "g.",
<add> "g.a.",
<add> "g.a.v.",
<add> "g.a.w.v.",
<add> "g.g.d.",
<add> "g.m.t.",
<add> "g.o.",
<add> "g.omt.e.",
<add> "g.p.",
<add> "g.s.",
<add> "g.v.",
<add> "g.w.w.",
<add> "geb.",
<add> "gebr.",
<add> "gebrs.",
<add> "gec.",
<add> "gec.decr.",
<add> "ged.",
<add> "ged.st.",
<add> "gedipl.",
<add> "gedr.st.",
<add> "geh.",
<add> "gem.",
<add> "gem.",
<add> "gem.",
<add> "gem.gem.comm.",
<add> "gem.st.",
<add> "gem.stem.",
<add> "gem.w.",
<add> "gemeensch.optr.",
<add> "gemeensch.standp.",
<add> "gemeensch.strat.",
<add> "gemeent.",
<add> "gemeent.b.",
<add> "gemeent.regl.",
<add> "gemeent.verord.",
<add> "geol.",
<add> "geopp.",
<add> "gepubl.",
<add> "ger.deurw.",
<add> "ger.w.",
<add> "gerekw.",
<add> "gereq.",
<add> "gesch.",
<add> "get.",
<add> "getr.",
<add> "gev.m.",
<add> "gev.maatr.",
<add> "gew.",
<add> "ghert.",
<add> "gir.eff.verk.",
<add> "gk.",
<add> "gr.",
<add> "gramm.",
<add> "grat.w.",
<add> "grootb.w.",
<add> "grs.",
<add> "grvm.",
<add> "grw.",
<add> "gst.",
<add> "gw.",
<add> "h.a.",
<add> "h.a.v.o.",
<add> "h.b.o.",
<add> "h.e.a.o.",
<add> "h.e.g.a.",
<add> "h.e.geb.",
<add> "h.e.gestr.",
<add> "h.l.",
<add> "h.m.",
<add> "h.o.",
<add> "h.r.",
<add> "h.t.l.",
<add> "h.t.m.",
<add> "h.w.geb.",
<add> "hand.",
<add> "handelsn.w.",
<add> "handelspr.",
<add> "handelsr.w.",
<add> "handelsreg.w.",
<add> "handv.",
<add> "harv.l.rev.",
<add> "hc.",
<add> "herald.",
<add> "hert.",
<add> "herz.",
<add> "hfdst.",
<add> "hfst.",
<add> "hgrw.",
<add> "hhr.",
<add> "hist.",
<add> "hooggel.",
<add> "hoogl.",
<add> "hosp.",
<add> "hpw.",
<add> "hr.",
<add> "hr.",
<add> "ms.",
<add> "hr.ms.",
<add> "hregw.",
<add> "hrg.",
<add> "hst.",
<add> "huis.just.",
<add> "huisv.w.",
<add> "huurbl.",
<add> "hv.vn.",
<add> "hw.",
<add> "hyp.w.",
<add> "i.b.s.",
<add> "i.c.",
<add> "i.c.m.h.",
<add> "i.e.",
<add> "i.f.",
<add> "i.f.p.",
<add> "i.g.v.",
<add> "i.h.",
<add> "i.h.a.",
<add> "i.h.b.",
<add> "i.l.pr.",
<add> "i.o.",
<add> "i.p.o.",
<add> "i.p.r.",
<add> "i.p.v.",
<add> "i.pl.v.",
<add> "i.r.d.i.",
<add> "i.s.m.",
<add> "i.t.t.",
<add> "i.v.",
<add> "i.v.m.",
<add> "i.v.s.",
<add> "i.w.tr.",
<add> "i.z.",
<add> "ib.",
<add> "ibid.",
<add> "icip-ing.cons.",
<add> "iem.",
<add> "inc.",
<add> "indic.soc.",
<add> "indiv.",
<add> "inf.",
<add> "inf.i.d.a.c.",
<add> "inf.idac.",
<add> "inf.r.i.z.i.v.",
<add> "inf.riziv.",
<add> "inf.soc.secr.",
<add> "ing.",
<add> "ing.",
<add> "cons.",
<add> "ing.cons.",
<add> "inst.",
<add> "int.",
<add> "int.",
<add> "rechtsh.",
<add> "strafz.",
<add> "interm.",
<add> "intern.fisc.act.",
<add> "intern.vervoerr.",
<add> "inv.",
<add> "inv.",
<add> "f.",
<add> "inv.w.",
<add> "inv.wet.",
<add> "invord.w.",
<add> "inz.",
<add> "ir.",
<add> "irspr.",
<add> "iwtr.",
<add> "j.",
<add> "j.-cl.",
<add> "j.c.b.",
<add> "j.c.e.",
<add> "j.c.fl.",
<add> "j.c.j.",
<add> "j.c.p.",
<add> "j.d.e.",
<add> "j.d.f.",
<add> "j.d.s.c.",
<add> "j.dr.jeun.",
<add> "j.j.d.",
<add> "j.j.p.",
<add> "j.j.pol.",
<add> "j.l.",
<add> "j.l.m.b.",
<add> "j.l.o.",
<add> "j.p.a.",
<add> "j.r.s.",
<add> "j.t.",
<add> "j.t.d.e.",
<add> "j.t.dr.eur.",
<add> "j.t.o.",
<add> "j.t.t.",
<add> "jaarl.",
<add> "jb.hand.",
<add> "jb.kred.",
<add> "jb.kred.c.s.",
<add> "jb.l.r.b.",
<add> "jb.lrb.",
<add> "jb.markt.",
<add> "jb.mens.",
<add> "jb.t.r.d.",
<add> "jb.trd.",
<add> "jeugdrb.",
<add> "jeugdwerkg.w.",
<add> "jhr.",
<add> "jg.",
<add> "jis.",
<add> "jl.",
<add> "journ.jur.",
<add> "journ.prat.dr.fisc.fin.",
<add> "journ.proc.",
<add> "jr.",
<add> "jrg.",
<add> "jur.",
<add> "jur.comm.fl.",
<add> "jur.dr.soc.b.l.n.",
<add> "jur.f.p.e.",
<add> "jur.fpe.",
<add> "jur.niv.",
<add> "jur.trav.brux.",
<add> "jurambt.",
<add> "jv.cass.",
<add> "jv.h.r.j.",
<add> "jv.hrj.",
<add> "jw.",
<add> "k.",
<add> "k.",
<add> "k.b.",
<add> "k.g.",
<add> "k.k.",
<add> "k.m.b.o.",
<add> "k.o.o.",
<add> "k.v.k.",
<add> "k.v.v.v.",
<add> "kadasterw.",
<add> "kaderb.",
<add> "kador.",
<add> "kbo-nr.",
<add> "kg.",
<add> "kh.",
<add> "kiesw.",
<add> "kind.bes.v.",
<add> "kkr.",
<add> "kon.",
<add> "koopv.",
<add> "kr.",
<add> "krankz.w.",
<add> "ksbel.",
<add> "kt.",
<add> "ktg.",
<add> "ktr.",
<add> "kvdm.",
<add> "kw.r.",
<add> "kymr.",
<add> "kzr.",
<add> "kzw.",
<add> "l.",
<add> "l.b.",
<add> "l.b.o.",
<add> "l.bas.",
<add> "l.c.",
<add> "l.gew.",
<add> "l.j.",
<add> "l.k.",
<add> "l.l.",
<add> "l.o.",
<add> "l.p.",
<add> "l.r.b.",
<add> "l.u.v.i.",
<add> "l.v.r.",
<add> "l.v.w.",
<add> "l.w.",
<add> "l'exp.-compt.b..",
<add> "l’exp.-compt.b.",
<add> "landinr.w.",
<add> "landscrt.",
<add> "lat.",
<add> "law.ed.",
<add> "lett.",
<add> "levensverz.",
<add> "lgrs.",
<add> "lidw.",
<add> "limb.rechtsl.",
<add> "lit.",
<add> "litt.",
<add> "liw.",
<add> "liwet.",
<add> "lk.",
<add> "ll.",
<add> "ll.(l.)l.r.",
<add> "loonw.",
<add> "losbl.",
<add> "ltd.",
<add> "luchtv.",
<add> "luchtv.w.",
<add> "m.",
<add> "m.",
<add> "not.",
<add> "m.a.v.o.",
<add> "m.a.w.",
<add> "m.b.",
<add> "m.b.o.",
<add> "m.b.r.",
<add> "m.b.t.",
<add> "m.d.g.o.",
<add> "m.e.a.o.",
<add> "m.e.r.",
<add> "m.h.",
<add> "m.h.d.",
<add> "m.i.v.",
<add> "m.j.t.",
<add> "m.k.",
<add> "m.m.",
<add> "m.m.a.",
<add> "m.m.h.h.",
<add> "m.m.v.",
<add> "m.n.",
<add> "m.not.fisc.",
<add> "m.nt.",
<add> "m.o.",
<add> "m.r.",
<add> "m.s.a.",
<add> "m.u.p.",
<add> "m.v.a.",
<add> "m.v.h.n.",
<add> "m.v.t.",
<add> "m.z.",
<add> "maatr.teboekgest.luchtv.",
<add> "maced.",
<add> "mand.",
<add> "max.",
<add> "mbl.not.",
<add> "me.",
<add> "med.",
<add> "med.",
<add> "v.b.o.",
<add> "med.b.u.f.r.",
<add> "med.bufr.",
<add> "med.vbo.",
<add> "meerv.",
<add> "meetbr.w.",
<add> "mej.",
<add> "mevr.",
<add> "mém.adm.",
<add> "mgr.",
<add> "mgrs.",
<add> "mhd.",
<add> "mi.verantw.",
<add> "mil.",
<add> "mil.bed.",
<add> "mil.ger.",
<add> "min.",
<add> "min.",
<add> "aanbev.",
<add> "min.",
<add> "circ.",
<add> "min.",
<add> "fin.",
<add> "min.j.omz.",
<add> "min.just.circ.",
<add> "mitt.",
<add> "mln.",
<add> "mnd.",
<add> "mod.",
<add> "mon.",
<add> "mouv.comm.",
<add> "mr.",
<add> "ms.",
<add> "muz.",
<add> "mv.",
<add> "n.",
<add> "chr.",
<add> "n.a.",
<add> "n.a.g.",
<add> "n.a.v.",
<add> "n.b.",
<add> "n.c.",
<add> "n.chr.",
<add> "n.d.",
<add> "n.d.r.",
<add> "n.e.a.",
<add> "n.g.",
<add> "n.h.b.c.",
<add> "n.j.",
<add> "n.j.b.",
<add> "n.j.w.",
<add> "n.l.",
<add> "n.m.",
<add> "n.m.m.",
<add> "n.n.",
<add> "n.n.b.",
<add> "n.n.g.",
<add> "n.n.k.",
<add> "n.o.m.",
<add> "n.o.t.k.",
<add> "n.rapp.",
<add> "n.tijd.pol.",
<add> "n.v.",
<add> "n.v.d.r.",
<add> "n.v.d.v.",
<add> "n.v.o.b.",
<add> "n.v.t.",
<add> "nat.besch.w.",
<add> "nat.omb.",
<add> "nat.pers.",
<add> "ned.",
<add> "ned.cult.r.",
<add> "neg.verkl.",
<add> "nhd.",
<add> "wisk.",
<add> "njcm-bull.",
<add> "nl.",
<add> "nnd.",
<add> "no.",
<add> "not.fisc.m.",
<add> "not.w.",
<add> "not.wet.",
<add> "nr.",
<add> "nrs.",
<add> "nste.",
<add> "nt.",
<add> "numism.",
<add> "o.",
<add> "o.a.",
<add> "o.b.",
<add> "o.c.",
<add> "o.g.",
<add> "o.g.v.",
<add> "o.i.",
<add> "o.i.d.",
<add> "o.m.",
<add> "o.o.",
<add> "o.o.d.",
<add> "o.o.v.",
<add> "o.p.",
<add> "o.r.",
<add> "o.regl.",
<add> "o.s.",
<add> "o.t.s.",
<add> "o.t.t.",
<add> "o.t.t.t.",
<add> "o.t.t.z.",
<add> "o.tk.t.",
<add> "o.v.t.",
<add> "o.v.t.t.",
<add> "o.v.tk.t.",
<add> "o.v.v.",
<add> "ob.",
<add> "obsv.",
<add> "octr.",
<add> "octr.gem.regl.",
<add> "octr.regl.",
<add> "oe.",
<add> "off.pol.",
<add> "ofra.",
<add> "ohd.",
<add> "omb.",
<add> "omnil.",
<add> "omz.",
<add> "on.ww.",
<add> "onderr.",
<add> "onfrank.",
<add> "onteig.w.",
<add> "ontw.",
<add> "b.w.",
<add> "onuitg.",
<add> "onz.",
<add> "oorl.w.",
<add> "op.cit.",
<add> "opin.pa.",
<add> "opm.",
<add> "or.",
<add> "ord.br.",
<add> "ord.gem.",
<add> "ors.",
<add> "orth.",
<add> "os.",
<add> "osm.",
<add> "ov.",
<add> "ov.w.i.",
<add> "ov.w.ii.",
<add> "ov.ww.",
<add> "overg.w.",
<add> "overw.",
<add> "ovkst.",
<add> "oz.",
<add> "p.",
<add> "p.a.",
<add> "p.a.o.",
<add> "p.b.o.",
<add> "p.e.",
<add> "p.g.",
<add> "p.j.",
<add> "p.m.",
<add> "p.m.a.",
<add> "p.o.",
<add> "p.o.j.t.",
<add> "p.p.",
<add> "p.v.",
<add> "p.v.s.",
<add> "pachtw.",
<add> "pag.",
<add> "pan.",
<add> "pand.b.",
<add> "pand.pér.",
<add> "parl.gesch.",
<add> "parl.gesch.",
<add> "inv.",
<add> "parl.st.",
<add> "part.arb.",
<add> "pas.",
<add> "pasin.",
<add> "pat.",
<add> "pb.c.",
<add> "pb.l.",
<add> "pct.",
<add> "pens.",
<add> "pensioenverz.",
<add> "per.ber.i.b.r.",
<add> "per.ber.ibr.",
<add> "pers.",
<add> "st.",
<add> "pft.",
<add> "pk.",
<add> "pktg.",
<add> "plv.",
<add> "po.",
<add> "pol.",
<add> "pol.off.",
<add> "pol.r.",
<add> "pol.w.",
<add> "postbankw.",
<add> "postw.",
<add> "pp.",
<add> "pr.",
<add> "preadv.",
<add> "pres.",
<add> "prf.",
<add> "prft.",
<add> "prg.",
<add> "prijz.w.",
<add> "proc.",
<add> "procesregl.",
<add> "prof.",
<add> "prot.",
<add> "prov.",
<add> "prov.b.",
<add> "prov.instr.h.m.g.",
<add> "prov.regl.",
<add> "prov.verord.",
<add> "prov.w.",
<add> "publ.",
<add> "pun.",
<add> "pw.",
<add> "q.b.d.",
<add> "q.e.d.",
<add> "q.q.",
<add> "q.r.",
<add> "r.",
<add> "r.a.b.g.",
<add> "r.a.c.e.",
<add> "r.a.j.b.",
<add> "r.b.d.c.",
<add> "r.b.d.i.",
<add> "r.b.s.s.",
<add> "r.c.",
<add> "r.c.b.",
<add> "r.c.d.c.",
<add> "r.c.j.b.",
<add> "r.c.s.j.",
<add> "r.cass.",
<add> "r.d.c.",
<add> "r.d.i.",
<add> "r.d.i.d.c.",
<add> "r.d.j.b.",
<add> "r.d.j.p.",
<add> "r.d.p.c.",
<add> "r.d.s.",
<add> "r.d.t.i.",
<add> "r.e.",
<add> "r.f.s.v.p.",
<add> "r.g.a.r.",
<add> "r.g.c.f.",
<add> "r.g.d.c.",
<add> "r.g.f.",
<add> "r.g.z.",
<add> "r.h.a.",
<add> "r.i.c.",
<add> "r.i.d.a.",
<add> "r.i.e.j.",
<add> "r.i.n.",
<add> "r.i.s.a.",
<add> "r.j.d.a.",
<add> "r.j.i.",
<add> "r.k.",
<add> "r.l.",
<add> "r.l.g.b.",
<add> "r.med.",
<add> "r.med.rechtspr.",
<add> "r.n.b.",
<add> "r.o.",
<add> "r.ov.",
<add> "r.p.",
<add> "r.p.d.b.",
<add> "r.p.o.t.",
<add> "r.p.r.j.",
<add> "r.p.s.",
<add> "r.r.d.",
<add> "r.r.s.",
<add> "r.s.",
<add> "r.s.v.p.",
<add> "r.stvb.",
<add> "r.t.d.f.",
<add> "r.t.d.h.",
<add> "r.t.l.",
<add> "r.trim.dr.eur.",
<add> "r.v.a.",
<add> "r.verkb.",
<add> "r.w.",
<add> "r.w.d.",
<add> "rap.ann.c.a.",
<add> "rap.ann.c.c.",
<add> "rap.ann.c.e.",
<add> "rap.ann.c.s.j.",
<add> "rap.ann.ca.",
<add> "rap.ann.cass.",
<add> "rap.ann.cc.",
<add> "rap.ann.ce.",
<add> "rap.ann.csj.",
<add> "rapp.",
<add> "rb.",
<add> "rb.kh.",
<add> "rdn.",
<add> "rdnr.",
<add> "re.pers.",
<add> "rec.",
<add> "rec.c.i.j.",
<add> "rec.c.j.c.e.",
<add> "rec.cij.",
<add> "rec.cjce.",
<add> "rec.gén.enr.not.",
<add> "rechtsk.t.",
<add> "rechtspl.zeem.",
<add> "rechtspr.arb.br.",
<add> "rechtspr.b.f.e.",
<add> "rechtspr.bfe.",
<add> "rechtspr.soc.r.b.l.n.",
<add> "recl.reg.",
<add> "rect.",
<add> "red.",
<add> "reg.",
<add> "reg.huiz.bew.",
<add> "reg.w.",
<add> "registr.w.",
<add> "regl.",
<add> "regl.",
<add> "r.v.k.",
<add> "regl.besl.",
<add> "regl.onderr.",
<add> "regl.r.t.",
<add> "rep.",
<add> "rép.fisc.",
<add> "rép.not.",
<add> "rep.r.j.",
<add> "rep.rj.",
<add> "req.",
<add> "res.",
<add> "resp.",
<add> "rev.",
<add> "rev.",
<add> "comp.",
<add> "rev.",
<add> "trim.",
<add> "civ.",
<add> "rev.",
<add> "trim.",
<add> "comm.",
<add> "rev.acc.trav.",
<add> "rev.adm.",
<add> "rev.b.compt.",
<add> "rev.b.dr.const.",
<add> "rev.b.dr.intern.",
<add> "rev.b.séc.soc.",
<add> "rev.banc.fin.",
<add> "rev.comm.",
<add> "rev.cons.prud.",
<add> "rev.dr.b.",
<add> "rev.dr.commun.",
<add> "rev.dr.étr.",
<add> "rev.dr.fam.",
<add> "rev.dr.intern.comp.",
<add> "rev.dr.mil.",
<add> "rev.dr.min.",
<add> "rev.dr.pén.",
<add> "rev.dr.pén.mil.",
<add> "rev.dr.rur.",
<add> "rev.dr.u.l.b.",
<add> "rev.dr.ulb.",
<add> "rev.exp.",
<add> "rev.faill.",
<add> "rev.fisc.",
<add> "rev.gd.",
<add> "rev.hist.dr.",
<add> "rev.i.p.c.",
<add> "rev.ipc.",
<add> "rev.not.b.",
<add> "rev.prat.dr.comm.",
<add> "rev.prat.not.b.",
<add> "rev.prat.soc.",
<add> "rev.rec.",
<add> "rev.rw.",
<add> "rev.trav.",
<add> "rev.trim.d.h.",
<add> "rev.trim.dr.fam.",
<add> "rev.urb.",
<add> "richtl.",
<add> "riv.dir.int.",
<add> "riv.dir.int.priv.proc.",
<add> "rk.",
<add> "rln.",
<add> "roln.",
<add> "rom.",
<add> "rondz.",
<add> "rov.",
<add> "rtl.",
<add> "rubr.",
<add> "ruilv.wet.",
<add> "rv.verdr.",
<add> "rvkb.",
<add> "s.",
<add> "s.",
<add> "s.a.",
<add> "s.b.n.",
<add> "s.ct.",
<add> "s.d.",
<add> "s.e.c.",
<add> "s.e.et.o.",
<add> "s.e.w.",
<add> "s.exec.rept.",
<add> "s.hrg.",
<add> "s.j.b.",
<add> "s.l.",
<add> "s.l.e.a.",
<add> "s.l.n.d.",
<add> "s.p.a.",
<add> "s.s.",
<add> "s.t.",
<add> "s.t.b.",
<add> "s.v.",
<add> "s.v.p.",
<add> "samenw.",
<add> "sc.",
<add> "sch.",
<add> "scheidsr.uitspr.",
<add> "schepel.besl.",
<add> "sec.",
<add> "secr.comm.",
<add> "secr.gen.",
<add> "sect.soc.",
<add> "sess.",
<add> "cas.",
<add> "sir.",
<add> "soc.",
<add> "best.",
<add> "soc.",
<add> "handv.",
<add> "soc.",
<add> "verz.",
<add> "soc.act.",
<add> "soc.best.",
<add> "soc.kron.",
<add> "soc.r.",
<add> "soc.sw.",
<add> "soc.weg.",
<add> "sofi-nr.",
<add> "somm.",
<add> "somm.ann.",
<add> "sp.c.c.",
<add> "sr.",
<add> "ss.",
<add> "st.doc.b.c.n.a.r.",
<add> "st.doc.bcnar.",
<add> "st.vw.",
<add> "stagever.",
<add> "stas.",
<add> "stat.",
<add> "stb.",
<add> "stbl.",
<add> "stcrt.",
<add> "stud.dipl.",
<add> "su.",
<add> "subs.",
<add> "subst.",
<add> "succ.w.",
<add> "suppl.",
<add> "sv.",
<add> "sw.",
<add> "t.",
<add> "t.a.",
<add> "t.a.a.",
<add> "t.a.n.",
<add> "t.a.p.",
<add> "t.a.s.n.",
<add> "t.a.v.",
<add> "t.a.v.w.",
<add> "t.aann.",
<add> "t.acc.",
<add> "t.agr.r.",
<add> "t.app.",
<add> "t.b.b.r.",
<add> "t.b.h.",
<add> "t.b.m.",
<add> "t.b.o.",
<add> "t.b.p.",
<add> "t.b.r.",
<add> "t.b.s.",
<add> "t.b.v.",
<add> "t.bankw.",
<add> "t.belg.not.",
<add> "t.desk.",
<add> "t.e.m.",
<add> "t.e.p.",
<add> "t.f.r.",
<add> "t.fam.",
<add> "t.fin.r.",
<add> "t.g.r.",
<add> "t.g.t.",
<add> "t.g.v.",
<add> "t.gem.",
<add> "t.gez.",
<add> "t.huur.",
<add> "t.i.n.",
<add> "t.j.k.",
<add> "t.l.l.",
<add> "t.l.v.",
<add> "t.m.",
<add> "t.m.r.",
<add> "t.m.w.",
<add> "t.mil.r.",
<add> "t.mil.strafr.",
<add> "t.not.",
<add> "t.o.",
<add> "t.o.r.b.",
<add> "t.o.v.",
<add> "t.ontv.",
<add> "t.p.r.",
<add> "t.pol.",
<add> "t.r.",
<add> "t.r.g.",
<add> "t.r.o.s.",
<add> "t.r.v.",
<add> "t.s.r.",
<add> "t.strafr.",
<add> "t.t.",
<add> "t.u.",
<add> "t.v.c.",
<add> "t.v.g.",
<add> "t.v.m.r.",
<add> "t.v.o.",
<add> "t.v.v.",
<add> "t.v.v.d.b.",
<add> "t.v.w.",
<add> "t.verz.",
<add> "t.vred.",
<add> "t.vreemd.",
<add> "t.w.",
<add> "t.w.k.",
<add> "t.w.v.",
<add> "t.w.v.r.",
<add> "t.wrr.",
<add> "t.z.",
<add> "t.z.t.",
<add> "t.z.v.",
<add> "taalk.",
<add> "tar.burg.z.",
<add> "td.",
<add> "techn.",
<add> "telecomm.",
<add> "th.",
<add> "toel.",
<add> "toel.st.v.w.",
<add> "toep.",
<add> "toep.regl.",
<add> "tom.",
<add> "top.",
<add> "trans.b.",
<add> "transp.r.",
<add> "trb.",
<add> "trib.",
<add> "trib.civ.",
<add> "trib.gr.inst.",
<add> "ts.",
<add> "ts.",
<add> "best.",
<add> "ts.",
<add> "verv.",
<add> "turnh.rechtsl.",
<add> "tvpol.",
<add> "tvpr.",
<add> "tvrechtsgesch.",
<add> "tw.",
<add> "u.",
<add> "u.a.",
<add> "u.a.r.",
<add> "u.a.v.",
<add> "u.c.",
<add> "u.c.c.",
<add> "u.g.",
<add> "u.p.",
<add> "u.s.",
<add> "u.s.d.c.",
<add> "uitdr.",
<add> "uitl.w.",
<add> "uitv.besch.div.b.",
<add> "uitv.besl.",
<add> "uitv.besl.",
<add> "succ.w.",
<add> "uitv.besl.bel.rv.",
<add> "uitv.besl.l.b.",
<add> "uitv.reg.",
<add> "inv.w.",
<add> "uitv.reg.bel.d.",
<add> "uitv.reg.afd.verm.",
<add> "uitv.reg.lb.",
<add> "uitv.reg.succ.w.",
<add> "univ.",
<add> "univ.verkl.",
<add> "v.",
<add> "v.",
<add> "chr.",
<add> "v.a.",
<add> "v.a.v.",
<add> "v.c.",
<add> "v.C.",
<add> "v.Chr.",
<add> "v.chr.",
<add> "v.d.",
<add> "v.h.",
<add> "v.huw.verm.",
<add> "v.i.",
<add> "v.i.o.",
<add> "v.j.",
<add> "v.k.a.",
<add> "v.m.",
<add> "v.o.f.",
<add> "v.o.n.",
<add> "v.onderh.verpl.",
<add> "v.p.",
<add> "v.r.",
<add> "v.s.o.",
<add> "v.t.t.",
<add> "v.t.t.t.",
<add> "v.tk.t.",
<add> "v.toep.r.vert.",
<add> "v.v.b.",
<add> "v.v.g.",
<add> "v.v.t.",
<add> "v.v.t.t.",
<add> "v.v.tk.t.",
<add> "v.w.b.",
<add> "v.z.m.",
<add> "vb.",
<add> "vb.bo.",
<add> "vbb.",
<add> "vc.",
<add> "vd.",
<add> "veldw.",
<add> "ver.k.",
<add> "ver.verg.gem.",
<add> "gem.comm.",
<add> "verbr.",
<add> "verd.",
<add> "verdr.",
<add> "verdr.v.",
<add> "tek.mod.",
<add> "verenw.",
<add> "verg.",
<add> "verg.fr.gem.",
<add> "comm.",
<add> "verkl.",
<add> "verkl.herz.gw.",
<add> "verl.",
<add> "deelw.",
<add> "vern.",
<add> "verord.",
<add> "vers.r.",
<add> "versch.",
<add> "versl.c.s.w.",
<add> "versl.csw.",
<add> "vert.",
<add> "verw.",
<add> "verz.",
<add> "verz.w.",
<add> "verz.wett.besl.",
<add> "verz.wett.decr.besl.",
<add> "vgl.",
<add> "vid.",
<add> "viss.w.",
<add> "vl.parl.",
<add> "vl.r.",
<add> "vl.t.gez.",
<add> "vl.w.reg.",
<add> "vl.w.succ.",
<add> "vlg.",
<add> "vn.",
<add> "vnl.",
<add> "vnw.",
<add> "vo.",
<add> "vo.bl.",
<add> "voegw.",
<add> "vol.",
<add> "volg.",
<add> "volt.",
<add> "deelw.",
<add> "voorl.",
<add> "voorz.",
<add> "vord.w.",
<add> "vorst.d.",
<add> "vr.",
<add> "vred.",
<add> "vrg.",
<add> "vnw.",
<add> "vrijgrs.",
<add> "vs.",
<add> "vt.",
<add> "vw.",
<add> "vz.",
<add> "vzngr.",
<add> "vzr.",
<add> "w.",
<add> "w.a.",
<add> "w.b.r.",
<add> "w.c.h.",
<add> "w.conf.huw.",
<add> "w.conf.huwelijksb.",
<add> "w.consum.kr.",
<add> "w.f.r.",
<add> "w.g.",
<add> "w.gew.r.",
<add> "w.ident.pl.",
<add> "w.just.doc.",
<add> "w.kh.",
<add> "w.l.r.",
<add> "w.l.v.",
<add> "w.mil.straf.spr.",
<add> "w.n.",
<add> "w.not.ambt.",
<add> "w.o.",
<add> "w.o.d.huurcomm.",
<add> "w.o.d.k.",
<add> "w.openb.manif.",
<add> "w.parl.",
<add> "w.r.",
<add> "w.reg.",
<add> "w.succ.",
<add> "w.u.b.",
<add> "w.uitv.pl.verord.",
<add> "w.v.",
<add> "w.v.k.",
<add> "w.v.m.s.",
<add> "w.v.r.",
<add> "w.v.w.",
<add> "w.venn.",
<add> "wac.",
<add> "wd.",
<add> "wetb.",
<add> "n.v.h.",
<add> "wgb.",
<add> "winkelt.w.",
<add> "wisk.",
<add> "wka-verkl.",
<add> "wnd.",
<add> "won.w.",
<add> "woningw.",
<add> "woonr.w.",
<add> "wrr.",
<add> "wrr.ber.",
<add> "wrsch.",
<add> "ws.",
<add> "wsch.",
<add> "wsr.",
<add> "wtvb.",
<add> "ww.",
<add> "x.d.",
<add> "z.a.",
<add> "z.g.",
<add> "z.i.",
<add> "z.j.",
<add> "z.o.z.",
<add> "z.p.",
<add> "z.s.m.",
<add> "zg.",
<add> "zgn.",
<add> "zn.",
<add> "znw.",
<add> "zr.",
<add> "zr.",
<add> "ms.",
<add> "zr.ms.",
<add> "'m",
<add> "'n",
<add> "'ns",
<add> "'s",
<add> "'t",
<add>]
<ide>
<ide> _exc = {}
<ide> for orth in abbrevs: | 3 |
Python | Python | resolve issue in init file | 8852ee4d1bfd3f0eacf06679ce52dccf9d9a262a | <ide><path>glances/__init__.py
<ide> import sys
<ide>
<ide> # Global name
<del><<<<<<< HEAD
<del>__version__ = '3.0.1_beta'
<del>=======
<del>__version__ = '3.0.1'
<del>>>>>>>> hotfix/issue1314
<add>__version__ = '3.1.0_beta'
<ide> __author__ = 'Nicolas Hennion <[email protected]>'
<ide> __license__ = 'LGPLv3'
<ide> | 1 |
Javascript | Javascript | change streams to always emit close by default | f0d2df41f8716670435b284e987b2fcc23221947 | <ide><path>lib/internal/fs/streams.js
<ide> function ReadStream(path, options) {
<ide> if (options.highWaterMark === undefined)
<ide> options.highWaterMark = 64 * 1024;
<ide>
<del> // For backwards compat do not emit close on destroy.
<del> if (options.emitClose === undefined) {
<del> options.emitClose = false;
<del> }
<ide> if (options.autoDestroy === undefined) {
<ide> options.autoDestroy = false;
<ide> }
<ide> ReadStream.prototype._destroy = function(err, cb) {
<ide>
<ide> function closeFsStream(stream, cb, err) {
<ide> stream[kFs].close(stream.fd, (er) => {
<del> er = er || err;
<del> cb(er);
<ide> stream.closed = true;
<del> const s = stream._writableState || stream._readableState;
<del> if (!er && !s.emitClose)
<del> stream.emit('close');
<add> cb(er || err);
<ide> });
<ide>
<ide> stream.fd = null;
<ide> function WriteStream(path, options) {
<ide> // Only buffers are supported.
<ide> options.decodeStrings = true;
<ide>
<del> // For backwards compat do not emit close on destroy.
<del> if (options.emitClose === undefined) {
<del> options.emitClose = false;
<del> }
<ide> if (options.autoDestroy === undefined) {
<ide> options.autoDestroy = false;
<ide> }
<ide><path>test/parallel/test-fs-stream-destroy-emit-error.js
<ide> tmpdir.refresh();
<ide>
<ide> {
<ide> const stream = fs.createReadStream(__filename);
<del> stream.on('close', common.mustNotCall());
<add> stream.on('close', common.mustCall());
<ide> test(stream);
<ide> }
<ide>
<ide> {
<ide> const stream = fs.createWriteStream(`${tmpdir.path}/dummy`);
<del> stream.on('close', common.mustNotCall());
<add> stream.on('close', common.mustCall());
<ide> test(stream);
<ide> }
<ide> | 2 |
PHP | PHP | add methods to change unfurl options | d923504dc66a3f88759cab2023abfaf715596d28 | <ide><path>src/Illuminate/Notifications/Messages/SlackMessage.php
<ide> class SlackMessage
<ide> */
<ide> public $linkNames = 0;
<ide>
<add> /**
<add> * Indicates if you want a preview of links inlined in the message.
<add> *
<add> * @var bool
<add> */
<add> public $unfurlLinks = true;
<add>
<add> /**
<add> * Indicates if you want a preview of links to media inlined in the message.
<add> *
<add> * @var bool
<add> */
<add> public $unfurlMedia = true;
<add>
<ide> /**
<ide> * The message's attachments.
<ide> *
<ide> public function linkNames()
<ide> return $this;
<ide> }
<ide>
<add> /**
<add> * Find and link channel names and usernames.
<add> *
<add> * @return $this
<add> */
<add> public function unfurlLinks($unfurl = true)
<add> {
<add> $this->unfurlLinks = $unfurl;
<add>
<add> return $this;
<add> }
<add>
<add> /**
<add> * Find and link channel names and usernames.
<add> *
<add> * @return $this
<add> */
<add> public function unfurlMedia($unfurl = true)
<add> {
<add> $this->unfurlMedia = $unfurl;
<add>
<add> return $this;
<add> }
<add>
<ide> /**
<ide> * Set additional request options for the Guzzle HTTP client.
<ide> * | 1 |
Ruby | Ruby | handle uncommitted formulae." | e793e526611ca4cbc6d0155af8c596c4e826fc41 | <ide><path>Library/Homebrew/formula_versions.rb
<ide> def initialize(formula, options = {})
<ide> @repository = formula.tap.path
<ide> @entry_name = @path.relative_path_from(repository).to_s
<ide> @max_depth = options[:max_depth]
<del> @current_formula = formula
<ide> end
<ide>
<ide> def rev_list(branch)
<ide> def version_attributes_map(attributes, branch)
<ide>
<ide> attributes.each do |attribute|
<ide> attributes_map[attribute] ||= {}
<del> # Set the attributes for the current formula in case it's not been
<del> # committed yet.
<del> set_attribute_map(attributes_map[attribute], @current_formula, attribute)
<ide> end
<ide>
<ide> rev_list(branch) do |rev|
<ide> formula_at_revision(rev) do |f|
<ide> attributes.each do |attribute|
<del> set_attribute_map(attributes_map[attribute], f, attribute)
<add> map = attributes_map[attribute]
<add> if f.stable
<add> map[:stable] ||= {}
<add> map[:stable][f.stable.version] ||= []
<add> map[:stable][f.stable.version] << f.send(attribute)
<add> end
<add> next unless f.devel
<add> map[:devel] ||= {}
<add> map[:devel][f.devel.version] ||= []
<add> map[:devel][f.devel.version] << f.send(attribute)
<ide> end
<ide> end
<ide> end
<ide>
<ide> attributes_map
<ide> end
<del>
<del> private
<del>
<del> def set_attribute_map(map, f, attribute)
<del> if f.stable
<del> map[:stable] ||= {}
<del> map[:stable][f.stable.version] ||= []
<del> map[:stable][f.stable.version] << f.send(attribute)
<del> end
<del> return unless f.devel
<del> map[:devel] ||= {}
<del> map[:devel][f.devel.version] ||= []
<del> map[:devel][f.devel.version] << f.send(attribute)
<del> end
<ide> end | 1 |
Text | Text | fix freebsd development dependencies | ce17492101b54dab8f3fa5934618679c9671f582 | <ide><path>guides/source/development_dependencies_install.md
<ide> use MariaDB instead (see [this announcement](https://www.archlinux.org/news/mari
<ide> To install all run:
<ide>
<ide> ```bash
<del>$ pkg install sqlite3 mysql80-client mysql80-server postgresql11-client postgresql11-server memcached imagemagick ffmpeg mupdf yarn libxml2 vips poppler-utils
<add>$ sudo pkg install sqlite3 mysql80-client mysql80-server postgresql11-client postgresql11-server memcached imagemagick6 ffmpeg mupdf yarn libxml2 vips poppler-utils
<ide> # portmaster databases/redis
<ide> ```
<ide> | 1 |
Javascript | Javascript | add option data to event callbacks | e4339bc51239fbef3d7bbd1b090a3e3cc0b2ef22 | <ide><path>src/ngAnimate/animateQueue.js
<ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate
<ide> join: []
<ide> };
<ide>
<add> function getEventData(options) {
<add> return {
<add> addClass: options.addClass,
<add> removeClass: options.removeClass,
<add> from: options.from,
<add> to: options.to
<add> };
<add> }
<add>
<ide> function makeTruthyCssClassMap(classString) {
<ide> if (!classString) {
<ide> return null;
<ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate
<ide>
<ide> if (skipAnimations) {
<ide> // Callbacks should fire even if the document is hidden (regression fix for issue #14120)
<del> if (documentHidden) notifyProgress(runner, event, 'start');
<add> if (documentHidden) notifyProgress(runner, event, 'start', getEventData(options));
<ide> close();
<del> if (documentHidden) notifyProgress(runner, event, 'close');
<add> if (documentHidden) notifyProgress(runner, event, 'close', getEventData(options));
<ide> return runner;
<ide> }
<ide>
<ide> var $$AnimateQueueProvider = ['$animateProvider', /** @this */ function($animate
<ide> // this will update the runner's flow-control events based on
<ide> // the `realRunner` object.
<ide> runner.setHost(realRunner);
<del> notifyProgress(runner, event, 'start', {});
<add> notifyProgress(runner, event, 'start', getEventData(options));
<ide>
<ide> realRunner.done(function(status) {
<ide> close(!status);
<ide> var animationDetails = activeAnimationsLookup.get(node);
<ide> if (animationDetails && animationDetails.counter === counter) {
<ide> clearElementAnimationState(node);
<ide> }
<del> notifyProgress(runner, event, 'close', {});
<add> notifyProgress(runner, event, 'close', getEventData(options));
<ide> });
<ide> });
<ide>
<ide><path>test/ngAnimate/animateSpec.js
<ide> describe('animations', function() {
<ide>
<ide> });
<ide>
<add> describe('event data', function() {
<add>
<add> it('should be included for enter',
<add> inject(function($animate, $rootScope, $rootElement, $document) {
<add> var eventData;
<add>
<add> $animate.on('enter', jqLite($document[0].body), function(element, phase, data) {
<add> eventData = data;
<add> });
<add>
<add> element = jqLite('<div></div>');
<add> $animate.enter(element, $rootElement, null, {
<add> addClass: 'red blue',
<add> removeClass: 'yellow green',
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> $rootScope.$digest();
<add>
<add> $animate.flush();
<add>
<add> expect(eventData).toEqual({
<add> addClass: 'red blue',
<add> removeClass: null,
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> }));
<add>
<add>
<add> it('should be included for leave',
<add> inject(function($animate, $rootScope, $rootElement, $document) {
<add> var eventData;
<add>
<add> $animate.on('leave', jqLite($document[0].body), function(element, phase, data) {
<add> eventData = data;
<add> });
<add>
<add> var outerContainer = jqLite('<div></div>');
<add> element = jqLite('<div></div>');
<add> outerContainer.append(element);
<add> $rootElement.append(outerContainer);
<add>
<add> $animate.leave(element, {
<add> addClass: 'red blue',
<add> removeClass: 'yellow green',
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add>
<add> $animate.flush();
<add>
<add> expect(eventData).toEqual({
<add> addClass: 'red blue',
<add> removeClass: null,
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> })
<add> );
<add>
<add>
<add> it('should be included for move',
<add> inject(function($animate, $rootScope, $rootElement, $document) {
<add> var eventData;
<add>
<add> $animate.on('move', jqLite($document[0].body), function(element, phase, data) {
<add> eventData = data;
<add> });
<add>
<add> var parent = jqLite('<div></div>');
<add> var parent2 = jqLite('<div></div>');
<add> element = jqLite('<div></div>');
<add> parent.append(element);
<add> $rootElement.append(parent);
<add> $rootElement.append(parent2);
<add>
<add> $animate.move(element, parent2, null, {
<add> addClass: 'red blue',
<add> removeClass: 'yellow green',
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add>
<add> $animate.flush();
<add>
<add> expect(eventData).toEqual({
<add> addClass: 'red blue',
<add> removeClass: null,
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> })
<add> );
<add>
<add>
<add> it('should be included for addClass', inject(function($animate, $rootElement) {
<add> var eventData;
<add>
<add> element = jqLite('<div class="purple"></div>');
<add> $animate.on('addClass', element, function(element, phase, data) {
<add> eventData = data;
<add> });
<add>
<add> $rootElement.append(element);
<add> $animate.addClass(element, 'red blue', {
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> $animate.flush();
<add>
<add> expect(eventData).toEqual({
<add> addClass: 'red blue',
<add> removeClass: null,
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> }));
<add>
<add>
<add> it('should be included for removeClass', inject(function($animate, $rootElement) {
<add> var eventData;
<add>
<add> element = jqLite('<div class="red blue purple"></div>');
<add> $animate.on('removeClass', element, function(element, phase, data) {
<add> eventData = data;
<add> });
<add>
<add> $rootElement.append(element);
<add> $animate.removeClass(element, 'red blue', {
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> $animate.flush();
<add>
<add> expect(eventData).toEqual({
<add> removeClass: 'red blue',
<add> addClass: null,
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> }));
<add>
<add>
<add> it('should be included for setClass', inject(function($animate, $rootElement) {
<add> var eventData;
<add>
<add> element = jqLite('<div class="yellow green purple"></div>');
<add>
<add> $animate.on('setClass', element, function(element, phase, data) {
<add>
<add> eventData = data;
<add> });
<add>
<add> $rootElement.append(element);
<add> $animate.setClass(element, 'red blue', 'yellow green', {
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> $animate.flush();
<add>
<add> expect(eventData).toEqual({
<add> addClass: 'red blue',
<add> removeClass: 'yellow green',
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> }));
<add>
<add> it('should be included for animate', inject(function($animate, $rootElement) {
<add> // The event for animate changes to 'setClass' if both addClass and removeClass
<add> // are definded, because the operations are merged. However, it is still 'animate'
<add> // and not 'addClass' if only 'addClass' is defined.
<add> // Ideally, we would make this consistent, but it's a BC
<add> var eventData, eventName;
<add>
<add> element = jqLite('<div class="yellow green purple"></div>');
<add>
<add> $animate.on('setClass', element, function(element, phase, data) {
<add> eventData = data;
<add> eventName = 'setClass';
<add> });
<add>
<add> $animate.on('animate', element, function(element, phase, data) {
<add> eventData = data;
<add> eventName = 'animate';
<add> });
<add>
<add> $rootElement.append(element);
<add> var runner = $animate.animate(element, {opacity: 0}, {opacity: 1}, null, {
<add> addClass: 'red blue',
<add> removeClass: 'yellow green'
<add> });
<add> $animate.flush();
<add> runner.end();
<add>
<add> expect(eventName).toBe('setClass');
<add> expect(eventData).toEqual({
<add> addClass: 'red blue',
<add> removeClass: 'yellow green',
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add>
<add> eventData = eventName = null;
<add> runner = $animate.animate(element, {opacity: 0}, {opacity: 1}, null, {
<add> addClass: 'yellow green'
<add> });
<add>
<add> $animate.flush();
<add> runner.end();
<add>
<add> expect(eventName).toBe('animate');
<add> expect(eventData).toEqual({
<add> addClass: 'yellow green',
<add> removeClass: null,
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add>
<add> eventData = eventName = null;
<add> runner = $animate.animate(element, {opacity: 0}, {opacity: 1}, null, {
<add> removeClass: 'yellow green'
<add> });
<add>
<add> $animate.flush();
<add> runner.end();
<add>
<add> expect(eventName).toBe('animate');
<add> expect(eventData).toEqual({
<add> addClass: null,
<add> removeClass: 'yellow green',
<add> from: {opacity: 0},
<add> to: {opacity: 1}
<add> });
<add> }));
<add> });
<add>
<ide> they('should trigger a callback for a $prop animation if the listener is on the document',
<ide> ['enter', 'leave'], function($event) {
<ide> module(function($provide) { | 2 |
Javascript | Javascript | destroy the instance since we don’t expose it | aad49ed20ba01f8235140977acf07e6a23a4789e | <ide><path>packages/ember-application/lib/system/application.js
<ide> const Application = Engine.extend({
<ide> */
<ide> visit(url, options) {
<ide> return this.boot().then(() => {
<del> return this.buildInstance().boot(options).then((instance) => {
<del> return instance.visit(url);
<del> });
<add> let instance = this.buildInstance();
<add>
<add> return instance.boot(options)
<add> .then(() => instance.visit(url))
<add> .catch(error => {
<add> run(instance, 'destroy');
<add> throw error;
<add> });
<ide> });
<ide> }
<ide> }); | 1 |
Text | Text | add space after period | c28466a2a440eb10d8dd8f1fb9467eb3e1b1a0d9 | <ide><path>doc/api/child_process.md
<ide> spawn('prg', [], { stdio: ['pipe', null, null, null, 'pipe'] });
<ide> parent and child processes, and the child is a Node.js process, the child
<ide> is launched with the IPC channel unreferenced (using `unref()`) until the
<ide> child registers an event handler for the [`process.on('disconnect')`][] event
<del>or the [`process.on('message')`][] event.This allows the child to exit normally
<del>without the process being held open by the open IPC channel.*
<add>or the [`process.on('message')`][] event. This allows the child to exit
<add>normally without the process being held open by the open IPC channel.*
<ide>
<ide> See also: [`child_process.exec()`][] and [`child_process.fork()`][]
<ide> | 1 |
Text | Text | remove duplication section [ci skip] | b24b20c80ef55096d268bb49b8d4a3faf05843da | <ide><path>guides/source/upgrading_ruby_on_rails.md
<ide> Upgrading from Rails 4.2 to Rails 5.0
<ide>
<ide> ToDo...
<ide>
<del>### Ruby 2.2.2+
<del>
<del>ToDo...
<del>
<ide> ### Active Record models now inherit from ApplicationRecord by default
<ide>
<ide> In Rails 4.2 an Active Record model inherits from `ActiveRecord::Base`. In Rails 5.0, | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.