hash
stringlengths
40
40
diff
stringlengths
131
114k
message
stringlengths
7
980
project
stringlengths
5
67
split
stringclasses
1 value
d4b6c298c453a9ed2169f3aec0f8bce816b9c8bf
diff --git a/src/lang-yaml.js b/src/lang-yaml.js index <HASH>..<HASH> 100644 --- a/src/lang-yaml.js +++ b/src/lang-yaml.js @@ -10,16 +10,18 @@ PR.registerLangHandler( PR.createSimpleLexer( [ - [PR.PR_PUNCTUATION, /^[:|>?]/, null, ':|>?'], + [PR.PR_PUNCTUATION, /^[:|>?]+/, null, ':|>?'], [PR.PR_DECLARATION, /^%(?:YAML|TAG)[^#\r\n]+/, null, '%'], [PR.PR_TYPE, /^[&]\S+/, null, '&'], [PR.PR_TYPE, /^!\S*/, null, '!'], [PR.PR_STRING, /^"(?:[^\\"]|\\.)*(?:"|$)/, null, '"'], [PR.PR_STRING, /^'(?:[^']|'')*(?:'|$)/, null, "'"], - [PR.PR_COMMENT, /^#[^\r\n]*/, null, '#'] + [PR.PR_COMMENT, /^#[^\r\n]*/, null, '#'], + [PR.PR_PLAIN, /^\s+/, null, ' \t\r\n'] ], [ [PR.PR_DECLARATION, /^(?:---|\.\.\.)(?:[\r\n]|$)/], [PR.PR_PUNCTUATION, /^-/], - [PR.PR_KEYWORD, /^\w+:[ \r\n]/] + [PR.PR_KEYWORD, /^\w+:[ \r\n]/], + [PR.PR_PLAIN, /^\w+/] ]), ['yaml', 'yml']);
reduced the number of tokens generated by YAML mode by coallescing tokens
google_code-prettify
train
2f5e6c88227acbeb11102b7b3ec3fc11f2c62028
diff --git a/nosedjango/__init__.py b/nosedjango/__init__.py index <HASH>..<HASH> 100644 --- a/nosedjango/__init__.py +++ b/nosedjango/__init__.py @@ -1 +1,2 @@ # Just a place holder for Windows. +__version__ = (0, 6)
added a version code to nosedjango
nosedjango_nosedjango
train
20690b918a78b8ae67b5fd693dda202cb5ae0799
diff --git a/src/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepository.php b/src/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepository.php index <HASH>..<HASH> 100644 --- a/src/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepository.php +++ b/src/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepository.php @@ -65,7 +65,8 @@ class UserConstraintsReadRepository implements UserConstraintsReadRepositoryInte ->from($this->userRolesTableName->toNative()) ->where(PermissionsSchemaConfigurator::USER_ID_COLUMN . ' = :userId'); - $userConstraintsQuery = $this->connection->createQueryBuilder() + $queryBuilder = $this->connection->createQueryBuilder(); + $userConstraintsQuery = $queryBuilder ->select('rs.' . SearchSchemaConfigurator::CONSTRAINT_COLUMN) ->from($this->rolesSearchTableName, 'rs') ->innerJoin( @@ -81,6 +82,9 @@ class UserConstraintsReadRepository implements UserConstraintsReadRepositoryInte 'rs.' . SearchSchemaConfigurator::UUID_COLUMN . ' = rp.' . PermissionsSchemaConfigurator::ROLE_ID_COLUMN ) ->where(PermissionsSchemaConfigurator::PERMISSION_COLUMN . ' = :permission') + ->andWhere($queryBuilder->expr()->isNotNull( + 'rs.' . SearchSchemaConfigurator::CONSTRAINT_COLUMN + )) ->setParameter('userId', $userId->toNative()) ->setParameter('permission', $permission->toNative()); diff --git a/test/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepositoryTest.php b/test/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepositoryTest.php index <HASH>..<HASH> 100644 --- a/test/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepositoryTest.php +++ b/test/Role/ReadModel/Constraints/Doctrine/UserConstraintsReadRepositoryTest.php @@ -41,7 +41,7 @@ class UserConstraintsReadRepositoryTest extends \PHPUnit_Framework_TestCase protected function setUp() { - $this->roleIds = [new UUID(), new UUID(), new UUID()]; + $this->roleIds = [new UUID(), new UUID(), new UUID(), new UUID()]; $this->userRolesTableName = new StringLiteral('user_roles'); $this->rolePermissionsTableName = new StringLiteral('role_permissions'); @@ -131,6 +131,7 @@ class UserConstraintsReadRepositoryTest extends \PHPUnit_Framework_TestCase $this->insertUserRole(new StringLiteral('user1'), $this->roleIds[1]); $this->insertUserRole(new StringLiteral('user1'), $this->roleIds[2]); $this->insertUserRole(new StringLiteral('user2'), $this->roleIds[2]); + $this->insertUserRole(new StringLiteral('user1'), $this->roleIds[3]); } private function seedRolePermissions() @@ -143,6 +144,9 @@ class UserConstraintsReadRepositoryTest extends \PHPUnit_Framework_TestCase $this->insertUserPermission($this->roleIds[1], Permission::GEBRUIKERS_BEHEREN()); $this->insertUserPermission($this->roleIds[2], Permission::AANBOD_MODEREREN()); + + $this->insertUserPermission($this->roleIds[3], Permission::AANBOD_VERWIJDEREN()); + $this->insertUserPermission($this->roleIds[3], Permission::AANBOD_MODEREREN()); } private function seedRolesSearch() @@ -150,6 +154,7 @@ class UserConstraintsReadRepositoryTest extends \PHPUnit_Framework_TestCase $this->insertRole($this->roleIds[0], new StringLiteral('Brussel Validatoren'), new StringLiteral('zipCode:1000')); $this->insertRole($this->roleIds[1], new StringLiteral('Antwerpen Validatoren'), new StringLiteral('zipCode:2000')); $this->insertRole($this->roleIds[2], new StringLiteral('Leuven Validatoren'), new StringLiteral('zipCode:3000')); + $this->insertRole($this->roleIds[3], new StringLiteral('Geen constraint'), null); } /** @@ -187,14 +192,17 @@ class UserConstraintsReadRepositoryTest extends \PHPUnit_Framework_TestCase * @param StringLiteral $roleName * @param StringLiteral $constraint */ - private function insertRole(UUID $roleId, StringLiteral $roleName, StringLiteral $constraint) - { + private function insertRole( + UUID $roleId, + StringLiteral $roleName, + StringLiteral $constraint = null + ) { $this->getConnection()->insert( $this->rolesSearchTableName, [ SearchSchemaConfigurator::UUID_COLUMN => $roleId->toNative(), SearchSchemaConfigurator::NAME_COLUMN => $roleName->toNative(), - SearchSchemaConfigurator::CONSTRAINT_COLUMN => $constraint->toNative(), + SearchSchemaConfigurator::CONSTRAINT_COLUMN => $constraint ? $constraint->toNative() : null, ] ); }
III-<I> Make sure to only return user constraints with non empty constraints when getting those constraints by user id and permission.
cultuurnet_udb3-php
train
76cfd77b3a88ce17508471bf335829eb0628abcf
diff --git a/mmcv/utils/config.py b/mmcv/utils/config.py index <HASH>..<HASH> 100644 --- a/mmcv/utils/config.py +++ b/mmcv/utils/config.py @@ -121,7 +121,7 @@ class Config: regexp = r'\{\{\s*' + str(key) + r'\s*\}\}' value = value.replace('\\', '/') config_file = re.sub(regexp, value, config_file) - with open(temp_config_name, 'w') as tmp_config_file: + with open(temp_config_name, 'w', encoding='utf-8') as tmp_config_file: tmp_config_file.write(config_file) @staticmethod @@ -139,7 +139,7 @@ class Config: base_var_dict[randstr] = base_var regexp = r'\{\{\s*' + BASE_KEY + r'\.' + base_var + r'\s*\}\}' config_file = re.sub(regexp, f'"{randstr}"', config_file) - with open(temp_config_name, 'w') as tmp_config_file: + with open(temp_config_name, 'w', encoding='utf-8') as tmp_config_file: tmp_config_file.write(config_file) return base_var_dict @@ -353,7 +353,8 @@ class Config: warnings.warn( 'Please check "file_format", the file format may be .py') with tempfile.NamedTemporaryFile( - 'w', suffix=file_format, delete=False) as temp_file: + 'w', encoding='utf-8', suffix=file_format, + delete=False) as temp_file: temp_file.write(cfg_str) # on windows, previous implementation cause error # see PR 1077 for details @@ -536,7 +537,7 @@ class Config: if file is None: return self.pretty_text else: - with open(file, 'w') as f: + with open(file, 'w', encoding='utf-8') as f: f.write(self.pretty_text) else: import mmcv
Fix config parsing error caused by non-ascii characters (#<I>) * explicitly set encoding as 'utf-8'
open-mmlab_mmcv
train
d8e270ba9868f29edf9ff1ce384d548d93a3bea4
diff --git a/lib/shopify_theme/version.rb b/lib/shopify_theme/version.rb index <HASH>..<HASH> 100644 --- a/lib/shopify_theme/version.rb +++ b/lib/shopify_theme/version.rb @@ -1,3 +1,3 @@ module ShopifyTheme - VERSION = "0.0.4" + VERSION = "0.0.5" end
Packaging for version <I> release
Shopify_shopify_theme
train
12760b9d44650914a3c087547b1b0f760b29e16d
diff --git a/devassistant/package_managers.py b/devassistant/package_managers.py index <HASH>..<HASH> 100644 --- a/devassistant/package_managers.py +++ b/devassistant/package_managers.py @@ -70,11 +70,6 @@ class PackageManager(object): raise NotImplementedError() @classmethod - def is_installed(cls, *args, **kwargs): - """Is this manager available?""" - raise NotImplementedError() - - @classmethod def works(cls, *args, **kwargs): """Returns True if this package manager is usable, False otherwise.""" raise NotImplementedError() @@ -173,14 +168,6 @@ class YUMPackageManager(PackageManager): return False @classmethod - def is_installed(cls, dep): - try: - ClHelper('which rpm') - return True - except exceptions.ClException: - return False - - @classmethod def works(cls): try: import yum @@ -302,14 +289,6 @@ class PIPPackageManager(PackageManager): return False @classmethod - def is_installed(cls): - try: - ClHelper.run_command('which pip') - return True - except exceptions.ClException: - return False - - @classmethod def works(cls): try: ClHelper.run_command('pip') @@ -368,14 +347,6 @@ class NPMPackageManager(PackageManager): return False @classmethod - def is_installed(cls): - try: - ClHelper.run_command('which npm') - return True - except exceptions.ClException: - return False - - @classmethod def works(cls): try: ClHelper.run_command('npm')
Remove extraneous is_installed method from package managers, we are ok with just works()
devassistant_devassistant
train
76e2b55996bc5994b0fb7c0f17c45e7915686dba
diff --git a/src/ossos-pipeline/ossos/gui/image.py b/src/ossos-pipeline/ossos/gui/image.py index <HASH>..<HASH> 100644 --- a/src/ossos-pipeline/ossos/gui/image.py +++ b/src/ossos-pipeline/ossos/gui/image.py @@ -5,8 +5,6 @@ import tempfile from astropy.io import fits -from ossos import daophot - class DownloadedFitsImage(object): """ @@ -119,6 +117,10 @@ class DownloadedFitsImage(object): raise ValueError("Apcor data is required in order to calculate " "observed magnitude.") + # NOTE: this import is only here so that we don't load up IRAF + # unnecessarily (ex: for candidates processing). + from ossos import daophot + # TODO refactor: associate SourceReadings here? Don't want to pass # in maxcount like this... return daophot.phot_mag(self.as_file().name, x, y,
Moved daophot import to the method where it is used so that IRAF is not loaded unless needed. This improves startup time for processing candidates.
OSSOS_MOP
train
22988308f842c14af7ee505f9461fc7c832d6ab4
diff --git a/lib/karafka/process.rb b/lib/karafka/process.rb index <HASH>..<HASH> 100644 --- a/lib/karafka/process.rb +++ b/lib/karafka/process.rb @@ -6,7 +6,7 @@ module Karafka # Signal types that we handle HANDLED_SIGNALS = %i( - SIGINT SIGQUIT + SIGINT SIGQUIT SIGTERM ).freeze HANDLED_SIGNALS.each do |signal| diff --git a/lib/karafka/server.rb b/lib/karafka/server.rb index <HASH>..<HASH> 100644 --- a/lib/karafka/server.rb +++ b/lib/karafka/server.rb @@ -11,6 +11,7 @@ module Karafka @consumers = Concurrent::Array.new bind_on_sigint bind_on_sigquit + bind_on_sigterm start_supervised end @@ -39,6 +40,15 @@ module Karafka end end + # What should happen when we decide to quit with sigterm + def bind_on_sigterm + process.on_sigterm do + Karafka::App.stop! + consumers.map(&:stop) + exit + end + end + # Starts Karafka with a supervision # @note We don't need to sleep because Karafka::Fetcher is locking and waiting to # finish loop (and it won't happen until we explicitily want to stop) diff --git a/spec/lib/karafka/server_spec.rb b/spec/lib/karafka/server_spec.rb index <HASH>..<HASH> 100644 --- a/spec/lib/karafka/server_spec.rb +++ b/spec/lib/karafka/server_spec.rb @@ -11,6 +11,7 @@ RSpec.describe Karafka::Server do expect(Karafka::Process.instance).to receive(:supervise).and_yield expect(Karafka::Process.instance).to receive(:on_sigint) expect(Karafka::Process.instance).to receive(:on_sigquit) + expect(Karafka::Process.instance).to receive(:on_sigterm) server_class.run end @@ -19,6 +20,7 @@ RSpec.describe Karafka::Server do expect(Karafka::Process.instance).to receive(:supervise) expect(Karafka::Process.instance).to receive(:on_sigint).and_yield expect(Karafka::Process.instance).to receive(:on_sigquit) + expect(Karafka::Process.instance).to receive(:on_sigterm) expect(Karafka::App).to receive(:stop!) expect(server_class).to receive(:exit) @@ -29,6 +31,18 @@ RSpec.describe Karafka::Server do expect(Karafka::Process.instance).to receive(:supervise) expect(Karafka::Process.instance).to receive(:on_sigint) expect(Karafka::Process.instance).to receive(:on_sigquit).and_yield + expect(Karafka::Process.instance).to receive(:on_sigterm) + expect(Karafka::App).to receive(:stop!) + expect(server_class).to receive(:exit) + + server_class.run + end + + it 'defines a proper action for sigterm' do + expect(Karafka::Process.instance).to receive(:supervise) + expect(Karafka::Process.instance).to receive(:on_sigint) + expect(Karafka::Process.instance).to receive(:on_sigquit) + expect(Karafka::Process.instance).to receive(:on_sigterm).and_yield expect(Karafka::App).to receive(:stop!) expect(server_class).to receive(:exit)
for running on Heroku, allow server to respond to SIGTERM and shut down accordingly (#<I>)
karafka_karafka
train
7addf6a62c04b4211efd0c0ae18eefe503b9ccc5
diff --git a/src/Client.php b/src/Client.php index <HASH>..<HASH> 100644 --- a/src/Client.php +++ b/src/Client.php @@ -76,7 +76,7 @@ class Client */ public function send($zpl) { - if (!@socket_write($this->socket, $zpl)) { + if (false === @socket_write($this->socket, $zpl)) { $error = $this->getLastError(); throw new CommunicationException($error['message'], $error['code']); }
don't throw error on empty response
robgridley_zebra
train
ecb2477c77f95db0517e37702071b94a8d6b22dd
diff --git a/lib/puppet/parser/ast/leaf.rb b/lib/puppet/parser/ast/leaf.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/parser/ast/leaf.rb +++ b/lib/puppet/parser/ast/leaf.rb @@ -113,10 +113,10 @@ class Puppet::Parser::AST # not include syntactical constructs, like '$' and '{}'). def evaluate(scope) parsewrap do - if ! scope.include?(@value) - :undef - else + if scope.include?(@value) scope[@value, {:file => file, :line => line}] + else + :undef end end end
ast: code cleanup for variable nodes. This just rearranges an if statement so that it doesn't have a double negatives causing mental load evaluating the code as written.
puppetlabs_puppet
train
b7145eb849788fee6f5715b4beb66965f9aca690
diff --git a/lib/how_is/sources/ci/travis.rb b/lib/how_is/sources/ci/travis.rb index <HASH>..<HASH> 100644 --- a/lib/how_is/sources/ci/travis.rb +++ b/lib/how_is/sources/ci/travis.rb @@ -30,8 +30,7 @@ module HowIs validate_default_branch_response!(response) branches = response["branches"] - # Fail if +branches+ isn't an Array of Hashes. - unless branches.is_a?(Array) && branches.all? { |branch| branch.is_a?(Hash) } + unless array_of_hashes?(branches) raise BadResponseError, "expected `branches' to be Array of Hashes." end @@ -50,6 +49,10 @@ module HowIs private + def array_of_hashes?(ary) + ary.is_a?(Array) && ary.all? { |obj| obj.is_a?(Hash) } + end + def validate_default_branch_response!(response) # Fail if +response+ isn't a Hash. unless response.is_a?(Hash)
clean up ci/travis.rb some more.
duckinator_inq
train
1e2eb5565be04878d4f40b61d363a10d78bcc255
diff --git a/resources/src/main/java/org/robolectric/res/android/ResTable_config.java b/resources/src/main/java/org/robolectric/res/android/ResTable_config.java index <HASH>..<HASH> 100644 --- a/resources/src/main/java/org/robolectric/res/android/ResTable_config.java +++ b/resources/src/main/java/org/robolectric/res/android/ResTable_config.java @@ -896,8 +896,8 @@ public class ResTable_config { static ResTable_config fromDtoH(final ResTable_config o) { return new ResTable_config( 0 /*sizeof(ResTable_config)*/, - dtohs((short) o.mcc), - dtohs((short) o.mnc), + dtohs((short) o.mcc) & 0xFFFF, + dtohs((short) o.mnc) & 0xFFFF, o.language, o.country, o.orientation, @@ -906,15 +906,15 @@ public class ResTable_config { o.keyboard, o.navigation, o.inputFlags, - dtohs((short) o.screenWidth), - dtohs((short) o.screenHeight), - dtohs((short) o.sdkVersion), - dtohs((short) o.minorVersion), + dtohs((short) o.screenWidth) & 0xFFFF, + dtohs((short) o.screenHeight) & 0xFFFF, + dtohs((short) o.sdkVersion) & 0xFFFF, + dtohs((short) o.minorVersion) & 0xFFFF, o.screenLayout, o.uiMode, - dtohs((short) o.smallestScreenWidthDp), - dtohs((short) o.screenWidthDp), - dtohs((short) o.screenHeightDp), + dtohs((short) o.smallestScreenWidthDp) & 0xFFFF, + dtohs((short) o.screenWidthDp) & 0xFFFF, + dtohs((short) o.screenHeightDp) & 0xFFFF, o.localeScript, o.localeVariant, o.screenLayout2, @@ -2386,4 +2386,4 @@ public class ResTable_config { } return false; } -} \ No newline at end of file +} diff --git a/resources/src/test/java/org/robolectric/res/android/ResTable_configTest.java b/resources/src/test/java/org/robolectric/res/android/ResTable_configTest.java index <HASH>..<HASH> 100644 --- a/resources/src/test/java/org/robolectric/res/android/ResTable_configTest.java +++ b/resources/src/test/java/org/robolectric/res/android/ResTable_configTest.java @@ -42,4 +42,12 @@ public class ResTable_configTest { assertThat(resTable_config.getBcp47Locale(/* canonicalize= */ true)).isEqualTo("fil-ph"); } + + @Test + public void fromDtoH_preservesMnc() { + ResTable_config config = new ResTable_config(); + config.mnc = 0xFFFF; + + assertThat(ResTable_config.fromDtoH(config).mnc).isEqualTo(0xFFFF); + } }
Fix sign extension after byte order conversion. Android's ResTable_config uses uint<I>, but Robolectric uses Java short+int. When converting byte order for 0xFFFF, we need to ensure that it stays that value. This primarilly applies to the resource qualifier "mnc<I>", which gets encoded as 0xFFFF. PiperOrigin-RevId: <I>
robolectric_robolectric
train
c051f209e8bf680e319c309439eaaac7aa81345f
diff --git a/lib/punchblock/translator/asterisk/call.rb b/lib/punchblock/translator/asterisk/call.rb index <HASH>..<HASH> 100644 --- a/lib/punchblock/translator/asterisk/call.rb +++ b/lib/punchblock/translator/asterisk/call.rb @@ -28,6 +28,10 @@ module Punchblock send_pb_event offer_event end + def to_s + "#<#{self.class}:#{id} Channel: #{channel.inspect}>" + end + def process_ami_event(ami_event) pb_logger.trace "Processing AMI event #{ami_event.inspect}" case ami_event.name
[FEATURE] Translator::Asterisk::Call#to_s should give call ID and channel name
adhearsion_punchblock
train
add8b5caff157c0b3fd2c9c319d69f27f9ed013b
diff --git a/octokit/gist_comments.go b/octokit/gist_comments.go index <HASH>..<HASH> 100644 --- a/octokit/gist_comments.go +++ b/octokit/gist_comments.go @@ -20,6 +20,8 @@ type GistCommentsService struct { } // Get a list of all gist comments +// +// https://developer.github.com/v3/gists/comments/#list-comments-on-a-gist func (c *GistCommentsService) All(uri *Hyperlink, uriParams M) (comments []GistComment, result *Result) { url, err := ExpandWithDefault(uri, &GistCommentsURL, uriParams) if err != nil { @@ -31,6 +33,8 @@ func (c *GistCommentsService) All(uri *Hyperlink, uriParams M) (comments []GistC } // Get a single comment by id +// +// https://developer.github.com/v3/gists/comments/#get-a-single-comment func (c *GistCommentsService) One(uri *Hyperlink, uriParams M) (comment *GistComment, result *Result) { url, err := ExpandWithDefault(uri, &GistCommentsURL, uriParams) if err != nil { @@ -42,6 +46,8 @@ func (c *GistCommentsService) One(uri *Hyperlink, uriParams M) (comment *GistCom } // Creates a comment on a gist +// +// https://developer.github.com/v3/gists/comments/#create-a-comment func (c *GistCommentsService) Create(uri *Hyperlink, uriParams M, requestParams interface{}) (comment *GistComment, result *Result) { url, err := ExpandWithDefault(uri, &GistCommentsURL, uriParams) if err != nil { @@ -53,6 +59,8 @@ func (c *GistCommentsService) Create(uri *Hyperlink, uriParams M, requestParams } // Updates a comment on a gist +// +// https://developer.github.com/v3/gists/comments/#edit-a-comment func (c *GistCommentsService) Update(uri *Hyperlink, uriParams M, requestParams interface{}) (comment *GistComment, result *Result) { url, err := ExpandWithDefault(uri, &GistCommentsURL, uriParams) if err != nil { @@ -64,6 +72,8 @@ func (c *GistCommentsService) Update(uri *Hyperlink, uriParams M, requestParams } // Deletes a comment on a gist +// +// https://developer.github.com/v3/gists/comments/#delete-a-comment func (c *GistCommentsService) Delete(uri *Hyperlink, uriParams M) (success bool, result *Result) { url, err := ExpandWithDefault(uri, &GistCommentsURL, uriParams) if err != nil {
Add doc urls gist_comments
octokit_go-octokit
train
0b7bb62da983d62fbbfafbc6bde95c1eecc46649
diff --git a/lib/netflix2.class.js b/lib/netflix2.class.js index <HASH>..<HASH> 100644 --- a/lib/netflix2.class.js +++ b/lib/netflix2.class.js @@ -54,19 +54,12 @@ class Netflix { } /** - * login credentials - * @typedef {Object} credentials - * @property {string} email - your email address - * @property {string} password - your password - */ - - /** * Sets cookies, API endpoints, and the authURL that must be used to * make API calls * * This must be called before using any other functions * - * @param {...credentials} credentials + * @param {{email: string, password: string}} credentials * */ async login(credentials) { @@ -113,8 +106,21 @@ class Netflix { const response = await this.__apiRequest(endpoint, options) return response.body } + /** * @typedef {Object} Profile + * @property {string} firstName + * @property {string} rawFirstName + * @property {string} guid + * @property {boolean} isAccountOwner + * @property {boolean} isActive + * @property {boolean} defaultKidsProfile + * @property {string} experience + * @property {boolean} isAutoCreated + * @property {string} avatarName + * @property {{32: string, 50: string, 64: string, 80: string, 100: string, 112: string, 160: string, 200: string, 320: string, }} avatarImages + * @property {boolean} canEdit + * @property {boolean} isDefault */ /** @@ -149,9 +155,21 @@ class Netflix { } } +/** + * + * @typedef {Object} rating + * @property {"thumb"|"star"} ratingType + * @property {string} title + * @property {number} movieID + * @property {number} rating + * @property {string} date + * @property {number} timestamp + * @property {number} comparableDate + */ + /** * - * @returns {Promise<Object[]>} + * @returns {Promise<rating[]>} */ async getRatingHistory() { let ratingItems = [] @@ -169,9 +187,31 @@ class Netflix { } /** + * @typedef {Object} viewingHistoryItem + * @property {string} title + * @property {string} videoTitle + * @property {number} movieID + * @property {string} country + * @property {number} bookmark - Amount of seconds the user has already seen + * @property {number} duration - Total duration of episode/movie in seconds + * @property {number} date + * @property {number} deviceType + * @property {string} dateStr + * @property {number} index + * @property {string} topNodeId + * @property {string} rating + * @property {number} series + * @property {string} seriesTitle + * @property {string} seasonDescriptor + * @property {string} episodeTitle + */ + + /** * Downloads the whole list of viewed movies. * The Netflix endpoint is paged. * This structure is copied from getRatingHistory. + * + * @returns viewingHistoryItem[] */ async getViewingHistory() { let viewedItems = [] @@ -204,6 +244,11 @@ class Netflix { return await this.__hideSpecificViewingHistory(movieID, true) } + /** + * + * @param {number} movieID + * @param {boolean} seriesAll + */ async __hideSpecificViewingHistory(movieID, seriesAll) { const options = { body: { @@ -242,6 +287,11 @@ class Netflix { return result.body } + /** + * + * @param {number} page + * @returns {Object} + */ async __getViewingHistory(page) { const options = { qs: { @@ -254,6 +304,12 @@ class Netflix { return response.body } + /** + * + * @param {boolean} isThumbRating + * @param {number} titleId + * @param {number} rating + */ async __setRating(isThumbRating, titleId, rating) { const endpoint = isThumbRating ? constants.setThumbRatingEndpointUrl : constants.setVideoRatindEndpointUrl let options = { @@ -277,6 +333,11 @@ class Netflix { } } + /** + * + * @param {number} titleId + * @param {number} rating + */ async setStarRating(titleId, rating) { await this.__setRating(false, titleId, rating) } @@ -322,6 +383,11 @@ class Netflix { return response.body } + /** + * + * @param {{email: string, password: string}} credentials + * @returns {Object} + */ async __getLoginForm(credentials) { const options = { url: constants.loginUrl, @@ -349,6 +415,10 @@ class Netflix { } } + /** + * + * @param {Object} form + */ async __postLoginForm(form) { const options = { url: constants.loginUrl, @@ -365,6 +435,10 @@ class Netflix { } } + /** + * + * @param {number} page + */ async __getRatingHistory(page) { const options = { qs: { @@ -377,6 +451,12 @@ class Netflix { return response.body } + /** + * + * @param {string} endpoint + * @param {Object} options + * @returns {Object} + */ async __apiRequest(endpoint, options) { const extendedOptions = extend(true, options, { baseUrl: this.apiRoot, @@ -392,6 +472,10 @@ class Netflix { } } + /** + * + * @param {string} url + */ async __getContextData(url) { const options = { url: url, @@ -449,6 +533,10 @@ class Netflix { } } + /** + * + * @param {...string} urls + */ async __getContextDataFromUrls(urls) { for (const url of urls) { await this.__getContextData(url)
Add JsDoc for other methods (#<I>)
LBBO_node-netflix2
train
3050565568354f6e4a319a611a4f8ee020b4f0c0
diff --git a/bcbio/structural/prioritize.py b/bcbio/structural/prioritize.py index <HASH>..<HASH> 100644 --- a/bcbio/structural/prioritize.py +++ b/bcbio/structural/prioritize.py @@ -148,10 +148,12 @@ def _cnvkit_prioritize(sample, genes, allele_file, metrics_file): """ mdf = pd.read_table(metrics_file) mdf.columns = [x.lower() for x in mdf.columns] - mdf = mdf[mdf["gene"].str.contains("|".join(genes))] + if len(genes) > 0: + mdf = mdf[mdf["gene"].str.contains("|".join(genes))] mdf = mdf[["chromosome", "start", "end", "gene", "log2", "ci_hi", "ci_lo"]] adf = pd.read_table(allele_file) - adf = adf[adf["gene"].str.contains("|".join(genes))] + if len(genes) > 0: + adf = adf[adf["gene"].str.contains("|".join(genes))] adf = adf[["chromosome", "start", "end", "cn", "cn1", "cn2"]] df = pd.merge(mdf, adf, on=["chromosome", "start", "end"]) df = df[df["cn"] != 2]
CNV prioritization: only filter genes if present Avoids error when no gene names available for prioritization.
bcbio_bcbio-nextgen
train
841ff590ea6f4b195016b6a176876461b7af94e3
diff --git a/command/hook_ui.go b/command/hook_ui.go index <HASH>..<HASH> 100644 --- a/command/hook_ui.go +++ b/command/hook_ui.go @@ -60,6 +60,7 @@ const ( uiResourceCreate uiResourceModify uiResourceDestroy + uiResourceRead ) func (h *UiHook) PreApply(addr addrs.AbsResourceInstance, gen states.Generation, action plans.Action, priorState, plannedNewState cty.Value) (terraform.HookAction, error) { @@ -83,6 +84,9 @@ func (h *UiHook) PreApply(addr addrs.AbsResourceInstance, gen states.Generation, case plans.Update: operation = "Modifying..." op = uiResourceModify + case plans.Read: + operation = "Reading..." + op = uiResourceRead default: // We don't expect any other actions in here, so anything else is a // bug in the caller but we'll ignore it in order to be robust. @@ -196,6 +200,8 @@ func (h *UiHook) stillApplying(state uiResourceState) { msg = "Still destroying..." case uiResourceCreate: msg = "Still creating..." + case uiResourceRead: + msg = "Still reading..." case uiResourceUnknown: return } @@ -241,6 +247,8 @@ func (h *UiHook) PostApply(addr addrs.AbsResourceInstance, gen states.Generation msg = "Destruction complete" case uiResourceCreate: msg = "Creation complete" + case uiResourceRead: + msg = "Read complete" case uiResourceUnknown: return terraform.HookActionContinue, nil }
command: Add UI hooks for read actions
hashicorp_terraform
train
de796263898826698b30f4a75f0035ea57fd2336
diff --git a/res/generators/templates/application/app/javascript_index.js b/res/generators/templates/application/app/javascript_index.js index <HASH>..<HASH> 100644 --- a/res/generators/templates/application/app/javascript_index.js +++ b/res/generators/templates/application/app/javascript_index.js @@ -99,7 +99,9 @@ var App = { {action: "refresh"} ], {}); - Rho.RhoConnectClient.setNotification("*", that.syncNotify); + if (Rho.RhoConnectClient != null) { + Rho.RhoConnectClient.setNotification("*", that.syncNotify); + } $("#logoutBtn").on("click", function () { Rho.RhoConnectClient.logout(); @@ -169,7 +171,9 @@ var App = { $("#loginItem").toggleClass("hidden", loggedIn); }; this.init = function () { - Rho.RhoConnectClient.setNotification("*", that.syncNotify); + if (Rho.RhoConnectClient != null) { + Rho.RhoConnectClient.setNotification("*", that.syncNotify); + } that.updateUI(); $("#back").on("click", function () {
js app generation fix: Rho.RhoConnectClient is chechked on existance now before setting the notifications
rhomobile_rhodes
train
7ebc6078e34504541dfbb528ab80dea00cc80963
diff --git a/features/step_definitions/manage_patients_steps.rb b/features/step_definitions/manage_patients_steps.rb index <HASH>..<HASH> 100644 --- a/features/step_definitions/manage_patients_steps.rb +++ b/features/step_definitions/manage_patients_steps.rb @@ -15,7 +15,7 @@ Given(/^some patients who need renal treatment$/) do :forename => "Roger", :birth_date => "01/01/1947", :paediatric_patient_indicator => "1", - :sex => 1, + :sex => "Male", :ethnicity_id => Renalware::Ethnicity.first.id, :hosp_centre_code => "888" ) @@ -27,7 +27,7 @@ Given(/^some patients who need renal treatment$/) do :forename => "Doris", :birth_date => "24/06/1970", :paediatric_patient_indicator => "1", - :sex => 2, + :sex => "Female", :ethnicity_id => Renalware::Ethnicity.second.id, :hosp_centre_code => "888" ) @@ -39,7 +39,7 @@ Given(/^some patients who need renal treatment$/) do :forename => "Ghost", :birth_date => "28/02/1930", :paediatric_patient_indicator => "1", - :sex => 1, + :sex => "Male", :ethnicity_id => Renalware::Ethnicity.third.id, :hosp_centre_code => "999" )
Amended patient sex attribute to use string value instead of interger for factory instance.
airslie_renalware-core
train
ede323e2b3481b4a9bf0cc1fc905dbafa73ce1af
diff --git a/course/lib.php b/course/lib.php index <HASH>..<HASH> 100644 --- a/course/lib.php +++ b/course/lib.php @@ -2930,6 +2930,8 @@ function delete_course_module($id) { // very quick on an empty table) $DB->delete_records('course_modules_completion', array('coursemoduleid' => $cm->id)); $DB->delete_records('course_modules_availability', array('coursemoduleid'=> $cm->id)); + $DB->delete_records('course_completion_criteria', array('moduleinstance' => $cm->id, + 'criteriatype' => COMPLETION_CRITERIA_TYPE_ACTIVITY)); delete_context(CONTEXT_MODULE, $cm->id); return $DB->delete_records('course_modules', array('id'=>$cm->id)); diff --git a/lib/db/upgrade.php b/lib/db/upgrade.php index <HASH>..<HASH> 100644 --- a/lib/db/upgrade.php +++ b/lib/db/upgrade.php @@ -402,5 +402,25 @@ function xmldb_main_upgrade($oldversion) { upgrade_main_savepoint(true, 2012042300.00); } + if ($oldversion < 2012042300.02) { + require_once($CFG->libdir . '/completion/completion_criteria.php'); + // Delete orphaned criteria which were left when modules were removed + if ($DB->get_dbfamily() === 'mysql') { + $sql = "DELETE cc FROM {course_completion_criteria} cc + LEFT JOIN {course_modules} cm ON cm.id = cc.moduleinstance + WHERE cm.id IS NULL AND cc.criteriatype = ".COMPLETION_CRITERIA_TYPE_ACTIVITY; + } else { + $sql = "DELETE FROM {course_completion_criteria} + WHERE NOT EXISTS ( + SELECT 'x' FROM {course_modules} + WHERE {course_modules}.id = {course_completion_criteria}.moduleinstance) + AND cc.criteriatype = ".COMPLETION_CRITERIA_TYPE_ACTIVITY; + } + $DB->execute($sql); + + // Main savepoint reached + upgrade_main_savepoint(true, 2012042300.02); + } + return true; } diff --git a/version.php b/version.php index <HASH>..<HASH> 100644 --- a/version.php +++ b/version.php @@ -30,7 +30,7 @@ defined('MOODLE_INTERNAL') || die(); -$version = 2012042300.01; // YYYYMMDD = weekly release date of this DEV branch +$version = 2012042300.02; // YYYYMMDD = weekly release date of this DEV branch // RR = release increments - 00 in DEV branches // .XX = incremental changes
MDL-<I> Ensure that completion criteria are deleted when removing a course module Conflicts: lib/db/upgrade.php version.php
moodle_moodle
train
67d9f83f1552e2bcdeb20f12e572b27e9546f5ba
diff --git a/lib/build.js b/lib/build.js index <HASH>..<HASH> 100644 --- a/lib/build.js +++ b/lib/build.js @@ -30,9 +30,26 @@ module.exports = { return path.join(this.path, this.basename) }, + complete: function () { + return Promise.resolve(this) + }, + exists: function (dirs) { return dirs.some(function (dir) { return fs.existsSync(path.join(dir, this.basename)) }.bind(this)) + }, + + write: function (build) { + build = build || this + + return new Promise(function (resolve, reject) { + fs.writeFile(build.filename, build.definition, function (err) { + if (err) return reject(err) + + console.log(build.packageName + ' written') + resolve(build) + }) + }) } }
add post-processing hook and file writer to Build
nodenv_node-build-update-defs
train
e4bc212c824129bbc5394fe9b02b47466ea18499
diff --git a/java/client/test/org/openqa/selenium/CorrectEventFiringTest.java b/java/client/test/org/openqa/selenium/CorrectEventFiringTest.java index <HASH>..<HASH> 100644 --- a/java/client/test/org/openqa/selenium/CorrectEventFiringTest.java +++ b/java/client/test/org/openqa/selenium/CorrectEventFiringTest.java @@ -344,7 +344,7 @@ public class CorrectEventFiringTest extends AbstractDriverTestCase { uploadElement.sendKeys(file.getAbsolutePath()); // Shift focus to something else because send key doesn't make the focus leave - driver.findElement(By.tagName("body")).click(); + driver.findElement(By.tagName("id-name1")).click(); assertThat(result.getText(), equalTo("changed")); }
JimEvans: Changing Java CorrectEventFiringTest.testUploadingFileShouldFireOnChangeEvent to click on a specific element after upload to force event to fire. Previously, it was clicking on the <body> element. r<I>
SeleniumHQ_selenium
train
2e2e2f9d6455b8ad290748bee6cfae101253baaf
diff --git a/tests/spec/fs.truncate.spec.js b/tests/spec/fs.truncate.spec.js index <HASH>..<HASH> 100644 --- a/tests/spec/fs.truncate.spec.js +++ b/tests/spec/fs.truncate.spec.js @@ -223,3 +223,18 @@ describe('fs.truncate', function() { }); }); }); + + +describe('fsPromises.truncate', function () { + beforeEach(util.setup); + afterEach(util.cleanup); + it('should error when path does not exist (with promises)', () => { + var fsPromises = util.fs().promises; + + return fsPromises.truncate('/NonExistingPath', 0) + .catch(error => { + expect(error).to.exist; + expect(error.code).to.equal('ENOENT'); + }); + }); +}); \ No newline at end of file
Fixed issue#<I> using promises to test fs.truncate when path does not exist (#<I>) * added a test for issue#<I> using promises to test fs.truncate * Fixed issue#<I> tesing fs.truncate using promises when path does not exist
filerjs_filer
train
25d7ee286d4a964076ef0a9b53642c4cb580b3af
diff --git a/revision_store.py b/revision_store.py index <HASH>..<HASH> 100644 --- a/revision_store.py +++ b/revision_store.py @@ -58,7 +58,6 @@ class AbstractRevisionStore(object): serializer = self.repo._format._serializer search_key_name = serializer.search_key_name maximum_size = serializer.maximum_size - parent_id_basename_index = serializer.parent_id_basename_index # Maybe the rest of this ought to be part of the CHKInventory API? inv = inventory.CHKInventory(search_key_name) @@ -67,12 +66,11 @@ class AbstractRevisionStore(object): search_key_func = chk_map.search_key_registry.get(search_key_name) inv.id_to_entry = chk_map.CHKMap(chk_store, None, search_key_func) inv.id_to_entry._root_node.set_maximum_size(maximum_size) - if parent_id_basename_index: - inv.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store, - None, search_key_func) - inv.parent_id_basename_to_file_id._root_node.set_maximum_size( - maximum_size) - inv.parent_id_basename_to_file_id._root_node._key_width = 2 + inv.parent_id_basename_to_file_id = chk_map.CHKMap(chk_store, + None, search_key_func) + inv.parent_id_basename_to_file_id._root_node.set_maximum_size( + maximum_size) + inv.parent_id_basename_to_file_id._root_node._key_width = 2 return inv def get_inventory(self, revision_id):
parent_id_to_basename_index is no longer a serializer attribute - always required now
jelmer_python-fastimport
train
5721e75a599da008af80a06a951f54ffa39fbbfc
diff --git a/pythainlp/ner/__init__.py b/pythainlp/ner/__init__.py index <HASH>..<HASH> 100644 --- a/pythainlp/ner/__init__.py +++ b/pythainlp/ner/__init__.py @@ -5,6 +5,7 @@ Named-entity recognizer from pythainlp.corpus import download, get_file, stopwords from pythainlp.tag import pos_tag from pythainlp.tokenize import word_tokenize +from pythainlp.util import is_thaiword try: import sklearn_crfsuite @@ -22,20 +23,6 @@ _WORD_TOKENIZER = "newmm" # ตัวตัดคำ _STOPWORDS = stopwords.words("thai") -def _is_thaichar(ch): # เป็นอักษรไทยหรือไม่ - ch_val = ord(ch) - if ch_val >= 3584 and ch_val <= 3711: - return True - return False - - -def _is_thaiword(word): # เป็นคำที่มีแต่อักษรไทยหรือไม่ - for ch in word: - if ch != "." and not _is_thaichar(ch): - return False - return True - - def _is_stopword(word): # เช็คว่าเป็นคำฟุ่มเฟือย return word in _STOPWORDS @@ -47,7 +34,7 @@ def _doc2features(doc, i): features = { "word.word": word, "word.stopword": _is_stopword(word), - "word.isthai": _is_thaiword(word), + "word.isthai": is_thaiword(word), "word.isspace": word.isspace(), "postag": postag, "word.isdigit()": word.isdigit(), @@ -61,7 +48,7 @@ def _doc2features(doc, i): postag1 = doc[i - 1][1] features["word.prevword"] = prevword features["word.previsspace"] = prevword.isspace() - features["word.previsthai"] = _is_thaiword(prevword) + features["word.previsthai"] = is_thaiword(prevword) features["word.prevstopword"] = _is_stopword(prevword) features["word.prepostag"] = postag1 features["word.prevwordisdigit"] = prevword.isdigit() @@ -75,7 +62,7 @@ def _doc2features(doc, i): features["word.nextword"] = nextword features["word.nextisspace"] = nextword.isspace() features["word.nextpostag"] = postag1 - features["word.nextisthai"] = _is_thaiword(nextword) + features["word.nextisthai"] = is_thaiword(nextword) features["word.nextstopword"] = _is_stopword(nextword) features["word.nextwordisdigit"] = nextword.isdigit() else: diff --git a/pythainlp/spell/pn.py b/pythainlp/spell/pn.py index <HASH>..<HASH> 100644 --- a/pythainlp/spell/pn.py +++ b/pythainlp/spell/pn.py @@ -7,8 +7,27 @@ Based on Peter Norvig's Python code from http://norvig.com/spell-correct.html from collections import Counter from pythainlp.corpus import tnc +from pythainlp.util import is_thaichar -_WORDS = Counter(dict(tnc.get_word_frequency_all())) + +def _keep(word): + for ch in word: + if ch != "." and not is_thaichar(ch): + return False + if ch in "๐๑๒๓๔๕๖๗๘๙": + return False + return True + + +# get word frequency from TNC then filter out non-Thai words and low frequency words +word_freqs = tnc.get_word_frequency_all() +word_freqs = [ + word_freq + for word_freq in word_freqs + if word_freq[1] > 2 and len(word_freq[0]) <= 40 and _keep(word_freq[0]) +] + +_WORDS = Counter(dict(word_freqs)) _WORDS_TOTAL = sum(_WORDS.values()) diff --git a/pythainlp/util/__init__.py b/pythainlp/util/__init__.py index <HASH>..<HASH> 100644 --- a/pythainlp/util/__init__.py +++ b/pythainlp/util/__init__.py @@ -7,6 +7,20 @@ import re from nltk.util import ngrams as ngramsdata +def is_thaichar(ch): # เป็นอักษรไทยหรือไม่ + ch_val = ord(ch) + if ch_val >= 3584 and ch_val <= 3711: + return True + return False + + +def is_thaiword(word): # เป็นคำที่มีแต่อักษรไทยหรือไม่ + for ch in word: + if ch != "." and not is_thaichar(ch): + return False + return True + + def ngrams(token, num): """ ngrams สร้าง ngrams @@ -34,7 +48,7 @@ def trigram(token): return ngrams(token, 3) -RULE1 = [ +_NORMALIZE_RULE1 = [ "ะ", "ั", "็", @@ -61,7 +75,7 @@ RULE1 = [ ] # เก็บพวกสระ วรรณยุกต์ที่ซ้ำกันแล้วมีปัญหา -RULE2 = [ +_NORMALIZE_RULE2 = [ ("เเ", "แ"), # เ เ -> แ ("ํ(t)า", "\\1ำ"), ("ํา(t)", "\\1ำ"), @@ -81,9 +95,9 @@ def normalize(text): >>> print(normalize("เเปลก")=="แปลก") # เ เ ป ล ก กับ แปลก True """ - for data in RULE2: + for data in _NORMALIZE_RULE2: text = re.sub(data[0].replace("t", "[่้๊๋]"), data[1], text) - for data in list(zip(RULE1, RULE1)): + for data in list(zip(_NORMALIZE_RULE1, _NORMALIZE_RULE1)): text = re.sub(data[0].replace("t", "[่้๊๋]") + "+", data[1], text) return text
Filter out non-Thai words and low frequency words from word frequency list for spell checker
PyThaiNLP_pythainlp
train
f7a82e8d9a7698e05dc623c74f246d5bebc8a74d
diff --git a/metrics-core/src/main/java/com/codahale/metrics/ScheduledReporter.java b/metrics-core/src/main/java/com/codahale/metrics/ScheduledReporter.java index <HASH>..<HASH> 100644 --- a/metrics-core/src/main/java/com/codahale/metrics/ScheduledReporter.java +++ b/metrics-core/src/main/java/com/codahale/metrics/ScheduledReporter.java @@ -1,7 +1,12 @@ package com.codahale.metrics; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + import java.io.Closeable; +import java.util.Collections; import java.util.Locale; +import java.util.Set; import java.util.SortedMap; import java.util.concurrent.ExecutionException; import java.util.concurrent.Executors; @@ -12,9 +17,6 @@ import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.concurrent.atomic.AtomicInteger; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - /** * The abstract base class for all scheduled reporters (i.e., reporters which process a registry's * metrics periodically). @@ -58,6 +60,7 @@ public abstract class ScheduledReporter implements Closeable, Reporter { private final MetricRegistry registry; private final ScheduledExecutorService executor; private final boolean shutdownExecutorOnStop; + private final Set<MetricType> disabledMetricTypes; private ScheduledFuture<?> scheduledFuture; private final MetricFilter filter; private final double durationFactor; @@ -118,6 +121,18 @@ public abstract class ScheduledReporter implements Closeable, Reporter { TimeUnit durationUnit, ScheduledExecutorService executor, boolean shutdownExecutorOnStop) { + this(registry, name, filter, rateUnit, durationUnit, executor, shutdownExecutorOnStop, + Collections.<MetricType>emptySet()); + } + + protected ScheduledReporter(MetricRegistry registry, + String name, + MetricFilter filter, + TimeUnit rateUnit, + TimeUnit durationUnit, + ScheduledExecutorService executor, + boolean shutdownExecutorOnStop, + Set<MetricType> disabledMetricTypes) { this.registry = registry; this.filter = filter; this.executor = executor == null? createDefaultExecutor(name) : executor; @@ -126,6 +141,8 @@ public abstract class ScheduledReporter implements Closeable, Reporter { this.rateUnit = calculateRateUnit(rateUnit); this.durationFactor = 1.0 / durationUnit.toNanos(1); this.durationUnit = durationUnit.toString().toLowerCase(Locale.US); + this.disabledMetricTypes = disabledMetricTypes != null ? disabledMetricTypes : + Collections.<MetricType>emptySet(); } /** @@ -274,6 +291,10 @@ public abstract class ScheduledReporter implements Closeable, Reporter { return shutdownExecutorOnStop; } + protected Set<MetricType> getDisabledMetricTypes() { + return disabledMetricTypes; + } + private String calculateRateUnit(TimeUnit unit) { final String s = unit.toString().toLowerCase(Locale.US); return s.substring(0, s.length() - 1);
Add support for disabling some metrics from being reported Add a facility to disable set of metrics which should not be reported to a remote system. `ScheduledReporter` only saves them. The decision to how to filter the metric is delegated to concrete reporters.
dropwizard_metrics
train
3c57ee74c73884ea3c0a24d0ec40796453657823
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -237,7 +237,7 @@ install_requires = [ 'rtree>=0.9.7', 'setuptools>=39.0.0', 'sphinx>=0.6.6', - 'spyder-kernels>=2.0.3,<2.1.0', + 'spyder-kernels>=2.0.4,<2.1.0', 'textdistance>=4.2.0', 'three-merge>=0.1.1', 'watchdog>=0.10.3' diff --git a/spyder/dependencies.py b/spyder/dependencies.py index <HASH>..<HASH> 100644 --- a/spyder/dependencies.py +++ b/spyder/dependencies.py @@ -65,7 +65,7 @@ QTPY_REQVER = '>=1.5.0' RTREE_REQVER = '>=0.9.7' SETUPTOOLS_REQVER = '>=39.0.0' SPHINX_REQVER = '>=0.6.6' -SPYDER_KERNELS_REQVER = '>=2.0.3;<2.1.0' +SPYDER_KERNELS_REQVER = '>=2.0.4;<2.1.0' TEXTDISTANCE_REQVER = '>=4.2.0' THREE_MERGE_REQVER = '>=0.1.1' # None for pynsist install for now
Update spyder-kernels dependency
spyder-ide_spyder
train
09141d2a8f8cb0096c9ff9d079786f056fb4de8c
diff --git a/superset-frontend/webpack.config.js b/superset-frontend/webpack.config.js index <HASH>..<HASH> 100644 --- a/superset-frontend/webpack.config.js +++ b/superset-frontend/webpack.config.js @@ -340,6 +340,9 @@ const config = { exclude: [/superset-ui.*\/node_modules\//, /\.test.jsx?$/], include: [ new RegExp(`${APP_DIR}/(src|.storybook|plugins|packages)`), + ...['./src', './.storybook', './plugins', './packages'].map(p => + path.resolve(__dirname, p), + ), // redundant but required for windows /@encodable/, ], use: [babelLoader],
fix(build): update webpack jsx paths (#<I>)
apache_incubator-superset
train
8ba38b8efadf3a222a3721610cf8d163cf9834b3
diff --git a/pygmsh/geometry.py b/pygmsh/geometry.py index <HASH>..<HASH> 100644 --- a/pygmsh/geometry.py +++ b/pygmsh/geometry.py @@ -257,7 +257,18 @@ class Geometry(object): ) else: raise RuntimeError('Specify at least translation or rotation.') - return name + + # From <http://www.manpagez.com/info/gmsh/gmsh-2.4.0/gmsh_66.php>: + # + # > In this last extrusion command we retrieved the volume number + # > programatically by saving the output of the command into a + # > list. This list will contain the "top" of the extruded surface (in + # > out[0]) as well as the newly created volume (in out[1]). + # + top = '%s[0]' % name + extruded = '%s[1]' % name + + return top, extruded def add_boundary_layer( self, @@ -680,23 +691,20 @@ class Geometry(object): # is the surface that was created by the extrusion. previous = c angle = '2*Pi/3' - all_names = [] + all_surfaces = [] for i in range(3): self.add_comment('Round no. %s' % (i+1)) for k in range(len(previous)): # ts1[] = Extrude {{0,0,1}, {0,0,0}, 2*Pi/3}{Line{tc1};}; # ... - tmp_name = self.extrude( + top, surf = self.extrude( 'Line{%s}' % previous[k], rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle ) - all_names.append(tmp_name) - previous[k] = tmp_name + '[0]' - - # Now build surface loop and volume. - all_surfaces = [name + '[1]' for name in all_names] + all_surfaces.append(surf) + previous[k] = top # compound_surface = CompoundSurface(all_surfaces) @@ -753,19 +761,18 @@ class Geometry(object): # for the following Extrude() step. The second [1] entry of the array # is the surface that was created by the extrusion. previous = s - all_names = [] + all_volumes = [] num_steps = 3 for _ in range(num_steps): - tmp_name = self.extrude( + top, vol = self.extrude( 'Surface{%s}' % previous, rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle='2*Pi/%d' % num_steps ) - previous = tmp_name + '[0]' - all_names.append(tmp_name) + previous = top + all_volumes.append(vol) - all_volumes = [name + '[1]' for name in all_names] vol = self.add_compound_volume(all_volumes) if label: self.add_physical_volume(vol, label) @@ -840,29 +847,28 @@ class Geometry(object): # Extrude all edges three times by 2*Pi/3. previous = e angle = '2*Pi/3' - all_names = [] + all_surfaces = [] # com = [] self.add_comment('Extrude in 3 steps.') for i in range(3): self.add_comment('Step %s' % (i+1)) for k in range(len(previous)): # ts1[] = Extrude {{0,0,1}, {0,0,0}, 2*Pi/3}{Line{tc1};}; - name = self.extrude( + top, surf = self.extrude( 'Line{%s}' % previous[k], rotation_axis=rot_axis, point_on_axis=point_on_rot_axis, angle=angle ) # if k==0: - # com.append(name+'[1]') + # com.append(surf) # else: - # all_names.append(name+'[1]') - all_names.append(name+'[1]') - previous[k] = name + '[0]' + # all_names.appends(surf) + all_surfaces.append(surf) + previous[k] = top # # cs = CompoundSurface(com) # Now just add surface loop and volume. - all_surfaces = all_names # all_surfaces = all_names + [cs] surface_loop = self.add_surface_loop(all_surfaces) vol = self.add_volume(surface_loop) @@ -903,11 +909,10 @@ class Geometry(object): surf = self.add_plane_surface(','.join([ll_outer, ll_inner])) # Now Extrude the ring surface. - name = self.extrude( + top, vol = self.extrude( 'Surface{%s}' % surf, translation_axis=numpy.dot(R, [length, 0, 0]) ) - vol = name + '[0]' if label: self.add_physical_volume(vol, label) return vol
return top and extruded entity separately from extrude()
nschloe_pygmsh
train
d824225d2bf54fce99ddcbfa0804dcc24c3a2b30
diff --git a/core/typechecker/src/main/java/org/overture/typechecker/visitor/TypeCheckerDefinitionVisitor.java b/core/typechecker/src/main/java/org/overture/typechecker/visitor/TypeCheckerDefinitionVisitor.java index <HASH>..<HASH> 100644 --- a/core/typechecker/src/main/java/org/overture/typechecker/visitor/TypeCheckerDefinitionVisitor.java +++ b/core/typechecker/src/main/java/org/overture/typechecker/visitor/TypeCheckerDefinitionVisitor.java @@ -35,6 +35,7 @@ import org.overture.ast.definitions.SClassDefinition; import org.overture.ast.definitions.traces.AApplyExpressionTraceCoreDefinition; import org.overture.ast.definitions.traces.ABracketedExpressionTraceCoreDefinition; import org.overture.ast.definitions.traces.ALetBeStBindingTraceDefinition; +import org.overture.ast.definitions.traces.ALetDefBindingTraceDefinition; import org.overture.ast.definitions.traces.ARepeatTraceDefinition; import org.overture.ast.definitions.traces.ATraceDefinitionTerm; import org.overture.ast.definitions.traces.PTraceDefinition; @@ -1511,6 +1512,23 @@ public class TypeCheckerDefinitionVisitor extends } @Override + public PType caseALetDefBindingTraceDefinition(ALetDefBindingTraceDefinition node, TypeCheckInfo question) throws AnalysisException + { + Environment local = question.env; + for (PDefinition d: node.getLocalDefs()) + { + PDefinitionAssistantTC.typeResolve(d, rootVisitor, question); + d.apply(rootVisitor, question); + local = new FlatCheckedEnvironment(d, local, question.scope); + } + + node.getBody().apply(rootVisitor, new TypeCheckInfo(local, question.scope)); + local.unusedCheck(question.env); + + return null; + } + + @Override public PType caseALetBeStBindingTraceDefinition( ALetBeStBindingTraceDefinition node, TypeCheckInfo question) throws AnalysisException {
Typecheck of LetDefBindingTraceDefinition was being completely neglected. This commit adds the typecheck.
overturetool_overture
train
38e2909a12712c4ecd01008f0ea6eb79cf569ed3
diff --git a/test/test_variant.py b/test/test_variant.py index <HASH>..<HASH> 100644 --- a/test/test_variant.py +++ b/test/test_variant.py @@ -151,6 +151,44 @@ def test_serialization(): reconstituted = Variant.from_json(serialized) eq_(original, reconstituted) +def test_deserialization_old_keywords(): + old_variant_representation_json = """ + { + "ref": "T", + "contig": "22", + "start": 23230319, + "__class__": { + "__name__": "Variant", + "__module__": "varcode.variant" + }, + "normalize_contig_name": true, + "alt": "G", + "allow_extended_nucleotides": false, + "ensembl": { + "__class__": { + "__name__": "EnsemblRelease", + "__module__": "pyensembl.ensembl_release" + }, + "release": 75, + "server": "ftp://ftp.ensembl.org", + "species": { + "__class__": { + "__name__": "Species", + "__module__": "pyensembl.species" + }, + "latin_name": "homo_sapiens" + } + } + } + """ + variant = Variant.from_json(old_variant_representation_json) + eq_(variant.contig, "22") + eq_(variant.ref, "T") + eq_(variant.alt, "G") + eq_(variant.reference_name, "GRCh37") + eq_(variant.normalize_contig_names, True) + eq_(variant.allow_extended_nucleotides, False) + def test_hg19_chromosome_names(): # trimming of mithochondrial name eq_(Variant("M", 1, "A", "G", "hg19", convert_ucsc_contig_names=True).contig, "MT") diff --git a/varcode/__init__.py b/varcode/__init__.py index <HASH>..<HASH> 100644 --- a/varcode/__init__.py +++ b/varcode/__init__.py @@ -31,11 +31,13 @@ __all__ = [ "Variant", "EffectCollection", "VariantCollection", + # effects "effect_priority", "top_priority_effect", "MutationEffect", "NonsilentCodingMutation", + # file loading "load_maf", "load_maf_dataframe", diff --git a/varcode/variant.py b/varcode/variant.py index <HASH>..<HASH> 100644 --- a/varcode/variant.py +++ b/varcode/variant.py @@ -54,6 +54,13 @@ class Variant(Serializable): "_gene_names", ) + # any keywords which are renamed or removed should be added to this + # dictionary to preserve the ability to deserialize old representations + # of Variant objects + _KEYWORD_ALIASES = { + "normalize_contig_name": "normalize_contig_names" + } + def __init__( self, contig,
added keyword aliases to Variant
openvax_varcode
train
77bc344c5536aeaa4970fd2626f4ff22106ed841
diff --git a/de.tudarmstadt.ukp.statistics/src/main/java/de/tudarmstadt/ukp/dkpro/statistics/unitizing/UnitizingStudy.java b/de.tudarmstadt.ukp.statistics/src/main/java/de/tudarmstadt/ukp/dkpro/statistics/unitizing/UnitizingStudy.java index <HASH>..<HASH> 100644 --- a/de.tudarmstadt.ukp.statistics/src/main/java/de/tudarmstadt/ukp/dkpro/statistics/unitizing/UnitizingStudy.java +++ b/de.tudarmstadt.ukp.statistics/src/main/java/de/tudarmstadt/ukp/dkpro/statistics/unitizing/UnitizingStudy.java @@ -299,6 +299,8 @@ public class UnitizingStudy /** * Returns the length of the continuum. * + * Zero by default. + * * @return the length of the continuum */ public int getContinuumLength()
Remark on initial continuum length (=0)
dkpro_dkpro-statistics
train
35b08f1edfed0dc5205415d117b462ed99658fc5
diff --git a/ontrack-web/src/app/service/service.event.js b/ontrack-web/src/app/service/service.event.js index <HASH>..<HASH> 100644 --- a/ontrack-web/src/app/service/service.event.js +++ b/ontrack-web/src/app/service/service.event.js @@ -14,67 +14,74 @@ angular.module('ot.service.event', [ ); }; - function replacementFunction(event) { return function (match, expression) { + var entity; if (expression.charAt(0) == ':') { // We want a value var valueKey = expression.substring(1); var value = event.values[valueKey]; if (!value) { - return replacementValueError(event, valueKey); + return "#ERROR:" + valueKey; } // Rendering return $interpolate('<span class="ot-event-value">{{value.value}}</span>')({ valueKey: valueKey, value: value }); + } else if (expression == 'REF') { + if (event.ref) { + entity = event.entities[event.ref]; + if (!entity) { + return "#ERROR:REF:" + event.ref; + } else { + return renderEntity(event.ref, entity); + } + } else { + return "#ERROR:REF"; + } } else { // We want an entity reference - var entity = event.entities[expression]; + entity = event.entities[expression]; if (!entity) { - return replacementEntityError(event, expression); - } - // Link definition - var link = { - cls: '' - }; - if (expression == 'PROJECT') { - link.uri = "#/project/" + entity.id; - link.name = entity.name; - } - else if (expression == 'BRANCH') { - link.uri = "#/branch/" + entity.id; - link.name = entity.name; - } - else if (expression == 'BUILD') { - link.uri = "#/build/" + entity.id; - link.name = entity.name; - } - else if (expression == 'PROMOTION_LEVEL') { - link.uri = "#/promotionLevel/" + entity.id; - link.name = entity.name; + return "#ERROR:" + expression; } - else if (expression == 'VALIDATION_STAMP') { - link.uri = "#/validationStamp/" + entity.id; - link.name = entity.name; - } - else if (expression == 'VALIDATION_RUN') { - link.uri = "#/validationRun/" + entity.id; - link.name = '#' + entity.runOrder; - } - // Link rendering - return $interpolate('<a href="{{uri}}" class="{{cls}}">{{name}}</a>')(link); + return renderEntity(expression, entity); } }; } - function replacementEntityError (event, expression) { - return "#ERROR"; - } - - function replacementValueError (event, key) { - return "#ERROR"; + function renderEntity(expression, entity) { + // Link definition + var link = { + cls: '' + }; + if (expression == 'PROJECT') { + link.uri = "#/project/" + entity.id; + link.name = entity.name; + } + else if (expression == 'BRANCH') { + link.uri = "#/branch/" + entity.id; + link.name = entity.name; + } + else if (expression == 'BUILD') { + link.uri = "#/build/" + entity.id; + link.name = entity.name; + } + else if (expression == 'PROMOTION_LEVEL') { + link.uri = "#/promotionLevel/" + entity.id; + link.name = entity.name; + } + else if (expression == 'VALIDATION_STAMP') { + link.uri = "#/validationStamp/" + entity.id; + link.name = entity.name; + } + else if (expression == 'VALIDATION_RUN') { + link.uri = "#/validationRun/" + entity.id; + link.name = '#' + entity.runOrder; + } + // Link rendering + return $interpolate('<a href="{{uri}}" class="{{cls}}">{{name}}</a>')(link); } return self;
#<I> Property event at client side
nemerosa_ontrack
train
1cd1a21666eae100f1b4041d8f17b0d56908ae94
diff --git a/spec/support/matchers/perform_queries.rb b/spec/support/matchers/perform_queries.rb index <HASH>..<HASH> 100644 --- a/spec/support/matchers/perform_queries.rb +++ b/spec/support/matchers/perform_queries.rb @@ -15,4 +15,8 @@ RSpec::Matchers.define :perform_queries do |expected| @counter.query_count end + + def supports_block_expectations? + true + end end
RSpec: Support block argument for #perform_queries
ledermann_rails-settings
train
a9d47ba0be0ee44b672b2eee6a4056b189e5f5ea
diff --git a/samples/detect-intent-sentiment.v2.js b/samples/detect-intent-sentiment.v2.js index <HASH>..<HASH> 100644 --- a/samples/detect-intent-sentiment.v2.js +++ b/samples/detect-intent-sentiment.v2.js @@ -72,9 +72,7 @@ async function main( ` Score: ${result.sentimentAnalysisResult.queryTextSentiment.score}` ); console.log( - ` Magnitude: ${ - result.sentimentAnalysisResult.queryTextSentiment.magnitude - }` + ` Magnitude: ${result.sentimentAnalysisResult.queryTextSentiment.magnitude}` ); } else { console.log(`No sentiment Analysis Found`); diff --git a/samples/detect.v2beta1.js b/samples/detect.v2beta1.js index <HASH>..<HASH> 100644 --- a/samples/detect.v2beta1.js +++ b/samples/detect.v2beta1.js @@ -309,9 +309,7 @@ async function detectIntentandSentiment( ` Score: ${result.sentimentAnalysisResult.queryTextSentiment.score}` ); console.log( - ` Magnitude: ${ - result.sentimentAnalysisResult.queryTextSentiment.magnitude - }` + ` Magnitude: ${result.sentimentAnalysisResult.queryTextSentiment.magnitude}` ); } else { console.log(`No sentiment Analysis Found`); diff --git a/synth.metadata b/synth.metadata index <HASH>..<HASH> 100644 --- a/synth.metadata +++ b/synth.metadata @@ -1,5 +1,5 @@ { - "updateTime": "2019-06-05T14:17:01.048307Z", + "updateTime": "2019-06-07T11:12:31.751887Z", "sources": [ { "generator": { @@ -12,8 +12,8 @@ "git": { "name": "googleapis", "remote": "https://github.com/googleapis/googleapis.git", - "sha": "47c142a7cecc6efc9f6f8af804b8be55392b795b", - "internalRef": "251635729" + "sha": "15fdbe57306e3a56069af5e2595e9b1bb33b6123", + "internalRef": "251960694" } }, {
refactor: changes formatting of various statements
googleapis_nodejs-dialogflow
train
67d7757e9f285539decc2b94e519fcfc576d4cc5
diff --git a/cmd/tusd/main.go b/cmd/tusd/main.go index <HASH>..<HASH> 100644 --- a/cmd/tusd/main.go +++ b/cmd/tusd/main.go @@ -89,7 +89,7 @@ func main() { handler, err := tusd.NewHandler(tusd.Config{ MaxSize: maxSize, - BasePath: "files/", + BasePath: basepath, DataStore: store, NotifyCompleteUploads: true, })
fix hardcoded basePath
tus_tusd
train
19c739962dcb41a27c01b85b7609cd8bd19290a5
diff --git a/lib/events/index.js b/lib/events/index.js index <HASH>..<HASH> 100644 --- a/lib/events/index.js +++ b/lib/events/index.js @@ -455,6 +455,14 @@ export default function (self) { if (self.contextMenu || self.input) { return; } + + // Cancel dragging action if user ventures outside grid + if (self.draggingItem && e.which === 0) { + self.stopFreezeMove(e); + self.mouseup(e); + return; + } + self.mouse = overridePos || self.getLayerPos(e); var ctrl = (e.ctrlKey || e.metaKey || self.attributes.persistantSelectionMode) &&
Cancel moving when cursor exits grid (#<I>, fixes #<I>) * fix stuck issue on Move-selection * Add explanatory comment for change
TonyGermaneri_canvas-datagrid
train
1d86c412d6b71e1779ea00d220e9335100625646
diff --git a/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java b/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java index <HASH>..<HASH> 100644 --- a/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java +++ b/karyon-core/src/main/java/com/netflix/karyon/server/KaryonServer.java @@ -162,6 +162,8 @@ public class KaryonServer implements Closeable { } PhaseInterceptorRegistry.notifyInterceptors(InitializationPhaseInterceptor.Phase.InitBootstrap); + + serverBootstrap.initialize(); } /** diff --git a/karyon-core/src/main/java/com/netflix/karyon/server/ServerBootstrap.java b/karyon-core/src/main/java/com/netflix/karyon/server/ServerBootstrap.java index <HASH>..<HASH> 100644 --- a/karyon-core/src/main/java/com/netflix/karyon/server/ServerBootstrap.java +++ b/karyon-core/src/main/java/com/netflix/karyon/server/ServerBootstrap.java @@ -98,11 +98,11 @@ public class ServerBootstrap { private static final Logger logger = LoggerFactory.getLogger(ServerBootstrap.class); - private final Set<String> allBasePackages; - private final ClasspathScanner classpathScanner; + private Set<String> allBasePackages; + private ClasspathScanner classpathScanner; - protected ServerBootstrap() { - allBasePackages = readBasePackages(); + void initialize() { + readBasePackages(); List<Class<? extends Annotation>> annotations = Lists.newArrayList(); annotations.add(Application.class); @@ -249,7 +249,7 @@ public class ServerBootstrap { return classpathScanner; } - private Set<String> readBasePackages() { + private void readBasePackages() { Set<String> _allBasePackages = new HashSet<String>(); _allBasePackages.add("com.netflix"); @@ -258,7 +258,7 @@ public class ServerBootstrap { _allBasePackages.addAll(basePackages); } - return _allBasePackages; + allBasePackages = _allBasePackages; } protected class KaryonBootstrapModule implements BootstrapModule {
reverted back from constructor to initialize method per Nitesh's suggestion
Netflix_karyon
train
9ec35b74ec5109a3fc7d4398c91aa82028819b3b
diff --git a/packages/mdc-icon-toggle/foundation.js b/packages/mdc-icon-toggle/foundation.js index <HASH>..<HASH> 100644 --- a/packages/mdc-icon-toggle/foundation.js +++ b/packages/mdc-icon-toggle/foundation.js @@ -89,6 +89,7 @@ class MDCIconToggleFoundation extends MDCFoundation { init() { this.refreshToggleData(); + this.savedTabIndex_ = this.adapter_.getTabIndex(); this.adapter_.registerInteractionHandler('click', this.clickHandler_); this.adapter_.registerInteractionHandler('keydown', this.keydownHandler_); this.adapter_.registerInteractionHandler('keyup', this.keyupHandler_); diff --git a/test/unit/mdc-icon-toggle/mdc-icon-toggle.test.js b/test/unit/mdc-icon-toggle/mdc-icon-toggle.test.js index <HASH>..<HASH> 100644 --- a/test/unit/mdc-icon-toggle/mdc-icon-toggle.test.js +++ b/test/unit/mdc-icon-toggle/mdc-icon-toggle.test.js @@ -25,7 +25,7 @@ import {MDCIconToggle, MDCIconToggleFoundation} from '../../../packages/mdc-icon import {MDCRipple} from '../../../packages/mdc-ripple'; import {cssClasses} from '../../../packages/mdc-ripple/constants'; -function setupTest({useInnerIconElement = false} = {}) { +function setupTest({tabIndex = undefined, useInnerIconElement = false} = {}) { const root = document.createElement(useInnerIconElement ? 'span' : 'i'); if (useInnerIconElement) { const icon = document.createElement('i'); @@ -33,6 +33,9 @@ function setupTest({useInnerIconElement = false} = {}) { root.dataset.iconInnerSelector = `#${icon.id}`; root.appendChild(icon); } + if (tabIndex !== undefined) { + root.tabIndex = tabIndex; + } const component = new MDCIconToggle(root); return {root, component}; } @@ -74,17 +77,40 @@ test('set/get on', () => { assert.equal(root.getAttribute('aria-pressed'), 'false'); }); -test('set/get disabled', () => { - const {root, component} = setupTest(); +test('set/get disabled to true', () => { + const {root, component} = setupTest({tabIndex: 0}); + component.disabled = true; assert.isOk(component.disabled); assert.equal(root.getAttribute('aria-disabled'), 'true'); assert.isOk(root.classList.contains(MDCIconToggleFoundation.cssClasses.DISABLED)); + assert.equal(root.tabIndex, -1); +}); + +test('set/get disabled to false', () => { + const {root, component} = setupTest({tabIndex: 0}); component.disabled = false; assert.isNotOk(component.disabled); assert.isNotOk(root.hasAttribute('aria-disabled')); assert.isNotOk(root.classList.contains(MDCIconToggleFoundation.cssClasses.DISABLED)); + assert.equal(root.tabIndex, 0, 'element\'s tabIndex should be the same value it already had'); +}); + +test('set/get disabled to true, then false', () => { + const {root, component} = setupTest({tabIndex: 0}); + + component.disabled = true; + assert.isOk(component.disabled); + assert.equal(root.getAttribute('aria-disabled'), 'true'); + assert.isOk(root.classList.contains(MDCIconToggleFoundation.cssClasses.DISABLED)); + assert.equal(root.tabIndex, -1); + + component.disabled = false; + assert.isNotOk(component.disabled); + assert.isNotOk(root.hasAttribute('aria-disabled')); + assert.isNotOk(root.classList.contains(MDCIconToggleFoundation.cssClasses.DISABLED)); + assert.equal(root.tabIndex, 0, 'element\'s tabIndex should be the same value it originally had'); }); test('#refreshToggleData proxies to foundation.refreshToggleData()', () => { @@ -174,14 +200,12 @@ test('#adapter.setText sets the text content of the inner icon element when used }); test('#adapter.getTabIndex returns the tabIndex of the element', () => { - const {root, component} = setupTest(); - root.tabIndex = 4; + const {component} = setupTest({tabIndex: 4}); assert.equal(component.getDefaultFoundation().adapter_.getTabIndex(), 4); }); test('#adapter.setTabIndex sets the tabIndex of the element', () => { - const {root, component} = setupTest(); - root.tabIndex = 4; + const {root, component} = setupTest({tabIndex: 4}); component.getDefaultFoundation().adapter_.setTabIndex(2); assert.equal(root.tabIndex, 2); });
fix(icon-toggle): Don't nuke tabindex if initializing disabled to false (#<I>)
material-components_material-components-web
train
a937b29e105026392a40c23fb3ebdc940d4e96db
diff --git a/setup.py b/setup.py index <HASH>..<HASH> 100644 --- a/setup.py +++ b/setup.py @@ -65,7 +65,16 @@ class TestCommand(Command): import django if django.VERSION[:2] >= (1, 7): django.setup() - call_command('test', 'bakery') + + # With Django 1.6, the way tests were discovered changed (see + # https://docs.djangoproject.com/en/1.7/releases/1.6/#new-test-runner) + # Set the argument to the test management command appropriately + # depending on the Django version + test_module = 'bakery.tests' + if django.VERSION[:2] < (1, 6): + test_module = 'bakery' + + call_command('test', test_module) setup( @@ -98,4 +107,4 @@ setup( 'boto>=2.28', ], cmdclass={'test': TestCommand} -) \ No newline at end of file +)
Discover tests in Django <I> Django's test runner changed in Django <I>, and the way tests were discovered changed as well. Consequently `call_command('test', 'bakery')` won't work for Django >= <I>. Set the test module name to `baker.tests` in more recent versions of Django so the tests will get discovered. Addresses <URL>
datadesk_django-bakery
train
7db3f67f9bd60b0e76c4978f120ca53e4e248918
diff --git a/simuvex/plugins/symbolic_memory.py b/simuvex/plugins/symbolic_memory.py index <HASH>..<HASH> 100644 --- a/simuvex/plugins/symbolic_memory.py +++ b/simuvex/plugins/symbolic_memory.py @@ -256,6 +256,11 @@ class SimSymbolicMemory(SimMemory): #pylint:disable=abstract-method if len(missing) > 0: name = "%s_%x" % (self.id, addr) b = self.state.se.Unconstrained(name, num_bytes*8) + if self.id == 'reg' and self.state.arch.register_endness == 'Iend_LE': + b = b.reversed + if self.id == 'mem' and self.state.arch.memory_endness == 'Iend_LE': + b = b.reversed + self.state.log.add_event('uninitialized', memory_id=self.id, addr=addr, size=num_bytes) default_mo = SimMemoryObject(b, addr) for m in missing:
pre-reverse ASTs for little-endian memory and registers
angr_angr
train
8a4d49500031b3528836e01d5486246ba0149b86
diff --git a/app/mixins/slmodal.js b/app/mixins/slmodal.js index <HASH>..<HASH> 100755 --- a/app/mixins/slmodal.js +++ b/app/mixins/slmodal.js @@ -6,7 +6,7 @@ export default Ember.Mixin.create({ classNames: [ 'modal', 'fade' ], - attributeBindings: [ 'aria-hidden', 'tabindex', 'role' ], + attributeBindings: [ 'aria-hidden', 'tabindex', 'role', 'aria-labelledby', 'aria-describedby' ], /** * `aria-hidden` attribute to inform assistive technologies to skip the modal's DOM elements @@ -33,6 +33,23 @@ export default Ember.Mixin.create({ role: 'dialog', /** + * `aria-labelledby` attribute value + * + * @property {string} aria-labelledby + */ + 'aria-labelledby': function() { + return 'modalTitle-' + Math.random(); + }.property(), + + /** + * `aria-describedby` attribute value + * + * @property {string} aria-describedby + * @default null + */ + 'aria-describedby': null, + + /** * Overridable method stub * * Triggered by Twitter Bootstrap 3 modal's `show.bs.modal` event
Added support for: aria-labelledby and aria-describedby attributes
softlayer_sl-ember-components
train
e3dda9a68d848c80e8221b3dc124229c8093d9eb
diff --git a/salt/runner.py b/salt/runner.py index <HASH>..<HASH> 100644 --- a/salt/runner.py +++ b/salt/runner.py @@ -42,7 +42,6 @@ class RunnerClient(object): Execute a runner with the given arguments ''' self._verify_fun(fun) - # pylint: disable-msg=W0142 return self.functions[fun](*arg) def low(self, fun, low):
`W<I>` is now disabled in `,pylintrc` no need to disable it in-line.
saltstack_salt
train
f25306d7eee0d9b028d9b20d4b53ceeff5f88707
diff --git a/tests/Tasks/CleanupTest.php b/tests/Tasks/CleanupTest.php index <HASH>..<HASH> 100644 --- a/tests/Tasks/CleanupTest.php +++ b/tests/Tasks/CleanupTest.php @@ -30,11 +30,15 @@ class CleanupTest extends RocketeerTestCase }); }); + ob_start(); + $this->assertTaskOutput('Cleanup', 'Removing <info>2 releases</info> from the server', $this->getCommand(array(), array( 'clean-all' => true, 'verbose' => true, 'pretend' => false, ))); + + ob_end_clean(); } public function testPrintsMessageIfNoCleanup()
Suppress output during the tests This should solve the issue where we have blank lines generated dumped while the test suite it running.
rocketeers_rocketeer
train
90a6aa7ec0937c068b8d8ae5dd3900685dbb8990
diff --git a/src/Composer/Command/ShowCommand.php b/src/Composer/Command/ShowCommand.php index <HASH>..<HASH> 100644 --- a/src/Composer/Command/ShowCommand.php +++ b/src/Composer/Command/ShowCommand.php @@ -1227,7 +1227,12 @@ EOT $targetVersion = '^' . $package->getVersion(); } - return $versionSelector->findBestCandidate($name, $targetVersion, $bestStability); + $candidate = $versionSelector->findBestCandidate($name, $targetVersion, $bestStability); + while ($candidate instanceof AliasPackage) { + $candidate = $candidate->getAliasOf(); + } + + return $candidate; } private function getRepositorySet(Composer $composer)
Make sure we compare packages to their latest without aliases, fixes #<I>
composer_composer
train
e008fddad45c6c721237ffbd5d2e2bd977fc2ee2
diff --git a/lib/migration_comments/active_record/connection_adapters/abstract_adapter.rb b/lib/migration_comments/active_record/connection_adapters/abstract_adapter.rb index <HASH>..<HASH> 100644 --- a/lib/migration_comments/active_record/connection_adapters/abstract_adapter.rb +++ b/lib/migration_comments/active_record/connection_adapters/abstract_adapter.rb @@ -19,10 +19,16 @@ module MigrationComments::ActiveRecord::ConnectionAdapters false end + # SQLite style - embedded comments def inline_comments? false end + # PostgreSQL style - comment specific commands + def independent_comments? + false + end + # Remove a comment on a table (if set) def remove_table_comment(table_name) set_table_comment(table_name, nil) diff --git a/lib/migration_comments/active_record/connection_adapters/abstract_adapter/schema_creation.rb b/lib/migration_comments/active_record/connection_adapters/abstract_adapter/schema_creation.rb index <HASH>..<HASH> 100644 --- a/lib/migration_comments/active_record/connection_adapters/abstract_adapter/schema_creation.rb +++ b/lib/migration_comments/active_record/connection_adapters/abstract_adapter/schema_creation.rb @@ -3,6 +3,7 @@ module MigrationComments::ActiveRecord::ConnectionAdapters::AbstractAdapter def self.included(base) base.class_eval do alias_method_chain :column_options, :migration_comments + alias_method_chain :add_column_options!, :migration_comments alias_method_chain :visit_TableDefinition, :migration_comments alias_method_chain :visit_ColumnDefinition, :migration_comments end @@ -14,6 +15,14 @@ module MigrationComments::ActiveRecord::ConnectionAdapters::AbstractAdapter column_options end + def add_column_options_with_migration_comments!(sql, options) + sql = add_column_options_without_migration_comments!(sql, options) + if options.keys.include?(:comment) && [email protected]_comments? + sql << MigrationComments::ActiveRecord::ConnectionAdapters::CommentDefinition.new(@conn, nil, nil, options[:comment]).to_sql + end + sql + end + def visit_TableDefinition_with_migration_comments(o) if @conn.inline_comments? create_sql = "CREATE#{' TEMPORARY' if o.temporary} TABLE " diff --git a/lib/migration_comments/active_record/connection_adapters/mysql_adapter.rb b/lib/migration_comments/active_record/connection_adapters/mysql_adapter.rb index <HASH>..<HASH> 100644 --- a/lib/migration_comments/active_record/connection_adapters/mysql_adapter.rb +++ b/lib/migration_comments/active_record/connection_adapters/mysql_adapter.rb @@ -61,10 +61,14 @@ module MigrationComments::ActiveRecord::ConnectionAdapters def add_column_options!(sql, options) super(sql, options) if options.keys.include?(:comment) - sql << " COMMENT #{escaped_comment(options[:comment])}" + sql << CommentDefinition.new(self, nil, nil, options[:comment]).to_sql end end + def comment_sql(comment_definition) + " COMMENT #{escaped_comment(comment_definition.comment_text)}" + end + def execute_comment(comment_definition) if comment_definition.table_comment? set_table_comment comment_definition.table_name, comment_definition.comment_text diff --git a/lib/migration_comments/active_record/connection_adapters/postgresql_adapter.rb b/lib/migration_comments/active_record/connection_adapters/postgresql_adapter.rb index <HASH>..<HASH> 100644 --- a/lib/migration_comments/active_record/connection_adapters/postgresql_adapter.rb +++ b/lib/migration_comments/active_record/connection_adapters/postgresql_adapter.rb @@ -12,6 +12,10 @@ module MigrationComments::ActiveRecord::ConnectionAdapters true end + def independent_comments? + true + end + # Set a comment on a table def set_table_comment(table_name, comment_text) execute CommentDefinition.new(self, table_name, nil, comment_text).to_sql
override column options injector method for SchemaCreation introduced in Rails <I>
pinnymz_migration_comments
train
aad4c61b9e1407c8f0a16863e605fcd9639a78bf
diff --git a/merb-gen/lib/generators/templates/application/merb/config/environments/development.rb b/merb-gen/lib/generators/templates/application/merb/config/environments/development.rb index <HASH>..<HASH> 100644 --- a/merb-gen/lib/generators/templates/application/merb/config/environments/development.rb +++ b/merb-gen/lib/generators/templates/application/merb/config/environments/development.rb @@ -4,7 +4,12 @@ Merb::Config.use { |c| c[:reload_templates] = true c[:reload_classes] = true c[:reload_time] = 0.5 - c[:log_auto_flush ] = true c[:ignore_tampered_cookies] = true + c[:log_auto_flush ] = true c[:log_level] = :debug -} \ No newline at end of file + + c[:log_stream] = STDOUT + c[:log_file] = nil + # Or redirect logging into a file: + # c[:log_file] = Merb.root / "log" / "development.log" +} diff --git a/merb-gen/lib/generators/templates/application/merb/config/environments/production.rb b/merb-gen/lib/generators/templates/application/merb/config/environments/production.rb index <HASH>..<HASH> 100644 --- a/merb-gen/lib/generators/templates/application/merb/config/environments/production.rb +++ b/merb-gen/lib/generators/templates/application/merb/config/environments/production.rb @@ -3,5 +3,8 @@ Merb::Config.use { |c| c[:exception_details] = false c[:reload_classes] = false c[:log_level] = :error - c[:log_file] = Merb.log_path + "/production.log" -} \ No newline at end of file + + c[:log_file] = Merb.root / "log" / "production.log" + # or redirect logger using IO handle + # c[:log_stream] = STDOUT +} diff --git a/merb-gen/lib/generators/templates/application/merb/config/environments/rake.rb b/merb-gen/lib/generators/templates/application/merb/config/environments/rake.rb index <HASH>..<HASH> 100644 --- a/merb-gen/lib/generators/templates/application/merb/config/environments/rake.rb +++ b/merb-gen/lib/generators/templates/application/merb/config/environments/rake.rb @@ -3,5 +3,9 @@ Merb::Config.use { |c| c[:exception_details] = true c[:reload_classes] = false c[:log_auto_flush ] = true - c[:log_file] = Merb.log_path / 'merb_rake.log' -} \ No newline at end of file + + c[:log_stream] = STDOUT + c[:log_file] = nil + # Or redirect logging into a file: + # c[:log_file] = Merb.root / "log" / "development.log" +} diff --git a/merb-gen/lib/generators/templates/application/merb/config/environments/staging.rb b/merb-gen/lib/generators/templates/application/merb/config/environments/staging.rb index <HASH>..<HASH> 100644 --- a/merb-gen/lib/generators/templates/application/merb/config/environments/staging.rb +++ b/merb-gen/lib/generators/templates/application/merb/config/environments/staging.rb @@ -1,7 +1,10 @@ Merb.logger.info("Loaded STAGING Environment...") Merb::Config.use { |c| c[:exception_details] = false - c[:reload_classes] = false - c[:log_level] = :error - c[:log_file] = Merb.log_path + "/staging.log" -} \ No newline at end of file + c[:reload_classes] = false + c[:log_level] = :error + + c[:log_file] = Merb.root / "log" / "staging.log" + # or redirect logger using IO handle + # c[:log_stream] = STDOUT +} diff --git a/merb-gen/lib/generators/templates/application/merb/config/environments/test.rb b/merb-gen/lib/generators/templates/application/merb/config/environments/test.rb index <HASH>..<HASH> 100644 --- a/merb-gen/lib/generators/templates/application/merb/config/environments/test.rb +++ b/merb-gen/lib/generators/templates/application/merb/config/environments/test.rb @@ -1,7 +1,12 @@ Merb.logger.info("Loaded TEST Environment...") Merb::Config.use { |c| - c[:testing] = true + c[:testing] = true c[:exception_details] = true - c[:log_auto_flush ] = true - c[:log_level] = :fatal + c[:log_auto_flush ] = true + # log less in testing environment + c[:log_level] = :error + + c[:log_file] = Merb.root / "log" / "test.log" + # or redirect logger using IO handle + # c[:log_stream] = STDOUT }
Add notes about :log_stream and :log_file options.
wycats_merb
train
4470415807b08151a1b0667348c2a6b9b4692701
diff --git a/nanocomp/NanoComp.py b/nanocomp/NanoComp.py index <HASH>..<HASH> 100644 --- a/nanocomp/NanoComp.py +++ b/nanocomp/NanoComp.py @@ -174,10 +174,10 @@ def validate_split_runs_file(split_runs_file): if content[0].upper().split('\t') == ['NAME', 'RUN_ID']: return {c.split('\t')[1]: c.split('\t')[0] for c in content[1:] if c} else: - sysexit("ERROR: Mandatory header of --split_runs tsv file not found: 'NAME', 'RUN_ID'") + sys.exit("ERROR: Mandatory header of --split_runs tsv file not found: 'NAME', 'RUN_ID'") logging.error("Mandatory header of --split_runs tsv file not found: 'NAME', 'RUN_ID'") except IndexError: - sysexit("ERROR: Format of --split_runs tab separated file not as expected") + sys.exit("ERROR: Format of --split_runs tab separated file not as expected") logging.error("ERROR: Format of --split_runs tab separated file not as expected")
fixed undefined name sysexit
wdecoster_nanocomp
train
8a1bdcd4b9fa2b8993409141fa8ee8b887cff6d6
diff --git a/js/admin/src/main.js b/js/admin/src/main.js index <HASH>..<HASH> 100644 --- a/js/admin/src/main.js +++ b/js/admin/src/main.js @@ -6,7 +6,7 @@ app.initializers.add('approval', () => { extend(PermissionGrid.prototype, 'replyItems', items => { items.add('replyWithoutApproval', { icon: 'check', - label: 'Reply without approval', + label: app.translator.trans('flarum-approval.admin.permissions.reply_without_approval_label'), permission: 'discussion.replyWithoutApproval' }, 95); }); @@ -14,7 +14,7 @@ app.initializers.add('approval', () => { extend(PermissionGrid.prototype, 'moderateItems', items => { items.add('approvePosts', { icon: 'check', - label: 'Approve posts', + label: app.translator.trans('flarum-approval.admin.permissions.approve_posts_label'), permission: 'discussion.approvePosts' }, 65); }); diff --git a/js/forum/src/main.js b/js/forum/src/main.js index <HASH>..<HASH> 100644 --- a/js/forum/src/main.js +++ b/js/forum/src/main.js @@ -27,13 +27,13 @@ app.initializers.add('flarum-approval', () => { extend(CommentPost.prototype, 'headerItems', function(items) { if (!this.props.post.isApproved() && !this.props.post.isHidden()) { - items.add('unapproved', 'Awaiting Approval'); + items.add('unapproved', app.translator.trans('flarum-approval.forum.post.awaiting_approval_text')); } }); override(CommentPost.prototype, 'flagReason', function(original, flag) { if (flag.type() === 'approval') { - return 'Awaiting approval'; + return app.translator.trans('flarum-approval.forum.post.awaiting_approval_text'); } return original(flag); @@ -43,7 +43,7 @@ app.initializers.add('flarum-approval', () => { if (!post.isApproved() && post.canApprove()) { items.add('approve', <Button icon="check" onclick={PostControls.approveAction.bind(post)}> - Approve + {app.translator.trans('flarum-approval.forum.post_controls.approve_button')} </Button>, 10 );
Extract translations - Adds app.translator calls for forum and admin interfaces. - Locale file to follow later.
flarum_approval
train
be70dae33751ddc3e0ae5a55b5cdbf2002a42932
diff --git a/samples/downloadEncryptedFile.js b/samples/downloadEncryptedFile.js index <HASH>..<HASH> 100644 --- a/samples/downloadEncryptedFile.js +++ b/samples/downloadEncryptedFile.js @@ -56,7 +56,7 @@ function main( destination: destFileName, }; - // Descrypts and downloads the file. This can only be done with the key used + // Decrypts and downloads the file. This can only be done with the key used // to encrypt and upload the file. await storage .bucket(bucketName) diff --git a/samples/fileSetMetadata.js b/samples/fileSetMetadata.js index <HASH>..<HASH> 100644 --- a/samples/fileSetMetadata.js +++ b/samples/fileSetMetadata.js @@ -42,7 +42,7 @@ function main(bucketName = 'my-bucket', fileName = 'file.txt') { .bucket(bucketName) .file(fileName) .setMetadata({ - // Predefinded metadata for server e.g. 'cacheControl', 'contentDisposition', + // Predefined metadata for server e.g. 'cacheControl', 'contentDisposition', // 'contentEncoding', 'contentLanguage', 'contentType' contentDisposition: 'attachment; filename*=utf-8\'\'"anotherImage.jpg"', contentType: 'image/jpeg', diff --git a/samples/system-test/iam.test.js b/samples/system-test/iam.test.js index <HASH>..<HASH> 100644 --- a/samples/system-test/iam.test.js +++ b/samples/system-test/iam.test.js @@ -36,7 +36,7 @@ const expression = before(async () => { await bucket.create(); - // UniformBucketLevelAccess must be enabled to add a condiitonal binding. + // UniformBucketLevelAccess must be enabled to add a conditional binding. await bucket.setMetadata({ iamConfiguration: { uniformBucketLevelAccess: { diff --git a/samples/viewBucketIamMembers.js b/samples/viewBucketIamMembers.js index <HASH>..<HASH> 100644 --- a/samples/viewBucketIamMembers.js +++ b/samples/viewBucketIamMembers.js @@ -47,7 +47,7 @@ function main(bucketName = 'my-bucket') { const condition = binding.condition; if (condition) { - console.log(' Condiiton:'); + console.log(' Condition:'); console.log(` Title: ${condition.title}`); console.log(` Description: ${condition.description}`); console.log(` Expression: ${condition.expression}`); diff --git a/src/iam.ts b/src/iam.ts index <HASH>..<HASH> 100644 --- a/src/iam.ts +++ b/src/iam.ts @@ -288,7 +288,7 @@ class Iam { * @throws {Error} If no policy is provided. * * @param {Policy} policy The policy. - * @param {SetPolicyOptions} [options] Configuration opbject. + * @param {SetPolicyOptions} [options] Configuration options. * @param {SetPolicyCallback} callback Callback function. * @returns {Promise<SetPolicyResponse>} * diff --git a/src/storage.ts b/src/storage.ts index <HASH>..<HASH> 100644 --- a/src/storage.ts +++ b/src/storage.ts @@ -1085,7 +1085,7 @@ export class Storage extends Service { /** * @typedef {array} GetBucketsResponse * @property {Bucket[]} 0 Array of {@link Bucket} instances. - * @property {objcet} 1 nextQuery A query object to receive more results. + * @property {object} 1 nextQuery A query object to receive more results. * @property {object} 2 The full API response. */ /** @@ -1413,7 +1413,7 @@ export class Storage extends Service { * supply the project's ID as `projectId` in the `options` argument. * * @param {string} accessId The HMAC key's access ID. - * @param {HmacKeyOptions} options HmacKey constructor owptions. + * @param {HmacKeyOptions} options HmacKey constructor options. * @returns {HmacKey} * @see HmacKey *
fix: fixed typo (#<I>)
googleapis_nodejs-storage
train
e1ad642da0030b38b4e3e9640d3db2fa13e36a6c
diff --git a/pkg/util/procfs/procfs.go b/pkg/util/procfs/procfs.go index <HASH>..<HASH> 100644 --- a/pkg/util/procfs/procfs.go +++ b/pkg/util/procfs/procfs.go @@ -65,6 +65,9 @@ func (pfs *ProcFS) GetFullContainerName(pid int) (string, error) { func PidOf(name string) []int { pids := []int{} filepath.Walk("/proc", func(path string, info os.FileInfo, err error) error { + if err != nil { + return err + } base := filepath.Base(path) // Traverse only the directories we are interested in if info.IsDir() && path != "/proc" {
Fix intermittent failures in TestPidOf {procfs} Bailout if WalkFunc is called with an error. Fixes #<I>
kubernetes_kubernetes
train
fa67cf71b1a47182d7a0a3a883a8209928f0fd6f
diff --git a/omnic/__init__.py b/omnic/__init__.py index <HASH>..<HASH> 100644 --- a/omnic/__init__.py +++ b/omnic/__init__.py @@ -7,3 +7,5 @@ from omnic.utils.singleton import SingletonManager singletons = SingletonManager() # Ensure settings, at least, gets registered +from omnic import config +singletons.register('settings', config.Settings) diff --git a/omnic/config.py b/omnic/config.py index <HASH>..<HASH> 100644 --- a/omnic/config.py +++ b/omnic/config.py @@ -2,7 +2,6 @@ import os import importlib from omnic import default_settings -from omnic import singletons class Settings: @@ -42,32 +41,3 @@ class Settings: pass raise AttributeError('Invalid settings: "%s"' % key) - - -singletons.register('settings', Settings) - - -def load_settings(): - from omnic import default_settings - custom_settings_path = os.environ.get('OMNIC_SETTINGS') - if custom_settings_path: - # TODO import here - pass - return default_settings - - -def override_settings(new_settings): - global settings - old_settings = settings - settings = new_settings - return old_settings - - -def get_settings(): - global settings - if not settings: - settings = load_settings() - return settings - - -settings = load_settings() diff --git a/omnic/conversion/converter.py b/omnic/conversion/converter.py index <HASH>..<HASH> 100644 --- a/omnic/conversion/converter.py +++ b/omnic/conversion/converter.py @@ -13,9 +13,6 @@ class Converter: def __init__(self): self.config = singletons.settings - def configure(self, config): - pass - class ExecConverter(Converter): def get_arguments(self, resource): diff --git a/omnic/conversion/utils.py b/omnic/conversion/utils.py index <HASH>..<HASH> 100644 --- a/omnic/conversion/utils.py +++ b/omnic/conversion/utils.py @@ -15,7 +15,6 @@ async def convert_local(path, to_type): ''' Given an absolute path to a local file, convert to a given to_type ''' - settings = singletons.settings # Now find path between types typed_foreign_res = TypedLocalResource(path) original_ts = typed_foreign_res.typestring @@ -25,7 +24,7 @@ async def convert_local(path, to_type): # Loop through each step in graph path and convert for is_first, is_last, path_step in first_last_iterator(conversion_path): converter_class, from_ts, to_ts = path_step - converter = converter_class(settings) + converter = converter_class() in_resource = TypedLocalResource(path, from_ts) if is_first: # Ensure first resource is just the source one in_resource = typed_foreign_res @@ -41,7 +40,6 @@ def enqueue_conversion_path(url_string, to_type, enqueue_convert): Given a URL string that has already been downloaded, enqueue necessary conversion to get to target type ''' - settings = singletons.settings target_ts = TypeString(to_type) foreign_res = ForeignResource(url_string) @@ -59,7 +57,7 @@ def enqueue_conversion_path(url_string, to_type, enqueue_convert): # Loop through each step in graph path and convert is_first = True for converter_class, from_ts, to_ts in path: - converter = converter_class(settings) + converter = converter_class() in_resource = TypedResource(url_string, from_ts) if is_first: # Ensure first resource is just the source one in_resource = TypedForeignResource(url_string, from_ts)
cleaning up imports of config properly
michaelpb_omnic
train
53250552fe22980a5e343d9df687606bf6faef30
diff --git a/panels/_version.py b/panels/_version.py index <HASH>..<HASH> 100644 --- a/panels/_version.py +++ b/panels/_version.py @@ -1,2 +1,2 @@ # Versions compliant with PEP 440 https://www.python.org/dev/peps/pep-0440 -__version__ = "0.0.15" +__version__ = "0.0.16"
Update version number to <I>
chaoss_grimoirelab-sigils
train
b715eedbf1af4bf370b0cd6527b73372f51c5194
diff --git a/lib/express_templates/components/forms/option_support.rb b/lib/express_templates/components/forms/option_support.rb index <HASH>..<HASH> 100644 --- a/lib/express_templates/components/forms/option_support.rb +++ b/lib/express_templates/components/forms/option_support.rb @@ -5,6 +5,12 @@ module ExpressTemplates # on the field and an means of loading the collection for supplying # options to the user. module OptionSupport + + def has_many_through_association + reflection = resource_class.constantize.reflect_on_association(field_name.to_sym) + return reflection if reflection && reflection.macro.eql?(:has_many) && reflection.options.keys.include?(:through) + end + # Reflect on any association and return it if the association type # is :belongs_to. Returns false if the association is not :belongs_to. # Returns nil if there was a problem reflecting. @@ -19,7 +25,7 @@ module ExpressTemplates # Provide ActiveRecord code to load the associated collection as # options for display. def related_collection - reflection = belongs_to_association + reflection = belongs_to_association || has_many_through_association if reflection && !reflection.polymorphic? "#{reflection.klass}.all.select(:#{option_value_method}, :#{option_name_method}).order(:#{option_name_method})" end @@ -32,7 +38,7 @@ module ExpressTemplates end def option_name_method - cols = belongs_to_association.klass.columns + cols = (belongs_to_association||has_many_through_association).klass.columns @option_name_method ||= if cols.detect {|column| column.name.eql?('name') } :name diff --git a/lib/express_templates/components/forms/select.rb b/lib/express_templates/components/forms/select.rb index <HASH>..<HASH> 100644 --- a/lib/express_templates/components/forms/select.rb +++ b/lib/express_templates/components/forms/select.rb @@ -46,6 +46,8 @@ module ExpressTemplates else "{{options_from_collection_for_select(#{related_collection}, :id, :#{option_name_method}, @#{resource_name}.#{field_name})}}" end + elsif has_many_through_association + "{{options_from_collection_for_select(#{related_collection}, :id, :#{option_name_method}, @#{resource_name}.#{field_name})}}" else if selection = field_options.delete(:selected) "{{options_for_select(#{options}, \"#{selection}\")}}" diff --git a/test/components/forms/select_test.rb b/test/components/forms/select_test.rb index <HASH>..<HASH> 100644 --- a/test/components/forms/select_test.rb +++ b/test/components/forms/select_test.rb @@ -52,16 +52,31 @@ class SelectTest < ActiveSupport::TestCase [OpenStruct.new(name: 'id'), OpenStruct.new(name: 'name')] end end + class ::Tagging + def self.columns + [OpenStruct.new(name: 'id'), OpenStruct.new(name: 'name')] + end + end class ::Person def self.reflect_on_association(name) if name.eql? :gender - dummy_association = Object.new - class << dummy_association + dummy_belongs_to_association = Object.new + class << dummy_belongs_to_association def macro ; :belongs_to ; end def klass ; ::Gender ; end def polymorphic? ; false ; end end - return dummy_association + return dummy_belongs_to_association + end + if name.eql? :taggings + dummy_has_many_through_association = Object.new + class << dummy_has_many_through_association + def macro ; :has_many ; end + def klass ; ::Tagging ; end + def options ; {:through => :peron_tags} ; end + def polymorphic? ; false ; end + end + return dummy_has_many_through_association end end end @@ -99,10 +114,20 @@ class SelectTest < ActiveSupport::TestCase test "select multiple: true if passed multiple true" do fragment = -> { express_form(:person) { - select :gender, nil, include_blank: false, multiple: true + select :taggings, nil, include_blank: false, multiple: true } } assert_match 'multiple: true', ExpressTemplates.compile(&fragment) end + test "select multiple gets options from associated has_many_through collection" do + fragment = -> { + express_form(:person) { + select :taggings, nil, include_blank: false, multiple: true + } + } + assert_match 'options_from_collection_for_select(Tagging.all.select(:id, :name).order(:name), :id, :name, @person.taggings)', + ExpressTemplates.compile(&fragment) + end + end \ No newline at end of file
[#<I>] add support for has_many :through to select
aelogica_express_templates
train
15d4384e041bcc458ae6443a6becab1d995be60c
diff --git a/src/Illuminate/Contracts/Cache/Store.php b/src/Illuminate/Contracts/Cache/Store.php index <HASH>..<HASH> 100644 --- a/src/Illuminate/Contracts/Cache/Store.php +++ b/src/Illuminate/Contracts/Cache/Store.php @@ -26,7 +26,7 @@ interface Store * Store an item in the cache for a given number of minutes. * * @param string $key - * @param mixed $value + * @param mixed $value * @param float|int $minutes * @return void */ @@ -45,7 +45,7 @@ interface Store * Increment the value of an item in the cache. * * @param string $key - * @param mixed $value + * @param mixed $value * @return int|bool */ public function increment($key, $value = 1); @@ -54,7 +54,7 @@ interface Store * Decrement the value of an item in the cache. * * @param string $key - * @param mixed $value + * @param mixed $value * @return int|bool */ public function decrement($key, $value = 1); @@ -63,7 +63,7 @@ interface Store * Store an item in the cache indefinitely. * * @param string $key - * @param mixed $value + * @param mixed $value * @return void */ public function forever($key, $value);
Fix @param directives for cache store Some of this docblocks were using 3 spaces instead of the recommended 2 in the contributions page.
laravel_framework
train
218ec889d2fbbf2d75c24b5a8a38dfd582b5cfad
diff --git a/blocks/rss_client/db/access.php b/blocks/rss_client/db/access.php index <HASH>..<HASH> 100644 --- a/blocks/rss_client/db/access.php +++ b/blocks/rss_client/db/access.php @@ -75,7 +75,7 @@ $block_rss_client_capabilities = array( ) ), - 'block/rss_client:managesharedfeeds' => array( + 'block/rss_client:manageanyfeeds' => array( 'captype' => 'write', 'contextlevel' => CONTEXT_SYSTEM,
Replaced managesharedfeeds by manageanyfeeds
moodle_moodle
train
9f08fcc976c3a80310ea7d66499c9e58332afeab
diff --git a/lib/joint/instance_methods.rb b/lib/joint/instance_methods.rb index <HASH>..<HASH> 100644 --- a/lib/joint/instance_methods.rb +++ b/lib/joint/instance_methods.rb @@ -19,7 +19,7 @@ module Joint next unless io.respond_to?(:read) io.rewind if io.respond_to?(:rewind) grid.delete(send(name).id) - grid.put(io.read, { + grid.put(io, { :_id => send(name).id, :filename => send(name).name, :content_type => send(name).type, diff --git a/lib/joint/io.rb b/lib/joint/io.rb index <HASH>..<HASH> 100644 --- a/lib/joint/io.rb +++ b/lib/joint/io.rb @@ -1,3 +1,5 @@ +require 'stringio' + module Joint class IO attr_accessor :name, :content, :type, :size @@ -5,10 +7,17 @@ module Joint def initialize(attrs={}) attrs.each { |key, value| send("#{key}=", value) } @type ||= 'plain/text' - @size ||= @content.size unless @content.nil? + end + + def content=(value) + @io = StringIO.new(value || nil) + @size = value ? value.size : 0 + end + + def read(*args) + @io.read(*args) end alias path name - alias read content end end \ No newline at end of file
leave reading the IO to Mongo::Grid
jnunemaker_joint
train
c3ae9c70bda2b01b95554bcaa4b9f521a4cb8f70
diff --git a/tests/parser/types/numbers/test_uint256.py b/tests/parser/types/numbers/test_uint256.py index <HASH>..<HASH> 100644 --- a/tests/parser/types/numbers/test_uint256.py +++ b/tests/parser/types/numbers/test_uint256.py @@ -1,17 +1,44 @@ -# from ethereum.abi import ValueOutOfBounds +from vyper.exceptions import ( + InvalidLiteralException +) -def test_convert_bytes_to_uint256(get_contract_with_gas_estimation): - test_contract = """ +def test_convert_bytes_to_uint256(assert_compile_failed, get_contract_with_gas_estimation): + # Test valid bytes input for conversion + test_success = """ @public def foo(bar: bytes[5]) -> uint256: return convert(bar, uint256) """ - c = get_contract_with_gas_estimation(test_contract) + c = get_contract_with_gas_estimation(test_success) assert c.foo(b'\x00\x00\x00\x00\x00') == 0 assert c.foo(b'\x00\x07\x5B\xCD\x15') == 123456789 + # Test overflow bytes input for conversion + test_fail = """ +@public +def foo(bar: bytes[40]) -> uint256: + return convert(bar, uint256) + """ + + assert_compile_failed( + lambda: get_contract_with_gas_estimation(test_fail), + InvalidLiteralException + ) + + test_fail = """ +@public +def foobar() -> uint256: + barfoo: bytes[63] = "Hello darkness, my old friend I've come to talk with you again." + return convert(barfoo, uint256) + """ + + assert_compile_failed( + lambda: get_contract_with_gas_estimation(test_fail), + InvalidLiteralException + ) + def test_uint256_code(assert_tx_failed, get_contract_with_gas_estimation): uint256_code = """ diff --git a/vyper/types/convert.py b/vyper/types/convert.py index <HASH>..<HASH> 100644 --- a/vyper/types/convert.py +++ b/vyper/types/convert.py @@ -61,6 +61,8 @@ def to_uint256(expr, args, kwargs, context): return LLLnode(value=in_node.value, args=in_node.args, typ=BaseType('uint256'), pos=getpos(expr)) elif isinstance(in_node, LLLnode) and input_type is 'bytes': + if in_node.typ.maxlen > 32: + raise InvalidLiteralException("Cannot convert bytes array of max length {} to uint256".format(in_node.value), expr) return byte_array_to_num(in_node, expr, 'uint256') else:
Check that the sliced bytes is less than the storage capacity of uint<I>
ethereum_vyper
train
0143ab6449190e2ab134959b114732b0a273ef8e
diff --git a/src/index.js b/src/index.js index <HASH>..<HASH> 100644 --- a/src/index.js +++ b/src/index.js @@ -152,14 +152,25 @@ function Swarm (peerInfo) { var peerIdForConn muxedConn.on('stream', (conn) => { - if (peerIdForConn) { - conn.peerId = peerIdForConn + function gotId () { + if (peerIdForConn) { + conn.peerId = peerIdForConn + connHandler(conn) + } else { + setTimeout(gotId, 100) + } + } + + if (this.identify) { + return gotId() } + connHandler(conn) }) // if identify is enabled, attempt to do it for muxer reuse if (this.identify) { + console.log('exec identify') identify.exec(conn, muxedConn, peerInfo, (err, pi) => { if (err) { return console.log('Identify exec failed', err) diff --git a/test/09-swarm-with-muxing.node.js b/test/09-swarm-with-muxing.node.js index <HASH>..<HASH> 100644 --- a/test/09-swarm-with-muxing.node.js +++ b/test/09-swarm-with-muxing.node.js @@ -190,6 +190,7 @@ describe('high level API - with everything mixed all together!', function () { it('dial from tcp+ws to tcp+ws', (done) => { swarmC.handle('/mamao/1.0.0', (conn) => { + expect(conn.peerId).to.exist conn.pipe(conn) }) @@ -204,21 +205,6 @@ describe('high level API - with everything mixed all together!', function () { }) }) - it('again, so that identify had time', (done) => { - swarmC.handle('/mamao/1.0.0', (conn) => { - expect(conn.peerId).to.exist - conn.pipe(conn) - }) - - swarmA.dial(peerC, '/mamao/1.0.0', (err, conn) => { - expect(err).to.not.exist - expect(conn.peerId).to.exist - conn.end() - conn.on('data', () => {}) // let it flow.. let it flooooow - conn.on('end', done) - }) - }) - it('close a muxer emits event', (done) => { swarmC.close(() => {}) swarmA.once('peer-mux-closed', (peerInfo) => {
freeze handling conns till identify is finished on the incomming multiplexed streams
libp2p_js-libp2p-switch
train
3027871161409f84ecac029231cb444dd6a42c69
diff --git a/pandas/core/frame.py b/pandas/core/frame.py index <HASH>..<HASH> 100644 --- a/pandas/core/frame.py +++ b/pandas/core/frame.py @@ -389,6 +389,9 @@ class DataFrame(NDFrame): copy=copy) elif isinstance(data, list): if len(data) > 0: + if index is None and isinstance(data[0], Series): + index = _get_names_from_index(data) + if isinstance(data[0], (list, tuple, dict, Series)): conv_data, columns = _to_sdict(data, columns) if isinstance(conv_data, dict): @@ -4758,6 +4761,22 @@ def _convert_object_array(content, columns, coerce_float=False): for c, vals in zip(columns, content)) return sdict, columns +def _get_names_from_index(data): + index = range(len(data)) + has_some_name = any([s.name is not None for s in data]) + if not has_some_name: + return index + + count = 0 + for i, s in enumerate(data): + n = s.name + if n is not None: + index[i] = n + else: + index[i] = 'Unnamed %d' % count + count += 1 + + return index def _homogenize(data, index, columns, dtype=None): from pandas.core.series import _sanitize_array diff --git a/pandas/tests/test_frame.py b/pandas/tests/test_frame.py index <HASH>..<HASH> 100644 --- a/pandas/tests/test_frame.py +++ b/pandas/tests/test_frame.py @@ -1871,6 +1871,28 @@ class TestDataFrame(unittest.TestCase, CheckIndexing, assert_frame_equal(result, expected) def test_constructor_list_of_series(self): + data = [{'a': 1.5, 'b': 3.0, 'c':4.0}, + {'a': 1.5, 'b': 3.0, 'c':6.0}] + sdict = dict(zip(['x', 'y'], data)) + idx = Index(['a', 'b', 'c']) + + # all named + data2 = [Series([1.5, 3, 4], idx, dtype='O', name='x'), + Series([1.5, 3, 6], idx, name='y')] + result = DataFrame(data2) + expected = DataFrame.from_dict(sdict, orient='index') + assert_frame_equal(result, expected) + + # some unnamed + data2 = [Series([1.5, 3, 4], idx, dtype='O', name='x'), + Series([1.5, 3, 6], idx)] + result = DataFrame(data2) + + sdict = dict(zip(['x', 'Unnamed 0'], data)) + expected = DataFrame.from_dict(sdict, orient='index') + assert_frame_equal(result.sort_index(), expected) + + # none named data = [{'a': 1.5, 'b': 3, 'c':4, 'd':6}, {'a': 1.5, 'b': 3, 'd':6}, {'a': 1.5, 'd':6},
ENH: retain Series names when constructing DataFrame from list of Series #<I>
pandas-dev_pandas
train
d75a895fb36b6728bac3918019b1589abdc2e129
diff --git a/SwatDB/SwatDBRecordsetWrapper.php b/SwatDB/SwatDBRecordsetWrapper.php index <HASH>..<HASH> 100644 --- a/SwatDB/SwatDBRecordsetWrapper.php +++ b/SwatDB/SwatDBRecordsetWrapper.php @@ -143,7 +143,7 @@ abstract class SwatDBRecordsetWrapper extends SwatObject $this->objects_by_index[$index] = $object; } } - } while ($rs->nextResult()); + } while ($recordset->nextResult()); } }
Fix typo that broke all recordsets svn commit r<I>
silverorange_swat
train
82c9f51486b4e44b179bd1437595b080f98ff81e
diff --git a/lib/sensu/client/process.rb b/lib/sensu/client/process.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/client/process.rb +++ b/lib/sensu/client/process.rb @@ -233,25 +233,36 @@ module Sensu end end + # Determine the Sensu transport subscribe options for a + # subscription. If a subscription begins with a transport pipe + # type, either "direct:" or "roundrobin:", the subscription uses + # a direct transport pipe, and the subscription name is uses for + # both the pipe and the funnel names. If a subscription does not + # specify a transport pipe type, a fanout transport pipe is + # used, the subscription name is used for the pipe, and a unique + # funnel is created for the Sensu client. The unique funnel name + # for the Sensu client is created using a combination of the + # client name, the Sensu version, and the process start time + # (epoch). + # + # @param subscription [String] + # @return [Array] containing the transport subscribe options: + # the transport pipe type, pipe, and funnel. def transport_subscribe_options(subscription) _, raw_type = subscription.split(":", 2).reverse case raw_type when "direct", "roundrobin" [:direct, subscription, subscription] else - funnel = [@settings[:client][:name], VERSION, Time.now.to_i].join("-") + funnel = [@settings[:client][:name], VERSION, start_time].join("-") [:fanout, subscription, funnel] end end # Set up Sensu client subscriptions. Subscriptions determine the - # kinds of check requests the client will receive. A unique - # transport funnel is created for the Sensu client, using a - # combination of it's name, the Sensu version, and the current - # timestamp (epoch). The unique funnel is bound to each - # transport pipe, named after the client subscription. The Sensu + # kinds of check requests the client will receive. The Sensu # client will receive JSON serialized check requests from its - # funnel, that get parsed and processed. + # subscriptions, that get parsed and processed. def setup_subscriptions @logger.debug("subscribing to client subscriptions") @settings[:client][:subscriptions].each do |subscription| diff --git a/lib/sensu/daemon.rb b/lib/sensu/daemon.rb index <HASH>..<HASH> 100644 --- a/lib/sensu/daemon.rb +++ b/lib/sensu/daemon.rb @@ -31,13 +31,16 @@ module Sensu module Daemon include Utilities - # Initialize the Sensu process. Set the initial service state, set - # up the logger, load settings, load extensions, and optionally - # daemonize the process and/or create a PID file. A subclass may - # override this method. + attr_reader :start_time + + # Initialize the Sensu process. Set the start time, initial + # service state, set up the logger, load settings, load + # extensions, and optionally daemonize the process and/or create a + # PID file. A subclass may override this method. # # @param options [Hash] def initialize(options={}) + @start_time = Time.now.to_i @state = :initializing @timers = {:run => []} setup_logger(options)
[roundrobin] use process start time in unique client funnel
sensu_sensu
train
8577b19f4b12b6c43baed68a5d4aed9f87803ed9
diff --git a/lib/puppet/ssl/oids.rb b/lib/puppet/ssl/oids.rb index <HASH>..<HASH> 100644 --- a/lib/puppet/ssl/oids.rb +++ b/lib/puppet/ssl/oids.rb @@ -70,12 +70,18 @@ module Puppet::SSL::Oids ["1.3.6.1.4.1.34380.1.3.13", 'pp_auth_role', 'Puppet Node Role Name for Authorization'], ] + @did_register_puppet_oids = false + # Register our custom Puppet OIDs with OpenSSL so they can be used as CSR # extensions. Without registering these OIDs, OpenSSL will fail when it # encounters such an extension in a CSR. def self.register_puppet_oids() - PUPPET_OIDS.each do |oid_defn| - OpenSSL::ASN1::ObjectId.register(*oid_defn) + if !@did_register_puppet_oids + PUPPET_OIDS.each do |oid_defn| + OpenSSL::ASN1::ObjectId.register(*oid_defn) + end + + @did_register_puppet_oids = true end end diff --git a/spec/unit/util/monkey_patches_spec.rb b/spec/unit/util/monkey_patches_spec.rb index <HASH>..<HASH> 100644 --- a/spec/unit/util/monkey_patches_spec.rb +++ b/spec/unit/util/monkey_patches_spec.rb @@ -42,12 +42,16 @@ describe OpenSSL::SSL::SSLContext do it 'explicitly disable SSLv2 ciphers using the ! prefix so they cannot be re-added' do cipher_str = OpenSSL::SSL::SSLContext::DEFAULT_PARAMS[:ciphers] - expect(cipher_str.split(':')).to include('!SSLv2') + if cipher_str + expect(cipher_str.split(':')).to include('!SSLv2') + end end it 'does not exclude SSLv3 ciphers shared with TLSv1' do cipher_str = OpenSSL::SSL::SSLContext::DEFAULT_PARAMS[:ciphers] - expect(cipher_str.split(':')).not_to include('!SSLv3') + if cipher_str + expect(cipher_str.split(':')).not_to include('!SSLv3') + end end it 'sets parameters on initialization' do
(maint) Fix tests for OpenSSL <I>
puppetlabs_puppet
train
c4bae8f4c2d1cdb9f9c66b9df251357aa3fdb3a9
diff --git a/fs/torrentfs_test.go b/fs/torrentfs_test.go index <HASH>..<HASH> 100644 --- a/fs/torrentfs_test.go +++ b/fs/torrentfs_test.go @@ -11,6 +11,7 @@ import ( "os" "path/filepath" "strconv" + "strings" "testing" "time" @@ -93,11 +94,15 @@ func TestUnmountWedged(t *testing.T) { DisableTrackers: true, NoDHT: true, }) + defer client.Stop() log.Printf("%+v", *layout.Metainfo) client.AddTorrent(layout.Metainfo) fs := New(client) fuseConn, err := fuse.Mount(layout.MountDir) if err != nil { + if strings.Contains(err.Error(), "fuse") { + t.Skip(err) + } t.Fatal(err) } go func() {
Ignore failures due to FUSE not available
anacrolix_torrent
train
9e74cf11fa0f8f07f1a93ced28cc7d49a2010c31
diff --git a/chemlab/graphics/qtviewer.py b/chemlab/graphics/qtviewer.py index <HASH>..<HASH> 100644 --- a/chemlab/graphics/qtviewer.py +++ b/chemlab/graphics/qtviewer.py @@ -71,8 +71,9 @@ class QtViewer(QMainWindow): # functions without having to show the window first... context = QGLContext(QGLFormat()) widget = QChemlabWidget(context) + widget.setParent(self) context.makeCurrent() - + self.setCentralWidget(widget) self.resize(1000, 800) self.widget = widget diff --git a/chemlab/ipython.py b/chemlab/ipython.py index <HASH>..<HASH> 100644 --- a/chemlab/ipython.py +++ b/chemlab/ipython.py @@ -56,6 +56,10 @@ def showmol(mol, style='ball-and-stick', image.save(b, format='png') data = b.getvalue() + # Cleanup + del v + del w + # Save as png return ipy_Image(data=data) @@ -84,14 +88,17 @@ def showsys(sys, width=400, height=400): w.paintGL() # Make sure to finish everything - data = glReadPixels(0, 0, width, height, GL_RGB, GL_UNSIGNED_BYTE) + # Make pil image to save as png image = pil_Image.fromstring('RGB', (width, height), data) b = BytesIO() image.save(b, format='png') data = b.getvalue() + # Cleanup + del v + del w # Save as png return ipy_Image(data=data)
Adding cleanup for qt objects
chemlab_chemlab
train
dfdb7515259d90c0ab7b86f433084b2e98569a7e
diff --git a/main/core/API/Serializer/User/UserSerializer.php b/main/core/API/Serializer/User/UserSerializer.php index <HASH>..<HASH> 100644 --- a/main/core/API/Serializer/User/UserSerializer.php +++ b/main/core/API/Serializer/User/UserSerializer.php @@ -313,6 +313,7 @@ class UserSerializer } $fieldFacetValue = $serializer->deserialize('Claroline\CoreBundle\Entity\Facet\FieldFacetValue', $fieldFacetValue); + $fieldFacetValue->setUser($user); $user->addFieldFacet($fieldFacetValue); } }
Fix user creation with facets (#<I>)
claroline_Distribution
train
334873d3784e2baa2b19f8f69b5aade36715ba03
diff --git a/packages/interface-ipfs-core/src/add-all.js b/packages/interface-ipfs-core/src/add-all.js index <HASH>..<HASH> 100644 --- a/packages/interface-ipfs-core/src/add-all.js +++ b/packages/interface-ipfs-core/src/add-all.js @@ -171,6 +171,28 @@ module.exports = (common, options) => { expect(root.cid.toString()).to.equal(fixtures.directory.cid) }) + it('should receive progress path as empty string when adding content without paths', async function () { + const content = (name) => fixtures.directory.files[name] + const progressSizes = {} + + const dirs = [ + content('pp.txt'), + content('holmes.txt'), + content('jungle.txt') + ] + + const total = { + '': dirs.reduce((acc, curr) => acc + curr.length, 0) + } + + const handler = (bytes, path) => { + progressSizes[path] = bytes + } + + await drain(ipfs.addAll(dirs, { progress: handler })) + expect(progressSizes).to.deep.equal(total) + }) + it('should receive file name from progress event', async () => { const receivedNames = [] function handler (p, name) { diff --git a/packages/ipfs-http-server/src/api/resources/files-regular.js b/packages/ipfs-http-server/src/api/resources/files-regular.js index <HASH>..<HASH> 100644 --- a/packages/ipfs-http-server/src/api/resources/files-regular.js +++ b/packages/ipfs-http-server/src/api/resources/files-regular.js @@ -260,7 +260,7 @@ exports.add = { multipart(request), async function * (source) { for await (const entry of source) { - currentFileName = entry.name || 'unknown' + currentFileName = entry.name || '' if (entry.type === 'file') { filesParsed = true
fix: align behaviour between go and js for content without paths (#<I>) Aligns behaviour between js and go when no path is present during an import - we now pass an empty string instead of `'unknown'`.
ipfs_js-ipfs
train
57e18824e8de6d504e94a84a71b53a8b200a067c
diff --git a/drizzlepac/haputils/catalog_utils.py b/drizzlepac/haputils/catalog_utils.py index <HASH>..<HASH> 100644 --- a/drizzlepac/haputils/catalog_utils.py +++ b/drizzlepac/haputils/catalog_utils.py @@ -496,6 +496,10 @@ class HAPCatalogBase: gain_values = [g for g in gain_keys if g > 0.0] self.gain = self.image.keyword_dict['exptime'] * np.mean(gain_values) + # Set the gain for ACS/SBC and WFC3/IR to 1.0 + if self.image.keyword_dict["detector"] in ["IR", "SBC"]: + self.gain = 1.0 + # Convert photometric aperture radii from arcsec to pixels self.aper_radius_arcsec = [self.param_dict['aperture_1'], self.param_dict['aperture_2']] self.aper_radius_list_pixels = [] @@ -766,7 +770,8 @@ class HAPPointCatalog(HAPCatalogBase): # Create the list of photometric apertures to measure phot_apers = [CircularAperture(pos_xy, r=r) for r in self.aper_radius_list_pixels] - # Perform aperture photometry + + # Perform aperture photometry - the input data should NOT be background subtracted photometry_tbl = photometry_tools.iraf_style_photometry(phot_apers, bg_apers, data=image,
Set the gain to a value of <I> when processing ACS/SBC data (#<I>) * Ensure the photometry routine has a value of <I> when the input data is ACS/SBC to avoid values being set to nan and causing all of the sources to be trimmed from the output catalogs. * Set the gain for both ACS/SBC and WFC3/IR and relocate the setting of the gain to the HAPCatalogBase.
spacetelescope_drizzlepac
train
6814694a16d7fc8653c12dfbe3729a2501b2a338
diff --git a/src/main/java/com/spotify/docker/client/messages/swarm/SwarmInit.java b/src/main/java/com/spotify/docker/client/messages/swarm/SwarmInit.java index <HASH>..<HASH> 100644 --- a/src/main/java/com/spotify/docker/client/messages/swarm/SwarmInit.java +++ b/src/main/java/com/spotify/docker/client/messages/swarm/SwarmInit.java @@ -71,16 +71,11 @@ public abstract class SwarmInit { @JsonProperty("AdvertiseAddr") final String advertiseAddr, @JsonProperty("ForceNewCluster") final Boolean forceNewCluster, @JsonProperty("Spec") final SwarmSpec swarmSpec) { - final Builder builder = builder() + return builder() .listenAddr(listenAddr) .advertiseAddr(advertiseAddr) - .forceNewCluster(forceNewCluster); - - if (swarmSpec != null) { - builder.swarmSpec(swarmSpec); - } - - return builder.build(); - + .forceNewCluster(forceNewCluster) + .swarmSpec(swarmSpec) + .build(); } }
Remove unnecessary null checks for nullables in SwarmInit
spotify_docker-client
train
579eac13d1abd2dbb8bb060fe7bb028a517e705c
diff --git a/pythran/passes.py b/pythran/passes.py index <HASH>..<HASH> 100644 --- a/pythran/passes.py +++ b/pythran/passes.py @@ -368,11 +368,18 @@ class NormalizeIdentifiers(ast.NodeVisitor): node.name=self.rename(node.name) self.visit(node.args) [ self.visit(n) for n in node.body ] + def visit_alias(self, node): if node.asname: if node.asname in cxx_keywords: node.asname=self.rename(node.name) + def visit_Attribute(self, node): + self.visit(node.value) + if node.attr in cxx_keywords: + node.attr += "_" # let us only hope the considered class does not have this attribute + # Always true as long as we don't have custom classes. + def normalize_identifiers(node): '''Prevents naming conflict with c++ keywords by appending extra '_' to conflicting names.''' ni=NormalizeIdentifiers(identifiers(node), dict())
Fix bug in normalize_identifiers. Previous implementation did not dive into attribute attr.
serge-sans-paille_pythran
train
7dbaaea87fbda0630c71fc283fc663dcbd0049ba
diff --git a/dingo/core/__init__.py b/dingo/core/__init__.py index <HASH>..<HASH> 100644 --- a/dingo/core/__init__.py +++ b/dingo/core/__init__.py @@ -71,6 +71,7 @@ class NetworkDingo: self._pf_config = kwargs.get('pf_config', None) self._static_data = kwargs.get('static_data', {}) + self.import_pf_config() self.import_static_data() def mv_grid_districts(self): diff --git a/examples/example.py b/examples/example.py index <HASH>..<HASH> 100755 --- a/examples/example.py +++ b/examples/example.py @@ -25,7 +25,7 @@ cfg_dingo.load_config('config_misc.cfg') start = time.time() # database connection -conn = db.connection(section='oedb') +conn = db.connection(section='oedb_remote') # instantiate dingo network object nd = NetworkDingo(name='network') @@ -35,14 +35,13 @@ nd = NetworkDingo(name='network') #mv_grid_districts=[1328] #mv_grid_districts=[1294] #mv_grid_districts=[419] +mv_grid_districts=[480] #mv_grid_districts = [359, 415, 424, 447, 402, 406, 489, 480, 371] #mv_grid_districts=[359] #mv_grid_districts = [386,372,406,371,402,415,480,424,489,367]#,569,359,591] #mv_grid_districts=[3087, 2990, 3080, 3034, 3088] #mv_grid_districts=[3080]#, 3080]#, 3080] -mv_grid_districts = list(range(1250,1351)) - -nd.import_pf_config() +#mv_grid_districts = list(range(1250,1351)) nd.import_mv_grid_districts(conn, mv_grid_districts)
move nd's PF import to init of nd
openego_ding0
train
6677e0abccf9bbb229e301b666c65059b8a5b101
diff --git a/cake/tests/cases/libs/model/model.test.php b/cake/tests/cases/libs/model/model.test.php index <HASH>..<HASH> 100644 --- a/cake/tests/cases/libs/model/model.test.php +++ b/cake/tests/cases/libs/model/model.test.php @@ -1635,6 +1635,32 @@ class ModelTest extends CakeTestCase { $result = $this->model->validates(); $this->assertFalse($result); + $this->model->validate['modified'] = array('allowEmpty' => false, 'rule' => 'date'); + + $data = array('TestValidate' => array('modified' => null)); + $result = $this->model->create($data); + $this->assertTrue($result); + $result = $this->model->validates(); + $this->assertFalse($result); + + $data = array('TestValidate' => array('modified' => false)); + $result = $this->model->create($data); + $this->assertTrue($result); + $result = $this->model->validates(); + $this->assertFalse($result); + + $data = array('TestValidate' => array('modified' => '')); + $result = $this->model->create($data); + $this->assertTrue($result); + $result = $this->model->validates(); + $this->assertFalse($result); + + $data = array('TestValidate' => array('modified' => '2007-05-01')); + $result = $this->model->create($data); + $this->assertTrue($result); + $result = $this->model->validates(); + $this->assertTrue($result); + $this->model->validate['slug'] = array('allowEmpty' => false, 'rule' => array('maxLength', 45)); $data = array('TestValidate' => array('user_id' => '1', 'title' => 0, 'body' => 'body', 'slug' => ''));
Adding test I forgot to commit with [<I>] git-svn-id: <URL>
cakephp_cakephp
train
2a0657e89b8c9e9d81cb94dab4fc252e16470678
diff --git a/system/Database/Forge.php b/system/Database/Forge.php index <HASH>..<HASH> 100644 --- a/system/Database/Forge.php +++ b/system/Database/Forge.php @@ -69,7 +69,7 @@ class Forge * @var array */ protected $primaryKeys = []; - + /** * List of foreign keys. * @@ -337,7 +337,7 @@ class Forge } //-------------------------------------------------------------------- - + /** * Add Foreign Key * @@ -347,25 +347,25 @@ class Forge */ public function addForeignKey($fieldName= '',$tableName = '', $tableField = '', $onUpdate = false, $onDelete = false) { - + if( ! isset($this->fields[$fieldName])) { throw new \RuntimeException('Field "'.$fieldName.'" not exist'); - } - + } + $this->foreignKeys[$fieldName] = [ - 'table' => $tableName, - 'field' => $tableField, - 'onDelete' => $onDelete, - 'onUpdate' => $onUpdate + 'table' => $tableName, + 'field' => $tableField, + 'onDelete' => strtoupper($onDelete), + 'onUpdate' => strtoupper($onUpdate) ]; - - + + return $this; } //-------------------------------------------------------------------- - + /** * Foreign Key Drop * @@ -376,9 +376,9 @@ class Forge */ public function dropForeignKey($table, $foreign_name) { - + $sql = sprintf($this->dropConstraintStr,$this->db->escapeIdentifiers($this->db->DBPrefix.$table),$this->db->escapeIdentifiers($this->db->DBPrefix.$foreign_name)); - + if ($sql === false) { if ($this->db->DBDebug) @@ -393,7 +393,7 @@ class Forge } //-------------------------------------------------------------------- - + /** * Create Table * @@ -554,7 +554,7 @@ class Forge return false; } - + // If the prefix is already starting the table name, remove it... if (! empty($this->db->DBPrefix) && strpos($table_name, $this->db->DBPrefix) === 0) { @@ -614,7 +614,7 @@ class Forge } $sql = $sql . ' ' . $this->db->escapeIdentifiers($table); - + return $sql; } @@ -1157,22 +1157,22 @@ class Forge $sql = ''; $allowActions = array('CASCADE','SET NULL','NO ACTION','RESTRICT','SET DEFAULT'); - + if (count($this->foreignKeys) > 0){ foreach ($this->foreignKeys as $field => $fkey) { $name_index = $table.'_'.$field.'_foreign'; - + $sql .= ",\n\tCONSTRAINT " . $this->db->escapeIdentifiers($name_index) . ' FOREIGN KEY(' . $this->db->escapeIdentifiers($field) . ') REFERENCES '.$this->db->escapeIdentifiers($this->db->DBPrefix.$fkey['table']).' ('.$this->db->escapeIdentifiers($fkey['field']).')'; - + if($fkey['onDelete'] !== false && in_array($fkey['onDelete'], $allowActions)){ $sql .= " ON DELETE ".$fkey['onDelete']; } - + if($fkey['onUpdate'] !== false && in_array($fkey['onUpdate'], $allowActions)){ $sql .= " ON UPDATE ".$fkey['onUpdate']; } - + } }
FK: Recognize ON UPDATE and ON DELETE commands if is not in uppercase
codeigniter4_CodeIgniter4
train
8d02d364d5f0675fb1a0fda4a2a3dbf67f9ae8df
diff --git a/src/Leevel/Database/Condition.php b/src/Leevel/Database/Condition.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Database/Condition.php +++ b/src/Leevel/Database/Condition.php @@ -2546,10 +2546,9 @@ class Condition * * @param array|\Closure|\Leevel\Database\Condition|\Leevel\Database\Select|string $names * @param mixed $cols - * @param mixed - * @param null|mixed $cond + * @param null|mixed $cond * - * @throws \InvalidArgumentException $cond + * @throws \InvalidArgumentException * * @return \Leevel\Database\Condition */ diff --git a/src/Leevel/Database/Ddd/Entity.php b/src/Leevel/Database/Ddd/Entity.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Database/Ddd/Entity.php +++ b/src/Leevel/Database/Ddd/Entity.php @@ -1941,8 +1941,6 @@ abstract class Entity implements IArray, IJson, JsonSerializable, ArrayAccess /** * 整理黑白名单变更数据. - * - * @param array $type */ protected function normalizeWhiteAndBlackChangedData(string $type): array { @@ -2173,9 +2171,7 @@ abstract class Entity implements IArray, IJson, JsonSerializable, ArrayAccess $white = $black = $whiteAndBlack = []; } - $value = $value->toArray($white, $black, $whiteAndBlack); - - return $value; + return $value->toArray($white, $black, $whiteAndBlack); } /** diff --git a/src/Leevel/Database/Ddd/UnitOfWork.php b/src/Leevel/Database/Ddd/UnitOfWork.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Database/Ddd/UnitOfWork.php +++ b/src/Leevel/Database/Ddd/UnitOfWork.php @@ -256,7 +256,7 @@ class UnitOfWork /** * 创建一个事务工作单元. * - * @return \Leevel\Database\Ddd\IUnitOfWork + * @return \Leevel\Database\Ddd\UnitOfWork */ public static function make(): self { @@ -1185,7 +1185,7 @@ class UnitOfWork * * @param \Leevel\Database\Ddd\Entity $entity * - * @throws \InvalieletentException + * @throws \InvalidArgumentException */ protected function validateDeleteAlreadyExists(Entity $entity, string $type): void { diff --git a/src/Leevel/Database/Proxy/Db.php b/src/Leevel/Database/Proxy/Db.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Database/Proxy/Db.php +++ b/src/Leevel/Database/Proxy/Db.php @@ -608,8 +608,6 @@ class Db /** * 获得查询字符串. - * - * @param $withLogicGroup */ public static function makeSql(bool $withLogicGroup = false): string { diff --git a/src/Leevel/Database/Select.php b/src/Leevel/Database/Select.php index <HASH>..<HASH> 100644 --- a/src/Leevel/Database/Select.php +++ b/src/Leevel/Database/Select.php @@ -784,8 +784,6 @@ class Select /** * 获得查询字符串. - * - * @param $withLogicGroup */ public function makeSql(bool $withLogicGroup = false): string {
fix(database): fix for phpstan level 2
hunzhiwange_framework
train
f99bad03b9f35e9242b3f0077255f998b91ed543
diff --git a/pyatv/protocols/companion/opack.py b/pyatv/protocols/companion/opack.py index <HASH>..<HASH> 100644 --- a/pyatv/protocols/companion/opack.py +++ b/pyatv/protocols/companion/opack.py @@ -8,10 +8,19 @@ from datetime import datetime # pylint: disable=too-many-branches,too-many-return-statements,too-many-statements import struct -from typing import Tuple +from typing import NamedTuple, Tuple from uuid import UUID +class UID(NamedTuple): + """Type for UID encoded data. + + Either 1, 2, 3 or 4 bytes (bit endian). + """ + + value: int + + def pack(data: object) -> bytes: """Pack data structure with OPACK and return bytes.""" return _pack(data, []) @@ -91,6 +100,15 @@ def _pack(data, object_list): ) if len(data) >= 0xF: packed_bytes += b"\x03" + elif isinstance(data, UID): + if data.value <= 0xFF: + packed_bytes = bytes([0xC1]) + data.value.to_bytes(1, byteorder="big") + elif data.value <= 0xFFFF: + packed_bytes = bytes([0xC2]) + data.value.to_bytes(2, byteorder="big") + elif data.value <= 0xFFFFFF: + packed_bytes = bytes([0xC3]) + data.value.to_bytes(3, byteorder="big") + elif data.value <= 0xFFFFFFFF: + packed_bytes = bytes([0xC4]) + data.value.to_bytes(4, byteorder="big") else: raise TypeError(str(type(data))) @@ -194,6 +212,12 @@ def _unpack(data, object_list): add_to_object_list = False elif 0xA0 <= data[0] <= 0xBF: value, remaining = object_list[data[0] - 0xA0], data[1:] + elif 0xC1 <= data[0] <= 0xC4: + length = data[0] - 0xC0 + value, remaining = ( + UID(int.from_bytes(data[1 : 1 + length], byteorder="big")), + data[1 + length :], + ) else: raise TypeError(hex(data[0])) diff --git a/tests/protocols/companion/test_opack.py b/tests/protocols/companion/test_opack.py index <HASH>..<HASH> 100644 --- a/tests/protocols/companion/test_opack.py +++ b/tests/protocols/companion/test_opack.py @@ -8,7 +8,9 @@ from uuid import UUID from deepdiff import DeepDiff import pytest -from pyatv.protocols.companion.opack import pack, unpack +from pyatv.protocols.companion.opack import UID, pack, unpack + +# pack def test_pack_unsupported_type(): @@ -110,6 +112,16 @@ def test_pack_ptr(): ) +def test_pack_uid(): + assert pack(UID(0x01)) == b"\xC1\x01" + assert pack(UID(0x0102)) == b"\xC2\x01\x02" + assert pack(UID(0x010203)) == b"\xC3\x01\x02\x03" + assert pack(UID(0x01020304)) == b"\xC4\x01\x02\x03\x04" + + +# unpack + + def test_unpack_unsupported_type(): with pytest.raises(TypeError): unpack(b"\x00") @@ -216,6 +228,13 @@ def test_unpack_ptr(): ) +def test_unpack_uid(): + assert unpack(b"\xC1\x01") == (UID(0x01), b"") + assert unpack(b"\xC2\x01\x02") == (UID(0x0102), b"") + assert unpack(b"\xC3\x01\x02\x03") == (UID(0x010203), b"") + assert unpack(b"\xC4\x01\x02\x03\x04") == (UID(0x01020304), b"") + + def test_golden(): data = { "_i": "_systemInfo", @@ -246,6 +265,7 @@ def test_golden(): "_sf": 256, "model": "iPhone10,6", "name": "iPhone", + "uid": UID(0x11223344), }, "_t": 2, }
companion: Add support for UID in OPACK Relates to #<I>
postlund_pyatv
train
eaa69496d266735c6385653de93a238125bacee3
diff --git a/spec/spec_helper.rb b/spec/spec_helper.rb index <HASH>..<HASH> 100644 --- a/spec/spec_helper.rb +++ b/spec/spec_helper.rb @@ -21,5 +21,5 @@ module Minitest end def run_all_threads - sleep 0.001 + Thread.list.each(&:run) end
spec_helper: suddenly, this seems to work #teamnoidea
chastell_kamerling
train
797bab33325f9a4d85bcb66517504e9c4379a62e
diff --git a/src/sap.ui.core/src/sap/ui/test/actions/Action.js b/src/sap.ui.core/src/sap/ui/test/actions/Action.js index <HASH>..<HASH> 100644 --- a/src/sap.ui.core/src/sap/ui/test/actions/Action.js +++ b/src/sap.ui.core/src/sap/ui/test/actions/Action.js @@ -127,24 +127,24 @@ function ($, ManagedObject, QUnitUtils, Opa5, Device) { var bFireArtificialEvents; var oDomRef = $DomRef[0]; - if (!isAlreadyFocused) { + if (isAlreadyFocused || (Device.browser.msie && (Device.browser.version < 12))) { + // If the event is already focused, make sure onfocusin event of the control will be properly fired when executing this action, + // otherwise the next blur will not be able to safely remove the focus. + // In IE11, if the focus action fails and focusin is dispatched, onfocusin will be called twice + // to avoid this, directly dispatch the artificial events + bFireArtificialEvents = true; + } else { $DomRef.focus(); // This check will only return false if you have the focus in the dev tools console, // or a background tab, or the browser is not focused at all. We still want onfocusin to work var bWasFocused = $DomRef.is(":focus"); // do not fire the artificial events in this case since we would recieve onfocusin twice bFireArtificialEvents = !bWasFocused; - } else { - // This makes sure the onfocusin event of the control will be properly fired when executing this action, - // since blur cannot safely remove the focus - bFireArtificialEvents = true; } if (bFireArtificialEvents) { $.sap.log.debug("Control " + oControl + " could not be focused - maybe you are debugging?", this._sLogPrefix); - } - if (bFireArtificialEvents) { this._createAndDispatchFocusEvent("focusin", oDomRef); this._createAndDispatchFocusEvent("focus", oDomRef); this._createAndDispatchFocusEvent("activate", oDomRef);
[FIX] Opa: fix action focus when IE<I> browser is not focused Change-Id: If7e<I>a9a4eda8b5e<I>bd<I>f9fe<I>d<I>
SAP_openui5
train
c51a3b6fc7c0f779a1572a9af965d148e55af650
diff --git a/lib/field.js b/lib/field.js index <HASH>..<HASH> 100644 --- a/lib/field.js +++ b/lib/field.js @@ -86,7 +86,6 @@ module.exports = State.extend({ }, probability: { deps: ['count', 'parent.count'], - cache: false, fn: function () { if (!this.parent) return null; return this.count / this.parent.count; diff --git a/lib/type.js b/lib/type.js index <HASH>..<HASH> 100644 --- a/lib/type.js +++ b/lib/type.js @@ -32,7 +32,7 @@ var Type = exports.Type = State.extend({ cache: false, fn: function () { if (!this.parent) return null; - return this.count / this.parent.count; + return this.count / (this.parent.total_count || this.parent.count); } }, }, @@ -228,8 +228,10 @@ exports.Document = Type.extend({ // parse sub-document and add to this.fields assert(_.isPlainObject(obj), format('value must be object, got `%s`', obj)); - _.each(obj, function(val, key) { - this.fields.addToField(key, val); + // make sure to handle undefined values too + var union = _.union(_.keys(obj), this.fields.pluck('name')); + _.each(union, function(key) { + this.fields.addToField(key, obj[key]); }.bind(this)); this.count += 1; @@ -254,10 +256,28 @@ module.exports.Array = Type.extend({ } }, derived: { + total_count: { + cache: false, + fn: function() { + return _.sum(this.lengths); + } + }, average_length: { - deps: ['lengths'], + deps: ['count'], + fn: function() { + return this.total_count / this.count; + } + }, + /** + * Convenience alias to access sub-fields. Returns + * null if this Field does not have a 'Document' type. + * @returns {FieldCollection} + */ + fields: { + deps: ['types.length'], fn: function() { - return _.sum(this.lengths) / this.lengths.length; + var objType = this.types.get('Document'); + return objType ? objType.fields : null; } } }, @@ -268,6 +288,7 @@ module.exports.Array = Type.extend({ this.types.addToType(val); }.bind(this)); + this.lengths.push(arr.length); this.count += 1; }, collections: { diff --git a/test/array-object-types.test.js b/test/array-object-types.test.js index <HASH>..<HASH> 100644 --- a/test/array-object-types.test.js +++ b/test/array-object-types.test.js @@ -10,7 +10,7 @@ describe('arrays and objects as type (INT-203 restructuring)', function () { { x: { b: 1 } }, { x: [ "bar", null, false ] }, { x: [ {c: 1, d: 1}, {c: 2 } ] }, - // { e: 1 } + { e: 1 } ]; var schema; @@ -32,10 +32,10 @@ describe('arrays and objects as type (INT-203 restructuring)', function () { x.types.pluck('probability') ); assert.deepEqual(dist, { - 'Array': 3/5, - 'String': 1/5, - 'Document': 1/5, - // 'Undefined': 1/6 + 'Array': 3/6, + 'String': 1/6, + 'Document': 1/6, + 'Undefined': 1/6 }); }); @@ -56,10 +56,18 @@ describe('arrays and objects as type (INT-203 restructuring)', function () { arr = schema.fields.get('x').types.get('Array'); }); - it('should return the lengths of all encountered arrays', function () { + it('should return the lengths of all encountered arrays', function() { assert.deepEqual(arr.lengths, [3, 3, 2]); }); + it('should return the probability of x being an array', function(){ + assert.equal(arr.probability, 3/6); + }); + + it('should return the total count of all containing values', function() { + assert.equal(arr.total_count, 8); + }); + it('should return the type distribution inside an array', function () { var arrDist = _.zipObject( arr.types.pluck('name'), @@ -74,14 +82,6 @@ describe('arrays and objects as type (INT-203 restructuring)', function () { }); }); - it('should return the correct count inside the array', function () { - assert.equal(arr.count, 8); - }); - - it('should contain the basic values inside the array', function () { - assert.deepEqual(arr.values.serialize(), [1, 2, 3, 'foo', null, false]); - }); - it('should have a `.fields` alias for convenience', function () { assert.deepEqual(arr.fields, arr.types.get('Document').fields); });
added "Undefined" handling, total_count
mongodb-js_mongodb-schema
train
b33ad0c5fc7e24abb2c8309a217af5f8ff1318a1
diff --git a/org/postgresql/jdbc2/PreparedStatement.java b/org/postgresql/jdbc2/PreparedStatement.java index <HASH>..<HASH> 100644 --- a/org/postgresql/jdbc2/PreparedStatement.java +++ b/org/postgresql/jdbc2/PreparedStatement.java @@ -65,14 +65,6 @@ public class PreparedStatement extends Statement implements java.sql.PreparedSta this.sql = sql; this.connection = connection; - // might just as well create it here, so we don't take the hit later - - SimpleDateFormat df = new SimpleDateFormat("''yyyy-MM-dd''"); - tl_df.set(df); - - df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); - tl_tsdf.set(df); - for (i = 0; i < sql.length(); ++i) { int c = sql.charAt(i); @@ -95,17 +87,6 @@ public class PreparedStatement extends Statement implements java.sql.PreparedSta templateStrings[i] = (String)v.elementAt(i); } - /** - * New in 7.1 - overides Statement.close() to dispose of a few local objects - */ - public void close() throws SQLException - { - // free the ThreadLocal caches - tl_df.set(null); - tl_tsdf.set(null); - super.close(); - } - /** * A Prepared SQL query is executed and its ResultSet is returned * @@ -343,6 +324,10 @@ public class PreparedStatement extends Statement implements java.sql.PreparedSta public void setDate(int parameterIndex, java.sql.Date x) throws SQLException { SimpleDateFormat df = (SimpleDateFormat) tl_df.get(); + if(df==null) { + df = new SimpleDateFormat("''yyyy-MM-dd''"); + tl_df.set(df); + } set(parameterIndex, df.format(x)); @@ -382,6 +367,10 @@ public class PreparedStatement extends Statement implements java.sql.PreparedSta public void setTimestamp(int parameterIndex, Timestamp x) throws SQLException { SimpleDateFormat df = (SimpleDateFormat) tl_tsdf.get(); + if(df==null) { + df = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss"); + tl_tsdf.set(df); + } df.setTimeZone(TimeZone.getTimeZone("GMT")); // Use the shared StringBuffer
SimpleDateFormat performance improvement, thread-safe. Barry Lind
pgjdbc_pgjdbc
train
586579152cff10a86d03346ff77e30ad65542a94
diff --git a/www/nativescript-webview-interface.js b/www/nativescript-webview-interface.js index <HASH>..<HASH> 100644 --- a/www/nativescript-webview-interface.js +++ b/www/nativescript-webview-interface.js @@ -37,7 +37,7 @@ var NSWebViewinterface = (function () { } }; - /** + /** * Handles JS function calls by android/ios. This function is called from nativescript. * Result value of JS function call can be promise or any other data. * @param {number} reqId - Internal communication id @@ -46,8 +46,9 @@ var NSWebViewinterface = (function () { */ NSWebViewinterface.prototype._callJSFunction = function (reqId, functionName, args) { var _this = this; - if (functionName && window[functionName]) { - var retnVal = window[functionName].apply(window, args); + var resolvedFn = _this._getResolvedFunction(functionName); + if(resolvedFn){ + var retnVal = resolvedFn.apply(window, args); if (retnVal && retnVal.then) { retnVal.then(function (value) { _this._sendJSCallResponse(reqId, value); @@ -57,7 +58,29 @@ var NSWebViewinterface = (function () { this._sendJSCallResponse(reqId, retnVal); } } - }; + } + + /** + * Resolves a function, if the function to be executed is in deep object chain. + * e.g If we want to execute a function 'parent.child.child.fn' from native app, + * this function will extract fn from the object chain. + * We can do it by using eval also, but as there is a way, why to invite unknown security risks? + * + */ + NSWebViewinterface.prototype._getResolvedFunction = function(functionName){ + if(functionName && (functionName = functionName.trim()).length){ + functionName = functionName.indexOf('window.') === 0 ? functionName.replace('window.', '') : functionName; + var arrFnPath = functionName.split('.'); + var fn = window; + for(var i =0; i < arrFnPath.length; i++){ + if(!fn[arrFnPath[i]]){ + break; + } + fn = fn[arrFnPath[i]]; + } + return fn; + } + } /** * Returns JS Call response by emitting internal _jsCallRespone event
Added provision to execute function in deep object chain in webview, while calling by callJSFunction API from native app.
shripalsoni04_nativescript-webview-interface
train
6f0e0d7149cd2c1e3e96eef8211c0c9516813659
diff --git a/Readme.md b/Readme.md index <HASH>..<HASH> 100644 --- a/Readme.md +++ b/Readme.md @@ -364,6 +364,11 @@ or doctype html +doctypes are case-insensitive, so the following are equivalent: + + doctype Basic + doctype basic + Will output the _html 5_ doctype. Below are the doctypes defined by default, which can easily be extended: diff --git a/lib/compiler.js b/lib/compiler.js index <HASH>..<HASH> 100644 --- a/lib/compiler.js +++ b/lib/compiler.js @@ -82,7 +82,7 @@ Compiler.prototype = { */ setDoctype: function(name){ - var doctype = doctypes[name || 'default']; + var doctype = doctypes[(name || 'default').toLowerCase()]; if (!doctype) throw new Error('unknown doctype "' + name + '"'); this.doctype = doctype; this.terse = '5' == name || 'html' == name; diff --git a/test/jade.test.js b/test/jade.test.js index <HASH>..<HASH> 100644 --- a/test/jade.test.js +++ b/test/jade.test.js @@ -35,6 +35,7 @@ module.exports = { 'test doctypes': function(assert){ assert.equal('<?xml version="1.0" encoding="utf-8" ?>', render('!!! xml')); assert.equal('<!DOCTYPE html>', render('doctype html')); + assert.equal('<!DOCTYPE html>', render('doctype HTML')); assert.equal('<!DOCTYPE html>', render('!!! 5')); assert.equal('<!DOCTYPE html>', render('!!!', { doctype:'html' })); assert.equal('<!DOCTYPE html>', render('!!! html', { doctype:'xml' }));
Added; doctype value is now case-insensitive should still be defined lowercase
pugjs_then-pug
train
1dd9578d065b0c15e37e6207ab4a947c7b8a5a65
diff --git a/mysql/toolkit/components/advanced.py b/mysql/toolkit/components/advanced.py index <HASH>..<HASH> 100644 --- a/mysql/toolkit/components/advanced.py +++ b/mysql/toolkit/components/advanced.py @@ -97,6 +97,6 @@ class Advanced: self._printer('\t' + str(len(tables)), 'tables truncated') return tables - def execute_script(self, sql_script, commands=None, split_func=True, split_char=';'): + def execute_script(self, sql_script, commands=None, split_func=True, split_char=';', dump_fails=True): """Wrapper method for ExecuteScript class.""" - ExecuteScript(self, sql_script, commands, split_func, split_char) + ExecuteScript(self, sql_script, commands, split_func, split_char, dump_fails) diff --git a/mysql/toolkit/components/execute.py b/mysql/toolkit/components/execute.py index <HASH>..<HASH> 100644 --- a/mysql/toolkit/components/execute.py +++ b/mysql/toolkit/components/execute.py @@ -5,7 +5,7 @@ from tqdm import tqdm class ExecuteScript: - def __init__(self, mysql_instance, sql_script, commands=None, split_func=True, split_char=';'): + def __init__(self, mysql_instance, sql_script, commands=None, split_func=True, split_char=';', dump_fails=True): """Execute a sql file one command at a time.""" # Pass MySQL instance from execute_script method to ExecuteScript class self.MySQL = mysql_instance @@ -27,7 +27,7 @@ class ExecuteScript: self.execute_commands() # Dump failed commands to text file - if len(self.fail) > 1: + if len(self.fail) > 1 and dump_fails: self.dump_fails() def _get_commands(self, sql_script): @@ -46,7 +46,9 @@ class ExecuteScript: # Remove 'DROP' commands commands_with_drops = len(self.commands) self.commands = [c for c in self.commands if not c.startswith('DROP')] - print("\tDROP commands removed", commands_with_drops - len(self.commands)) + removed = commands_with_drops - len(self.commands) + if removed > 0: + print("\tDROP commands removed", removed) # Execute every command from the input file print('\t' + str(len(self.commands)), 'commands')
added conditional print statement to only print number of drops removed if more than 0
mrstephenneal_mysql-toolkit
train
bd5c64379cfd1de17ae71a0e734248da0cd03684
diff --git a/board.py b/board.py index <HASH>..<HASH> 100644 --- a/board.py +++ b/board.py @@ -43,8 +43,7 @@ time. "calendar_rows_on_screen": "integer DEFAULT 240", "calendar_scrolled_to": "integer DEFAULT 0"}, ("dimension",), - {"dimension": ("dimension", "name"), - "wallpaper": ("image", "name")}, + {"wallpaper": ("image", "name")}, ["calendar_rows_on_screen > 0", "calendar_scrolled_to >= 0"])] def __init__(self, dimension, width, height, wallpaper, diff --git a/item.py b/item.py index <HASH>..<HASH> 100644 --- a/item.py +++ b/item.py @@ -10,6 +10,7 @@ from event import ( IrrelevantEvent, ImpracticalEvent, PortalTravelEvent) +from location import Location from effect import Effect, EffectDeck import re diff --git a/pawn.py b/pawn.py index <HASH>..<HASH> 100644 --- a/pawn.py +++ b/pawn.py @@ -23,18 +23,18 @@ class Pawn: tables = [ ("pawn", - {"board": "text", + {"dimension": "text", "thing": "text", "img": "text", "visible": "boolean", "interactive": "boolean"}, - ("board", "thing"), + ("dimension", "thing"), {"img": ("img", "name"), - "board, thing": ("thing", "dimension, name")}, + "dimension, thing": ("thing", "dimension, name")}, [])] - def __init__(self, board, thing, img, visible, interactive, db=None): - self.board = board + def __init__(self, dimension, thing, img, visible, interactive, db=None): + self.dimension = dimension self.thing = thing self.img = img self.visible = visible @@ -48,13 +48,10 @@ class Pawn: if db is not None: dimname = None thingname = None - if stringlike(self.board): - dimname = self.board + if stringlike(self.dimension): + dimname = self.dimension else: - if stringlike(self.board.dimension): - dimname = self.board.dimension - else: - dimname = self.board.dimension.name + dimname = self.dimension.name if stringlike(self.thing): thingname = self.thing else: @@ -75,8 +72,9 @@ class Pawn: def unravel(self, db): # Invariant: things have already been unraveled - if stringlike(self.board): - self.board = db.boarddict[self.board] + if stringlike(self.dimension): + self.dimension = db.dimensiondict[self.dimension] + self.board = db.boarddict[self.dimension.name] if stringlike(self.thing): self.thing = db.itemdict[self.board.dimension.name][self.thing] self.thing.pawn = self
dimension doesn't really need its own table
LogicalDash_LiSE
train
085c60af49f17751476ee42b51ca72d0ace6efa3
diff --git a/src/ol/renderer/canvas/TileLayer.js b/src/ol/renderer/canvas/TileLayer.js index <HASH>..<HASH> 100644 --- a/src/ol/renderer/canvas/TileLayer.js +++ b/src/ol/renderer/canvas/TileLayer.js @@ -143,7 +143,7 @@ class CanvasTileLayerRenderer extends CanvasLayerRenderer { const tileSource = tileLayer.getSource(); const sourceRevision = tileSource.getRevision(); const tileGrid = tileSource.getTileGridForProjection(projection); - const zDirection = tileSource.getZDirection() === undefined ? this.zDirection : tileSource.getZDirection(); + const zDirection = tileSource.zDirection === undefined ? this.zDirection : tileSource.zDirection; const z = tileGrid.getZForResolution(viewResolution, zDirection); const tileResolution = tileGrid.getResolution(z); let extent = frameState.extent; diff --git a/src/ol/source/Tile.js b/src/ol/source/Tile.js index <HASH>..<HASH> 100644 --- a/src/ol/source/Tile.js +++ b/src/ol/source/Tile.js @@ -326,18 +326,6 @@ class TileSource extends Source { */ useTile(z, x, y, projection) {} - /** - * Indicate which resolution should be used by a renderer if the views resolution - * does not match any resolution of the tile source. - * If 0, the nearest resolution will be used. If 1, the nearest lower resolution - * will be used. If -1, the nearest higher resolution will be used. If undefined, - * the decision is left to the renderer. - * @return {number|undefined} Prefered zDirection for source resolution selection. - */ - getZDirection() { - return undefined; - } - } diff --git a/src/ol/source/Zoomify.js b/src/ol/source/Zoomify.js index <HASH>..<HASH> 100644 --- a/src/ol/source/Zoomify.js +++ b/src/ol/source/Zoomify.js @@ -258,20 +258,12 @@ class Zoomify extends TileImage { }); /** - * @protected * @type {number|undefined} */ this.zDirection = options.zDirection; } - /** - * @inheritDoc - */ - getZDirection() { - return this.zDirection; - } - } export default Zoomify;
Remove tile source zDirection getter The renderer now accesses the zDirection member directly if it exists. This also has the advantage of not advertising any object methods that hav no use for most tile sources.
openlayers_openlayers
train
fbe6b93e386ecbb165429213c613c489d7df8852
diff --git a/src/Draggable/Draggable.js b/src/Draggable/Draggable.js index <HASH>..<HASH> 100644 --- a/src/Draggable/Draggable.js +++ b/src/Draggable/Draggable.js @@ -622,7 +622,7 @@ export default class Draggable { } else if (typeof appendTo === 'function') { return appendTo(source); } else { - return document.body; + return source.parentNode; } } }
Source parent container as appendable container default
Shopify_draggable
train
24e0bd7767d09acee176108b2c6da49733544509
diff --git a/packages/mdc-chips/addon/components/mdc-chip-link-to.js b/packages/mdc-chips/addon/components/mdc-chip-link-to.js index <HASH>..<HASH> 100644 --- a/packages/mdc-chips/addon/components/mdc-chip-link-to.js +++ b/packages/mdc-chips/addon/components/mdc-chip-link-to.js @@ -9,5 +9,7 @@ export default LinkComponent.extend (ChipMixin, { text: computed ('params.[]', function () { return this.get ('params')[0]; - }) + }), + + activeClass: 'mdc-chip--active', });
feat: Specialized the activeClass on chip links
onehilltech_ember-cli-mdc
train
bb0af9f85a09e280501fdef4e08c5b1c5f9c5caa
diff --git a/grails-core/src/test/groovy/grails/util/GrailsUtilTests.java b/grails-core/src/test/groovy/grails/util/GrailsUtilTests.java index <HASH>..<HASH> 100644 --- a/grails-core/src/test/groovy/grails/util/GrailsUtilTests.java +++ b/grails-core/src/test/groovy/grails/util/GrailsUtilTests.java @@ -25,7 +25,7 @@ import junit.framework.TestCase; public class GrailsUtilTests extends TestCase { public void testGrailsVersion() { - assertEquals("3.3.13-SNAPSHOT", GrailsUtil.getGrailsVersion()); + assertEquals("3.3.13.BUILD-SNAPSHOT", GrailsUtil.getGrailsVersion()); } @Override
Corrected GrailsUtilTests to BUILD-SNAPSHOT
grails_grails-core
train
9def1e807e1f4cb5475874e00d2770c25f066497
diff --git a/templates/js/ui.atk4_loader.js b/templates/js/ui.atk4_loader.js index <HASH>..<HASH> 100644 --- a/templates/js/ui.atk4_loader.js +++ b/templates/js/ui.atk4_loader.js @@ -250,8 +250,9 @@ $.widget('ui.atk4_loader', { if(!f.hasClass('nofocus'))f.focus(); }); },function(){ // second callback, which is always called, when loading is completed - self.loader.hide(); + self.loader.hide(); self.loading=false; + el.trigger('after_html_loaded'); }); }, /*
Update templates/js/ui.atk4_loader.js trigger after html hase been changed with ajax
atk4_atk4
train
fc9198a84aac1eefbdd7e64a4f3e8a3c18793c00
diff --git a/rtv/content.py b/rtv/content.py index <HASH>..<HASH> 100644 --- a/rtv/content.py +++ b/rtv/content.py @@ -431,19 +431,19 @@ class SubredditContent(Content): raise exceptions.SubredditError('Unrecognized order "%s"' % order) if query: - loc = None - if listing == 'r' and name != 'front': - loc = name - - elif listing == 'domain': - query = 'site:{0} {1}'.format(name, query) - - elif listing in ['u', 'user']: + if listing in ['u', 'user'] and '/m/' not in name: + reddit.config.API_PATHS['search'] = 'r/{subreddit}/search' query = 'author:{0} {1}'.format(name, query) + location = None + else: + reddit.config.API_PATHS['search'] = \ + '{}/{{subreddit}}/search'.format(listing) + location = None if name == 'front' else name - submissions = reddit.search(query, subreddit=loc, sort=order, + submissions = reddit.search(query, subreddit=location, sort=order, period=period) + elif listing == 'domain': submissions = reddit.get_domain_listing(name, sort=(order or 'hot'), period=period)
Make search work for different types of reddit pages
michael-lazar_rtv
train
db622c3ea4f7746471c7888303badf8215a12877
diff --git a/lib/validates_lengths_from_database/version.rb b/lib/validates_lengths_from_database/version.rb index <HASH>..<HASH> 100644 --- a/lib/validates_lengths_from_database/version.rb +++ b/lib/validates_lengths_from_database/version.rb @@ -1,3 +1,3 @@ module ValidatesLengthsFromDatabase - VERSION = "0.1.1" + VERSION = "0.1.2" end
Bumped version for new gem release.
rubiety_validates_lengths_from_database
train
08534b28a2195d30bb3c45dce0b834f3696028b8
diff --git a/internal/goofys_test.go b/internal/goofys_test.go index <HASH>..<HASH> 100644 --- a/internal/goofys_test.go +++ b/internal/goofys_test.go @@ -4200,10 +4200,12 @@ func (s *GoofysTest) testReadMyOwnWriteFuse(t *C, externalUpdate bool) { if !externalUpdate { // we flushed and ttl expired, next lookup should // realize nothing is changed and NOT invalidate the - // cache. Except ADLv1 because PUT there doesn't + // cache. Except ADLv1,GCS because PUT there doesn't // return the mtime, so the open above will think the // file is updated and not re-use cache - if _, adlv1 := s.cloud.(*ADLv1); !adlv1 { + _, adlv1 := s.cloud.(*ADLv1) + _, isGCS := s.cloud.(*GCSBackend) + if !adlv1 && !isGCS { cloud.err = fuse.EINVAL } } else {
Fix TestReadMyOwnWriteFuse for gcs
kahing_goofys
train
2cfd54f05447bd5627bfd9315bd36f4eda4b82a6
diff --git a/lib/torquespec/server.rb b/lib/torquespec/server.rb index <HASH>..<HASH> 100644 --- a/lib/torquespec/server.rb +++ b/lib/torquespec/server.rb @@ -13,11 +13,11 @@ module TorqueSpec def start(opts={}) if ready? if TorqueSpec.lazy - puts "Using running JBoss (try lazy=false if you get errors)" + RSpec.configuration.reporter.message "Using running JBoss (try lazy=false if you get errors)" if TorqueSpec.verbose? return else stop - puts "Waiting for running JBoss to shutdown" + RSpec.configuration.reporter.message "Waiting for running JBoss to shutdown" if TorqueSpec.verbose? sleep(5) sleep(1) while ready? self.stopped = false @@ -30,19 +30,18 @@ module TorqueSpec return if stopped self.stopped = true if TorqueSpec.lazy - puts "JBoss won't be stopped (lazy=true)" + RSpec.configuration.reporter.message "JBoss won't be stopped (lazy=true)" if TorqueSpec.verbose? else shutdown - puts "Shutdown message sent to JBoss" + RSpec.configuration.reporter.message "Shutdown message sent to JBoss" if TorqueSpec.verbose? end end def deploy(url) t0 = Time.now - print "deploy #{url} " - $stdout.flush + RSpec.configuration.reporter.message "deploy #{url} " if TorqueSpec.verbose? _deploy(url) - puts "in #{(Time.now - t0).to_i}s" + RSpec.configuration.reporter.message "in #{(Time.now - t0).to_i}s" if TorqueSpec.verbose? end def undeploy(url) @@ -55,11 +54,11 @@ module TorqueSpec end def wait_for_ready(timeout) - puts "Waiting up to #{timeout}s for JBoss to boot" + RSpec.configuration.reporter.message "Waiting up to #{timeout}s for JBoss to boot" if TorqueSpec.verbose? t0 = Time.now while (Time.now - t0 < timeout && !stopped) do if ready? - puts "JBoss started in #{(Time.now - t0).to_i}s" + RSpec.configuration.reporter.message "JBoss started in #{(Time.now - t0).to_i}s" if TorqueSpec.verbose? return true end sleep(1) @@ -77,7 +76,7 @@ module TorqueSpec self.server_pid = process.pid Thread.new(process) { |console| while(console.gets); end } %w{ INT TERM KILL }.each { |signal| trap(signal) { stop } } - puts "#{cmd}\npid=#{process.pid}" + RSpec.configuration.reporter.message "#{cmd}\npid=#{process.pid}" if TorqueSpec.verbose? wait > 0 ? wait_for_ready(wait) : process.pid end diff --git a/lib/torquespec/torquespec.rb b/lib/torquespec/torquespec.rb index <HASH>..<HASH> 100644 --- a/lib/torquespec/torquespec.rb +++ b/lib/torquespec/torquespec.rb @@ -20,7 +20,7 @@ module TorqueSpec end class << self - attr_accessor :knob_root, :jboss_home, :jvm_args, :max_heap, :lazy, :drb_port, :spec_dir, :domain_mode + attr_accessor :knob_root, :jboss_home, :jvm_args, :max_heap, :lazy, :drb_port, :spec_dir, :domain_mode, :verbose def configure yield self end @@ -45,6 +45,9 @@ module TorqueSpec rescue Exception $stderr.puts "WARN: Unable to determine JBoss install location; set either TorqueSpec.jboss_home or ENV['JBOSS_HOME']" end + def verbose? + @verbose + end end def self.on_windows? @@ -93,5 +96,7 @@ TorqueSpec.configure do |config| config.knob_root = ".torquespec" config.domain_mode = %w(yes true 1).include?(java.lang.System.getProperty('domain.mode') || ENV['DOMAIN_MODE']) config.jvm_args = "-Xms64m -Xmx1024m -XX:MaxPermSize=512m -XX:+UseConcMarkSweepGC -XX:+UseParNewGC -XX:+CMSClassUnloadingEnabled -Djruby.home=#{config.jruby_home}" + config.verbose = false end +
Adds the option to make TorqueSpec a quieter. * Logs through RSpec messages instead of farting out of $stdout all the time. * Adds the option to make it silent, in which case the output should look similar to vanilla RSpec (without torquespec). * Verbosity is ON by default (to behave as closely to the original as is reasonably possible). Note that this isn't exactly identical to the original, which was outputting through $stdout, not RSpec messages.
torquebox_torquespec
train
ca07251938cde89d23090f3b4382e1ca9f9ac03d
diff --git a/code/forms/CancelOrderForm.php b/code/forms/CancelOrderForm.php index <HASH>..<HASH> 100644 --- a/code/forms/CancelOrderForm.php +++ b/code/forms/CancelOrderForm.php @@ -5,12 +5,11 @@ * @package shop * @subpackage forms */ -class Order_CancelForm extends Form { +class CancelOrderForm extends Form { static $email_notification = false; function __construct($controller, $name, $orderID) { - $fields = new FieldSet( new HiddenField('OrderID', '', $orderID) ); @@ -18,10 +17,11 @@ class Order_CancelForm extends Form { new FormAction('doCancel', _t('Order.CANCELORDER','Cancel this order')) ); parent::__construct($controller, $name, $fields, $actions); + $this->extend("updateForm"); } /** - * Form action handler for Order_CancelForm. + * Form action handler for CancelOrderForm. * * Take the order that this was to be change on, * and set the status that was requested from @@ -35,18 +35,15 @@ class Order_CancelForm extends Form { $order = DataObject::get_by_id('Order', $SQL_data['OrderID']); $order->Status = 'MemberCancelled'; $order->write(); - //TODO: notify people via email?? Make it optional. if(self::$email_notification){ $email = new Email(Email::getAdminEmail(),Email::getAdminEmail(),sprintf(_t('Order.CANCELSUBJECT','Order #%d cancelled by member'),$order->ID),$order->renderWith('Order')); $email->send(); } - if(Member::currentUser() && $link = AccountPage::find_link()){ //TODO: set session message "order successfully cancelled". Director::redirect($link); //TODO: can't redirect to account page when not logged in }else{ - $form->Controller()->setSessionMessage(_t("OrderForm.ORDERCANCELLED", "Order sucessfully cancelled"),'warning'); //assumes controller has OrderManipulation extension Director::redirectBack(); } diff --git a/code/forms/OutstandingPaymentForm.php b/code/forms/OutstandingPaymentForm.php index <HASH>..<HASH> 100644 --- a/code/forms/OutstandingPaymentForm.php +++ b/code/forms/OutstandingPaymentForm.php @@ -24,7 +24,7 @@ class OutstandingPaymentForm extends Form { $requiredFields = array_merge($requiredFields, $paymentRequiredFields); } $actions = new FieldSet( - new FormAction('dopayment', _t('OrderForm.PAYORDER','Pay outstanding balance')) + new FormAction('dopayment', _t('OrderForm.PAYORDER','Pay outstanding balance')) ); parent::__construct($controller, $name, $fields, $actions, $requiredFields); $this->extend('updateForm'); diff --git a/code/model/OrderManipulation.php b/code/model/OrderManipulation.php index <HASH>..<HASH> 100644 --- a/code/model/OrderManipulation.php +++ b/code/model/OrderManipulation.php @@ -108,7 +108,7 @@ class OrderManipulation extends Extension{ * checking to see if they can cancel their order * first of all. * - * @return Order_CancelForm + * @return CancelOrderForm */ function CancelForm() { if(self::$allow_cancelling && $order = $this->orderfromid()) {
Renamed Order_CancelForm to CancelOrderForm
silvershop_silvershop-core
train
035085d3831d3a080b1491f04888112ff77a0dee
diff --git a/templar/link.py b/templar/link.py index <HASH>..<HASH> 100644 --- a/templar/link.py +++ b/templar/link.py @@ -2,6 +2,7 @@ import argparse import os import re import sys +import textwrap from collections import OrderedDict from templar.markdown import convert @@ -126,9 +127,6 @@ def scrape_headers(text, builder): # Linker # ########## -def add_whitespace(text, whitespace): - return whitespace + ('\n' + whitespace).join(line for line in text.split('\n')) - re_include = re.compile(r""" (?:(?<=\n)|(?<=\A)) ([ \t]*) # \1 is leading whitespace @@ -165,12 +163,17 @@ def substitute_links(text, cache, base_dir): regex = match.group(3) if not file_exists(filename): filename = os.path.join(base_dir, filename) + if not file_exists(filename): + print("Warning: could not find file", match.group(2), + "or", filename) + return '' + text = retrieve_and_link(filename, cache) if not regex: result = cache[filename + ':all'] else: result = resolve_include_regex(regex, cache, filename) - return add_whitespace(result, match.group(1)) + return textwrap.indent(result, match.group(1)) return re_include.sub(link_sub, text) def retrieve_and_link(filename, cache): diff --git a/tests/test_link/test_include.py b/tests/test_link/test_include.py index <HASH>..<HASH> 100644 --- a/tests/test_link/test_include.py +++ b/tests/test_link/test_include.py @@ -358,6 +358,16 @@ class IncludeTest(LinkTest): """ self.assertLink('text', expect, files) + def testNonexistentIncludeFile(self): + files = { + 'text': """ + <include does-not-exist.md> + """, + } + expect = """ + """ + self.assertLink('text', expect, files) + if __name__ == '__main__': main()
Add warning when linker cannot find an included filepath (fixes #<I>)
albert12132_templar
train
68642fc2b9f62504fc8bc1f60dce27c508db6916
diff --git a/hawk/src/main/java/com/orhanobut/hawk/HawkBuilder.java b/hawk/src/main/java/com/orhanobut/hawk/HawkBuilder.java index <HASH>..<HASH> 100644 --- a/hawk/src/main/java/com/orhanobut/hawk/HawkBuilder.java +++ b/hawk/src/main/java/com/orhanobut/hawk/HawkBuilder.java @@ -167,17 +167,10 @@ public class HawkBuilder { case NO_ENCRYPTION: encryption = new Base64Encryption(); break; - case HIGHEST: - encryption = new AesEncryption(getStorage(), getPassword()); - if (!getEncryption().init()) { - getInfoStorage().put(KEY_NO_CRYPTO, true); - encryption = new Base64Encryption(); - } - break; case MEDIUM: - encryption = new AesEncryption(getStorage(), null); + case HIGHEST: + encryption = new ConcealEncryption(context); if (!getEncryption().init()) { - //fallback to no encryption getInfoStorage().put(KEY_NO_CRYPTO, true); encryption = new Base64Encryption(); }
Replace custom AES crypto implementation with conceal as default
orhanobut_hawk
train
5c3916d8be950a1c3c0b8c370ae915cf63eacc6b
diff --git a/src/server/worker/serverworkermanager.js b/src/server/worker/serverworkermanager.js index <HASH>..<HASH> 100644 --- a/src/server/worker/serverworkermanager.js +++ b/src/server/worker/serverworkermanager.js @@ -51,18 +51,21 @@ function ServerWorkerManager(_parameters) { _workers[worker.pid] = {worker: worker, state: CONSTANTS.workerStates.initializing, type: null, cb: null}; logger.debug('workerPid forked ' + worker.pid); worker.on('message', messageHandling); + worker.on('exit', function(code, signal) { + logger.debug('worker has exited: ' + worker.pid); + if (code !== null && !signal) { + logger.warn('worker ' + worker.pid + ' has exited abnormally with code ' + code); + } + delete _workers[worker.pid]; + reserveWorkerIfNecessary(); + }); } } function freeWorker(workerPid) { - //FIXME it would be better if we would have a global function that listens to all close events of the children - //because that way we could be able to get child-freeze and reuse the slot if (_workers[workerPid]) { - _workers[workerPid].worker.on('close', function (/*code, signal*/) { - logger.debug('worker have been freed: ' + workerPid); - delete _workers[workerPid]; - }); _workers[workerPid].worker.kill('SIGINT'); + delete _workers[workerPid]; } } @@ -71,6 +74,7 @@ function ServerWorkerManager(_parameters) { var len = Object.keys(_workers).length; logger.debug('there are ' + len + ' worker to close'); Object.keys(_workers).forEach(function (workerPid) { + _workers[workerPid].worker.removeAllListeners('exit'); _workers[workerPid].worker.on('close', function (/*code, signal*/) { logger.debug('workerPid closed: ' + workerPid); delete _workers[workerPid]; @@ -185,6 +189,10 @@ function ServerWorkerManager(_parameters) { function request(parameters, callback) { _waitingRequests.push({request: parameters, cb: callback}); + reserveWorkerIfNecessary(); + } + + function reserveWorkerIfNecessary() { var workerIds = Object.keys(_workers || {}), i, initializingWorkers = 0, freeWorkers = 0; @@ -197,7 +205,7 @@ function ServerWorkerManager(_parameters) { } } - if (_waitingRequests.length > initializingWorkers + freeWorkers && + if (_waitingRequests.length + 1 /* keep a spare */ > initializingWorkers + freeWorkers && workerIds.length < gmeConfig.server.maxWorkers) { reserveWorker(); } @@ -259,7 +267,7 @@ function ServerWorkerManager(_parameters) { if (i < workerPids.length) { assignRequest(workerPids[i]); - } else if (_waitingRequests.length > initializingWorkers && + } else if (_waitingRequests.length + 1 /* keep a spare */ > initializingWorkers && Object.keys(_workers || {}).length < gmeConfig.server.maxWorkers) { reserveWorker(); } @@ -280,7 +288,7 @@ function ServerWorkerManager(_parameters) { if (_managerId === null) { _managerId = setInterval(queueManager, 10); } - reserveWorker(); + reserveWorkerIfNecessary(); } return { diff --git a/src/server/worker/simpleworker.js b/src/server/worker/simpleworker.js index <HASH>..<HASH> 100644 --- a/src/server/worker/simpleworker.js +++ b/src/server/worker/simpleworker.js @@ -294,7 +294,7 @@ var WEBGME = require(__dirname + '/../../../webgme'), if (closeErr) { logger.error('error closing storage', closeErr); } - callback(err, result.serialize()); + callback(err, result ? result.serialize() : null); }); } ); @@ -811,4 +811,4 @@ process.on('SIGINT', function () { //console.error('child was killed without initialization'); process.exit(1); } -}); \ No newline at end of file +});
serverworkermanager: handle when a worker exits unexpectedly Former-commit-id: 4b<I>f5a<I>b<I>b<I>fb<I>c<I>bb<I>
webgme_webgme-engine
train
bd57416c0ae99411f112ba56aab75943df876306
diff --git a/zinnia_wymeditor/admin.py b/zinnia_wymeditor/admin.py index <HASH>..<HASH> 100644 --- a/zinnia_wymeditor/admin.py +++ b/zinnia_wymeditor/admin.py @@ -10,6 +10,7 @@ from django.contrib.staticfiles.storage import staticfiles_storage from zinnia.models import Entry from zinnia.admin.entry import EntryAdmin +from zinnia.settings import ENTRY_BASE_MODEL class EntryAdminWYMEditorMixin(object): @@ -64,5 +65,7 @@ class EntryAdminWYMEditor(EntryAdminWYMEditorMixin, """ pass -admin.site.unregister(Entry) -admin.site.register(Entry, EntryAdminWYMEditor) + +if ENTRY_BASE_MODEL == 'zinnia.models_bases.entry.AbstractEntry': + admin.site.unregister(Entry) + admin.site.register(Entry, EntryAdminWYMEditor)
Only register the EntryAdminWYMEditor if Entry model is based on AbstractEntry
django-blog-zinnia_zinnia-wysiwyg-wymeditor
train
bfd24867a9729f7400d44fdb632676b319b4f7d2
diff --git a/lib/spaceship/tunes/app_version.rb b/lib/spaceship/tunes/app_version.rb index <HASH>..<HASH> 100644 --- a/lib/spaceship/tunes/app_version.rb +++ b/lib/spaceship/tunes/app_version.rb @@ -199,6 +199,11 @@ module Spaceship client.update_app_version!(application.apple_id, is_live?, raw_data) end + # @return (String) An URL to this specific resource. You can enter this URL into your browser + def url + "https://itunesconnect.apple.com/WebObjects/iTunesConnect.woa/ra/ng/app/#{self.application.apple_id}/" + (self.is_live? ? "cur" : "") + end + # Private methods def setup diff --git a/lib/spaceship/tunes/application.rb b/lib/spaceship/tunes/application.rb index <HASH>..<HASH> 100644 --- a/lib/spaceship/tunes/application.rb +++ b/lib/spaceship/tunes/application.rb @@ -119,6 +119,11 @@ module Spaceship edit_version || live_version end + # @return (String) An URL to this specific resource. You can enter this URL into your browser + def url + "https://itunesconnect.apple.com/WebObjects/iTunesConnect.woa/ra/ng/app/#{self.apple_id}" + end + # @return (Hash) Contains the reason for rejection. # if everything is alright, the result will be # `{"sectionErrorKeys"=>[], "sectionInfoKeys"=>[], "sectionWarningKeys"=>[], "replyConstraints"=>{"minLength"=>1, "maxLength"=>4000}, "appNotes"=>{"threads"=>[]}, "betaNotes"=>{"threads"=>[]}, "appMessages"=>{"threads"=>[]}}` diff --git a/spec/tunes/app_version_spec.rb b/spec/tunes/app_version_spec.rb index <HASH>..<HASH> 100644 --- a/spec/tunes/app_version_spec.rb +++ b/spec/tunes/app_version_spec.rb @@ -43,6 +43,17 @@ describe Spaceship::AppVersion do expect(version.release_notes['English']).to eq('Also News') end + describe "#url" do + let (:app) { Spaceship::Application.all.first } + it "live version" do + expect(app.live_version.url).to eq('https://itunesconnect.apple.com/WebObjects/iTunesConnect.woa/ra/ng/app/898536088/cur') + end + + it "edit version" do + expect(app.edit_version.url).to eq('https://itunesconnect.apple.com/WebObjects/iTunesConnect.woa/ra/ng/app/898536088/') + end + end + describe "App Status" do it "parses readyForSale" do version = Spaceship::Application.all.first.live_version diff --git a/spec/tunes/application_spec.rb b/spec/tunes/application_spec.rb index <HASH>..<HASH> 100644 --- a/spec/tunes/application_spec.rb +++ b/spec/tunes/application_spec.rb @@ -24,6 +24,10 @@ describe Spaceship::Application do expect(app.raw_data['versions'].count).to eq(2) end + it "#url" do + expect(Spaceship::Application.all.first.url).to eq('https://itunesconnect.apple.com/WebObjects/iTunesConnect.woa/ra/ng/app/898536088/') + end + describe "#find" do describe "find using bundle identifier" do it "returns the application if available" do
Added url method to app_version and application
fastlane_fastlane
train
19479ebdfb3a658e1e959f6cc779c6b3166a0d75
diff --git a/backup/restorelib.php b/backup/restorelib.php index <HASH>..<HASH> 100644 --- a/backup/restorelib.php +++ b/backup/restorelib.php @@ -551,6 +551,16 @@ $course->hiddensections = addslashes($course_header->course_hiddensections); $course->timecreated = addslashes($course_header->course_timecreated); $course->timemodified = addslashes($course_header->course_timemodified); + //Calculate sortorder field + $sortmax = get_record_sql('SELECT MAX(sortorder) AS max + FROM ' . $CFG->prefix . 'course + WHERE category=' . $course->category); + if (!empty($sortmax->max)) { + $course->sortorder = $sortmax->max + 1; + unset($sortmax); + } else { + $course->sortorder = 100; + } //Now insert the record $newid = insert_record("course",$course); if ($newid) {
Calculate NEXT course->sortorder in restore to avoid sortorder grown always (by fix_course_sortorder()). Merged from MOODLE_<I>_STABLE
moodle_moodle
train
0e2eeb7e3315e51881b5bd941165dbad1ca0a533
diff --git a/deep.js b/deep.js index <HASH>..<HASH> 100644 --- a/deep.js +++ b/deep.js @@ -212,6 +212,9 @@ define([ deep.client = {}; require("./lib/stores/chain"); + deep.delay = function(ms){ + return deep({}).delay(ms); + } //_________________________________________________________________________________
add deep.delay (front API)
deepjs_deepjs
train