message
stringlengths
13
484
diff
stringlengths
38
4.63k
Code block: support the "pycon" language specifier It's used for code copied from the Python REPL.
@@ -12,7 +12,7 @@ from bot.utils import has_lines log = logging.getLogger(__name__) BACKTICK = "`" -PY_LANG_CODES = ("python", "py") # Order is important; "py" is second cause it's a subset. +PY_LANG_CODES = ("python", "pycon", "py") # Order is important; "py" is last cause it's a subset. _TICKS = { BACKTICK, "'",
Spin/sleep for a few times before raising exception on job not found. Helps with rare but persistent workflow failures with CWL on NFS jobStore
@@ -29,6 +29,7 @@ import stat import errno import time import traceback +import time try: import cPickle as pickle except ImportError: @@ -143,6 +144,31 @@ class FileJobStore(AbstractJobStore): self.update(jobGraph) self._batchedJobGraphs = None + def waitForExists(self, jobStoreID, maxTries=35, sleepTime=1): + """Spin-wait and block for a file to appear before returning False if it does not. + + The total max wait time is maxTries * sleepTime. The current default is + tuned to match Linux NFS defaults where the client's cache of the directory + listing on the server is supposed to become coherent within 30 sec. + Delayes beyond that would probably indicate a pathologically slow file system + that just should not be used for the jobStore. + + The warning will be sent to the log only on the first retry. + + In practice, the need for retries happens rarely, but it does happen + over the course of large workflows with a jobStore on a busy NFS.""" + for iTry in range(1,maxTries+1): + jobFile = self._getJobFileName(jobStoreID) + if os.path.exists(jobFile): + return True + if iTry >= maxTries: + return False + elif iTry == 1: + logger.warning(("Job file `{}` for job `{}` does not exist (yet). We will try #{} more times with {}s " + "intervals.").format(jobFile, jobStoreID, maxTries - iTry, sleepTime)) + time.sleep(sleepTime) + return False + def exists(self, jobStoreID): return os.path.exists(self._getJobFileName(jobStoreID)) @@ -459,7 +485,7 @@ class FileJobStore(AbstractJobStore): """ Raises a NoSuchJobException if the jobStoreID does not exist. """ - if not self.exists(jobStoreID): + if not self.waitForExists(jobStoreID,30): raise NoSuchJobException(jobStoreID) def _checkJobStoreFileID(self, jobStoreFileID):
Moderation: add creation date & duration to expired infraction log Closes
@@ -311,6 +311,11 @@ class InfractionScheduler(Scheduler): user_id = infraction["user"] type_ = infraction["type"] id_ = infraction["id"] + inserted_at = infraction["inserted_at"] + expiry = infraction["expires_at"] + + expiry = dateutil.parser.isoparse(expiry).replace(tzinfo=None) if expiry else None + created = time.format_infraction_with_duration(inserted_at, expiry) log.info(f"Marking infraction #{id_} as inactive (expired).") @@ -318,7 +323,8 @@ class InfractionScheduler(Scheduler): log_text = { "Member": str(user_id), "Actor": str(self.bot.user), - "Reason": infraction["reason"] + "Reason": infraction["reason"], + "Created": created, } try:
modify cache label in CircleCI config Original attempt to include Python version did not work. We need to use context information (https://circleci.com/docs/2.0/contexts/) and the {{ .Environment.variableName }} key template (https://circleci.com/docs/2.0/caching/)
@@ -11,11 +11,21 @@ workflows: version: 2.1 test: jobs: - - test-3_10 - - test-3_9 - - test-3_8 - - test-3_7 - - test-3_6 + - test-3_10: + context: + - pyani + - test-3_9: + context: + - pyani + - test-3_8: + context: + - pyani + - test-3_7: + context: + - pyani + - test-3_6: + context: + - pyani weekly: triggers: - schedule: @@ -25,11 +35,21 @@ workflows: only: - master jobs: - - test-3_10 - - test-3_9 - - test-3_8 - - test-3_7 - - test-3_6 + - test-3_10: + context: + - pyani + - test-3_9: + context: + - pyani + - test-3_8: + context: + - pyani + - test-3_7: + context: + - pyani + - test-3_6: + context: + - pyani jobs: test-3_8: &test-template @@ -41,9 +61,14 @@ jobs: steps: - checkout + - run: + name: set_environment_variables + command: | + export PYTHONVER=`python --version | sed -r 's/ /_/g'` + - restore_cache: keys: - - pyani-dependencies-pip-{{ python --version | sed -r 's/ /_/g' }}-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "requirements-pip.txt" }}-{{ checksum "requirements-thirdparty.txt" }}-{{ checksum "requirements-fastani.txt" }}-{{ checksum "requirements-pyqt-conda.txt" }}-{{ checksum "requirements-pyqt-pip.txt" }} + - pyani-dependencies-pip-{{ .Environment.PYTHONVER }}-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "requirements-pip.txt" }}-{{ checksum "requirements-thirdparty.txt" }}-{{ checksum "requirements-fastani.txt" }}-{{ checksum "requirements-pyqt-conda.txt" }}-{{ checksum "requirements-pyqt-pip.txt" }} - pyani-dependencies-pip- - run: @@ -84,7 +109,7 @@ jobs: - save_cache: paths: - .pyenv - key: pyani-dependencies-pip-{{ python --version | sed -r 's/ /_/g' }}-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "requirements-pip.txt" }}-{{ checksum "requirements-thirdparty.txt" }}-{{ checksum "requirements-fastani.txt" }}-{{ checksum "requirements-pyqt-conda.txt" }}-{{ checksum "requirements-pyqt-pip.txt" }} + key: pyani-dependencies-pip-{{ .Environment.PYTHONVER }}-{{ .Branch }}-{{ checksum "requirements.txt" }}-{{ checksum "requirements-dev.txt" }}-{{ checksum "requirements-pip.txt" }}-{{ checksum "requirements-thirdparty.txt" }}-{{ checksum "requirements-fastani.txt" }}-{{ checksum "requirements-pyqt-conda.txt" }}-{{ checksum "requirements-pyqt-pip.txt" }} - run: name: install pyani
Add Infraction converter This adds the Infraction converter to be used in infraction_edit and infraction_append.
@@ -549,6 +549,36 @@ def _snowflake_from_regex(pattern: t.Pattern, arg: str) -> int: return int(match.group(1)) +class Infraction(Converter): + """ + Attempts to convert a given infraction ID into an infraction. + + Alternatively, `l`, `last`, or `recent` can be passed in order to + obtain the most recent infraction by the actor. + """ + + async def convert(self, ctx: Context, arg: str) -> t.Optional[dict]: + """Attempts to convert `arg` into an infraction `dict`.""" + if arg in ("l", "last", "recent"): + params = { + "actor__id": ctx.author.id, + "ordering": "-inserted_at" + } + + infractions = await ctx.bot.api_client.get("bot/infractions", params=params) + + if not infractions: + await ctx.send( + ":x: Couldn't find most recent infraction; you have never given an infraction." + ) + return None + + return infractions[0] + + else: + return ctx.bot.api_client.get(f"bot/infractions/{arg}") + + Expiry = t.Union[Duration, ISODateTime] FetchedMember = t.Union[discord.Member, FetchedUser] UserMention = partial(_snowflake_from_regex, RE_USER_MENTION)
Update hpgp-data.yaml Add Shasta publication and tags
@@ -6,8 +6,12 @@ UpdateFrequency: Data will be added and updated as technologies improve or new d Tags: - aws-pds - genomic + - genetic - life sciences -License: Human PanGenomics Project data are licensed under the Creative Commons CC0 1.0 Universal license. + - fastq + - fast5 + - cram +License: "[Creative Commons CC0 1.0 Universal](https://creativecommons.org/publicdomain/zero/1.0/deed.en)" Resources: - Description: Human PanGenomics Project ARN: arn:aws:s3:::human-pangenomics @@ -19,3 +23,6 @@ DataAtWork: Tutorials: Tools & Applications: Publications: + - Title: Nanopore sequencing and the Shasta toolkit enable efficient de novo assembly of eleven human genomes + URL: https://www.nature.com/articles/s41587-020-0503-6 + AuthorName: Shafin et al (2020)
Update I2cMux.py Cleanup for testing
-webgui = Runtime.createAndStart("WebGui","WebGui") +port="COM3" +# +if ('virtual' in globals() and virtual): + virtualArduino = Runtime.start("virtualArduino", "VirtualArduino") + virtualArduino.connect(port) ard = Runtime.createAndStart("Arduino","Arduino") -ard.connect("COM3") +ard.connect(port) # i2cmux = Runtime.createAndStart("i2cMux","I2cMux") # From version 1.0.2316 use attach instead of setController -i2cmux.setController(ard,"1","0x70") +# i2cmux.setController(ard,"1","0x70") i2cmux.attach(ard,"1","0x70") # mpu6050_0 = Runtime.createAndStart("Mpu6050-0","Mpu6050")
Update CUDA version on GPU tests PyTorch core is dropping support for 11.6.
@@ -17,7 +17,7 @@ jobs: strategy: matrix: python_version: ["3.8"] - cuda_arch_version: ["11.6"] + cuda_arch_version: ["11.7"] fail-fast: false uses: pytorch/test-infra/.github/workflows/linux_job.yml@main with:
Update metrics.rst Fixed grammar
@@ -167,7 +167,7 @@ Process Metrics: Search Metrics: - - ``mattermost_search_posts_searches_duration_seconds``: The total duration in seconds of search query requests. + - ``mattermost_search_posts_searches_duration_seconds``: The total duration, in seconds, of search query requests. - ``mattermost_search_posts_searches_total``: The total number of search query requests. WebSocket Metrics:
Events/logs: allow passing in timestamp `timestamp` is actually passed in normally by the client. `reported_timestamp` is the time of inserting into the db. Is it named the wrong way around? Perhaps.
@@ -42,7 +42,7 @@ class Events(v2_Events): raw_events = request_dict.get('events') or [] raw_logs = request_dict.get('logs') or [] if any( - item.get('timestamp') or item.get('reported_timestamp') + item.get('reported_timestamp') for item in itertools.chain(raw_events, raw_logs) ): check_user_action_allowed('set_timestamp')
Translate `<None>` in namespace view See
@@ -139,7 +139,7 @@ def _set_text(column, cell, model, iter, data): if element is RELATIONSHIPS: text = gettext("<Relationships>") else: - text = format(element) or "<None>" + text = format(element) or gettext("<None>") cell.set_property("text", text)
Add sos archive spec - lsblk_pairs Newly added "lsblk -O -P" command outupt in sosreport. (https://github.com/sosreport/sos/commit/a8dbdd2143f693758b4df76a615d06c85d8638fd)
@@ -120,6 +120,7 @@ class SosSpecs(Specs): libvirtd_qemu_log = glob_file(r"/var/log/libvirt/qemu/*.log") locale = simple_file("sos_commands/i18n/locale") lsblk = first_file(["sos_commands/block/lsblk", "sos_commands/filesys/lsblk"]) + lsblk_pairs = simple_file("sos_commands/block/lsblk_-O_-P") ls_boot = simple_file("sos_commands/boot/ls_-lanR_.boot") ls_sys_firmware = simple_file("sos_commands/boot/ls_-lanR_.sys.firmware") lscpu = simple_file("sos_commands/processor/lscpu")
fix standard_field to shadow_root. fix screenshot in log_error.
@@ -1473,7 +1473,7 @@ class WebappInternal(Base): else: self.wait_element(term) # find element - element = self.get_field(term,name_attr).find_parent() + element = self.get_field(term,name_attr).find_parent() if not self.webapp_shadowroot() else self.get_field(term,name_attr) if not(element): raise Exception("Couldn't find element") @@ -1484,7 +1484,7 @@ class WebappInternal(Base): container = self.get_current_container() self.send_keys(input_field(), Keys.F3) else: - icon = next(iter(element.select("img[src*=fwskin_icon_lookup], img[src*=btpesq_mdi]")),None) + icon = next(iter(element.select("img[src*=fwskin_icon_lookup], img[src*=btpesq_mdi], [style*=fwskin_icon_lookup]")),None) icon_s = self.soup_to_selenium(icon) container = self.get_current_container() self.click(icon_s) @@ -6626,12 +6626,14 @@ class WebappInternal(Base): if self.config.new_log: self.execution_flow() - if self.config.screenshot: + proceed_action = lambda: ((stack_item != "setUpClass") or (stack_item == "setUpClass" and self.restart_counter == 3)) + + if self.config.screenshot and proceed_action(): self.log.take_screenshot_log(self.driver, stack_item, test_number) if new_log_line: self.log.new_line(False, log_message) - if ((stack_item != "setUpClass") or (stack_item == "setUpClass" and self.restart_counter == 3)): + if proceed_action(): self.log.save_file() if not self.config.skip_restart and len(self.log.list_of_testcases()) > 1 and self.config.initial_program != '': self.restart() @@ -6659,7 +6661,7 @@ class WebappInternal(Base): if stack_item != "setUpClass": self.restart_counter = 0 - if ((stack_item != "setUpClass") or (stack_item == "setUpClass" and self.restart_counter == 3)): + if proceed_action(): if self.restart_counter >= 3: self.restart_counter = 0 self.assertTrue(False, log_message)
[elasticsearch] Add instructions for how to enable snapshots Closes
@@ -190,6 +190,13 @@ There are a couple reasons we recommend this. subPath: elasticsearch.keystore ``` +#### How to enable snapshotting? + +1. Install your [snapshot plugin](https://www.elastic.co/guide/en/elasticsearch/plugins/current/repository.html) into a custom docker image following the [how to install plugins guide](/elasticsearch/README.md#how-to-install-plugins) +2. Add any required secrets or credentials into an Elasticsearch keystore following the [how to use the keystore guide](/elasticsearch/README.md#how-to-use-the-keystore) +3. Configure the [snapshot repository](https://www.elastic.co/guide/en/elasticsearch/reference/current/modules-snapshots.html) as you normally would. +4. To automate snapshots you can use a tool like [curator](https://www.elastic.co/guide/en/elasticsearch/client/curator/current/snapshot.html). In the future there are plans to have Elasticsearch manage automated snapshots with [Snapshot Lifecycle Management](https://github.com/elastic/elasticsearch/issues/38461). + ### Local development environments
make sure Response is pickleable Ref
+import pickle from datetime import date from pytest import raises, fixture @@ -9,6 +10,19 @@ from elasticsearch_dsl.response.aggs import AggData, BucketData, Bucket def agg_response(aggs_search, aggs_data): return response.Response(aggs_search, aggs_data) +def test_agg_response_is_pickleable(agg_response): + agg_response.hits + r = pickle.loads(pickle.dumps(agg_response)) + + assert r == agg_response + +def test_response_is_pickleable(dummy_response): + res = response.Response(Search(), dummy_response) + res.hits + r = pickle.loads(pickle.dumps(res)) + + assert r == res + def test_response_stores_search(dummy_response): s = Search() r = response.Response(s, dummy_response)
1. Add is launched column 2. Default 0 to thr image
@@ -22,7 +22,8 @@ SELECT "awc_location_months"."aww_name" AS "aww_name", "awc_location_months"."contact_phone_number" AS "contact_phone_number", "awc_location_months"."aggregation_level" AS "aggregation_level", -agg_awc.thr_distribution_image_count, +COALESCE(agg_awc.thr_distribution_image_count,0) as thr_distribution_image_count, +agg_awc.is_launched, agg_awc.month as month, COALESCE(SUM(agg_child_health.rations_21_plus_distributed),0) + COALESCE(ccr.mother_thr,0) as thr_given_21_days, COALESCE(SUM(agg_child_health.thr_eligible),0) + COALESCE(ccr.mother_thr_eligible,0) as total_thr_candidates @@ -92,6 +93,7 @@ GROUP BY "awc_location_months"."contact_phone_number", "awc_location_months"."aggregation_level", agg_awc.month, + agg_awc.is_launched, agg_awc.thr_distribution_image_count, ccr.mother_thr, ccr.mother_thr_eligible
Remove catch check Summary: Pull Request resolved:
@@ -457,7 +457,6 @@ class build_deps(PytorchCommand): check_file(os.path.join(third_party_path, "gloo", "CMakeLists.txt")) check_file(os.path.join(third_party_path, "pybind11", "CMakeLists.txt")) check_file(os.path.join(third_party_path, 'cpuinfo', 'CMakeLists.txt')) - check_file(os.path.join(third_party_path, 'catch', 'CMakeLists.txt')) check_file(os.path.join(third_party_path, 'onnx', 'CMakeLists.txt')) check_file(os.path.join(third_party_path, 'QNNPACK', 'CMakeLists.txt')) check_file(os.path.join(third_party_path, 'fbgemm', 'CMakeLists.txt'))
Make test_env_bot.py compatible with python3 on windows. Change `tp` to be read in text mode and not binary mode to make f.read() str in both python3 and in python2.
@@ -43,7 +43,7 @@ def setup_test_env(): tp = os.path.join(BOT_DIR, 'third_party') if sys.platform == 'win32': # third_party is a symlink. - with open(tp, 'rb') as f: + with open(tp, 'r') as f: tp = os.path.join(BOT_DIR, f.read()) sys.path.insert(0, tp)
m1n1.hv: Do map low RAM Apparently this is still necessary
@@ -1267,7 +1267,7 @@ class HV(Reloadable): print(f"Mapping guest physical memory...") ram_base = self.u.ba.phys_base & ~0xffffffff - #self.map_hw(ram_base, ram_base, self.u.ba.phys_base - ram_base) + self.map_hw(ram_base, ram_base, self.u.ba.phys_base - ram_base) self.map_hw(phys_base, phys_base, self.u.ba.mem_size_actual - phys_base + ram_base) print(f"Loading kernel image (0x{len(image):x} bytes)...")
Fix(ci) reduce the ci load by only installing lmdb in tests reduce the ci load by only installing lmdb in tests
@@ -10,17 +10,13 @@ class CustomBuildConfig(BuildConfig): class WorkWithCustomDeps(LightningWork): def __init__(self, cloud_compute: CloudCompute = CloudCompute(), **kwargs): - build_config = CustomBuildConfig(requirements=["numpy", "pandas", "py"]) + build_config = CustomBuildConfig(requirements=["py"]) super().__init__(parallel=True, **kwargs, cloud_compute=cloud_compute, cloud_build_config=build_config) def run(self): # installed by the build commands and by requirements in the build config import lmdb - import numpy as np - import pandas as pd - print("installed numpy version:", np.__version__) - print("installed pandas version:", pd.__version__) print("installed lmdb version:", lmdb.__version__)
Update important-upgrade-notes.rst Added reason & consequence
@@ -21,7 +21,8 @@ Important Upgrade Notes | | | | | This change was made because ``Update.Props == nil`` unintentionally cleared all ``Props``, such as the profile picture, instead of preserving them. | +----------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------+ -| v5.10.0 | ``SupportedTimezonesPath`` setting in config.json was removed. // What's the reason and or consequence? | +| v5.10.0 | ``SupportedTimezonesPath`` setting in config.json and changes to timezones in the UI based on the timezones.json file was removed. This was made to support | +| | `storing configurations in the database <https://docs.mattermost.com/administration/config-in-database.html#configuration-in-the-mattermost-database>`_. | +----------------------------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------+ | v5.9.0 | If **DisableLegacyMfa** setting in ``config.json`` is set to ``true`` and `multi-factor authentication <https://docs.mattermost.com/deployment/auth.html>`_ is | | | enabled, ensure your users have upgraded to mobile app version 1.17 or later. Otherwise, users who have MFA enabled may not be able to log in successfully. |
Add test about associate floating_ip to VM Only one floating IP address can be allocated to an instance which have one port.
# License for the specific language governing permissions and limitations # under the License. +import testtools + from tempest.api.compute.floating_ips import base from tempest.common.utils import data_utils from tempest import config @@ -99,3 +101,27 @@ class FloatingIPsNegativeTestJSON(base.BaseFloatingIPsTest): self.assertRaises((lib_exc.NotFound, lib_exc.BadRequest), self.client.associate_floating_ip_to_server, '', self.server_id) + + @decorators.attr(type=['negative']) + @decorators.idempotent_id('58a80596-ffb2-11e6-9393-fa163e4fa634') + @test.services('network') + @testtools.skipUnless(CONF.network.public_network_id, + 'The public_network_id option must be specified.') + def test_associate_ip_to_server_with_floating_ip(self): + # The VM have one port + # Associate floating IP A to the VM + # Associate floating IP B which is from same pool with floating IP A + # to the VM, should raise BadRequest exception + body = self.client.create_floating_ip( + pool=CONF.network.public_network_id)['floating_ip'] + self.addCleanup(self.client.delete_floating_ip, body['id']) + self.client.associate_floating_ip_to_server(body['ip'], self.server_id) + self.addCleanup(self.client.disassociate_floating_ip_from_server, + body['ip'], self.server_id) + + body = self.client.create_floating_ip( + pool=CONF.network.public_network_id)['floating_ip'] + self.addCleanup(self.client.delete_floating_ip, body['id']) + self.assertRaises(lib_exc.BadRequest, + self.client.associate_floating_ip_to_server, + body['ip'], self.server_id)
Check mode before going to OBJECT. For linked object, already in object mode, you can set mode
@@ -29,6 +29,7 @@ from io_scene_gltf2.io.exp import gltf2_io_draco_compression_extension def save(context, export_settings): """Start the glTF 2.0 export and saves to content either to a .gltf or .glb file.""" if bpy.context.active_object is not None: + if bpy.context.active_object.mode != "OBJECT": bpy.ops.object.mode_set(mode='OBJECT') original_frame = bpy.context.scene.frame_current
tests: test_directories Fixed test_directories test in tests/func/test_diff.py
@@ -112,7 +112,7 @@ def test_directories(tmp_dir, scm, dvc): (tmp_dir / "dir" / "2").unlink() dvc.add("dir") - scm.add("dir.dvc") + scm.add(["dir.dvc"]) scm.commit("delete a file") # The ":/<text>" format is a way to specify revisions by commit message:
Strip newline when ingesting `version.txt` Summary: Pull Request resolved: Test Plan: Run cmake and observe there are no warning in stdout nor in `CMakeCache.txt`
@@ -364,6 +364,8 @@ include(cmake/public/utils.cmake) # ---[ Version numbers for generated libraries file(READ version.txt TORCH_DEFAULT_VERSION) +# Strip trailing newline +string(REGEX REPLACE "\n$" "" TORCH_DEFAULT_VERSION "${TORCH_DEFAULT_VERSION}") if("${TORCH_DEFAULT_VERSION} " STREQUAL " ") message(WARNING "Could not get version from base 'version.txt'") # If we can't get the version from the version file we should probably
budgets: also show budget for currencies not in balance Ref
</ol> {% endmacro %} -{% macro balance_with_budget(amount, budget) %} +{% macro render_budget(budget, currency, number=0) %} {% if budget %} - {% if amount.currency in budget %} - {% set diff = budget[amount.currency] - amount.number %} + {% if currency in budget %} + {% set diff = budget[currency] - number %} <span class="budget budget-{% if diff > 0 %}positive{% else %}{% if diff < 0 %}negative{% else %}zero{% endif %}{% endif %}"> - ({{ diff|format_currency(currency, show_if_zero=True) }}) + ({{ diff|format_currency(currency, show_if_zero=True) }}{{ ' '+currency if not number else '' }}) </span> {% else %} <span class="budget no-budget"></span> {% endif %} {% endif %} -<span class="number">{{ amount|format_amount }}</span> {% endmacro %} {% macro account_tree(account_name, interval_balances, dates, accumulate) %} <a href="{{ url_for('account', name=account.account, time=begin_date|string + ' - ' + end_date|string) }}"> {% for pos in balance %} <span class="balance"> - {{ balance_with_budget(pos.units, budget) }} + {{ render_budget(budget, pos.units.currency, pos.units.number) }} + <span class="number">{{ pos.units|format_amount }}</span> </span> {% endfor %} + {% if budget %} + {% for currency, number in budget.items() if currency not in balance.currencies() %} + {{ render_budget(budget, currency) }} + {% endfor %} + {% endif %} {% for pos in balance_children %} <span class="balance-children"> - {{ balance_with_budget(pos.units, budget) }} + <span class="number">{{ pos.units|format_amount }}</span> </span> {% endfor %} </a>
Simplify check for negotiated protocol negotiatedProtocol's type is Optional[bytes] See and Note that OpenSSL.SSL.Connection.get_next_proto_negotiated is deprecated:
@@ -233,13 +233,11 @@ class H2ClientProtocol(Protocol, TimeoutMixin): def handshakeCompleted(self) -> None: """We close the connection with InvalidNegotiatedProtocol exception when the connection was not made via h2 protocol""" - negotiated_protocol = self.transport.negotiatedProtocol - if isinstance(negotiated_protocol, bytes): - negotiated_protocol = str(self.transport.negotiatedProtocol, 'utf-8') - if negotiated_protocol != 'h2': + protocol = self.transport.negotiatedProtocol + if protocol is not None and protocol != b"h2": # Here we have not initiated the connection yet # So, no need to send a GOAWAY frame to the remote - self._lose_connection_with_error([InvalidNegotiatedProtocol(negotiated_protocol)]) + self._lose_connection_with_error([InvalidNegotiatedProtocol(protocol.decode("utf-8"))]) def _check_received_data(self, data: bytes) -> None: """Checks for edge cases where the connection to remote fails
ci: add support for python 3.10 experimental builds Resolves:
@@ -19,10 +19,16 @@ jobs: tests: name: ${{ matrix.os }} / ${{ matrix.python-version }} runs-on: ${{ matrix.os }}-latest + continue-on-error: ${{ matrix.experimental }} strategy: matrix: os: [Ubuntu, MacOS, Windows] python-version: [3.6, 3.7, 3.8, 3.9] + experimental: [false] + include: + - os: Ubuntu + python-version: "3.10.0-alpha - 3.10.0" + experimental: true fail-fast: false steps: - uses: actions/checkout@v2
Fix nogil status for error handling in line tracing code of with/try-finally statements. See
@@ -7539,12 +7539,14 @@ class TryFinallyStatNode(StatNode): code.funcstate.in_try_finally = was_in_try_finally code.putln("}") - code.set_all_labels(old_labels) temps_to_clean_up = code.funcstate.all_free_managed_temps() code.mark_pos(self.finally_clause.pos) code.putln("/*finally:*/ {") + # Reset labels only after writing out a potential line trace call for correct nogil error handling. + code.set_all_labels(old_labels) + def fresh_finally_clause(_next=[self.finally_clause]): # generate the original subtree once and always keep a fresh copy node = _next[0]
fix typo in resnet50_trainer.py Summary: Pull Request resolved:
@@ -497,7 +497,7 @@ def Train(args): test_model = None if (args.test_data is not None): log.info("----- Create test net ----") - if use_ideep: + if args.use_ideep: test_arg_scope = { 'use_cudnn': False, 'cudnn_exhaustive_search': False,
Update changelog.md Included the improvement with channel name sorting that was brought up in this GitHub issue: The poster of the issue asked for this improvement to be noted in the changelog.
@@ -46,6 +46,7 @@ Also see [changelog in progress](http://bit.ly/2nK3cVf) for the next release. - Added focus on the text box after hitting "Edit" on Account Settings options. - Improved formatting of quotes in the channel header. - Added a date separator for search results. + - Channel names are now sorted correctly in the left-hand-side by taking non-alphabetical characters into consideration (e.g. brackets, hash sign, etc.) #### Integrations - Added username and profile picture to incoming webhook set up pages.
analytics: Eliminate slider-focused text selection in Firefox. Fixes
@@ -43,6 +43,13 @@ hr { border-width: 2px; } +.rangeslider-container { + -webkit-user-select: none; + -moz-user-select: none; + -ms-user-select: none; + user-select: none; +} + .rangeselector text { font-weight: 400; }
Fix boring app test: `debug=True` when running on the cloud debug=True for boring_app (dynamic app also has debug=True)
@@ -13,7 +13,7 @@ def test_boring_app_example_cloud() -> None: with run_app_in_cloud( os.path.join(_PROJECT_ROOT, "examples/app_boring/"), app_name="app_dynamic.py", - debug=False, + debug=True, ) as ( _, view_page,
DOC: added release note for `isfinite` support for `datetime64` and `timedelta64`
@@ -197,6 +197,10 @@ The boolean and integer types are incapable of storing ``np.nan`` and ``np.inf`` which allows us to provide specialized ufuncs that are up to 250x faster than the current approach. +``np.isfinite`` ufunc supports ``datetime64`` and ``timedelta64`` types +----------------------------------------------------------------------- +Previously, `np.isfinite` used to raise a ``TypeError`` on being used on these +two types. Changes =======
Fix: role bluebanquise after repositories_client Except for the first management node where we install ansible interactively, we need to configure the repositories before installing BlueBanquise's dependencies.
hosts: "mg_managements" roles: - - role: bluebanquise - tags: bluebanquise - role: set_hostname tags: set_hostname - role: nic tags: repositories_server - role: repositories_client tags: repositories_client + - role: bluebanquise + tags: bluebanquise - role: hosts_file tags: hosts_file - role: ssh_master
Split extra chrome args on whitespace This is in case multiple args are used.
@@ -179,7 +179,7 @@ class Chrome: extra_chrome_args = os.environ.get('BROZZLER_EXTRA_CHROME_ARGS') if extra_chrome_args: - chrome_args.append(extra_chrome_args) + chrome_args.extend(extra_chrome_args.split()) if disk_cache_dir: chrome_args.append('--disk-cache-dir=%s' % disk_cache_dir) if disk_cache_size:
Unbreak original mpi implementation in dials.stills_process 1) Sync interface change to do_work method. 2) Add quick_parse to the OptionParser. On the node I tried at LCLS, reading from stdin seemed to break the MPI subsystem.
@@ -244,7 +244,7 @@ def run(self): import copy # Parse the command line - params, options, all_paths = self.parser.parse_args(show_diff_phil=False, return_unhandled=True) + params, options, all_paths = self.parser.parse_args(show_diff_phil=False, return_unhandled=True, quick_parse=True) # Check we have some filenames if not all_paths: @@ -373,7 +373,7 @@ def do_work(i, item_list): size = comm.Get_size() # size: number of processes running in this job subset = [item for i, item in enumerate(iterable) if (i+rank)%size == 0] - do_work((rank, subset)) + do_work(rank, subset) else: from dxtbx.command_line.image_average import splitit if params.mp.nproc == 1:
Optimization: Avoid error check for operations that cannot raise. * This is not done a lot yet, and we miss a dedcicated query for the operation, but not the arguments to raise. * Added because it was noted missing as part of the conversion error check needed for Ctypes to work.
@@ -111,8 +111,6 @@ def getOperationCode(to_name, operator, arg_names, in_place, needs_check, # This needs to have one case per operation of Python, and there are many # of these, pylint: disable=too-many-branches,too-many-statements - # TODO: Use "needs_check" too. - prefix_args = () ref_count = 1 @@ -195,6 +193,7 @@ def getOperationCode(to_name, operator, arg_names, in_place, needs_check, getErrorExitBoolCode( condition = "%s == false" % res_name, release_names = arg_names, + needs_check = needs_check, emit = emit, context = context ) @@ -223,6 +222,7 @@ def getOperationCode(to_name, operator, arg_names, in_place, needs_check, getErrorExitCode( check_name = value_name, release_names = arg_names, + needs_check = needs_check, emit = emit, context = context )
Update message.py Second attempt to fix reply_markup
@@ -1569,7 +1569,7 @@ class Message(base.TelegramObject): chat_id: typing.Union[str, int], disable_notification: typing.Optional[bool] = None, reply_to_message_id: typing.Optional[int] = None, - reply_markup: typing.Union[InlineKeyboardMarkup, ReplyKeyboardMarkup, None] = self.reply_markup, + reply_markup: typing.Union[InlineKeyboardMarkup, ReplyKeyboardMarkup, None] = None, parse_mode: typing.Union[base.String, None] = None, ) -> Message: """ @@ -1582,7 +1582,7 @@ class Message(base.TelegramObject): :param parse_mode: :return: """ - kwargs = {"chat_id": chat_id} + kwargs = {"chat_id": chat_id, "reply_markup": self.reply_markup} text = self.text or self.caption if disable_notification is not None:
analytics: Remove unused CSS styling for `hr` tags. There are no `<hr>` HTML tags in the stats / analytics page, so removes the unused CSS rules for these elements.
@@ -3,10 +3,6 @@ body { background-color: hsl(0, 0%, 98%); } -hr { - border-width: 2px; -} - p { margin-bottom: 0; } @@ -72,10 +68,6 @@ p { top: -30px; } - &.pie-chart hr { - margin-bottom: 8px; - } - .button-container { position: relative; z-index: 1;
Add empty methods to screen class, for Gracefully ignores features that don't make sense in live coding mode.
@@ -99,8 +99,32 @@ class MockTurtle(RawTurtle): return super().tracer(n) # Leave tracing disabled. + def title(self, title): + pass + + def setup(self, width=None, height=None, startx=None, starty=None): + pass + + def textinput(self, title, prompt): + pass + + def numinput(self, title, prompt, default=None, minval=None, maxval=None): + pass + + def mainloop(self): + pass + + def done(self): + pass + + def bye(self): + pass + + def exitonclick(self): + pass + _Stamp = namedtuple('Stamp', 'pos heading color') - _screen = _pen = OriginalTurtle = original_mainloop = None + _screen = _pen = OriginalTurtle = None instances = [] @classmethod @@ -110,8 +134,6 @@ class MockTurtle(RawTurtle): turtle_module = sys.modules['turtle'] cls.OriginalTurtle = turtle_module.Turtle turtle_module.Turtle = MockTurtle - cls.original_mainloop = turtle_module.mainloop - turtle_module.mainloop = turtle_module.done = lambda: None # noinspection PyProtectedMember MockTurtle._screen = MockTurtle._Screen(canvas) MockTurtle._pen = MockTurtle() @@ -125,8 +147,7 @@ class MockTurtle(RawTurtle): if cls.OriginalTurtle is not None: turtle_module = sys.modules['turtle'] turtle_module.Turtle = cls.OriginalTurtle - turtle_module.mainloop = turtle_module.done = cls.original_mainloop - MockTurtle._pen = cls.OriginalTurtle = cls.original_mainloop = None + MockTurtle._pen = cls.OriginalTurtle = None MockTurtle._screen = None @classmethod
Update setup.py xgboost library needed.
@@ -39,7 +39,6 @@ setup( "flask", "pandas>=1.0.5", "numpy<1.19.0,>=1.16.0", - "matplotlib==3.3.1", "requests", "flask_cors", "flask_wtf", @@ -48,10 +47,12 @@ setup( "psutil", "gunicorn", "six>=1.14.0", + "matplotlib==3.3.1", "tensorflow", "keras==2.3.1", "sklearn", - "scikit-image" + "scikit-image", + "xgboost" ], extras_require={ "dev": [
GraphComponentBinding : Replace __nonzero__ with __bool__ for Python 3 See
@@ -213,7 +213,7 @@ int length( GraphComponent &g ) return g.children().size(); } -bool nonZero( GraphComponent &g ) +bool toBool( GraphComponent &g ) { return true; } @@ -303,7 +303,15 @@ void GafferModule::bindGraphComponent() .def( "__delitem__", (void (*)( GraphComponent &, long ))&delItem ) .def( "__contains__", contains ) .def( "__len__", &length ) - .def( "__nonzero__", &nonZero ) +// The default conversion to bool uses `__len__`, which trips a lot of +// people up as they expect `if graphComponent` to be equivalent to +// `if graphComponent is not None`. So we provide a more specific conversion +// which is always true. +#if PY_MAJOR_VERSION > 2 + .def( "__bool__", &toBool ) +#else + .def( "__nonzero__", &toBool ) +#endif .def( "__repr__", &repr ) .def( "items", &items ) .def( "keys", &keys )
Correct JMC download link Mission Control is available as OpenJDK project now so we refer to that location for the download link.
@@ -35,7 +35,7 @@ jfr The ``jfr`` telemetry device enables the `Java Flight Recorder <http://docs.oracle.com/javacomponents/jmc-5-5/jfr-runtime-guide/index.html>`_ on the benchmark candidate. Up to JDK 11, Java flight recorder ships only with Oracle JDK, so Rally assumes that Oracle JDK is used for benchmarking. If you run benchmarks on JDK 11 or later, Java flight recorder is also available on OpenJDK. -To enable ``jfr``, invoke Rally with ``esrally --telemetry jfr``. ``jfr`` will then write a flight recording file which can be opened in `Java Mission Control <http://www.oracle.com/technetwork/java/javaseproducts/mission-control/java-mission-control-1998576.html>`_. Rally prints the location of the flight recording file on the command line. +To enable ``jfr``, invoke Rally with ``esrally --telemetry jfr``. ``jfr`` will then write a flight recording file which can be opened in `Java Mission Control <https://jdk.java.net/jmc/>`_. Rally prints the location of the flight recording file on the command line. .. image:: jfr-es.png :alt: Sample Java Flight Recording
svtplay: this happen when a video have been unpublished the page is up but the video is gone
@@ -91,6 +91,9 @@ class Svtplay(Service, MetadataThumbMixin): except json.decoder.JSONDecodeError: yield ServiceError(f"Can't decode api request: {res.request.url}") return + if res.status_code >= 400: + yield ServiceError("Can't find any videos. its removed?") + return videos = self._get_video(janson) yield from videos
Update Readme steps to run examples This documentation updates is to address
@@ -114,7 +114,7 @@ Save the file as **.splunkrc** in the current user's home directory. #### Run the examples -Examples are located in the **/splunk-sdk-python/examples** directory. To run the examples at the command line, use the Python interpreter and include any arguments that are required by the example: +Examples are located in the **/splunk-sdk-python/examples** directory. To run the examples at the command line, use the Python interpreter and include any arguments that are required by the example. In the commands below, replace "examplename" with the name of the specific example in the directory that you want to run: python examplename.py --username="admin" --password="changeme"
Update for new version of plugin To be cross platform the nonce file has been changed to home dir Looking in tmp means we can support both versions
@@ -2,6 +2,7 @@ import os import os.path import requests import time +from pathlib import Path from talon import ctrl, ui, Module, Context, actions, clip # Courtesy of https://github.com/anonfunc/talon-user/blob/master/apps/jetbrains.py @@ -79,7 +80,16 @@ def _get_nonce(port): try: with open(os.path.join("/tmp", "vcidea_" + str(port)), "r") as fh: return fh.read() + except FileNotFoundError as e: + try: + home = str(Path.home()) + with open(os.path.join(home, "vcidea_" + str(port)), "r") as fh: + return fh.read() except IOError: + print("Could not find nonce in tmp or home") + return None + except IOError as e: + print(e) return None
Bump source versions for v11.0-rc1 Problem: Tezos announced a new release. Revisions used have to be bumped. Solution: updated versions for Tezos sources.
"url_template": "https://github.com/<owner>/<repo>/archive/<rev>.tar.gz" }, "tezos": { - "ref": "refs/tags/v10.2", + "ref": "refs/tags/v11.0-rc1", "repo": "https://gitlab.com/tezos/tezos", - "rev": "5bfd311b701015381338e73a30c74415fa493c10", + "rev": "36055190bb560997f377ab7ee7bc6b66fe61835f", "type": "git" } }
STY: fixed PEP8 errors Removed whitespace and fixed indentation.
@@ -368,10 +368,10 @@ class TestConstellationBasics(object): 'kwargs': {'dkey': 'mlt'}, 'apply_inst': False}, {'function': mult_data, 'args': self.custom_args, 'apply_inst': False}] - testConst2 = pysat.Constellation(instruments=[ - pysat.Instrument('pysat', 'testing', num_samples=10, - clean_level='clean') for i in range(5)], - custom=custom) + testConst2 = pysat.Constellation( + instruments=[pysat.Instrument('pysat', 'testing', num_samples=10, + clean_level='clean') + for i in range(5)], custom=custom) # Ensure both constellations have the same custom_* attributes assert self.testConst.custom_functions == testConst2.custom_functions
Use gevent for celery worker Increase concurrency to 100 (light threads)
@@ -11,4 +11,4 @@ fi sleep 10 # Wait for migrations echo "==> $(date +%H:%M:%S) ==> Running Celery worker <==" -exec celery -A safe_transaction_service.taskapp worker --loglevel $log_level -c 4 +exec celery -A safe_transaction_service.taskapp worker --loglevel $log_level --pool=gevent --concurrency=100
Bugfix Authorization header parsing Werkzeug expects None if header isn't present, rather than an empty string. Fixes
@@ -248,8 +248,7 @@ class BaseRequestWebsocket(_BaseRequestResponse): @property def authorization(self) -> Optional[Authorization]: - header = self.headers.get("Authorization", "") - return parse_authorization_header(header) + return parse_authorization_header(self.headers.get("Authorization")) @property def cache_control(self) -> RequestCacheControl:
add missing import missing `argparse` worked before because py3tester included it, but should have been explicitly included anyway
"""Unit tests for covidcast_meta_cache_updater.py.""" # standard library +import argparse import json import unittest from unittest.mock import MagicMock
GL Renderer : Reduce repetition This will be more useful when we add additional options using the same pattern.
@@ -182,6 +182,20 @@ T *reportedCast( const IECore::RunTimeTyped *v, const char *type, const IECore:: return nullptr; } +template<typename T> +T option( const IECore::Object *v, const IECore::InternedString &name, const T &defaultValue ) +{ + if( !v ) + { + return defaultValue; + } + if( auto d = reportedCast<const IECore::TypedData<T>>( v, "option", name ) ) + { + return d->readable(); + } + return defaultValue; +} + template<typename T> T parameter( const IECore::CompoundDataMap &parameters, const IECore::InternedString &name, const T &defaultValue ) { @@ -724,33 +738,15 @@ class OpenGLRenderer final : public IECoreScenePreview::Renderer if( name == "camera" ) { - if( value == nullptr ) - { - m_camera = ""; - } - else if( const IECore::StringData *d = reportedCast<const IECore::StringData>( value, "option", name ) ) - { - m_camera = d->readable(); - - } - return; + m_camera = ::option<string>( value, name, "" ); } else if( name == "frame" || name == "sampleMotion" ) { // We know what these mean, we just have no use for them. - return; } else if( name == "gl:selection" ) { - if( value == nullptr ) - { - m_selection.clear(); - } - else if( auto d = reportedCast<const IECore::PathMatcherData>( value, "option", name ) ) - { - m_selection = d->readable(); - } - return; + m_selection = ::option<IECore::PathMatcher>( value, name, IECore::PathMatcher() ); } else if( boost::starts_with( name.string(), "gl:primitive:" ) || @@ -768,16 +764,16 @@ class OpenGLRenderer final : public IECoreScenePreview::Renderer m_baseStateOptions->members().erase( name ); } m_baseState = nullptr; // We'll update it lazily in `baseState()` - return; } else if( boost::contains( name.string(), ":" ) && !boost::starts_with( name.string(), "gl:" ) ) { // Ignore options prefixed for some other renderer. - return; } - + else + { IECore::msg( IECore::Msg::Warning, "IECoreGL::Renderer::option", boost::format( "Unknown option \"%s\"." ) % name.c_str() ); } + } void output( const IECore::InternedString &name, const Output *output ) override {
increase build number Increases build number instead of a new version number.
{% set name = "geocat-comp" %} -{% set version = "2022.10.1" %} +{% set version = "2022.10.0" %} package: name: {{ name }} @@ -7,7 +7,7 @@ package: build: noarch: python - number: 0 + number: 1 script: {{ PYTHON }} -m pip install --no-deps --ignore-installed -vv . source:
Check and evaluate CURSOR FORWARD command after CSI in ANSI formatted text. Co-Author: Jonathan Slenders
@@ -55,7 +55,11 @@ class ANSI: formatted_text = self._formatted_text while True: + # NOTE: CSI is a special token within a stream of characters that + # introduces an ANSI control sequence used to set the + # style attributes of the following characters. csi = False + c = yield # Everything between \001 and \002 should become a ZeroWidthEscape. @@ -70,6 +74,7 @@ class ANSI: else: escaped_text += c + # Check for CSI if c == "\x1b": # Start of color escape sequence. square_bracket = yield @@ -84,19 +89,37 @@ class ANSI: # Got a CSI sequence. Color codes are following. current = "" params = [] + while True: char = yield + + # Construct number if char.isdigit(): current += char + + # Eval number else: + # Limit and save number value params.append(min(int(current or 0), 9999)) + + # Get delimiter token if present if char == ";": current = "" + + # Check and evaluate color codes elif char == "m": # Set attributes and token. self._select_graphic_rendition(params) style = self._create_style_string() break + + # Check and evaluate cursor forward + elif char == "C": + for i in range(params[0]): + # add <SPACE> using current style + formatted_text.append((style, " ")) + break + else: # Ignore unsupported sequence. break @@ -127,14 +150,16 @@ class ANSI: self._bgcolor = _bg_colors[attr] elif attr == 1: self._bold = True + # elif attr == 2: + # self._faint = True elif attr == 3: self._italic = True elif attr == 4: self._underline = True elif attr == 5: - self._blink = True + self._blink = True # Slow blink elif attr == 6: - self._blink = True # Fast blink. + self._blink = True # Fast blink elif attr == 7: self._reverse = True elif attr == 8: @@ -142,7 +167,7 @@ class ANSI: elif attr == 9: self._strike = True elif attr == 22: - self._bold = False + self._bold = False # Normal intensity elif attr == 23: self._italic = False elif attr == 24: @@ -151,9 +176,12 @@ class ANSI: self._blink = False elif attr == 27: self._reverse = False + elif attr == 28: + self._hidden = False elif attr == 29: self._strike = False elif not attr: + # Reset all style attributes self._color = None self._bgcolor = None self._bold = False
Update institution ITB ITB has switched auth protocol from CAS to SAML. In addition, they requested to test login on their development server with our test server first before going to production. [skip ci]
@@ -327,18 +327,6 @@ def main(env): 'email_domains': [], 'delegation_protocol': 'saml-shib', }, - { - '_id': 'itb', - 'name': 'Institut Teknologi Bandung', - 'description': 'Institut Teknologi Bandung - OSF Repository', - 'banner_name': 'itb-banner.png', - 'logo_name': 'itb-shield.png', - 'login_url': None, - 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://osf.io/goodbye')), - 'domains': [], - 'email_domains': [], - 'delegation_protocol': 'cas-pac4j', - }, { '_id': 'ljaf', 'name': 'Laura and John Arnold Foundation', @@ -1009,11 +997,11 @@ def main(env): 'description': 'Institut Teknologi Bandung - OSF Repository', 'banner_name': 'itb-banner.png', 'logo_name': 'itb-shield.png', - 'login_url': None, + 'login_url': SHIBBOLETH_SP_LOGIN.format(encode_uri_component('https://login-dev3.itb.ac.id/idp/shibboleth')), 'logout_url': SHIBBOLETH_SP_LOGOUT.format(encode_uri_component('https://test.osf.io/goodbye')), 'domains': ['test-osf-itb.cos.io'], 'email_domains': [], - 'delegation_protocol': 'cas-pac4j', + 'delegation_protocol': 'saml-shib', }, { '_id': 'ljaf',
[Docs] Add `typing-extensions` dependency guide Although TVM does not, but `tvmc` depends on `typing-extensions`, which is not mentioned in the documentation.
@@ -331,6 +331,12 @@ like ``virtualenv``. pip3 install --user numpy decorator attrs + * If you want to use ``tvmc``: the TVM command line driver. + + .. code:: bash + + pip3 install --user typing-extensions + * If you want to use RPC Tracker .. code:: bash
Pass existing node collection from State. Also: remove unused collection argument.
@@ -22,7 +22,6 @@ var ChannelEditRouter = Backbone.Router.extend({ var ChannelManageView = require("edit_channel/new_channel/views"); var channel_manager_view = new ChannelManageView.ChannelListPage ({ el: $("#channel-container"), - collection: this.channelCollection }); }, @@ -61,7 +60,7 @@ var ChannelEditRouter = Backbone.Router.extend({ var QueueView = require("edit_channel/queue/views"); var queue = new QueueView.Queue({ el: $("#queue-area"), - collection: this.nodeCollection, + collection: State.nodeCollection, clipboard_root : State.current_user.get_clipboard(), trash_root : State.current_channel.get_root("trash_tree"), });
skip setting occlusion settings just use the default stuff for now, until we can figure out a good setting (hard)
@@ -778,9 +778,9 @@ engine_no_focus_sleep 50 // Power savings while alt-tabbed out of TF2 //r_ForceWaterLeaf 1 // Optimization to visleafs //r_occlusion 1 // Use CPU to have the GPU skip rendering models/props you cannot see r_fastzreject 0 // Skip outdated render method -r_occludeemaxarea 40 // Skip occlusion of objects that are too large to be skipped -r_occluderminarea 101 // Block all occluders by default -r_occludermincount 6 // Constant count for occluders +//r_occludeemaxarea 40 // Skip occlusion of objects that are too large to be skipped +//r_occluderminarea 101 // Block all occluders by default +//r_occludermincount 6 // Constant count for occluders //mat_tonemapping_occlusion_use_stencil 0 // Force disable stencil buffer for tonemapping r_norefresh 1 // Do not store a useless and unused frame time variable mat_forcehardwaresync 0 // Skip hardware sync which is not proper on queued material system @@ -1375,7 +1375,7 @@ snd_async_spew_blocking 0 // Disable async spew alias sound_low"snd_disable_mixer_duck 1;snd_pitchquality 0;dsp_slow_cpu 1;snd_spatialize_roundrobin 3;dsp_room 0;dsp_facingaway 30;dsp_speaker 0;dsp_water 0;dsp_spatial 0;snd_defer_trace 1;dsp_enhance_stereo 0;snd_surround_speakers 0" alias sound_medium"snd_disable_mixer_duck 0;snd_pitchquality 0;dsp_slow_cpu 1;snd_spatialize_roundrobin 1;dsp_room 0;dsp_facingaway 30;dsp_speaker 50;dsp_water 0;dsp_spatial 40;snd_defer_trace 1;dsp_enhance_stereo 0;snd_surround_speakers 0" -alias sound_high"snd_disable_mixer_duck 0;snd_pitchquality 1;dsp_slow_cpu 0;snd_spatialize_roundrobin 1;dsp_room 0;dsp_facingaway 30;dsp_speaker 50;dsp_water 14;dsp_spatial 40;snd_defer_trace 1;dsp_enhance_stereo 0;snd_surround_speakers 2" +alias sound_high"snd_disable_mixer_duck 0;snd_pitchquality 1;dsp_slow_cpu 0;snd_spatialize_roundrobin 1;dsp_room 0;dsp_facingaway 30;dsp_speaker 50;dsp_water 14;dsp_spatial 40;snd_defer_trace 1;dsp_enhance_stereo 0;snd_surround_speakers 0" alias sound_very_high"snd_disable_mixer_duck 0;snd_pitchquality 1;dsp_slow_cpu 0;snd_spatialize_roundrobin 0;dsp_room 0;dsp_facingaway 30;dsp_speaker 50;dsp_water 14;dsp_spatial 40;snd_defer_trace 0;dsp_enhance_stereo 0;snd_surround_speakers 5" alias sound_ultra"snd_disable_mixer_duck 0;snd_pitchquality 1;dsp_slow_cpu 0;snd_spatialize_roundrobin 0;dsp_room 1;dsp_facingaway 30;dsp_speaker 50;dsp_water 14;dsp_spatial 40;snd_defer_trace 0;dsp_enhance_stereo 1;snd_surround_speakers 5"
Added missing import to standalone.py Now importing ``Mapping`` for standalone parsers
@@ -30,7 +30,7 @@ from types import ModuleType from typing import ( TypeVar, Generic, Type, Tuple, List, Dict, Iterator, Collection, Callable, Optional, FrozenSet, Any, Union, Iterable, IO, TYPE_CHECKING, - Pattern as REPattern, ClassVar, Set, + Pattern as REPattern, ClassVar, Set, Mapping ) ###}
Simplify install-vault.sh a smidge Since all supported versions have published enterprise versions we no longer need to fallback to the old style S3 download URLS.
@@ -49,11 +49,6 @@ function install_vault_release() { if [[ "${HVAC_VAULT_LICENSE}" == "enterprise" ]]; then download_url="https://releases.hashicorp.com/vault/${HVAC_VAULT_VERSION}+ent/vault_${HVAC_VAULT_VERSION}+ent_${machine}_amd64.zip" - if ! curl --head "${download_url}" | head -1 | grep '\b200\b'; then - # Vault enterprise binaries earlier than v1.2.3 have different release downlaod URLs, so we - # fallback to this S3 URL in such cases. - download_url="https://s3-us-west-2.amazonaws.com/hc-enterprise-binaries/vault/ent/${HVAC_VAULT_VERSION}/vault-enterprise_${HVAC_VAULT_VERSION}%2Bent_${machine}_amd64.zip" - fi else download_url="https://releases.hashicorp.com/vault/${HVAC_VAULT_VERSION}/vault_${HVAC_VAULT_VERSION}_${machine}_amd64.zip" fi
propagate rename from check_lint SublimeHaskellHsDevChain was renamed ChainRunner. Propagate the rename to fly_check.
@@ -109,11 +109,11 @@ class FlyCheckViewEventListener(sublime_plugin.ViewEventListener): print('fly: executing {0}'.format(check_cmd)) if check_cmd: - CheckLint.SublimeHaskellHsDevChain.reset_chain_flag() + CheckLint.ChainRunner.reset_chain_flag() self.view.run_command(check_cmd, {'fly': True}) if Settings.COMPONENT_DEBUG.fly_mode: print('fly: awaiting command completion') - CheckLint.SublimeHaskellHsDevChain.run_chain_flag().wait() + CheckLint.ChainRunner.run_chain_flag().wait() delta_t = None
fixed _federation bugs for hetero-pearson scenario: same name has different dtype
@@ -157,7 +157,7 @@ class Federation(FederationABC): for i, info in enumerate(channel_infos): obj = self._receive_obj(info, name, tag=_SPLIT_.join([tag, NAME_DTYPE_TAG])) rtn_dtype.append(obj) - LOGGER.debug(f"[rabbitmq.get] _name_dtype_keys: {_name_dtype_keys[i]}, dtype: {obj}") + LOGGER.debug(f"[rabbitmq.get] _name_dtype_keys: {_name_dtype_keys}, dtype: {obj}") for k in _name_dtype_keys: if k not in self._name_dtype_map:
Update Singularity Link Old link is expiring, replace the new link it suggests.
@@ -82,7 +82,7 @@ Rules describe how to create **output files** from **input files**. * Input and output files can contain multiple named wildcards. * Rules can either use shell commands, plain Python code or external Python or R scripts to create output files from input files. * Snakemake workflows can be easily executed on **workstations**, **clusters**, **the grid**, and **in the cloud** without modification. The job scheduling can be constrained by arbitrary resources like e.g. available CPU cores, memory or GPUs. -* Snakemake can automatically deploy required software dependencies of a workflow using `Conda <https://conda.io>`_ or `Singularity <http://singularity.lbl.gov/>`_. +* Snakemake can automatically deploy required software dependencies of a workflow using `Conda <https://conda.io>`_ or `Singularity <https://sylabs.io/docs/>`_. * Snakemake can use Amazon S3, Google Storage, Dropbox, FTP, WebDAV, SFTP and iRODS to access input or output files and further access input files via HTTP and HTTPS.
Fix crashing on SIOCGIFADDR During scanning the list of interfaces, some of them can be configured invalidly: lack of ip, link down etc. This patch handles the error and inform about problem with concrete interface.
@@ -18,10 +18,21 @@ def local_ip4_addr_list(): for if_nidx in socket.if_nameindex(): name = if_nidx[1] sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - ip_addr = socket.inet_ntoa(fcntl.ioctl( - sock.fileno(), + try: + ip_of_ni = fcntl.ioctl(sock.fileno(), 0x8915, # SIOCGIFADDR - struct.pack('256s', name[:15].encode("UTF-8")))[20:24]) + struct.pack('256s', name[:15].encode("UTF-8"))) + except OSError as e: + if e.errno == 99: # EADDRNOTAVAIL + print("Warning!", + "Interface: {}".format(name), + "IP address not available for interface.", + sep='\n') + continue + else: + raise e + + ip_addr = socket.inet_ntoa(ip_of_ni[20:24]) nic.add(ip_addr) return nic
MAINT: special: Improve comments about Cephes p1evl function. Closes
* coef[0] = C , ..., coef[N] = C . * N 0 * - * The function p1evl() assumes that coef[N] = 1.0 and is - * omitted from the array. Its calling arguments are + * The function p1evl() assumes that c_N = 1.0 so that coefficent + * is omitted from the array. Its calling arguments are * otherwise the same as polevl(). * * * program in microcode or assembly language. * */ - /* * Cephes Math Library Release 2.1: December, 1988 @@ -83,6 +82,15 @@ static inline double polevl(double x, const double coef[], int N) /* p1evl() */ /* N * Evaluate polynomial when coefficient of x is 1.0. + * That is, C_{N} is assumed to be 1, and that coefficient + * is not included in the input array coef. + * coef must have length N and contain the polynomial coefficients + * stored as + * coef[0] = C_{N-1} + * coef[1] = C_{N-2} + * ... + * coef[N-2] = C_1 + * coef[N-1] = C_0 * Otherwise same as polevl. */
awsbsub: fix file upload for absolute path The s3 key name must be the basename and not the absolute path
@@ -214,7 +214,7 @@ def _upload_and_get_command(boto3_factory, args, job_s3_folder, job_name, config # upload input files, if there if args.input_file: for file in args.input_file: - s3_uploader.put_file(file, file) + s3_uploader.put_file(file, os.path.basename(file)) # upload command, if needed if args.command_file or not sys.stdin.isatty() or args.env:
Deseasonify: reduce icon shuffle log verbosity It is not necessary to log all icon paths on each shuffle. Creates unnecessary visual clutter in the logfile.
@@ -332,8 +332,8 @@ class BrandingManager(commands.Cog): return False if not self.remaining_icons: + log.info("Reset & shuffle remaining icons") await self._reset_remaining_icons() - log.info(f"Set remaining icons: {await pretty_files(self.remaining_icons)}") next_up, *self.remaining_icons = self.remaining_icons success = await self.bot.set_icon(next_up.download_url)
production: Create stream in an atomic transaction. To avoid the window between stream creation and creation of the Recipient object, we create the stream in an atomic transaction. Fixes
from typing import Collection, List, Optional, Set, Tuple, Union +from django.db import transaction from django.db.models.query import QuerySet from django.utils.timezone import now as timezone_now from django.utils.translation import gettext as _ @@ -102,6 +103,7 @@ def create_stream_if_needed( realm, invite_only, history_public_to_subscribers ) + with transaction.atomic(): (stream, created) = Stream.objects.get_or_create( realm=realm, name__iexact=stream_name, @@ -124,12 +126,6 @@ def create_stream_if_needed( stream.rendered_description = render_stream_description(stream_description) stream.save(update_fields=["recipient", "rendered_description"]) - if stream.is_public(): - send_stream_creation_event(stream, active_non_guest_user_ids(stream.realm_id)) - else: - realm_admin_ids = [user.id for user in stream.realm.get_admin_users_and_bots()] - send_stream_creation_event(stream, realm_admin_ids) - event_time = timezone_now() RealmAuditLog.objects.create( realm=realm, @@ -138,6 +134,13 @@ def create_stream_if_needed( event_type=RealmAuditLog.STREAM_CREATED, event_time=event_time, ) + if created: + if stream.is_public(): + send_stream_creation_event(stream, active_non_guest_user_ids(stream.realm_id)) + else: + realm_admin_ids = [user.id for user in stream.realm.get_admin_users_and_bots()] + send_stream_creation_event(stream, realm_admin_ids) + return stream, created
fix local redis url [nodeploy]
{ "datastore_mode": "local", - "redis_cache_url": "redis://localhost:6739", + "redis_cache_url": "redis://localhost:6379", "tasks_mode": "local", "log_level": "info", "tba_log_level": "debug",
Make DK-DK2->SE exchange available Fixes a misspelling in the parser, causing DK-DK2->SE exchange not to be shown on eMap DK-DK2->SE was written DK-DK2->SE-SE
@@ -123,7 +123,7 @@ def fetch_exchange(zone_key1='DK-DK1', zone_key2='DK-DK2', session=None, 'DK-DK1->NL':'"ExchangeNetherlands"', 'DK-DK1->SE':'"ExchangeSweden"', 'DK-DK1->SE-SE3':'"ExchangeSweden"', - 'DK-DK2->SE-SE':'("ExchangeSweden" - "BornholmSE4")',# Exchange from Bornholm to Sweden is included in "ExchangeSweden" + 'DK-DK2->SE':'("ExchangeSweden" - "BornholmSE4")',# Exchange from Bornholm to Sweden is included in "ExchangeSweden" 'DK-DK2->SE-SE4':'("ExchangeSweden" - "BornholmSE4")' #but Bornholm island is reported separately from DK-DK2 in eMap }
Ensure peer is still running before we call run_task() on them RegularChainBodySyncer._assign_body_download_to_peers and FastChainBodySyncer._assign_receipt_download_to_peers would make async calls after looking up a peer and before calling run_task() on them, causing a LifecycleError sometimes. They now perform the peer lookup immediately before calling run_task(), ensuring the peer is still running when we do so.
@@ -267,13 +267,15 @@ class BaseBodyChainSyncer(Service, PeerSubscriber): Loop indefinitely, assigning idle peers to download any block bodies needed for syncing. """ while self.manager.is_running: - # from all the peers that are not currently downloading block bodies, get the fastest - peer = await self._body_peers.get_fastest() - # get headers for bodies that we need to download, preferring lowest block number batch_id, headers = await self._block_body_tasks.get(MAX_BODIES_FETCH) - # schedule the body download and move on + # from all the peers that are not currently downloading block bodies, get the + # fastest + peer = await self._body_peers.get_fastest() + # NOTE: If there are any async calls between getting the peer above and the + # run_task() below, we need to ensure the peer is still running, otherwise it may have + # stopped and run_task() will raise a LifecycleError. peer.manager.run_task(self._run_body_download_batch, peer, batch_id, headers) async def _block_body_bundle_processing(self, bundles: Tuple[BlockBodyBundle, ...]) -> None: @@ -768,13 +770,14 @@ class FastChainBodySyncer(BaseBodyChainSyncer): Loop indefinitely, assigning idle peers to download receipts needed for syncing. """ while self.manager.is_running: - # from all the peers that are not currently downloading receipts, get the fastest - peer = await self._receipt_peers.get_fastest() - # get headers for receipts that we need to download, preferring lowest block number batch_id, headers = await self._receipt_tasks.get(MAX_RECEIPTS_FETCH) - # schedule the receipt download and move on + # from all the peers that are not currently downloading receipts, get the fastest + peer = await self._receipt_peers.get_fastest() + # NOTE: If there are any async calls between getting the peer above and the + # run_task() below, we need to ensure the peer is still running, otherwise it may have + # stopped and run_task() will raise a LifecycleError. peer.manager.run_task(self._run_receipt_download_batch, peer, batch_id, headers) def _mark_body_download_complete(
update staging.yaml remove nh/dhis2/te [ci skip]
@@ -52,7 +52,7 @@ branches: - vellum-staging # DO NOT REMOVE this is similar to "autostaging", but for vellum #- fr/case-templates # FR May 15 - rn_only_select_app_type_when_no_advanced # Rohit June 27 - - nh/dhis2/te # Norman Oct 2 + #- nh/dhis2/te # Norman Oct 2 - sr-session-audit # Sravan Oct 15 - jls/kill-exchange # Jenny Nov 13 - bmb/saas-q4-qa # Biyeun Nov 13
cherry-pick: tighten up output If stdout or stderr are empty, don't print empty lines. Also trim any trailing lines so we don't show excess ones. Tested-by: Mike Frysinger
@@ -60,8 +60,10 @@ change id will be added. capture_stderr=True) status = p.Wait() - print(p.stdout, file=sys.stdout) - print(p.stderr, file=sys.stderr) + if p.stdout: + print(p.stdout.strip(), file=sys.stdout) + if p.stderr: + print(p.stderr.strip(), file=sys.stderr) if status == 0: # The cherry-pick was applied correctly. We just need to edit the
Fix issue with bad encoding on windows By forcing the encoding to a unix varient, Emacs does not correctly handle windows style line endings. This prevents the code from being compiled.
@@ -354,7 +354,6 @@ With arg, turn mode on if and only if arg is positive. (provide 'live-py-mode) ;; Local Variables: -;; coding: us-ascii-unix ;; fill-column: 76 ;; indent-tabs-mode: nil ;; End:
Updated the formatting of the bullets We also need to update the screenshot so it matches the new description. Not sure how to do that.
@@ -10,11 +10,15 @@ The Promote tab The Promote tab is where you can configure a page's metadata, to help search engines find and index it. Below is a description of all the default fields under this tab. **For Search Engines** + * **Slug:** The section of the URL that appears after your website's domain e.g. ``http://domain.com/blog/[my-slug]/``. This is automatically generated from the main page title, which is set in the Content tab. Slugs should be entirely lowercase, with words separated by hyphens (-). It is recommended that you don't change a page's slug once a page is published. + * **Title tag:** This is the bold headline that often shows up search engine results. This is one of the most significant elements of how search engines rank the page. The keywords used here should align with the keywords you wish to be found for. If you don't think this field is working, ask your developers to check they have configured the site to output the appropriate tags on the frontend. + * **Meta description:** This is the descriptive text displayed underneath a headline in search engine results. It is designed to explain what this page is about. It has no impact on how search engines rank your content, but it can impact on the likelihood that a user will click your result. Ideally 140 to 155 characters in length. If you don't think this field is working, ask your developers to check they have configured the site to output the appropriate tags on the frontend. **For Site Menus** + * **Show in menus:** Ticking this box will ensure that the page is included in automatically generated menus on your site. Note: A page will only display in menus if all of its parent pages also have *Show in menus* ticked.
atvscript: Add start log entry Add a line that is printed when the script is started to make it easier to separate different runs because of append mode.
@@ -302,6 +302,8 @@ async def appstart(loop): loop.set_exception_handler(_handle_exception) + _LOGGER.debug("Started atvscript") + try: print(args.output(await _handle_command(args, abort_sem, loop)), flush=True) except Exception as ex:
Updated Sanskrit.rst * Updated Sanskrit.rst Added wiki * Update sanskrit.rst
Sanskrit ******** +Sanskrit is the primary liturgical language of Hinduism, a philosophical language of Hinduism, Jainism, Buddhism and Sikhism, and a literary language of ancient and medieval South Asia that also served as a lingua franca. It is a standardised dialect of Old Indo-Aryan, originating as Vedic Sanskrit and tracing its linguistic ancestry back to Proto-Indo-Iranian and Proto-Indo-European. As one of the oldest Indo-European languages for which substantial written documentation exists, Sanskrit holds a prominent position in Indo-European studies. (Source: `Wikipedia <https:// https://en.m.wikipedia.org/wiki/Sanskrit>`_) + + Corpora =======
Remove reference to the mailinglist. I don't look at the mailing list and there's been basically zero activity there for a while. Pointing to it does no favors to us or to those with questions, so I'm guding people to GH issues instead.
@@ -53,5 +53,5 @@ Tags will show up for you automatically in forms and the admin. For more info check out the `documentation <https://django-taggit.readthedocs.io/>`_. And for questions about usage or -development you can contact the `mailinglist -<https://groups.google.com/group/django-taggit>`_. +development you can create an issue on Github (if your question is about +usage please add the `question` tag).
gaze_estimation_demo: zero-initialize FaceInferenceResults members This works around a static analysis warning.
namespace gaze_estimation { struct FaceInferenceResults { - float faceDetectionConfidence; + float faceDetectionConfidence{}; cv::Rect faceBoundingBox; std::vector<cv::Point2i> faceLandmarks; @@ -20,8 +20,8 @@ struct FaceInferenceResults { cv::Rect rightEyeBoundingBox; cv::Point2f leftEyeMidpoint; cv::Point2f rightEyeMidpoint; - bool leftEyeState; - bool rightEyeState; + bool leftEyeState{}; + bool rightEyeState{}; cv::Point3f gazeVector;
Fixed add_host.sh script location Otherwise it gets downloaded to the /root folder
@@ -487,8 +487,8 @@ oc adm policy add-cluster-role-to-user cluster-admin ${AUSERNAME} # Workaround for BZ1469358 ansible master1 -b -m fetch -a "src=/etc/origin/master/ca.serial.txt dest=/tmp/ca.serial.txt flat=true" ansible masters -b -m copy -a "src=/tmp/ca.serial.txt dest=/etc/origin/master/ca.serial.txt mode=644 owner=root" -curl https://raw.githubusercontent.com/openshift/openshift-ansible-contrib/master/reference-architecture/azure-ansible/add_host.sh -o add_host.sh -s -chmod a+x ./add_host.sh +curl https://raw.githubusercontent.com/openshift/openshift-ansible-contrib/master/reference-architecture/azure-ansible/add_host.sh -o /home/${AUSERNAME}/add_host.sh -s +chmod a+x /home/${AUSERNAME}/add_host.sh cat /home/${AUSERNAME}/openshift-install.out | tr -cd [:print:] | mail -s "${RESOURCEGROUP} Install Complete" ${RHNUSERNAME} || true touch /root/.openshiftcomplete touch /home/${AUSERNAME}/.openshiftcomplete
stream settings: Remove background click handler in "Manage Streams". This click handler reset the stream creation form; it's not clear why that behavior would be useful, or why we'd want anything to happen when clicking in these background areas, so the correct thing to do is just remove the handler. Fixes:
@@ -1127,14 +1127,4 @@ export function initialize() { $(".right").removeClass("show"); $(".subscriptions-header").removeClass("slide-left"); }); - - { - const sel = ".search-container, .streams-list, .subscriptions-header"; - - $("#manage_streams_container").on("click", sel, (e) => { - if ($(e.target).is(sel)) { - stream_edit.open_edit_panel_empty(); - } - }); - } }
Improve the background task documentation This should hopefully indicate that `test_app` context blocks can be used to ensure that background tasks complete within the test.
Background tasks ================ -Some actions can often take a lot of time to complete, which may cause -the client to timeout before receiving a response. Equally some tasks -just don't need to be completed before the response is sent and -instead can be done in the background. Quart provides a way to create -and run a task in the background via the app ``add_background_task`` -method, +If you have a task to perform where the outcome or result isn't +required you can utilise a background task to run it. Background tasks +run concurrently with the route handlers etc, i.e. in the +background. Background tasks are very useful when they contain actions +that take a lot of time to complete, as they allow a response to be +sent to the client whilst the task itself is carried out. Equally some +tasks just don't need to be completed before the response is sent and +instead can be done in the background. + +Background tasks in Quart are created via the ``add_background_task`` +method: .. code-block:: python @@ -20,9 +25,18 @@ method, app.add_background_task(background_task) return 'Success' + @app.before_serving + async def startup(): + app.add_background_task(background_task) + + The background tasks will have access to the app context. The tasks will be awaited during shutdown to ensure they complete before the app -shutdowns. +shuts down. If your task does not complete it will eventually be +cancelled as the app is forceably shut down by the server. + +Synchronous background tasks are supported and will run in a separate +thread. .. warning:: @@ -37,6 +51,21 @@ shutdowns. Testing background tasks ------------------------ +To ensure that background tasks complete in tests utilise the +``test_app`` context manager. This will wait for any background +tasks to complete before allowing the test to continue: + +.. code-block:: python + + async def test_tasks_complete(): + async with app.test_app(): + app.add_background_task(...) + # Background task has completed here + assert task_has_done_something + +Note when testing an app the ``test_client`` usage should be within +the ``test_app`` context block. + The background task coroutine function can be tested by creating an app context and await the function,
DeleteChannels : Default channels plug to "" This matches the behaviour of all our other Delete* nodes. Breaking change : Changed default value for DeleteChannels channels plug
@@ -55,7 +55,7 @@ DeleteChannels::DeleteChannels( const std::string &name ) storeIndexOfNextChild( g_firstPlugIndex ); addChild( new IntPlug( "mode", Plug::In, Delete, Delete, Keep ) ); - addChild( new StringPlug( "channels", Gaffer::Plug::In, "[RGB]" ) ); + addChild( new StringPlug( "channels" ) ); // Direct pass-through for the things we don't ever change. // This not only simplifies our implementation, but it is also
Show newest votes on legislator page The "recent votes" section used default sorting, so it didn't guarantee that it would actually show the legislator's most recent votes.
@@ -144,9 +144,11 @@ def person(request, person_id): .order_by("-created_at", "id")[:SPONSORED_BILLS_TO_SHOW] ) - votes = person.votes.all().select_related("vote_event", "vote_event__bill")[ - :RECENT_VOTES_TO_SHOW - ] + votes = ( + person.votes.all() + .select_related("vote_event", "vote_event__bill") + .order_by("-vote_event__start__date")[:RECENT_VOTES_TO_SHOW] + ) person.vote_events = [] for vote in votes: vote_event = vote.vote_event
Update README.rst README is pointing to a container that hasn't been released yet.
@@ -89,13 +89,13 @@ Use this installation mode if you are contributing to NeMo. Docker containers: ~~~~~~~~~~~~~~~~~~ The easiest way to start training with NeMo is by using `NeMo's container <https://ngc.nvidia.com/catalog/containers/nvidia:nemo>`_. -It has all requirements and NeMo 1.0.0rc1 already installed. +It has all requirements and NeMo 1.0.0b3 already installed. .. code-block:: bash docker run --gpus all -it --rm --shm-size=8g \ -p 8888:8888 -p 6006:6006 --ulimit memlock=-1 --ulimit \ - stack=67108864 --device=/dev/snd nvcr.io/nvidia/nemo:1.0.0rc1 + stack=67108864 --device=/dev/snd nvcr.io/nvidia/nemo:1.0.0b3 If you chose to work with main branch, we recommend using NVIDIA's PyTorch container version 20.11-py3 and then installing from GitHub.
Don't copy color layer unnecessarily If the color layer uses the same UFO layer as the base one, then no need to copy the layer at all. For example: <key>com.github.googlei18n.ufo2ft.colorLayerMapping</key> <array> <array> <string>public.default</string> <integer>0</integer> </array> </array>
@@ -69,6 +69,9 @@ class ExplodeColorLayerGlyphsFilter(BaseFilter): for layerName, colorID in colorLayerMapping: layerGlyphSet = self._getLayer(font, layerName) if glyph.name in layerGlyphSet: + if glyph == layerGlyphSet[glyph.name]: + layerGlyphName = glyph.name + else: layerGlyphName = self._copyGlyph( layerGlyphSet, glyphSet, glyph.name, layerName )
subs: Properly focus on Stream name box while creating a new stream. Fixes
@@ -477,8 +477,8 @@ exports.change_state = (function () { if (hash.arguments.length > 0) { // if in #streams/new form. if (hash.arguments[0] === "new") { - exports.new_stream_clicked(); components.toggle.lookup("stream-filter-toggle").goto("all-streams"); + exports.new_stream_clicked(); } else if (hash.arguments[0] === "all") { components.toggle.lookup("stream-filter-toggle").goto("all-streams"); } else if (hash.arguments[0] === "subscribed") {
Update graphsage.py fix some bugs
@@ -141,7 +141,7 @@ class GraphSAGE(GNNBase): else: logits = logits - graph.node_features['node_emb']=logits #put the results into the NLPGraph + graph.node_features['node_emb']=logits.clone().detach() #put the results into the NLPGraph return graph
Delete mux and muy Unnecessary two lines of mux and muy was deleted.
@@ -155,8 +155,6 @@ def _forward(args, index, config, data, variables, output_image=True): if e.repeat_evaluation_type == "last": avg = sum elif e.repeat_evaluation_type == "std": - mux = np.array([s / e.num_evaluations for s in sum_mux]) - muy = np.array([(s / e.num_evaluations)**2 for s in sum]) std_result = [np.nan_to_num(np.sqrt(x / e.num_evaluations - (y / e.num_evaluations)**2)) for x, y in zip(sum_mux, sum)] avg = std_result else:
docs: fix typo in parameter set docs Fix typo identified in Added emphasis on the need to install / reinstall the package
@@ -17,7 +17,7 @@ Adding Parameter Sets ********************* Parameter sets can be added to PyBaMM by creating a python package, and -registering a `entry point`_ to ``pybamm_parameter_sets``. At a minimum, the +registering a `entry point`_ to ``pybamm_parameter_set``. At a minimum, the package (``cell_parameters``) should consist of the following:: cell_parameters @@ -46,11 +46,11 @@ For an example, see the `Marquis2019`_ parameter sets. ... } -Then register ``get_parameter_values`` to ``pybamm_parameter_sets`` in ``pyproject.toml``: +Then register ``get_parameter_values`` to ``pybamm_parameter_set`` in ``pyproject.toml``: .. code-block:: toml - [project.entry-points.pybamm_parameter_sets] + [project.entry-points.pybamm_parameter_set] cell_alpha = "cell_parameters.cell_alpha:get_parameter_values" If you are using ``setup.py`` or ``setup.cfg`` to setup your package, please @@ -58,6 +58,21 @@ see SetupTools' documentation for registering `entry points`_. .. _entry points: https://setuptools.pypa.io/en/latest/userguide/entry_point.html#entry-points-for-plugins +Finally install you package (``python -m pip install .``), to complete the process. +You will need to reinstall your package every time you add a new parameter set. +If you're actively editing the parameter set it may be helpful to install in +editing mode (``python -m pip install -e .``) instead. + +Once successfully registered, your parameter set will appear within the contents +of ``pybamm.parameter_sets``, along with any other bundled or installed +third-party parameter sets. + +.. doctest:: + + >>> import pybamm + >>> list(pybamm.parameter_sets) + ['Ai2020', 'Chen2020', ...] + If you're willing to open-source your parameter set, `let us know`_, and we can add an entry to :ref:`third-party-parameter-sets`. @@ -70,7 +85,7 @@ If you're willing to open-source your parameter set, Third-Party Parameter Sets ************************** -Registered a new parameter set to ``pybamm_parameter_sets``? +Registered a new parameter set to ``pybamm_parameter_set``? `Let us know`_, and we'll update our list. .. _bundled-parameter-sets:
Update running.rst minor type
@@ -219,7 +219,7 @@ significantly improving rendering performance. In production, you probably want to serve static files from a more optimized static file server like `nginx <http://nginx.net/>`_. You -can configure most any web server to recognize the version tags used +can configure almost any web server to recognize the version tags used by ``static_url()`` and set caching headers accordingly. Here is the relevant portion of the nginx configuration we use at FriendFeed::
Update deprecated-features.rst Fixed grammar
@@ -3,10 +3,10 @@ Deprecation Policy This document outlines the process for announcing deprecated features to the community. The guiding principle is `no surprises <https://docs.mattermost.com/developer/manifesto.html#no-surprises>`_ with guaranteed long-term stability, where admins or users should never run into anything unexpected with Mattermost. -Definition of Deprecated Feature -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +Definition of a Deprecated Feature +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -A deprecated feature is considered one that breaks backwards compatibility with previous versions. +A deprecated feature is considered to be one that breaks backwards compatibility with previous versions. Examples include:
Remove publish docker from nightly build Summary: Fixes nightly build failure Test Plan: buildkite Reviewers: schrockn
import sys import yaml -from defines import SupportedPython, SupportedPythons -from step_builder import BuildkiteQueue, StepBuilder +from defines import SupportedPython +from step_builder import StepBuilder SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) sys.path.append(SCRIPT_PATH) - -def publish_docker_images(): - # e.g. 27, 35, 36, 37 - python_versions = [''.join(py_version.split('.')[:2]) for py_version in SupportedPythons] - - return [ - StepBuilder("docker image %s" % version) - .run( - r"aws s3 cp s3://\${BUILDKITE_SECRETS_BUCKET}/dockerhub-creds /tmp/dockerhub-creds", - "cat /tmp/dockerhub-creds | docker login --username elementldevtools --password-stdin", - "pushd /workdir/.buildkite/images/", - "make VERSION=\"public\" build-public-{version}".format(version=version), - "make VERSION=\"public\" push-public-{version}".format(version=version), - ) - .on_integration_image(SupportedPython.V3_7, ['BUILDKITE_SECRETS_BUCKET']) - .on_queue(BuildkiteQueue.DOCKER) - .with_timeout(30) - .build() - for version in python_versions - ] - - if __name__ == "__main__": - steps = publish_docker_images() + [ + steps = [ StepBuilder('publish nightlies') .on_integration_image( SupportedPython.V3_7, ['SLACK_RELEASE_BOT_TOKEN', 'PYPI_USERNAME', 'PYPI_PASSWORD']
Port 51998 to master modules/postgres.py: replace sort with sorted.
@@ -3234,9 +3234,9 @@ def has_privileges( else: perms = [_PRIVILEGES_MAP[perm] for perm in _perms] if "ALL" in _privs: - retval = perms.sort() == _privileges[name].keys().sort() + retval = sorted(perms) == sorted(_privileges[name]) else: - retval = set(_privs).issubset(set(_privileges[name].keys())) + retval = set(_privs).issubset(set(_privileges[name])) return retval return False
Symbolic solver: introduce a counter for calls to Has_Contradiction This will make each call to Has_Contradiction unique in traces, and will provide a convenient way in debuggers to step in a particular call. TN:
@@ -142,6 +142,11 @@ package body Langkit_Support.Adalog.Symbolic_Solver is -- List of N_Predicates, to be applied at the end of solving. TODO??? we -- could apply this policy for all predicates, which would simplify the -- code a bit. + + Has_Contradiction_Counter : Natural; + -- During the Simplify optimization, number of times + -- ``Has_Contradiction`` was called. Used for logging/debugging + -- purposes. end record; -- Data used when doing a topological sort (used only in -- Solving_Context.Sort_Ctx), when we reach a complete potential solution. @@ -329,6 +334,7 @@ package body Langkit_Support.Adalog.Symbolic_Solver is return Result : constant Sort_Context := new Sort_Context_Type do Result.Using_Atoms := new Atom_Vector_Array' (Vars'Range => Atom_Vectors.Empty_Vector); + Result.Has_Contradiction_Counter := 0; end return; end Create; @@ -1340,8 +1346,13 @@ package body Langkit_Support.Adalog.Symbolic_Solver is Result : Boolean; begin + Sort_Ctx.Has_Contradiction_Counter := + Sort_Ctx.Has_Contradiction_Counter + 1; + if Simplify_Trace.Is_Active then - Simplify_Trace.Increase_Indent ("Looking for a contradiction"); + Simplify_Trace.Increase_Indent + ("Looking for a contradiction (number" + & Sort_Ctx.Has_Contradiction_Counter'Image & ")"); Simplify_Trace.Trace (Image (Atoms)); end if;
Removed a FIXME that is no longer valid We sped the label: locator up considerably so this shouldn't be an issue.
@@ -1473,9 +1473,6 @@ class Salesforce(object): for label, value in list(zip(it, it)): # this uses our custom "label" locator strategy locator = f"label:{label}" - # FIXME: we should probably only wait for the first label; - # after that we can assume the fields have been rendered - # so that we fail quickly if we can't find the element element = self.selenium.get_webelement(locator) self.scroll_element_into_view(locator) handler = get_form_handler(element, locator)
fix(bump): fix bump find_increment error in the previous design, MAJOR will be overwritten by other
@@ -34,11 +34,11 @@ def find_increment( continue found_keyword = result.group(0) new_increment = increments_map_default[found_keyword] - if new_increment == "MAJOR": - increment = new_increment - break - elif increment == "MINOR" and new_increment == "PATCH": + if increment == "MAJOR": continue + elif increment == "MINOR" and new_increment == "MAJOR": + increment = new_increment + elif increment == "PATCH" or increment is None: increment = new_increment return increment
rocketchat: Only set message content if it exists. Not sure where those come from since we discovered this with production data.
@@ -648,7 +648,15 @@ def process_messages( def message_to_dict(message: Dict[str, Any]) -> Dict[str, Any]: rc_sender_id = message["u"]["_id"] sender_id = user_id_mapper.get(rc_sender_id) + if "msg" in message: content = message["msg"] + else: # nocoverage + content = "This message imported from Rocket.Chat had no body in the data export." + logging.info( + "Message %s contains no message content: %s", + message["_id"], + message, + ) if message.get("reactions"): reactions = list_reactions(message["reactions"])
Tests: wait for FTS transfer to actually finish Otherwise, we can poll the status while it is still active, and the test fail
@@ -31,6 +31,8 @@ from rucio.client.rseclient import RSEClient from rucio.client.ruleclient import RuleClient from rucio.common.utils import run_cmd_process +MAX_POLL_WAIT_SECONDS = 60 + @pytest.fixture def did_factory(vo, test_scope): @@ -130,9 +132,14 @@ def test_tpc(containerized_rses, root_account, test_scope, did_factory, rse_clie # Check FTS transfer job assert fts_transfer_id is not None - assert fts_transfer_status in ['SUBMITTED', 'ACTIVE'] + # Wait for the FTS transfer to finish + fts_transfer_status = None + for _ in range(MAX_POLL_WAIT_SECONDS): fts_transfer_status = poll_fts_transfer_status(fts_transfer_id) + if fts_transfer_status not in ['SUBMITTED', 'ACTIVE']: + break + time.sleep(1) assert fts_transfer_status == 'FINISHED' poller.run(once=True, older_than=0)