message
stringlengths 13
484
| diff
stringlengths 38
4.63k
|
---|---|
Update .gitignore
This update to the .gitignore pre-emptively adds a few folders, namely lib/, dist/, and venv/.
lib/ and dist/ are common names of folders used to store build artecfacts,
and venv/ is default environment name for Python virutal environments. | @@ -64,6 +64,7 @@ _pycache_
augur/bin
.pytest_cache
.ipynb_checkpoints
+venv/
# Node #
########
@@ -86,3 +87,6 @@ docs/python/build/doctrees/augurcontext.doctree
# build directories #
#####################
build/
+lib/
+dist/
+
|
Remove unused args
fixes | @@ -2,6 +2,7 @@ commonfields:
id: UnzipFile
version: -1
name: UnzipFile
+releaseNotes: "Remove unused arguments"
script: |-
import zipfile
import os
@@ -97,12 +98,14 @@ tags:
args:
- name: fileName
default: true
+ deprecated: true
- name: password
secret: true
description: optional password which zip file protected by
- name: entryID
description: entry id of the attached zip file in the warroom
- name: lastZipFileInWarroom
+ deprecated: true
outputs:
- contextPath: ExtractedFiles
description: list of file names which extracted from zip
|
[Hexagon] Correct use of wrong cmake variable
The code should be checking DSPRPC_LIB_DIRS instead of REMOTE_DIR. | @@ -49,7 +49,7 @@ if (BUILD_FOR_ANDROID AND USE_HEXAGON_SDK)
get_hexagon_sdk_property("${USE_HEXAGON_SDK}" "${USE_HEXAGON_ARCH}"
DSPRPC_LIB DSPRPC_LIB_DIRS
)
- if(REMOTE_DIR)
+ if(DSPRPC_LIB_DIRS)
link_directories(${DSPRPC_LIB_DIRS})
else()
message(WARNING "Could not locate some Hexagon SDK components")
|
Fix `unit.cloud.clouds.test_ec2` for Windows
Mock instead of create tempfile | # Import Python libs
from __future__ import absolute_import
-import os
-import tempfile
# Import Salt Libs
from salt.cloud.clouds import ec2
from salt.exceptions import SaltCloudSystemExit
# Import Salt Testing Libs
-from tests.support.mixins import LoaderModuleMockMixin
from tests.support.unit import TestCase, skipIf
-from tests.support.mock import NO_MOCK, NO_MOCK_REASON
+from tests.support.mock import NO_MOCK, NO_MOCK_REASON, patch, PropertyMock
@skipIf(NO_MOCK, NO_MOCK_REASON)
-class EC2TestCase(TestCase, LoaderModuleMockMixin):
+class EC2TestCase(TestCase):
'''
Unit TestCase for salt.cloud.clouds.ec2 module.
'''
- def setup_loader_modules(self):
- return {ec2: {}}
-
def test__validate_key_path_and_mode(self):
- with tempfile.NamedTemporaryFile() as f:
- key_file = f.name
-
- os.chmod(key_file, 0o644)
- self.assertRaises(SaltCloudSystemExit,
- ec2._validate_key_path_and_mode,
- key_file)
- os.chmod(key_file, 0o600)
- self.assertTrue(ec2._validate_key_path_and_mode(key_file))
- os.chmod(key_file, 0o400)
- self.assertTrue(ec2._validate_key_path_and_mode(key_file))
-
- # tmp file removed
- self.assertRaises(SaltCloudSystemExit,
- ec2._validate_key_path_and_mode,
- key_file)
+
+ # Key file exists
+ with patch('os.path.exists', return_value=True):
+ with patch('os.stat') as patched_stat:
+
+ type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o644)
+ self.assertRaises(
+ SaltCloudSystemExit, ec2._validate_key_path_and_mode, 'key_file')
+
+ type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o600)
+ self.assertTrue(ec2._validate_key_path_and_mode('key_file'))
+
+ type(patched_stat.return_value).st_mode = PropertyMock(return_value=0o400)
+ self.assertTrue(ec2._validate_key_path_and_mode('key_file'))
+
+ # Key file does not exist
+ with patch('os.path.exists', return_value=False):
+ self.assertRaises(
+ SaltCloudSystemExit, ec2._validate_key_path_and_mode, 'key_file')
|
Fix libfaiss dependency to not expressly depend on conda-forge
Authors:
- Jordan Jacobelli (https://github.com/Ethyling)
Approvers:
- Ray Douglass (https://github.com/raydouglass)
URL: | @@ -49,7 +49,7 @@ requirements:
- faiss-proc=*=cuda
- gtest=1.10.0
- gmock
- - conda-forge::libfaiss=1.7.0
+ - libfaiss 1.7.0 *_cuda
run:
- libcumlprims {{ minor_version }}
- cudf {{ minor_version }}
@@ -59,7 +59,7 @@ requirements:
- {{ pin_compatible('cudatoolkit', max_pin='x.x') }}
- treelite=2.0.0
- faiss-proc=*=cuda
- - conda-forge::libfaiss=1.7.0
+ - libfaiss 1.7.0 *_cuda
about:
home: http://rapids.ai/
|
Vim plugin: use systemlist (if available)
We need yapf output as list, so use the systemlist() vim function,
which was added in vim 7.4.248. | @@ -33,11 +33,16 @@ function! yapf#YAPF() range
let l:cmd = 'yapf --lines=' . l:line_ranges
" Call YAPF with the current buffer
- let l:formatted_text = system(l:cmd, join(getline(1, '$'), "\n") . "\n")
+ if exists('*systemlist')
+ let l:formatted_text = systemlist(l:cmd, join(getline(1, '$'), "\n") . "\n")
+ else
+ let l:formatted_text =
+ \ split(system(l:cmd, join(getline(1, '$'), "\n") . "\n"), "\n")
+ endif
" Update the buffer.
execute '1,' . string(line('$')) . 'delete'
- call setline(1, split(l:formatted_text, "\n"))
+ call setline(1, l:formatted_text)
" Reset cursor to first line of the formatted range.
call cursor(a:firstline, 1)
|
Make status reporter create cfyuser group
As it now requires it, it has been failing on broker and DB which don't have it. | @@ -50,6 +50,7 @@ cp -R ${RPM_SOURCE_DIR}/packaging/status-reporter/files/* %{buildroot}
%pre
+groupadd -fr cfyuser
getent passwd cfyreporter >/dev/null || useradd -r -d /etc/cloudify -s /sbin/nologin cfyreporter
%files
|
find: Changing first message
Changing I am an AI bot message from az find command | @@ -19,7 +19,7 @@ from pkg_resources import parse_version
from knack.log import get_logger
logger = get_logger(__name__)
-WAIT_MESSAGE = ['I\'m an AI bot (learn more: aka.ms/aladdinkb); Let me see how I can help you...']
+WAIT_MESSAGE = ['Finding examples...']
EXTENSION_NAME = 'find'
|
Allow the evaluation to write to the input volume
This volume will anyway be destroyed. | @@ -122,7 +122,7 @@ class Evaluator(object):
self._client.containers.run(
image=self._eval_image_sha256,
volumes={
- self._input_volume: {'bind': '/input/', 'mode': 'ro'},
+ self._input_volume: {'bind': '/input/', 'mode': 'rw'},
self._output_volume: {'bind': '/output/', 'mode': 'rw'},
},
**self._run_kwargs,
|
llvm, state: Remove custom output struct type callback
The default works ok, and it will be needed to implement shape casting. | @@ -2149,9 +2149,6 @@ class State_Base(State):
def _get_input_struct_type(self, ctx):
return ctx.get_input_struct_type(self.function)
- def _get_output_struct_type(self, ctx):
- return ctx.get_output_struct_type(self.function)
-
def _get_param_struct_type(self, ctx):
return ctx.get_param_struct_type(self.function)
|
FIX: openstack attach ip timeout
increased amount of attempts and timeout limit
switch attempts limit to time limit | +import datetime
import os
import logging
import socket
@@ -198,24 +199,25 @@ class CephVMNode(object):
logger.info("Destroying volume %s", name)
driver.destroy_volume(volume)
- def attach_floating_ip(self):
+ def attach_floating_ip(self, timeout=120):
driver = self.driver
pool = driver.ex_list_floating_ip_pools()[0]
self.floating_ip = pool.create_floating_ip()
self.ip_address = self.floating_ip.ip_address
count = 0
host = None
+ timeout = datetime.timedelta(seconds=timeout)
+ starttime = datetime.datetime.now()
+ logger.info("Trying gethostbyaddr with {timeout}s timeout".format(timeout=timeout))
while True:
try:
- count += 1
host, _, _ = socket.gethostbyaddr(self.ip_address)
except:
- if count > 3:
- logger.info("Failed to get hostbyaddr in 3 retries")
+ if datetime.datetime.now() - starttime > timeout:
+ logger.info("Failed to get hostbyaddr in {timeout}s".format(timeout=timeout))
raise InvalidHostName("Invalid hostname for " + self.ip_address)
else:
- logger.info("Retrying gethostbyaddr in 10 seconds")
- sleep(10)
+ sleep(1)
if host is not None:
break
self.hostname = host
|
send_datasets command: `send_date` param optional
Allows the command to preproduce the "Send now" button exactly. | @@ -6,15 +6,19 @@ from corehq.motech.dhis2.tasks import send_datasets
class Command(BaseCommand):
- """
- Manually send datasets for a project assuming it was run at a date in the past
- """
+ help = ('Manually send datasets for a domain. Specify --send-date '
+ 'to simulate a date in the past')
def add_arguments(self, parser):
parser.add_argument('domain_name')
- parser.add_argument('send_date', help="YYYY-MM-DD")
+ parser.add_argument('--send_date', help="YYYY-MM-DD")
- def handle(self, domain_name, send_date, **options):
- send_date = datetime.strptime(send_date, '%Y-%m-%d')
+ def handle(self, domain_name, **options):
+ if 'send_date' in options:
+ send_date = datetime.strptime(options['send_date'], '%Y-%m-%d')
+ else:
+ send_date = None
print("Sending dataset")
- send_datasets(domain_name, send_now=True, send_date=send_date)
+ send_datasets.apply(domain_name, kwargs={
+ 'send_now': True, 'send_date': send_date,
+ })
|
ci: fix gha deprecations for promote-ga action
update docker/login-action to v2
remove usage of deprecated ::set-output | @@ -19,7 +19,7 @@ jobs:
with:
fetch-depth: 0
- name: "Docker Login"
- uses: docker/login-action@v1
+ uses: docker/login-action@v2
with:
registry: ${{ (!startsWith(secrets.RELEASE_REGISTRY, 'docker.io/')) && secrets.RELEASE_REGISTRY || null }}
username: ${{ secrets.GH_DOCKER_RELEASE_USERNAME }}
@@ -33,7 +33,7 @@ jobs:
if: always()
- id: check-slack-webhook
name: Assign slack webhook variable
- run: echo '::set-output name=slack_webhook_url::${{secrets.SLACK_WEBHOOK_URL}}'
+ run: echo "slack_webhook_url=${{secrets.SLACK_WEBHOOK_URL}}" >> $GITHUB_OUTPUT
- name: Slack notification
if: steps.check-slack-webhook.outputs.slack_webhook_url && always()
uses: edge/simple-slack-notify@master
@@ -72,7 +72,7 @@ jobs:
make release/ga/create-gh-release
- id: check-slack-webhook
name: Assign slack webhook variable
- run: echo '::set-output name=slack_webhook_url::${{secrets.SLACK_WEBHOOK_URL}}'
+ run: echo "slack_webhook_url=${{secrets.SLACK_WEBHOOK_URL}}" >> $GITHUB_OUTPUT
- name: Slack notification
if: steps.check-slack-webhook.outputs.slack_webhook_url && always()
uses: edge/simple-slack-notify@master
|
Adalog/Logic_Ref: simplify Set_Value code
TN: | @@ -30,22 +30,17 @@ package body Langkit_Support.Adalog.Logic_Ref is
function Set_Value (Self : in out Var; Data : Element_Type) return Boolean
is
- Old : Var := Self;
begin
- Inc_Ref (Old.Value);
-
if Debug.Debug then
Trace ("Setting the value of " & Image (Self) & " to "
& Element_Image (Data));
- Trace ("Old value is " & Element_Image (Old.Value));
+ Trace ("Old value is " & Element_Image (Self.Value));
end if;
Dec_Ref (Self.Value);
Self.Value := Data;
Inc_Ref (Self.Value);
Self.Reset := False;
-
- Dec_Ref (Old.Value);
return True;
end Set_Value;
|
SConstruct : Remove `gaffer` wrapper on Windows
We were already not including the Windows-specific `gaffer.cmd` on Linux, this removes the corresponding `gaffer` launch wrapper on Windows. | @@ -1352,7 +1352,7 @@ libraries = {
},
"scripts" : {
- "additionalFiles" : [ "bin/gaffer", "bin/__gaffer.py" ],
+ "additionalFiles" : [ "bin/__gaffer.py" ],
},
"misc" : {
@@ -1377,8 +1377,7 @@ libraries = {
}
-if env["PLATFORM"] == "win32" :
- libraries["scripts"]["additionalFiles"].append( "bin/gaffer.cmd" )
+libraries["scripts"]["additionalFiles"].append( "bin/gaffer.cmd" if env["PLATFORM"] == "win32" else "bin/gaffer" )
# Add on OpenGL libraries to definitions - these vary from platform to platform
for library in ( "GafferUI", "GafferScene", "GafferSceneUI", "GafferImageUI" ) :
|
Fix issue with encoder padding mask
Summary:
Fix issue with encoder padding mask
Also add lengths as a field in encoder_out of encode_src method
Add a conditional clause in transformer_monotonic_attention.py to handle the case where encoder_padding_mask is None | @@ -152,7 +152,8 @@ class TransformerMonotonicDecoder(TransformerDecoder):
encoder_out = encoder_out_dict["encoder_out"][0]
encoder_padding_mask = (
encoder_out_dict["encoder_padding_mask"][0]
- if len(encoder_out_dict["encoder_padding_mask"]) > 0
+ if encoder_out_dict["encoder_padding_mask"]
+ and len(encoder_out_dict["encoder_padding_mask"]) > 0
else None
)
|
Remove link to /#settings in header.
This removes the linke to /#settings in the "Change your settings"
header at the top of the /help/change-your-settings page. | -# Change your [settings](/#settings)
+# Change your settings
1. Click the cog (<i class="icon-vector-cog"></i>) icon in the top right corner.
2. From the dropdown menu, choose the **Settings** option.
|
Dereference and null the temporary "element_utf8" variable.
To avoid a reference (memory) leak | @@ -1757,6 +1757,7 @@ PyWcsprm_sub(
"string values for axis sequence must be one of 'latitude', 'longitude', 'cubeface', 'spectral', 'stokes', or 'celestial'");
goto exit;
}
+ Py_CLEAR(element_utf8);
} else if (PyLong_Check(element)) {
tmp = (Py_ssize_t)PyLong_AsSsize_t(element);
if (tmp == -1 && PyErr_Occurred()) {
|
Prepare 2.7.0rc1
[ci skip-rust]
[ci skip-build-wheels] | # 2.7.x Stable Releases
+## 2.7.0rc1 (Sep 01, 2021)
+
+### Bug fixes
+
+* Error, don't warn, when `--generate-lockfiles-resolve` is set to a disabled tool lockfile (cherrypick of #12738) ([#12741](https://github.com/pantsbuild/pants/pull/12741))
+
+* Add specific and actionable instructions to stale lockfile errors (cherrypick of #12699). ([#12717](https://github.com/pantsbuild/pants/pull/12717))
+
+* Improve descriptions of the process executions for PEX_PATH composition. (cherrypick of #12736) ([#12737](https://github.com/pantsbuild/pants/pull/12737))
+
+* Absolutize all of the execute_pex_args in the venv script. (cherrypick of #12727) ([#12729](https://github.com/pantsbuild/pants/pull/12729))
+
## 2.7.0rc0 (Aug 31, 2021)
### New Features
|
[Cocoa] Make confirm_quit an instance attribute
We might need seperate confirm_quit options for each window. This also
gets rid of the global variable. | @@ -48,8 +48,9 @@ class BrowserView:
else:
return False
- def windowShouldClose_(self, notification):
- if not _confirm_quit or self.display_confirmation_dialog():
+ def windowShouldClose_(self, window):
+ i = BrowserView.get_instance('window', window)
+ if not i.confirm_quit or self.display_confirmation_dialog():
return Foundation.YES
else:
return Foundation.NO
@@ -186,7 +187,7 @@ class BrowserView:
return handled
- def __init__(self, uid, title, url, width, height, resizable, fullscreen, min_size, background_color, webview_ready):
+ def __init__(self, uid, title, url, width, height, resizable, fullscreen, min_size, confirm_quit, background_color, webview_ready):
BrowserView.instances.append(self)
self.uid = uid
@@ -195,6 +196,8 @@ class BrowserView:
self._current_url_semaphor = threading.Semaphore(0)
self._js_result_semaphor = threading.Semaphore(0)
self.webview_ready = webview_ready
+ self.confirm_quit = confirm_quit
+
self.is_fullscreen = False
rect = AppKit.NSMakeRect(100.0, 350.0, width, height)
@@ -459,10 +462,7 @@ class BrowserView:
def create_window(title, url, width, height, resizable, fullscreen, min_size,
confirm_quit, background_color, webview_ready):
def create():
- global _confirm_quit
- _confirm_quit = confirm_quit
-
- browser = BrowserView(uid, title, url, width, height, resizable, fullscreen, min_size, background_color, webview_ready)
+ browser = BrowserView(uid, title, url, width, height, resizable, fullscreen, min_size, confirm_quit, background_color, webview_ready)
browser.show()
if not BrowserView.app.isRunning():
|
Added Travis CI info image
(For testing purposes currently referring to develop branch) | +[](https://travis-ci.com/textext/textext)
+
# TexText - A LaTeX/ XeLaTex/ LuaLaTex extension for Inkscape (releases 0.92, 0.91 and 0.48)
TexText is a Python plugin for the vector graphics editor [Inkscape](http://www.inkscape.org/) providing the possibility to add LaTeX generated SVG elements to your drawing.
|
Fix crcPI Space issue / Removed ctypes import and replaced with &s
crc overflowed 16 bits, but Python has no overflowing so that broke; Added bitwise ands to limit the bits to uint16
Removed ctypes dependency and replaced with &s | #!/usr/bin/env python3
import logging
-import ctypes
log = logging.getLogger("MPP-Solar")
@@ -140,26 +139,24 @@ def crcPI(data_bytes):
]
for c in data_bytes:
- # todo fix spaces
- if c == " ":
- continue
# log.debug('Encoding %s', c)
# todo fix response for older python
if type(c) == str:
c = ord(c)
- t_da = ctypes.c_uint8(crc >> 8)
- da = t_da.value >> 4
- crc <<= 4
+ da = ((crc >> 8) & 0xFF) >> 4
+ crc = (crc << 4) & 0xFFFF
+
index = da ^ (c >> 4)
crc ^= crc_ta[index]
- t_da = ctypes.c_uint8(crc >> 8)
- da = t_da.value >> 4
- crc <<= 4
+
+ da = ((crc >> 8) & 0xFF) >> 4
+ crc = (crc << 4) & 0xFFFF
+
index = da ^ (c & 0x0F)
crc ^= crc_ta[index]
- crc_low = ctypes.c_uint8(crc).value
- crc_high = ctypes.c_uint8(crc >> 8).value
+ crc_low = crc & 0xFF
+ crc_high = (crc >> 8) & 0xFF
if crc_low == 0x28 or crc_low == 0x0D or crc_low == 0x0A:
crc_low += 1
|
Fix `check_orphans.py` uses unsafe yaml loader
```
paasta-mesos-master::10-81-63-228-uswest2bdevc.dev.yelpcorp.com :
check_orphan_registrations : True
/opt/venvs/paasta-tools/bin/check_orphans.py:39: YAMLLoadWarning:
calling yaml.load() without Loader=... is deprecated, as the default
Loader is unsafe. Please read for full
details. x = yaml.load(f) WARNING:check_orphans:error getting file
from 10.144.177.10 WARNING:ch
``` | @@ -36,7 +36,7 @@ class ExitCode(Enum):
def get_zk_hosts(path: str) -> List[str]:
with open(path) as f:
- x = yaml.load(f)
+ x = yaml.safe_load(f)
return [f"{host}:{port}" for host, port in x]
|
Move test for adding devices to cache of nonexistent pool
This test replaces a removed test which called add-cache
on a nonexistent pool. | Test 'init-cache'.
"""
+# isort: FIRSTPARTY
+from dbus_client_gen import DbusClientUniqueResultError
+
# isort: LOCAL
from stratis_cli import StratisCliErrorCodes
from stratis_cli._errors import StratisCliEngineError, StratisCliPartialChangeError
@@ -88,6 +91,22 @@ class InitCacheFail2TestCase(SimTestCase):
self.check_error(StratisCliPartialChangeError, command_line, _ERROR)
+class InitCacheFail3TestCase(SimTestCase):
+ """
+ Test 'init-cache' for a non-existant pool.
+ """
+
+ _MENU = ["--propagate", "pool", "init-cache"]
+ _POOLNAME = "deadpool"
+
+ def test_init_cache(self):
+ """
+ Intializing the cache must fail since the pool does not exist.
+ """
+ command_line = self._MENU + [self._POOLNAME] + _DEVICE_STRATEGY()
+ self.check_error(DbusClientUniqueResultError, command_line, _ERROR)
+
+
class InitCacheSuccessTestCase(SimTestCase):
"""
Test 'init-cache' once.
|
Fix trpo flaky test
Setting seed=2 makes the flaky test more stable. | @@ -7,6 +7,7 @@ import pytest
import tensorflow as tf
from garage.envs import normalize
+from garage.experiment import deterministic
from garage.experiment import snapshotter
from garage.np.baselines import LinearFeatureBaseline
from garage.tf.algos import TRPO
@@ -108,8 +109,9 @@ class TestTRPO(TfGraphTestCase):
env.close()
- @pytest.mark.flaky
+ @pytest.mark.large
def test_trpo_gru_cartpole(self):
+ deterministic.set_seed(2)
with LocalTFRunner(snapshot_config, sess=self.sess) as runner:
env = TfEnv(normalize(gym.make('CartPole-v1')))
|
Pontoon: Update Gujarati (gu-IN) localization of AMO
Localization authors:
Anvee Malviya | @@ -5,8 +5,8 @@ msgstr ""
"Project-Id-Version: PROJECT 1.0\n"
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
"POT-Creation-Date: 2018-08-23 07:48+0000\n"
-"PO-Revision-Date: 2018-08-23 19:46+0000\n"
-"Last-Translator: Hariom Panchal <[email protected]>\n"
+"PO-Revision-Date: 2018-08-07 16:15+0000\n"
+"Last-Translator: Anvee Malviya <[email protected]>\n"
"Language-Team: LANGUAGE <[email protected]>\n"
"Language: gu_IN\n"
"MIME-Version: 1.0\n"
|
Fix for Python 2
This is needed to get pipenv to work on Python 2 hosts (current version on PyPI is broken). Adds the base class of `FileNotFoundError` and adds the default argument of `None` to the `utime` call. | @@ -1060,7 +1060,7 @@ def touch_update_stamp():
mkdir_p(PIPENV_CACHE_DIR)
p = os.sep.join((PIPENV_CACHE_DIR, '.pipenv_update_check'))
try:
- os.utime(p)
- except FileNotFoundError:
+ os.utime(p, None)
+ except OSError:
with open(p, 'w') as fh:
fh.write('')
|
Change Advanced Scala with Cats to Scala with Cats
Underscore.io renamed "Advanced Scala with Cats" to "Scala with Cats" and updated the contents of the book for Cats 1.0.0-RC1. The commit changes the name of the book and its respective link. The old link doesn't work anymore, | @@ -508,13 +508,13 @@ Projects with over 500 stargazers are in bold.
* [Scala Collections Cookbook](http://colobu.com/ScalaCollectionsCookbook/) - Scala collections introduction. written in Chinese.
* [Scala Exercises](http://scala-exercises.47deg.com/) - Brings the popular Scala Koans to the web. Offering hundreds of solvable exercises organized into 42 categories covering the basics of the Scala language.
* [Exercism - Scala Exercises](http://exercism.io/languages/scala/exercises) - Community-driven Scala exercises.
+* [Scala With Cats](https://underscore.io/books/scala-with-cats/) - Learn system architecture and design using the techniques of modern functional programming with [Cats](https://typelevel.org/cats/)
* [Scala in Depth](https://www.manning.com/books/scala-in-depth) - None
* [Scala school](https://twitter.github.io/scala_school/) - Scala school started as a series of lectures at Twitter to prepare experienced engineers to be productive Scala programmers.
* [Essential Scala](https://underscore.io/books/essential-scala/) - None
* [Scalera Blog](http://www.scalera.es) - Blog about Scala language and its environment (howto's, good practices, tips,...). Weekly posts written in both spanish and english
* [The Neophyte's Guide to Scala](http://danielwestheide.com/scala/neophytes.html) - None
* Resources by [Dr. Mark Lewis](http://www.cs.trinity.edu/~mlewis/) >> [Website](http://www.programmingusingscala.net/) | [Youtube Playlists](https://www.youtube.com/user/DrMarkCLewis/playlists)
-* [Advanced Scala With Cats](https://underscore.io/books/advanced-scala/) - Learn system architecture and design using the techniques of modern functional programming with [Cats](https://typelevel.org/cats/)
* [Functional Programming for Mortals](https://leanpub.com/fpmortals/read) - None
* [The Type Astronaut's Guide to Shapeless](https://underscore.io/books/shapeless-guide/) - None
|
More docs for methods in operator.h
Summary: Pull Request resolved: | @@ -671,6 +671,9 @@ class Operator : public OperatorBase {
return OperatorBase::template Input<Tensor>(idx, type);
}
+ /// XOutput is a modernized version of Output which returns a Tensor
+ /// rather than a Tensor* (the raw pointer in the latter case is
+ /// useless, as Tensor is a pointer type.)
Tensor XOutput(int idx, at::IntArrayRef dims, at::TensorOptions options) {
// We'll default device to the device of the current Operator Context
if (options.device_opt() == c10::nullopt) {
@@ -737,10 +740,19 @@ class Operator : public OperatorBase {
return OperatorBase::OutputTensor(idx, dims, options);
}
+ /// Legacy: please consider using the version of Output() which also takes
+ /// dtype and size as arguments.
inline Tensor* Output(int idx, DeviceType type = Context::GetDeviceType()) {
return OperatorBase::template Output<Tensor>(idx, type);
}
+ /// Get the output Tensor of an operator (allocating it if it is not
+ /// already initialized), and copy the contents of src into it.
+ /// You probably don't actually want to use this function (the fact
+ /// that you have a Tensor to copy from is probably a mistake:
+ /// you should have written the output into the output tensor,
+ /// from Output, directly in the first place), but this method
+ /// is situationally useful.
Tensor* OutputTensorCopyFrom(
int idx,
at::TensorOptions options,
|
Reinstate "no cost savings available" warning
This was originally introduced in but the HTML was subsequently
removed (I think during the merging of the new dashboards).
Closes | </div>
</div>
+ <div class="alert alert-warning hidden" id="no-cost-saving-warning">
+ There is currently no cost savings data available for these measures
+ </div>
+
{% verbatim %}
<script id="summary-panel" type="text/x-handlebars-template">
<p>{{ performanceDescription }}</p>
|
Remove the problematic migration entirely
The thumbnail check code is run every time the server is started anyway! | # Generated by Django 2.2.10 on 2020-04-04 12:38
from django.db import migrations
-from django.db.utils import OperationalError, ProgrammingError
-
-from part.models import Part
-from stdimage.utils import render_variations
def create_thumbnails(apps, schema_editor):
"""
Create thumbnails for all existing Part images.
+
+ Note: This functionality is now performed in apps.py,
+ as running the thumbnail script here caused too many database level errors.
+
+ This migration is left here to maintain the database migration history
+
"""
+ pass
- try:
- for part in Part.objects.all():
- # Render thumbnail for each existing Part
- if part.image:
- try:
- part.image.render_variations()
- except FileNotFoundError:
- print("Missing image:", part.image())
- # The image is missing, so clear the field
- part.image = None
- part.save()
-
- except (OperationalError, ProgrammingError):
- # Migrations have not yet been applied - table does not exist
- print("Could not generate Part thumbnails")
class Migration(migrations.Migration):
@@ -35,5 +23,5 @@ class Migration(migrations.Migration):
]
operations = [
- migrations.RunPython(create_thumbnails),
+ migrations.RunPython(create_thumbnails, reverse_code=create_thumbnails),
]
|
Don't include subjects_acceptable in exporting preprint providers
[#OSF-9046] | @@ -19,7 +19,7 @@ from osf.models.preprint_provider import rules_to_subjects
# When preprint_providers exclusively use Subject relations for creation, set this to False
SHOW_TAXONOMIES_IN_PREPRINT_PROVIDER_CREATE = True
-FIELDS_TO_NOT_IMPORT_EXPORT = ['access_token', 'share_source']
+FIELDS_TO_NOT_IMPORT_EXPORT = ['access_token', 'share_source', 'subjects_acceptable']
class PreprintProviderList(PermissionRequiredMixin, ListView):
|
DoubleSided shader error.
PURPOSE
DoubleSided shader error.
EFFECT OF CHANGE
Fixed error RPR_Doublesided. | @@ -1163,9 +1163,9 @@ class RPRShaderNodeDoublesided(RPRShaderNode):
rpr_node = self.create_node(pyrpr.MATERIAL_NODE_TWOSIDED, {})
if shader1:
- rpr_node.set_input(pyrpr.MATERIAL_INPUT_COLOR0, shader1)
+ rpr_node.set_input(pyrpr.MATERIAL_INPUT_FRONTFACE, shader1)
if shader2:
- rpr_node.set_input(pyrpr.MATERIAL_INPUT_COLOR1, shader2)
+ rpr_node.set_input(pyrpr.MATERIAL_INPUT_BACKFACE, shader2)
return rpr_node
|
Improvements to sessions script
remove count because this is very slow, just use delete return value for logging afterwards
change word in log | @@ -22,24 +22,22 @@ SESSION_AGE_THRESHOLD = 30
def main(dry_run=True):
old_sessions = Session.objects.filter(modified__lt=timezone.now() - datetime.timedelta(days=SESSION_AGE_THRESHOLD))
- initial_count = old_sessions.count()
if dry_run:
- logger.warn('Dry run mode, will delete files and then abort the transaction')
- logger.info('Preparing to Delete {} Sessions older than {} days'.format(initial_count, SESSION_AGE_THRESHOLD))
+ logger.warn('Dry run mode, will delete sessions and then abort the transaction')
+ logger.info('Preparing to delete Session objects older than {} days'.format(SESSION_AGE_THRESHOLD))
with transaction.atomic():
start = time.time()
- old_sessions.delete()
+ sessions_deleted = old_sessions.delete()[1]['osf.Session']
end = time.time()
- logger.info('Deleting {} Session objects took {} seconds'.format(initial_count, end - start))
+ logger.info('Deleting {} Session objects took {} seconds'.format(sessions_deleted, end - start))
if dry_run:
raise Exception('Dry run, aborting the transaction!')
-
@celery_app.task(name='scripts.clear_sessions')
def run_main(dry_run=True):
if not dry_run:
|
Check if CXX compiler supports all the needed functions
* Check if CXX compiler supports all the needed functions
This commit improves the code for PR according to
comments. Instead of checking ubuntu/gcc versions it
checks the support for the needed functions from the C++ compiler
using CHECK_CXX_SOURCE_COMPILES.
Fixes: 5229 | @@ -10,34 +10,6 @@ project(ATen)
cmake_policy(SET CMP0012 NEW)
-# ---[ If running on Ubuntu, check system version and compiler version.
-if(EXISTS "/etc/os-release")
- execute_process(COMMAND
- "sed" "-ne" "s/^ID=\\([a-z]\\+\\)$/\\1/p" "/etc/os-release"
- OUTPUT_VARIABLE OS_RELEASE_ID
- OUTPUT_STRIP_TRAILING_WHITESPACE
- )
- execute_process(COMMAND
- "sed" "-ne" "s/^VERSION_ID=\"\\([0-9\\.]\\+\\)\"$/\\1/p" "/etc/os-release"
- OUTPUT_VARIABLE OS_RELEASE_VERSION_ID
- OUTPUT_STRIP_TRAILING_WHITESPACE
- )
- if(OS_RELEASE_ID STREQUAL "ubuntu")
- if(OS_RELEASE_VERSION_ID VERSION_GREATER "17.04")
- if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
- if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS "6.0.0"
- AND NOT CMAKE_CXX_COMPILER_VERSION VERSION_LESS "5.0.0")
- message(FATAL_ERROR
- "Do not use GCC 5. GCC 5 has a known bug in Ubuntu 17.10"
- " and higher, which won't be fixed. For more information, see: "
- "https://github.com/pytorch/pytorch/issues/5229"
- )
- endif()
- endif()
- endif()
- endif()
-endif()
-
# RPATH stuff
# see https://cmake.org/Wiki/CMake_RPATH_handling
if(APPLE)
@@ -58,6 +30,31 @@ IF(NOT MSVC)
set(CMAKE_C_FLAGS "-fexceptions ${CMAKE_C_FLAGS}")
ENDIF(NOT MSVC)
+INCLUDE(CheckCXXSourceCompiles)
+
+#Check if certain std functions are supported. Sometimes
+#_GLIBCXX_USE_C99 macro is not defined and some functions are missing.
+CHECK_CXX_SOURCE_COMPILES("
+#include <cmath>
+#include <string>
+
+int main() {
+ int a = std::isinf(3.0);
+ int b = std::isnan(0.0);
+ std::string s = std::to_string(1);
+
+ return 0;
+ }" SUPPORT_GLIBCXX_USE_C99)
+
+if(NOT SUPPORT_GLIBCXX_USE_C99)
+ message(FATAL_ERROR
+ "The C++ compiler does not support required functions. "
+ "This is very likely due to a known bug in GCC 5 "
+ "(and maybe other versions) on Ubuntu 17.10 and newer. "
+ "For more information, see: "
+ "https://github.com/pytorch/pytorch/issues/5229"
+ )
+endif()
# Top-level build config
############################################
|
fix lifecycle config rule validation
Fixes | @@ -246,8 +246,15 @@ class Rule(BaseRule):
noncurrent_version_transition=None,
transition=None):
check_status(status)
- if not rule_filter:
- raise ValueError("Rule filter must be provided")
+ if (not abort_incomplete_multipart_upload and not expiration
+ and not noncurrent_version_expiration
+ and not noncurrent_version_transition
+ and not transition):
+ raise ValueError(
+ "at least one of action (AbortIncompleteMultipartUpload, "
+ "Expiration, NoncurrentVersionExpiration, "
+ "NoncurrentVersionTransition or Transition) must be specified "
+ "in a rule")
super().__init__(rule_filter, rule_id)
|
Fix test_early_z_pushed_to_end()
* Fix test_early_z_pushed_to_end()
Make it test what it's intended to test by removing extra optimizations. | @@ -18,21 +18,18 @@ from cirq.google import ExpZGate, ConvertToXmonGates, EjectZ
from cirq.value import Symbol
-def assert_optimizes(before, after):
- pre_optimizations = [
- ConvertToXmonGates(ignore_failures=True)
- ]
- followup_optimizations = [
+def assert_optimizes(before, after,
+ pre_opts=(ConvertToXmonGates(ignore_failures=True),),
+ post_opts=(
ConvertToXmonGates(ignore_failures=True),
- circuits.DropEmptyMoments()
- ]
-
+ circuits.DropEmptyMoments(),
+ )):
opt = EjectZ()
- for pre in pre_optimizations:
+ for pre in pre_opts:
pre.optimize_circuit(before)
opt.optimize_circuit(before)
- for post in followup_optimizations:
+ for post in post_opts:
post.optimize_circuit(before)
post.optimize_circuit(after)
@@ -89,7 +86,9 @@ def test_early_z_pushed_to_end():
circuits.Moment(),
circuits.Moment(),
circuits.Moment([ops.Z(q)**0.5]),
- ]))
+ ]),
+ pre_opts=[ConvertToXmonGates(ignore_failures=True)],
+ post_opts=[ConvertToXmonGates(ignore_failures=True)])
def test_multi_z_merges():
|
Update coin change problem
Optimized the coin change problem solution using dynamic programming. The previous solution used recursion. | -// Recursive C program for
-// coin change problem.
#include <stdio.h>
+#include <string.h>
-// Returns the count of ways we can
-// sum S[0...m-1] coins to get sum n
int count(int S[], int m, int n)
{
- // If n is 0 then there is 1 solution
- // (do not include any coin)
- if (n == 0)
- return 1;
+ // table[i] will be storing the number of solutions for
+ // value i. We need n+1 rows as the table is constructed
+ // in bottom up manner using the base case (n = 0)
+ int table[n + 1];
- // If n is less than 0 then no
- // solution exists
- if (n < 0)
- return 0;
+ // Initialize all table values as 0
+ memset(table, 0, sizeof(table));
- // If there are no coins and n
- // is greater than 0, then no
- // solution exist
- if (m <=0 && n >= 1)
- return 0;
+ // Base case (If given value is 0)
+ table[0] = 1;
- // count is sum of solutions (i)
- // including S[m-1] (ii) excluding S[m-1]
- return count( S, m - 1, n ) + count( S, m, n-S[m-1] );
+ // Pick all coins one by one and update the table[] values
+ // after the index greater than or equal to the value of the
+ // picked coin
+ for (int i = 0; i < m; i++)
+ for (int j = S[i]; j <= n; j++)
+ table[j] += table[j - S[i]];
+
+ return table[n];
}
-// Driver program to test above function
-int main()
+int main(int argc, char const *argv[])
{
- int i, j;
int arr[] = {1,2,3};
+
int m = sizeof(arr)/sizeof(arr[0]);
- printf("%d ", count(arr, m, 4));
- getchar();
+ int n=4;
+ printf("%d \n",count(arr,m,n));
return 0;
}
|
[skip-ci][COMMUNITY] New committer Ashutosh Parkhi
[COMMUNITY] New committer Ashutosh Parkhi | @@ -62,6 +62,7 @@ We do encourage everyone to work anything they are interested in.
- [Trevor Morris](https://github.com/trevor-m): @trevor-m - byoc, compiler
- [Leandro Nunes](https://github.com/leandron) (PMC): @leandron - tvmc
- [Lily Orth-Smith](https://github.com/electriclilies): @electriclilies - relay
+- [Ashutosh Parkhi](https://github.com/ashutosh-arm): @ashutosh-arm - cmsis-nn
- [Krzysztof Parzyszek](https://github.com/kparzysz-quic) (PMC): @kparzysz-quic - hexagon, llvm
- [Andrew Reusch](https://github.com/areusch): (PMC) @areusch - runtime, microTVM
- [David Riazati](https://github.com/driazati): @driazati - ci, community
@@ -151,8 +152,8 @@ We do encourage everyone to work anything they are interested in.
- [Lily Orth-Smith](https://github.com/electriclilies): @electriclilies
- [Wei Pan](https://github.com/wpan11nv): @wpan11nv
- [Michalis Papadimitriou](https://github.com/mikepapadim): @mikepapadim
-- [Ashutosh Parkhi](https://github.com/ashutosh-arm): @ashutosh-arm
- [Krzysztof Parzyszek](https://github.com/kparzysz-quic): @kparzysz-quic
+- [Ashutosh Parkhi](https://github.com/ashutosh-arm): @ashutosh-arm
- [Alexander Peskov](https://github.com/apeskov): @apeskov
- [Pariksheet Pinjari](https://github.com/PariksheetPinjari909): @PariksheetPinjari909
- [Josh Pollock](https://github.com/joshpoll): @joshpoll
|
Updated updater.py
Changed None to ``None`` in the docstring | @@ -183,7 +183,7 @@ class Updater:
Returns:
Local file path if the file is an up to date target file.
- None if file is not found or it is not up to date.
+ ``None`` if file is not found or it is not up to date.
"""
if filepath is None:
|
hip minor fix for c10
Summary:
TSIA
Pull Request resolved: | @@ -374,7 +374,7 @@ struct DefaultHIPAllocator final : public at::Allocator {
// lock the mutex
std::lock_guard<std::mutex> lock(HIPContext::mutex());
- if (FLAGS_caffe2_gpu_memory_tracking) {
+ if (c10::FLAGS_caffe2_gpu_memory_tracking) {
auto sz_it = g_size_map.find(ptr);
DCHECK(sz_it != g_size_map.end());
auto aff_it = g_hip_device_affiliation.find(ptr);
@@ -416,7 +416,7 @@ struct DefaultHIPAllocator final : public at::Allocator {
}
case HipMemoryPoolType::THC: {
HIP_ENFORCE(g_thc_allocator->Free(ptr));
- if (FLAGS_caffe2_gpu_memory_tracking) {
+ if (c10::FLAGS_caffe2_gpu_memory_tracking) {
g_hip_device_affiliation.erase(g_hip_device_affiliation.find(ptr));
}
break;
|
Update src/dash-table/dash/DataTable.js
Add XSS vulnerability warning | @@ -487,6 +487,8 @@ export const propTypes = {
]),
/**
* (default: False) If True, html may be used in markdown cells
+ * Be careful enabling html if the content being rendered can come
+ * from an untrusted user, as this may create an XSS vulnerability.
*/
html: PropTypes.bool
}),
|
Add time_to_anomaly method
Simplifies also the `propagate_to_anomly` method | @@ -1024,6 +1024,27 @@ class Orbit(object):
new_epoch,
)
+ @u.quantity_input(value=u.rad)
+ def time_to_anomaly(self, value):
+ """ Returns time required to be in a specific true anomaly.
+
+ Parameters
+ ----------
+ value : ~astropy.units.Quantity
+
+ Returns
+ -------
+ tof: ~astropy.units.Quantity
+ Time of flight required.
+ """
+
+ # Compute time of flight for correct epoch
+ M = nu_to_M(self.nu, self.ecc)
+ new_M = nu_to_M(value, self.ecc)
+ tof = Angle(new_M - M).wrap_at(360 * u.deg) / self.n
+
+ return tof
+
@u.quantity_input(value=u.rad)
def propagate_to_anomaly(self, value):
"""Propagates an orbit to a specific true anomaly.
@@ -1038,25 +1059,17 @@ class Orbit(object):
Resulting orbit after propagation.
"""
- # TODO: Avoid repeating logic with mean_motion?
- p, ecc, inc, raan, argp, _ = rv2coe(
- self.attractor.k.to(u.km ** 3 / u.s ** 2).value,
- self.r.to(u.km).value,
- self.v.to(u.km / u.s).value,
- )
# Compute time of flight for correct epoch
- M = nu_to_M(self.nu, self.ecc)
- new_M = nu_to_M(value, self.ecc)
- time_of_flight = Angle(new_M - M).wrap_at(360 * u.deg) / self.n
+ time_of_flight = self.time_to_anomaly(value)
return self.from_classical(
self.attractor,
- p / (1.0 - ecc ** 2) * u.km,
- ecc * u.one,
- inc * u.rad,
- raan * u.rad,
- argp * u.rad,
+ self.a,
+ self.ecc,
+ self.inc,
+ self.raan,
+ self.argp,
value,
epoch=self.epoch + time_of_flight,
plane=self.plane,
|
Pass schema specified during reflection into generated model meta.
Refs | @@ -586,6 +586,7 @@ class Introspector(object):
class BaseModel(Model):
class Meta:
database = self.metadata.database
+ schema = self.schema
def _create_model(table, models):
for foreign_key in database.foreign_keys[table]:
|
Enable linker preprocessing for armclang.
This should be temporary; for some reason the .sct cpp shebang isn't working for me. Same result in any case. | @@ -23,6 +23,9 @@ class MakefileArmclang(MakefileTool):
def __init__(self, workspace, env_settings):
MakefileTool.__init__(self, workspace, env_settings, logger)
+ # enable preprocessing linker files for GCC ARM
+ self.workspace['preprocess_linker_file'] = True
+ self.workspace['linker_extension'] = '.sct'
@staticmethod
def get_toolnames():
|
Add loading of log configuration
Add loading of config files for the PoET engine. This will allow for
broader configuration of the logging subsystem. | @@ -80,6 +80,13 @@ def main(args=None):
opts = parse_args(args)
try:
+ log_config = get_log_config('poet-engine-log-config.toml')
+ if log_config is None:
+ log_config = get_log_config('poet-engine-log-config.yaml')
+
+ if log_config is not None:
+ log_configuration(log_config=log_config)
+ else:
log_dir = get_log_dir()
log_configuration(
log_dir=log_dir,
|
Update pubsub snipets to accomodate changed semantics.
Re-assign 'policy.viewers'/'policy.editors', rather than mutating them
in place. | @@ -124,9 +124,9 @@ def topic_iam_policy(client, to_delete):
# [START topic_set_iam_policy]
ALL_USERS = policy.all_users()
- policy.viewers.add(ALL_USERS)
+ policy.viewers = [ALL_USERS]
LOGS_GROUP = policy.group('[email protected]')
- policy.editors.add(LOGS_GROUP)
+ policy.editors = [LOGS_GROUP]
new_policy = topic.set_iam_policy(policy) # API request
# [END topic_set_iam_policy]
@@ -395,9 +395,9 @@ def subscription_iam_policy(client, to_delete):
# [START subscription_set_iam_policy]
ALL_USERS = policy.all_users()
- policy.viewers.add(ALL_USERS)
+ policy.viewers = [ALL_USERS]
LOGS_GROUP = policy.group('[email protected]')
- policy.editors.add(LOGS_GROUP)
+ policy.editors = [LOGS_GROUP]
new_policy = subscription.set_iam_policy(policy) # API request
# [END subscription_set_iam_policy]
|
Fix PY3 h5py error when writing list of strings as attr
(TypeError: No conversion path for dtype: dtype('<U2'))
This solution ensures HDF5 encoding of is identical on all version of python: variable length | @@ -856,7 +856,9 @@ def save_NXdata(filename, signal, axes,
data_group = entry.create_group(nxdata_name)
data_group.attrs["NX_class"] = "NXdata"
data_group.attrs["signal"] = signal_name
- data_group.attrs["axes"] = axes_names
+ data_group.attrs["axes"] = numpy.array(
+ axes_names,
+ dtype=h5py.special_dtype(vlen=six.text_type)) # variable length UTF-8
if title:
# not in NXdata spec, but implemented by nexpy
data_group["title"] = title
|
Update readme
Addresses Issue | @@ -38,11 +38,10 @@ This open-source manuscript is a gateway for entering the Landlab world:
http://www.earth-surf-dynam.net/5/21/2017/
-After installation, tests can be run with:
+Two main installation options exist for Landlab. Most people will likely want to
+[install the conda package](https://github.com/landlab/landlab/wiki/Installing-Landlab-with-Anaconda). Individuals interested in modifying the Landlab source code should follow the [developer installation instructions](https://github.com/landlab/landlab/wiki/Installing-Landlab-from-source-code-(%22developer-install%22)).
- $ python -c 'import landlab; landlab.test()'
-
-The most current development version is always available from our git
+The most current source code is always available from our git
repository:
http://github.com/landlab/landlab
|
Relay: Support `--loglevel <level>` fully
There's no real logging in Relay yet, but this makes it support
switching level anyway. | Relays sit below an announcer, or another relay, and simply repeat what
they receive over PUB/SUB.
"""
-# Logging has to be configured first before we do anything.
+import argparse
+import gevent
+import hashlib
import logging
+import simplejson
+import time
+import uuid
+import zlib
from threading import Thread
-import time
+import zmq.green as zmq
+
+# Logging has to be configured first before we do anything.
logger = logging.getLogger(__name__)
-import zlib
+logger.setLevel(logging.INFO)
+__logger_channel = logging.StreamHandler()
+__logger_formatter = logging.Formatter(
+ '%(asctime)s - %(levelname)s - %(module)s:%(lineno)d: %(message)s'
+ )
+__logger_formatter.default_time_format = '%Y-%m-%d %H:%M:%S'
+__logger_formatter.default_msec_format = '%s.%03d'
+__logger_channel.setFormatter(__logger_formatter)
+logger.addHandler(__logger_channel)
+logger.info('Made logger')
-import gevent
-import simplejson
-import hashlib
-import uuid
-import zmq.green as zmq
from eddn.conf.Settings import Settings, loadConfig
from gevent import monkey
@@ -25,6 +37,7 @@ monkey.patch_all()
from bottle import Bottle, get, request, response, run
app = Bottle()
+
# This import must be done post-monkey-patching!
from eddn.core.StatsCollector import StatsCollector
statsCollector = StatsCollector()
@@ -37,6 +50,26 @@ if Settings.RELAY_DUPLICATE_MAX_MINUTES:
duplicateMessages.start()
+def parse_cl_args():
+ parser = argparse.ArgumentParser(
+ prog='Gateway',
+ description='EDDN Gateway server',
+ )
+
+ parser.add_argument(
+ '--loglevel',
+ help='Logging level to output at',
+ )
+
+ parser.add_argument(
+ '-c', '--config',
+ metavar='config filename',
+ nargs='?',
+ default=None,
+ )
+
+ return parser.parse_args()
+
@app.route('/stats/', method=['OPTIONS', 'GET'])
def stats():
stats = statsCollector.getSummary()
@@ -171,7 +204,12 @@ class EnableCors(object):
def main():
- loadConfig()
+ cl_args = parse_cl_args()
+ if cl_args.loglevel:
+ logger.setLevel(cl_args.loglevel)
+
+ loadConfig(cl_args)
+
r = Relay()
r.start()
|
Update lbaas-driver-v2 releasenotes
Update lbaas-driver-v2 releasenotes | @@ -8,3 +8,6 @@ features:
fixes:
- Includes the following bug fixes
Bug 1640076 - Using odl lbaas driver_v2 to create listener failed.
+ Bug 1633030 - Using odl lbaas driver_v2 to create loadbalancer failed.
+ Bug 1613583 - Odl lbaas driver_v2 Line 61 url_path error.
+ Bug 1613583 - Using ODL lbaas driver_v2 to create member failed.
\ No newline at end of file
|
custom fields: Add frontend validations in textual custom fields.
Add validations for short and long textual custom fields in
frontend. | <div class="user-name-section custom_user_field">
<label for="{{ field_name }}" class="title">{{ field_name }}</label>
{{#if is_long_text_field}}
- <textarea name="{{ field_name }}" id="{{ field_id }}">{{ field_value }}</textarea>
+ <textarea name="{{ field_name }}" id="{{ field_id }}" maxlength="500">{{ field_value }}</textarea>
{{else}}
- <input type="{{ field_type }}" name="{{ field_name }}" id="{{ field_id }}" value="{{ field_value }}" />
+ <input type="{{ field_type }}" name="{{ field_name }}" id="{{ field_id }}" value="{{ field_value }}" maxlength="50" />
{{/if}}
<div class="field_hint">{{ field_hint }}</div>
</div>
|
Update generic.txt
dedup of ```cobaltstrike``` | @@ -7200,13 +7200,6 @@ regsvr32.kz
webfax.org
yahoo.org.kz
-# Reference: https://twitter.com/SBousseaden/status/1221834746084368385
-# Reference: https://app.any.run/tasks/4a40a89c-bddd-4df8-993e-5732d8a52133/
-# Reference: https://www.virustotal.com/gui/domain/securelogonweb.com/relations
-# Reference: https://www.virustotal.com/gui/file/a8abcfde1a8d2eb3008e346c68ab4486c402e8d4dcd8d17e56787fa1c52e616b/detection
-
-securelogonweb.com
-
# Reference: https://twitter.com/FewAtoms/status/1224372841786855425
http://13.234.231.211
|
Deflake test_client_library_integration
Using ray_start_regular_shared test_tune_library_integration seems to make test_serve_handle flake. Separate use ray_start_regular instead.
No flake:
<img width="610" alt="Screen Shot 2022-07-27 at 1 10 59 PM" src="https://user-images.githubusercontent.com/14043490/181363214-522e9f41-df59-4b84-89b1-d8399b1901c6.png"> | @@ -8,7 +8,7 @@ from ray._private.client_mode_hook import enable_client_mode, client_mode_should
@pytest.mark.skip(reason="KV store is not working properly.")
-def test_rllib_integration(ray_start_regular_shared):
+def test_rllib_integration(ray_start_regular):
with ray_start_client_server():
import ray.rllib.algorithms.dqn as dqn
@@ -34,7 +34,7 @@ def test_rllib_integration(ray_start_regular_shared):
trainer.train()
-def test_rllib_integration_tune(ray_start_regular_shared):
+def test_rllib_integration_tune(ray_start_regular):
with ray_start_client_server():
# Confirming the behavior of this context manager.
# (Client mode hook not yet enabled.)
@@ -49,7 +49,7 @@ def test_rllib_integration_tune(ray_start_regular_shared):
@pytest.mark.asyncio
-async def test_serve_handle(ray_start_regular_shared):
+async def test_serve_handle(ray_start_regular):
with ray_start_client_server() as ray:
from ray import serve
|
Fix debug message
We're fetching the destination tag here. The build tag is something
different. | @@ -212,18 +212,18 @@ def stream_task_output(session, task_id, file_name,
def tag_koji_build(session, build_id, target, poll_interval=5):
- logger.debug('Finding build tag for target %s', target)
+ logger.debug('Finding destination tag for target %s', target)
target_info = session.getBuildTarget(target)
- build_tag = target_info['dest_tag_name']
- logger.info('Tagging build with %s', build_tag)
- task_id = session.tagBuild(build_tag, build_id)
+ dest_tag = target_info['dest_tag_name']
+ logger.info('Tagging build with %s', dest_tag)
+ task_id = session.tagBuild(dest_tag, build_id)
task = TaskWatcher(session, task_id, poll_interval=poll_interval)
task.wait()
if task.failed():
raise RuntimeError('Task %s failed to tag koji build' % task_id)
- return build_tag
+ return dest_tag
def get_koji_task_owner(session, task_id, default=None):
|
Make abc.GuildChannel.overwrites return a dictionary
Fix | @@ -346,16 +346,16 @@ class GuildChannel:
def overwrites(self):
"""Returns all of the channel's overwrites.
- This is returned as a list of two-element tuples containing the target,
- which can be either a :class:`Role` or a :class:`Member` and the overwrite
- as the second element as a :class:`PermissionOverwrite`.
+ This is returned as a dictionary where the key contains the target which
+ can be either a :class:`Role` or a :class:`Member` and the key is the
+ overwrite as a :class:`PermissionOverwrite`.
Returns
--------
- List[Tuple[Union[:class:`Role`, :class:`Member`], :class:`PermissionOverwrite`]]:
+ Mapping[Union[:class:`Role`, :class:`Member`], :class:`PermissionOverwrite`]:
The channel's permission overwrites.
"""
- ret = []
+ ret = {}
for ow in self._overwrites:
allow = Permissions(ow.allow)
deny = Permissions(ow.deny)
@@ -365,8 +365,7 @@ class GuildChannel:
target = self.guild.get_role(ow.id)
elif ow.type == 'member':
target = self.guild.get_member(ow.id)
-
- ret.append((target, overwrite))
+ ret[target] = overwrite
return ret
@property
|
Fix file paths and urls for deployment smoke test
* Fix file paths and urls for deployment smoke test
The documentation for smoke testing a deployed version of kfserving are out of date / incorrect.
* Update DEVELOPER_GUIDE.md
* Update DEVELOPER_GUIDE.md | @@ -208,9 +208,14 @@ make deploy-dev-storageInitializer
- **Note**: These commands also publishes to `KO_DOCKER_REPO` with the image of version 'latest', and change the configmap of your cluster to point to the new built images. It's just for development and testing purpose so you need to do it one by one. In configmap, for predictors it will just keep the one in development, for exlainer and storage initializer will just change the item impacted and set all others images including the `kfserving-controller-manager` and `logger` to be default.
### Smoke test after deployment
+
+Run the following command to smoke test the deployment,
+e.g. for the `v1beta1` version of kfserving:
+
```bash
kubectl apply -f docs/samples/v1beta1/tensorflow/tensorflow.yaml
```
+
You should see model serving deployment running under default or your specified namespace.
```console
@@ -222,7 +227,13 @@ $ kubectl get pods -n default -l serving.kubeflow.org/inferenceservice=flowers-s
NAME READY STATUS RESTARTS AGE
flowers-sample-default-htz8r-deployment-8fd979f9b-w2qbv 3/3 Running 0 10s
```
-NOTE: KFServing scales pods to 0 in the absence of traffic. If you don't see any pods, try sending out a query via curl using instructions in the tensorflow sample: https://github.com/kubeflow/kfserving/tree/master/docs/samples/v1beta1/tensorflow
+
+NOTE: KFServing scales pods to 0 in the absence of traffic when `minReplicas` is set to `0`.
+If you don't see any pods, try sending out a query via curl using instructions in the
+tensorflow sample
+([`v1alpha2` docs](https://github.com/kubeflow/kfserving/blob/master/docs/samples/v1alpha2/tensorflow/README.md),
+[`v1beta1` docs](https://github.com/kubeflow/kfserving/blob/master/docs/samples/v1beta1/tensorflow/README.md)).
+
## Iterating
|
workloads/pcmark: Fix reading results in python3
Ensure that the results file is decoded when using python3. | #
import os
import re
+import sys
import zipfile
from wa import ApkUiautoWorkload
@@ -58,6 +59,8 @@ class PcMark(ApkUiautoWorkload):
def update_output(self, context):
expected_results = len(self.regex_matches)
zf = zipfile.ZipFile(os.path.join(context.output_directory, self.result_file), 'r').read('Result.xml')
+ if sys.version_info[0] == 3:
+ zf = zf.decode(sys.stdout.encoding)
for line in zf.split('\n'):
for regex in self.regex_matches:
match = regex.search(line)
|
Fix remaining pylint errors.
Simplify if expression and fix the remaining unnecessary else statement. | @@ -288,7 +288,7 @@ def callbacks(app): # pylint: disable=redefined-outer-name
return {
'atomScale': atom_radius,
'relativeAtomScale': relative_atom_radius,
- 'bonds': True if len(show_bonds) > 0 else False,
+ 'bonds': bool(len(show_bonds) > 0),
'bondScale': bond_scale,
'ao': ambient_occlusion,
'brightness': brightness,
@@ -342,7 +342,6 @@ def callbacks(app): # pylint: disable=redefined-outer-name
def keep_atom_style(render, current):
if render == 'default':
return None
- else:
return current
|
add wait_blocker split timeout
add close_process method in restart_browser method. | @@ -645,7 +645,7 @@ class WebappInternal(Base):
logger().debug('Reloading user screen')
- self.driver_refresh()
+ self.restart_browser()
if self.config.coverage:
self.driver.get(f"{self.config.url}/?StartProg=CASIGAADV&A={self.config.initial_program}&Env={self.config.environment}")
@@ -653,7 +653,7 @@ class WebappInternal(Base):
if not self.config.skip_environment and not self.config.coverage:
self.program_screen(self.config.initial_program)
- self.wait_element_timeout(term="[name='cGetUser'] > input",
+ self.wait_element_timeout(term="[name='cGetUser']",
scrap_type=enum.ScrapType.CSS_SELECTOR, timeout = self.config.time_out , main_container='body')
@@ -2144,7 +2144,7 @@ class WebappInternal(Base):
logger().debug("Waiting blocker to continue...")
soup = None
result = True
- endtime = time.time() + 300
+ endtime = time.time() + self.config.time_out / 5
while (time.time() < endtime and result):
blocker_container = None
@@ -2166,6 +2166,9 @@ class WebappInternal(Base):
except:
pass
+ logger().debug(f'Blocker status: {blocker}')
+ time.sleep(1)
+
if blocker:
result = True
else:
@@ -2173,10 +2176,6 @@ class WebappInternal(Base):
if time.time() > endtime:
self.check_blocked_container(blocker_container_soup)
- self.log.take_screenshot_log(driver=self.driver, description='wait_blocker', stack_item=self.log.get_testcase_stack())#TODO trecho inserido para analise
- # if self.search_stack("Setup"):
- # self.restart_counter + 1
- # self.log_error('Blocked property timeout')
return result
@@ -9902,5 +9901,18 @@ class WebappInternal(Base):
logger().info("Closing the Browser")
self.driver.close()
+ self.close_process()
logger().info("Starting the Browser")
self.Start()
+
+ def close_process(self):
+ """
+ [Internal]
+ """
+ logger().debug('Closing process')
+ try:
+ os.system("taskkill /f /im firefox.exe")
+ os.system("taskkill /f /im geckodriver.exe")
+ except Exception as e:
+ logger().debug(f'Close process error: {str(e)}')
+
|
retune the picking method in vtkFrameWidgetRepresentation
adds a common pick function
the pick function tries with tolerance=0, then tolerance=0.005
The picking tolerance is not easy to use, and the behavior seems to
have changed between vtk7 and vtk8. This commit adds a workaround that
seems to work ok with both versions. | @@ -208,6 +208,21 @@ DataRep MakeDisk(double radius, double handleRadius, int axis)
return DataRepFromPolyData(Transform(d->GetOutput(), t));
}
+vtkDataSet* PickDataSet(vtkPicker* picker, int x, int y, vtkRenderer* renderer)
+{
+ picker->SetTolerance(0.0);
+ picker->Pick(x, y, 0.0, renderer);
+ if (picker->GetDataSet())
+ {
+ return picker->GetDataSet();
+ }
+
+ picker->SetTolerance(0.005);
+ picker->Pick(x, y, 0.0, renderer);
+ return picker->GetDataSet();
+}
+
+
class vtkFrameWidgetRepresentation::vtkInternal {
public:
vtkInternal()
@@ -232,7 +247,6 @@ public:
void InitPicker()
{
this->AxesPicker = vtkSmartPointer<vtkCellPicker>::New();
- this->AxesPicker->SetTolerance(0.01);
this->AxesPicker->PickFromListOn();
for (size_t i = 0; i < this->Reps.size(); ++i)
@@ -758,8 +772,7 @@ void vtkFrameWidgetRepresentation::HighlightActor(vtkDataSet* dataset)
//----------------------------------------------------------------------------
void vtkFrameWidgetRepresentation::OnMouseHover(double e[2])
{
- this->Internal->AxesPicker->Pick(e[0], e[1], 0.0, this->Renderer);
- vtkDataSet* dataset = this->Internal->AxesPicker->GetDataSet();
+ vtkDataSet* dataset = PickDataSet(this->Internal->AxesPicker, e[0], e[1], this->Renderer);
this->HighlightActor(dataset);
}
@@ -778,8 +791,8 @@ int vtkFrameWidgetRepresentation::ComputeInteractionState(int X, int Y, int vtkN
this->Internal->Transform->GetPosition(this->InteractionStartWorldPoint);
// Check if the axes actor was picked
- this->Internal->AxesPicker->Pick(X,Y,0.0,this->Renderer);
- vtkDataSet* dataset = this->Internal->AxesPicker->GetDataSet();
+ vtkDataSet* dataset = PickDataSet(this->Internal->AxesPicker, X, Y, this->Renderer);
+
if (dataset)
{
|
changed fixture for get_cache_dir() test
this hopefully fixes the build errors on appveyor for windows builds | @@ -114,28 +114,24 @@ def test_validate_project_urls():
@pytest.fixture
-def fixture_env_variable():
+def patch_for_get_cache_dir():
# storing current environmental variables for resetting later
- current_xdg = fv.os.environ.get("XDG_CACHE_HOME", None)
- current_platform = fv.sys.platform
- curernt_os_name = fv.os.name
+ original_xdg = fv.os.environ.get("XDG_CACHE_HOME", None)
+ original_expanduser = fv.os.path.expanduser
# change the environmental variables for the test
fv.os.environ["XDG_CACHE_HOME"] = "/dev/null/nonexistent"
- fv.sys.platform = "linux"
- fv.os.name = "posix"
+ fv.os.path.expanduser = lambda x: "/dev/null/nonexistent"
# return controll to the test function
yield
# reset the previously stored variables
- if current_xdg is not None:
- fv.os.environ["XDG_CACHE_HOME"] = current_xdg
- fv.sys.platform = current_platform
- fv.os.name = curernt_os_name
+ if original_xdg is not None:
+ fv.os.environ["XDG_CACHE_HOME"] = original_xdg
+ fv.os.path.expanduser = original_expanduser
-
-def test_get_cache_with_temporary_directory(fixture_env_variable):
+def test_get_cache_with_temporary_directory(patch_for_get_cache_dir):
# clear the functools.lru_cache, might be prefilled from other tests
fv.get_cache_dir.cache_clear()
|
help-docs: Update Homebrew instructions for the latest release on macOS.
The command `brew cask` is no longer a `brew` command as of Homebrew
version 3.5.2.
Updates the instruction to use `brew <command> --cask` instead.
Fixes: | @@ -14,7 +14,6 @@ look at the newest features, consider the [beta releases](#install-a-beta-releas
{tab|mac}
#### Disk image (recommended)
-<!-- TODO why zip? -->
1. Download [Zulip for macOS](https://zulip.com/apps/mac).
@@ -24,12 +23,12 @@ The app will update automatically to future versions.
#### Homebrew
-1. Run `brew cask install zulip` in Terminal.
+1. Run the command `brew install --cask zulip` from a terminal.
-1. Run Zulip from `Applications`. <!-- TODO fact check -->
+1. Run Zulip from `Applications`.
-The app will update automatically to future versions. `brew upgrade` will
-also work, if you prefer.
+The app will update automatically to future versions. Alternatively, you can
+run the command `brew upgrade zulip` to immediately upgrade.
{tab|windows}
|
Update build.gradle
Forgot to rebase before adding the tasks for building docs without testing. | @@ -401,7 +401,7 @@ task makeHailDocs(type: Exec, dependsOn: ['shadowJar', 'setupTutorial', 'makeFun
environment PYTHONPATH: '' + projectDir + '/python:' + sparkHome + '/python:' + sparkHome + '/python/lib/py4j-' + py4jVersion + '-src.zip'
}
-task makeHailDocsNoTest(type: Exec, dependsOn: ['runPandoc', 'prepareJavascript', 'copyHtml']) {
+task makeHailDocsNoTest(type: Exec, dependsOn: ['shadowJar', 'setupTutorial', 'makeFunctionsRst']) {
workingDir 'python/hail/docs'
commandLine 'make', 'clean', 'html'
environment SPARK_HOME: sparkHome
|
Fix for "part" form fields
Specify "default" rather than overriding "value" | @@ -74,34 +74,34 @@ function partFields(options={}) {
icon: 'fa-boxes',
},
component: {
- value: global_settings.PART_COMPONENT,
+ default: global_settings.PART_COMPONENT,
group: 'attributes',
},
assembly: {
- value: global_settings.PART_ASSEMBLY,
+ default: global_settings.PART_ASSEMBLY,
group: 'attributes',
},
is_template: {
- value: global_settings.PART_TEMPLATE,
+ default: global_settings.PART_TEMPLATE,
group: 'attributes',
},
trackable: {
- value: global_settings.PART_TRACKABLE,
+ default: global_settings.PART_TRACKABLE,
group: 'attributes',
},
purchaseable: {
- value: global_settings.PART_PURCHASEABLE,
+ default: global_settings.PART_PURCHASEABLE,
group: 'attributes',
onEdit: function(value, name, field, options) {
setFormGroupVisibility('supplier', value, options);
}
},
salable: {
- value: global_settings.PART_SALABLE,
+ default: global_settings.PART_SALABLE,
group: 'attributes',
},
virtual: {
- value: global_settings.PART_VIRTUAL,
+ default: global_settings.PART_VIRTUAL,
group: 'attributes',
},
};
|
Fixes README.md: 104: MD046/code-block-style
104: MD046/code-block-style Code block style
[Expected: fenced; Actual: indented] | @@ -101,7 +101,9 @@ cloning the repository directly into your [Sublime Text Packages directory].
You can locate your Sublime Text Packages directory by using the menu item
`Preferences` -> `Browse Packages...`
+```bash
git clone https://github.com/jonlabelle/SublimeJsPrettier.git "JsPrettier"
+```
## Usage
|
Add reference/target to the list of allowed sphinx nodes
In order to be able to add outside links in docstrings
For libadalang#923 | @@ -1370,7 +1370,7 @@ SUPPORTED_TAGS = [
"#text", "comment", "field", "paragraph", "list_item", "literal_block",
"enumerated_list", "field_name", "document", "bullet_list",
"system_message", "problematic", "warning", "field_list",
- "field_name", "field_body", "block_quote"
+ "field_name", "field_body", "block_quote", "reference", "target"
] + SUPPORTED_ADMONITIONS + list(TAGNAMES_WITH_SURROUNDINGS.keys())
SKIP_CHILDREN = ["field_name", "literal_block"]
|
Add tkFileDialog to future.movers.tkinter
Related to issue / Commit | @@ -10,3 +10,9 @@ else:
except ImportError:
raise ImportError('The FileDialog module is missing. Does your Py2 '
'installation include tkinter?')
+
+ try:
+ from tkFileDialog import *
+ except ImportError:
+ raise ImportError('The tkFileDialog module is missing. Does your Py2 '
+ 'installation include tkinter?')
|
fix loading 1dim tensor from 0.3.* to 0dim tensor
Summary:
This PR fixes .
Adding backward support when loading a checkpoint from 0.3.* with 1dim tensor, they are now 0 dim tensor in 0.4+.
Pull Request resolved: | @@ -642,6 +642,10 @@ class Module(object):
if key in state_dict:
input_param = state_dict[key]
+ # Backward compatibility: loading 1-dim tensor from 0.3.* to version 0.4+
+ if len(param.shape) == 0 and len(input_param.shape) == 1:
+ input_param = input_param[0]
+
if input_param.shape != param.shape:
# local shape should match the one in checkpoint
error_msgs.append('size mismatch for {}: copying a param of {} from checkpoint, '
|
Update 10setupBlitz.sh
Fixed typo that offered one option count more than available.
"..choose from these 5 options ..." > "..choose from these 4 options.." | @@ -196,7 +196,7 @@ if [ ${mountOK} -eq 1 ]; then
if [ ${network} = "bitcoin" ]; then
echo "Bitcoin Options"
menuitem=$(dialog --clear --beep --backtitle "RaspiBlitz" --title "Getting the Blockchain" \
- --menu "You need a copy of the Bitcoin Blockchain - you have 5 options:" 13 75 5 \
+ --menu "You need a copy of the Bitcoin Blockchain - you have 4 options:" 13 75 5 \
T "TORRENT --> MAINNET + TESTNET thru Torrent (DEFAULT)" \
C "COPY --> BLOCKCHAINDATA from another node with SCP" \
N "CLONE --> BLOCKCHAINDATA from 2nd HDD (extra cable)"\
@@ -206,7 +206,7 @@ if [ ${mountOK} -eq 1 ]; then
elif [ ${network} = "litecoin" ]; then
echo "Litecoin Options"
menuitem=$(dialog --clear --beep --backtitle "RaspiBlitz" --title "Getting the Blockchain" \
- --menu "You need a copy of the Litecoin Blockchain - you have 3 options:" 13 75 4 \
+ --menu "You need a copy of the Litecoin Blockchain - you have 2 options:" 13 75 4 \
T "TORRENT --> MAINNET thru Torrent (DEFAULT)" \
S "SYNC --> MAINNET thru Litecoin Network (FALLBACK+SLOW)" 2>&1 >/dev/tty)
|
mergify: add stable-6.0 backport configuration
This adds the stable-6.0 backport configuration in mergify. | @@ -35,3 +35,10 @@ pull_request_rules:
conditions:
- label=backport-stable-5.0
name: backport stable-5.0
+ - actions:
+ backport:
+ branches:
+ - stable-6.0
+ conditions:
+ - label=backport-stable-6.0
+ name: backport stable-6.0
|
Update lightning_module.rst
`*_epoch_out` methods expects a return of None. | @@ -448,7 +448,7 @@ The matching pseudocode is:
optimizer.step()
optimizer.zero_grad()
- epoch_out = training_epoch_end(outs)
+ training_epoch_end(outs)
Training with DataParallel
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
Remove redundant code comments
Comments are unified in the README | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
-"""
-This code creates a dataframe of dicom headers based on dicom files in a filepath.
-This code also extracts the images within those dicoms if requested. see section 'print images'
-pip3 install image numpy pandas pydicom pillow pypng
-"""
import numpy as np
import pandas as pd
-import pydicom as dicom #pydicom is most recent form of dicom python interface. see https://pydicom.github.io/
+import pydicom as dicom
import png, os, glob
import PIL as pil
from pprint import pprint
|
Fix whitespace issue
Introduced in | @@ -101,6 +101,9 @@ Bugs fixed
* Intel C compilers could complain about unsupported gcc pragmas.
Patch by Ralf Gommers. (Github issue :issue:`5052`)
+* Includes all bug-fixes and features from the 0.29 maintenance branch
+ up to the :ref:`0.29.33` release.
+
Other changes
-------------
|
Reduce parallelism for e2e tests from 4 to 2
e2e tests have been very flaky in CI, try to reduce the number of tests
run in parallell to see if that yields more stable CI builds | @@ -39,7 +39,7 @@ blocks:
- docker login --username "${DOCKER_USERNAME}" --password-stdin <<< "${DOCKER_PASSWORD}"
- ./bin/docker_build
# Run end-to-end/integration tests
- - tox -e integration_test -- -n 4 --use-docker-for-e2e
+ - tox -e integration_test -- -n 2 --use-docker-for-e2e
# Store metadata for promotion jobs
- echo "$SEMAPHORE_JOB_ID" > semaphore_job_id
- echo "$SEMAPHORE_GIT_SHA" > semaphore_git_sha
|
Table mixin for Tenancy columns
A mixin to add the Tenant and Tenant Group columns to a table. | @@ -3,6 +3,7 @@ import django_tables2 as tables
__all__ = (
'TenantColumn',
'TenantGroupColumn',
+ 'TenancyColumnsMixin',
)
@@ -50,3 +51,7 @@ class TenantGroupColumn(tables.TemplateColumn):
def value(self, value):
return str(value) if value else None
+
+class TenancyColumnsMixin(tables.Table):
+ tenant_group = TenantGroupColumn()
+ tenant = TenantColumn()
|
left_sidebar: Add data-placement to settings icon to prevent flickering.
Changing the position of tooltip using data-placement=bottom fixes this flickering artifact.
Fixes: | </ul>
<div id="streams_list" class="zoom-out">
<div id="streams_header" class="zoom-in-hide"><h4 class="sidebar-title" data-toggle="tooltip" title="{{ _('Filter streams') }}">{{ _('STREAMS') }}</h4>
- <i id="streams_inline_cog" class='fa fa-cog' aria-hidden="true" data-toggle="tooltip" title="{{ _('Subscribe, add, or configure streams') }}"></i>
+ <i id="streams_inline_cog" class='fa fa-cog' aria-hidden="true" data-toggle="tooltip" data-placement="bottom" title="{{ _('Subscribe, add, or configure streams') }}"></i>
<i id="streams_filter_icon" class='fa fa-search' aria-hidden="true" data-toggle="tooltip" title="{{ _('Filter streams') }} (q)"></i>
<div class="input-append notdisplayed stream_search_section">
<input class="stream-list-filter" type="text" autocomplete="off" placeholder="{{ _('Search streams') }}" />
|
fix(rez-pip): ensure the pip version check warning is displayed
The pip version check was using a too broad exception catch that would
cause the original raise to be silently skipped even it was raised
based on a successful version match (<19). Adjust exception handling
to avoid this issue. | @@ -250,6 +250,8 @@ def find_pip(pip_version=None, python_version=None):
if int(pip_major) < 19:
raise VersionError("pip >= 19 is required! Please update your pip.")
+ except VersionError:
+ raise
except:
# silently skip if pip version detection failed, pip itself will show
# a reasonable error message at the least.
|
packaging: Remove pin for jpeg, numpy
* packaging: Remove pin for jpeg, numpy
These may no longer be necessary due to the default anaconda channel
having the necessary packages now.
* Update packaging/torchvision/meta.yaml | @@ -9,14 +9,13 @@ requirements:
build:
- {{ compiler('c') }} # [win]
- libpng
- - jpeg <=9b
+ - jpeg
# NOTE: The only ffmpeg version that we build is actually 4.2
- ffmpeg >=4.2 # [not win]
host:
- python
- setuptools
- - defaults::numpy >=1.11
{{ environ.get('CONDA_PYTORCH_BUILD_CONSTRAINT') }}
{{ environ.get('CONDA_CUDATOOLKIT_CONSTRAINT') }}
{{ environ.get('CONDA_CPUONLY_FEATURE') }}
@@ -25,9 +24,8 @@ requirements:
- python
- libpng
- ffmpeg >=4.2 # [not win]
- - jpeg <=9b
- - pillow >=4.1.1
- - defaults::numpy >=1.11
+ - jpeg
+ - pillow >=5.3.0
{{ environ.get('CONDA_PYTORCH_CONSTRAINT') }}
{{ environ.get('CONDA_CUDATOOLKIT_CONSTRAINT') }}
@@ -53,7 +51,7 @@ test:
- pytest
- scipy
- av >=8.0.1
- - jpeg <=9b
+ - jpeg
- ca-certificates
|
[sync] thread safe check for folder conflicts
see | @@ -48,8 +48,8 @@ from maestral.config import MaestralConfig, MaestralState
from maestral.fsevents import Observer
from maestral.constants import (IDLE, SYNCING, PAUSED, STOPPED, DISCONNECTED,
EXCLUDED_FILE_NAMES, MIGNORE_FILE, IS_FS_CASE_SENSITIVE)
-from maestral.errors import (RevFileError, NoDropboxDirError,
- SyncError, PathError, NotFoundError,
+from maestral.errors import (RevFileError, NoDropboxDirError, SyncError, PathError,
+ NotFoundError, FileConflictError, FolderConflictError,
fswatch_to_maestral_error, os_to_maestral_error)
from maestral.utils.content_hasher import DropboxContentHasher
from maestral.utils.notify import MaestralDesktopNotifier, FILECHANGE
@@ -1618,18 +1618,22 @@ class UpDownSync:
dbx_path = self.to_dbx_path(local_path)
- md_old = self.client.get_metadata(dbx_path)
self._wait_for_creation(local_path)
if event.is_directory:
- if isinstance(md_old, FolderMetadata):
+ try:
+ md_new = self.client.make_dir(dbx_path, autorename=False)
+ except FolderConflictError:
+ logger.debug('No conflict for "%s": the folder already exists',
+ event.src_path)
self.set_local_rev(dbx_path, 'folder')
return
- else:
+ except FileConflictError:
md_new = self.client.make_dir(dbx_path, autorename=True)
else:
# check if file already exists with identical content
+ md_old = self.client.get_metadata(dbx_path)
if isinstance(md_old, FileMetadata):
local_hash = get_local_hash(local_path)
if local_hash == md_old.content_hash:
|
helper: Refactor updating message count on muted streams.
This simplifies how updating message count is handled
for muted stream vs non-muted streams. | @@ -108,10 +108,10 @@ def set_count(id_list: List[int], controller: Any, new_count: int) -> None:
add_to_counts = True
if msg_type == 'stream':
stream_id = messages[id]['stream_id']
- for stream in streams:
- if stream.stream_id in controller.model.muted_streams:
+ if stream_id in controller.model.muted_streams:
add_to_counts = False # if muted, don't add to eg. all_msg
- break
+ else:
+ for stream in streams:
if stream.stream_id == stream_id:
stream.update_count(stream.count + new_count)
break
|
Throttle scaler even when an exception is raised
The with throttle(seconds): construct doesn't work when an exception
is raised through it. | @@ -835,8 +835,8 @@ class ScalerThread(ExceptionalThread):
def tryRun(self):
while not self.stop:
- try:
with throttle(self.scaler.config.scaleInterval):
+ try:
queuedJobs = self.scaler.leader.getJobs()
queuedJobShapes = [
Shape(wallTime=self.scaler.getAverageRuntime(
|
Fixes TypeError: isinstance in airflow_component_test in Python 3.8.
Changes the mocking function to __init__ of the class instead of functools.partial to mitigate collision with the internal library. | import collections
import datetime
-import functools
import os
from unittest import mock
from airflow import models
+from airflow.operators import python_operator
import tensorflow as tf
from tfx import types
@@ -97,8 +97,8 @@ class AirflowComponentTest(tf.test.TestCase):
self.assertEqual(arg_list[0][1]['pipeline_info'].run_id, 'run_id')
mock_component_launcher.launch.assert_called_once()
- @mock.patch.object(functools, 'partial', wraps=functools.partial)
- def testAirflowComponent(self, mock_functools_partial):
+ @mock.patch.object(python_operator.PythonOperator, '__init__')
+ def testAirflowComponent(self, mock_python_operator_init):
mock_component_launcher_class = mock.Mock()
airflow_component.AirflowComponent(
parent_dag=self._parent_dag,
@@ -110,24 +110,27 @@ class AirflowComponentTest(tf.test.TestCase):
beam_pipeline_args=[],
additional_pipeline_args={},
component_config=None)
- # Airflow complained if we completely mock this function. So we "wraps" the
- # function. `partial` can be called multiple times from other than
- # AirflowComponent. We will check the first call only.
- mock_functools_partial.assert_called()
- args = mock_functools_partial.call_args_list[0][0]
- kwargs = mock_functools_partial.call_args_list[0][1]
- self.assertCountEqual(args,
- (airflow_component._airflow_component_launcher,))
- self.assertTrue(kwargs.pop('driver_args').enable_cache)
+
+ mock_python_operator_init.assert_called_once_with(
+ task_id=self._component.id,
+ provide_context=True,
+ python_callable=mock.ANY,
+ dag=self._parent_dag)
+
+ python_callable = mock_python_operator_init.call_args_list[0][1][
+ 'python_callable']
+ self.assertEqual(python_callable.func,
+ airflow_component._airflow_component_launcher)
+ self.assertTrue(python_callable.keywords.pop('driver_args').enable_cache)
self.assertEqual(
- kwargs, {
+ python_callable.keywords, {
'component': self._component,
'component_launcher_class': mock_component_launcher_class,
'pipeline_info': self._pipeline_info,
'metadata_connection_config': self._metadata_connection_config,
'beam_pipeline_args': [],
'additional_pipeline_args': {},
- 'component_config': None
+ 'component_config': None,
})
|
Update generic.txt
Moving to ```raccoon```: | @@ -8714,11 +8714,6 @@ medicacademic.com/aza/
hallmarkherbals.com
-# Reference: https://www.virustotal.com/gui/domain/analyticsonline.top/relations
-# Reference: https://twitter.com/FaLconIntel/status/1247895934127591426
-
-analyticsonline.top
-
# Reference: https://twitter.com/MBThreatIntel/status/1248412024305897475
# Reference: https://www.virustotal.com/gui/ip-address/198.12.66.107/relations
|
portico: Fix password strength bar reset after form invalidation.
This code prevents the password bar from being incorrectly clear after
the sign up form is rendered again after invalid data is submitted
(generally due to forgetting to agree to ToS).
Fixes | @@ -38,6 +38,12 @@ $(function () {
unhighlight: highlight('success'),
});
+ if (password_field) {
+ // Reset the state of the password strength bar if the page
+ // was just reloaded due to a validation failure on the backend.
+ common.password_quality(password_field.val(), $('#pw_strength .bar'), password_field);
+ }
+
password_field.on('change keyup', function () {
// Update the password strength bar even if we aren't validating
// the field yet.
|
add to ease the structuring of models,
consider the combination with SaverRestorer(..., prefix="...") | @@ -77,6 +77,35 @@ def under_name_scope():
return _impl
+def under_variable_scope():
+ """
+ Returns:
+ A decorator which makes the function happen under a variable scope,
+ which is named by the function itself.
+
+ Examples:
+
+ .. code-block:: python
+
+ @under_variable_scope()
+ def mid_level(x):
+ with argscope(Conv2D, kernel_shape=3, nl=BNReLU):
+ x = Conv2D('conv1', x, 512, stride=1)
+ x = Conv2D('conv2', x, 256, stride=1)
+ return x
+
+ """
+
+ def _impl(func):
+ @functools.wraps(func)
+ def wrapper(*args, **kwargs):
+ name = func.__name__
+ with tf.variable_scope(name):
+ return func(*args, **kwargs)
+ return wrapper
+ return _impl
+
+
@graph_memoized
def _get_cached_ns(name):
with tf.name_scope(None):
|
Allow dry-run deployments of installed packages
Makes the -d flag behavior consistent with the main 'deploy' command
(accepts both package names and source directories.)
Note that source directories will not be packaged and installed when
the -d flag is specified. | @@ -904,7 +904,11 @@ def deploy_problems(args, config):
try:
for problem_name in problem_names:
- if args.dry:
+ if isdir(join(get_problem_root(problem_name, absolute=True))):
+ # problem_name is already an installed package
+ deploy_location = join(get_problem_root(problem_name, absolute=True))
+ elif isdir(problem_name) and args.dry:
+ # dry run - avoid installing package
deploy_location = problem_name
elif isdir(problem_name):
# problem_name is a source dir - convert to .deb and install
@@ -923,9 +927,6 @@ def deploy_problems(args, config):
logger.error("An error occurred while installing problem packages.")
raise FatalException
deploy_location = join(get_problem_root(problem_name, absolute=True))
- elif isdir(join(get_problem_root(problem_name, absolute=True))):
- # problem_name is already an installed package
- deploy_location = join(get_problem_root(problem_name, absolute=True))
else:
logger.error("'%s' is neither an installed package, nor a valid problem directory",
problem_name)
|
Core & Internals: Fix main web.py endpoint errors
/vos endpoint was missing completely.
Fix import typo with SStates from the subscriptions endpoint. | #!/usr/bin/env python
-# Copyright 2012-2020 CERN for the benefit of the ATLAS collaboration.
+# -*- coding: utf-8 -*-
+# Copyright 2020 CERN
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
#
# Authors:
# - Thomas Beermann <[email protected]>, 2020
-#
-# PY3K COMPATIBLE
+# - Benedikt Ziemons <[email protected]>, 2020
from web import application, loadhook
@@ -60,8 +60,9 @@ from rucio.web.rest.rse import (Attributes as RAttributes, Distance as RDistance
Usage as RUsage, UsageHistory as RUsageHistory, Limits as RLimits, RSE
as RRSE, RSEs as RRSEs, QoSPolicy as RQoSPolicy) # NOQA: F401
from rucio.web.rest.scope import Scope as SCScope, ScopeList as SCScopeList # NOQA: F401
-from rucio.web.rest.subscription import SubscriptionId as SSubscriptionId, States as AStates, Rules as SRules, SubscriptionName as SSubscriptionName, Subscription as SSubscription # NOQA: F401
+from rucio.web.rest.subscription import SubscriptionId as SSubscriptionId, States as SStates, Rules as SRules, SubscriptionName as SSubscriptionName, Subscription as SSubscription # NOQA: F401
from rucio.web.rest.temporary_did import BulkDIDS as TBulkDIDS # NOQA: F401
+from rucio.web.rest.vo import VO as VVO, VOs as VVOs, RecoverVO as VRecoverVO # NOQA: F401
URLS = [
@@ -232,6 +233,12 @@ URLS += [
'/subscriptions/', 'SSubscription',
]
+URLS += [
+ '/vos/(.+)/recover', 'VRecoverVO',
+ '/vos/(.+)', 'VVO',
+ '/vos/', 'VVOs'
+]
+
URLS += ['/tmp_dids', 'TBulkDIDS']
APP = application(URLS, globals())
|
Update common/chromium/export_blink_webdisplayitemlist.patch
The Great Blink mv for source files, part 2. | -diff --git a/third_party/WebKit/public/platform/WebDisplayItemList.h b/third_party/WebKit/public/platform/WebDisplayItemList.h
-index 82af75c3727e..548eefc0fae5 100644
---- a/third_party/WebKit/public/platform/WebDisplayItemList.h
-+++ b/third_party/WebKit/public/platform/WebDisplayItemList.h
+diff --git a/third_party/blink/public/platform/web_display_item_list.h b/third_party/blink/public/platform/web_display_item_list.h
+index 31a3f6d8d5f4..0b2a13230fbd 100644
+--- a/third_party/blink/public/platform/web_display_item_list.h
++++ b/third_party/blink/public/platform/web_display_item_list.h
- #define WebDisplayItemList_h
+ #define THIRD_PARTY_BLINK_PUBLIC_PLATFORM_WEB_DISPLAY_ITEM_LIST_H_
- #include "WebBlendMode.h"
-+#include "WebCommon.h"
- #include "WebFloatPoint.h"
- #include "WebFloatRect.h"
- #include "WebRect.h"
+ #include "third_party/blink/public/platform/web_blend_mode.h"
++#include "third_party/blink/public/platform/web_common.h"
+ #include "third_party/blink/public/platform/web_float_point.h"
+ #include "third_party/blink/public/platform/web_float_rect.h"
+ #include "third_party/blink/public/platform/web_rect.h"
@@ -33,7 +34,7 @@ namespace blink {
// 'drawing' items) and operations to be performed when rendering this content
// (stored in 'clip', 'transform', 'filter', etc...). For more details see:
|
Create per-split output dirs in the BaseDriver. This is needed because some Beam
runners will fail if the output directory doesn't exist. | @@ -26,26 +26,35 @@ import tensorflow as tf
from tfx import types
from tfx.orchestration import data_types
from tfx.orchestration import metadata
+from tfx.types import artifact_utils
from tfx.types import channel_utils
def _generate_output_uri(base_output_dir: Text, name: Text,
execution_id: int) -> Text:
"""Generate uri for output artifact."""
+ return os.path.join(base_output_dir, name, str(execution_id))
- # Generates output uri based on execution id.
- uri = os.path.join(base_output_dir, name, str(execution_id))
- if tf.io.gfile.exists(uri):
- msg = 'Output artifact uri %s already exists' % uri
+
+def _prepare_output_paths(artifact: types.Artifact):
+ """Create output directories for output artifact."""
+ if tf.io.gfile.exists(artifact.uri):
+ msg = 'Output artifact uri %s already exists' % artifact.uri
absl.logging.error(msg)
raise RuntimeError(msg)
- else:
+
# TODO(zhitaoli): Consider refactoring this out into something
# which can handle permission bits.
- absl.logging.debug('Creating output artifact uri %s as directory', uri)
- tf.io.gfile.makedirs(uri)
-
- return uri
+ absl.logging.debug('Creating output artifact uri %s as directory',
+ artifact.uri)
+ tf.io.gfile.makedirs(artifact.uri)
+ # TODO(b/147242148): Avoid special-casing the "split_names" property.
+ if artifact.type.PROPERTIES and 'split_names' in artifact.type.PROPERTIES:
+ split_names = artifact_utils.decode_split_names(artifact.split_names)
+ for split in split_names:
+ split_dir = os.path.join(artifact.uri, split)
+ absl.logging.debug('Creating output split %s as directory', split_dir)
+ tf.io.gfile.makedirs(split_dir)
class BaseDriver(object):
@@ -178,6 +187,8 @@ class BaseDriver(object):
for name, output_list in result.items():
for artifact in output_list:
artifact.uri = _generate_output_uri(base_output_dir, name, execution_id)
+ _prepare_output_paths(artifact)
+
return result
def _fetch_cached_artifacts(
|
Refactor pre-build check into function
Improves intent | @@ -712,6 +712,11 @@ class BlockPublisher(object):
"""
return self._candidate_block is not None
+ def _can_build(self):
+ """Returns whether the block publisher is ready to build a block.
+ """
+ return self._chain_head is not None and self._pending_batches
+
def _log_consensus_state(self):
if self._logging_states.consensus_ready:
LOGGER.debug("Consensus is ready to build candidate block.")
@@ -728,9 +733,7 @@ class BlockPublisher(object):
"""
try:
with self._lock:
- if (self._chain_head is not None
- and not self._building()
- and self._pending_batches):
+ if not self._building() and self._can_build():
try:
self.initialize_block(self._chain_head)
except ConsensusNotReady:
|
Fix always include TAG review in intents.
This change makes is so that all intent email templates include the TAG
review field. | @@ -17,10 +17,10 @@ Intent to {{feature.intent_stage}}: {{feature.name}}
Specification: <a href="{{feature.spec_link}}">{{feature.spec_link}}</a>
{% endif %}
{% for link in feature.doc_links %}<a href="{{link}}">{{link}}</a>{% endfor %}
-
+{% endif %}
<label>TAG review</label>
{{feature.tag_review|urlize}}
-{% endif %}
+
<label>Summary</label>
{{feature.summary}}
{% if feature.intent_stage == "Implement" or feature.intent_stage == "Ship" or feature.intent_stage == "Implement and Ship" %}
|
Add __init__ to modules doc
Add a description for the module __init__() function, as suggested
in | @@ -209,6 +209,29 @@ default configuration file for the minion contains the information and format
used to pass data to the modules. :mod:`salt.modules.test`,
:file:`conf/minion`.
+.. _module_init:
+
+``__init__`` Function
+---------------------
+
+If you want your module to have different execution modes based on minion
+configuration, you can use the ``__init__(opts)`` function to perform initial
+module setup. The parameter ``opts`` is the complete minion configuration,
+as also available in the ``__opts__`` dict.
+
+.. code-block:: python
+
+ '''
+ Cheese module initialization example
+ '''
+ def __init__(opts):
+ '''
+ Allow foreign imports if configured to do so
+ '''
+ if opts.get('cheese.allow_foreign', False):
+ _enable_foreign_products()
+
+
Strings and Unicode
===================
|
Fix instantiating a xml.etree.ElementTree.Element
The methods removed by
are abstract in `MutableSequence` and therefore must be specified on `Element`. | # Stubs for xml.etree.ElementTree
-from typing import Any, Callable, Dict, Generator, IO, ItemsView, Iterable, Iterator, KeysView, List, MutableSequence, Optional, Sequence, Text, Tuple, TypeVar, Union
+from typing import Any, Callable, Dict, Generator, IO, ItemsView, Iterable, Iterator, KeysView, List, MutableSequence, Optional, overload, Sequence, Text, Tuple, TypeVar, Union
import io
import sys
@@ -70,6 +70,17 @@ class Element(MutableSequence['Element']):
def remove(self, subelement: 'Element') -> None: ...
def set(self, key: _str_argument_type, value: _str_argument_type) -> None: ...
def __bool__(self) -> bool: ...
+ def __delitem__(self, i: Union[int, slice]) -> None: ...
+ @overload
+ def __getitem__(self, i: int) -> 'Element': ...
+ @overload
+ def __getitem__(self, s: slice) -> Sequence['Element']: ...
+ def __len__(self) -> int: ...
+ @overload
+ def __setitem__(self, i: int, o: 'Element') -> None: ...
+ @overload
+ def __setitem__(self, s: slice, o: Iterable['Element']) -> None: ...
+
def SubElement(parent: Element, tag: _str_argument_type, attrib: Dict[_str_argument_type, _str_argument_type]=..., **extra: _str_argument_type) -> Element: ...
def Comment(text: _str_argument_type=...) -> Element: ...
|
Refreshing numel on a stride update is pointless.
Summary:
Pull Request resolved:
Test Plan: Imported from OSS | @@ -677,7 +677,6 @@ struct C10_API TensorImpl : public c10::intrusive_ptr_target {
virtual void set_stride(int64_t dim, int64_t new_stride) {
TORCH_CHECK(allow_tensor_metadata_change(), "set_stride ", err_msg_tensor_metadata_change_not_allowed);
strides_[dim] = new_stride;
- refresh_numel();
refresh_contiguous();
}
|
AntiSpam: create tasks in a safer manner
Name the tasks and use `scheduling.create_task` to ensure exceptions
are caught. | @@ -18,7 +18,7 @@ from bot.constants import (
)
from bot.converters import Duration
from bot.exts.moderation.modlog import ModLog
-from bot.utils import lock
+from bot.utils import lock, scheduling
from bot.utils.messages import format_user, send_attachments
@@ -115,7 +115,7 @@ class AntiSpam(Cog):
self.message_deletion_queue = dict()
- self.bot.loop.create_task(self.alert_on_validation_error())
+ self.bot.loop.create_task(self.alert_on_validation_error(), name="AntiSpam.alert_on_validation_error")
@property
def mod_log(self) -> ModLog:
@@ -192,7 +192,10 @@ class AntiSpam(Cog):
if channel.id not in self.message_deletion_queue:
log.trace(f"Creating queue for channel `{channel.id}`")
self.message_deletion_queue[message.channel.id] = DeletionContext(channel)
- self.bot.loop.create_task(self._process_deletion_context(message.channel.id))
+ scheduling.create_task(
+ self._process_deletion_context(message.channel.id),
+ name=f"AntiSpam._process_deletion_context({message.channel.id})"
+ )
# Add the relevant of this trigger to the Deletion Context
await self.message_deletion_queue[message.channel.id].add(
@@ -202,11 +205,9 @@ class AntiSpam(Cog):
)
for member in members:
-
- # Fire it off as a background task to ensure
- # that the sleep doesn't block further tasks
- self.bot.loop.create_task(
- self.punish(message, member, full_reason)
+ scheduling.create_task(
+ self.punish(message, member, full_reason),
+ name=f"AntiSpam.punish(message={message.id}, member={member.id}, rule={rule_name})"
)
await self.maybe_delete_messages(channel, relevant_messages)
|
llvm/builtins: Flip order of loops in transposed multiplication
Improves memory access pattern and therefore performance. | @@ -93,8 +93,8 @@ def setup_vxm_transposed(ctx):
b1.store(ctx.float_ty(0), ptr)
# Multiplication
- with helpers.for_loop_zero_inc(builder, y, "vxm_outer") as (b1, index_i):
- with helpers.for_loop_zero_inc(b1, x, "vxm_inner") as (b2, index_j):
+ with helpers.for_loop_zero_inc(builder, x, "trans_vxm_outer") as (b1, index_j):
+ with helpers.for_loop_zero_inc(b1, y, "trans_vxm_inner") as (b2, index_i):
# Multiplication and accumulation
vector_ptr = builder.gep(v, [index_i])
|
Change summoner example function name, add params
Changed the function name to better reflect its purpose
Moved summoner name to argument, added region
Moved further examples for getting a summoner to a comment which
outlines their purpose | import cassiopeia as cass
from cassiopeia.core import Summoner
-def test_cass():
- name = "Kalturi"
- me = Summoner(name=name)
- print("Name:", me.name)
- print("Id:", me.id)
- print("Account id:", me.account.id)
- print("Level:", me.level)
- print("Revision date:", me.revision_date)
- print("Profile icon id:", me.profile_icon.id)
- print("Profile icon name:", me.profile_icon.name)
- print("Profile icon url:", me.profile_icon.url)
- print("Profile icon image:", me.profile_icon.image)
- name = me.name
- id = me.id
- account_id = me.account.id
- me = cass.get_summoner(name)
- me = cass.get_summoner(name=name)
- me = cass.get_summoner(id=id)
- me = cass.get_summoner(account_id=account_id)
+def print_summoner(name: str, region: str):
+ summoner = Summoner(name=name, region=region)
+ print("Name:", summoner.name)
+ print("ID:", summoner.id)
+ print("Account ID:", summoner.account.id)
+ print("Level:", summoner.level)
+ print("Revision date:", summoner.revision_date)
+ print("Profile icon ID:", summoner.profile_icon.id)
+ print("Profile icon name:", summoner.profile_icon.name)
+ print("Profile icon URL:", summoner.profile_icon.url)
+ print("Profile icon image:", summoner.profile_icon.image)
+
+ # These are equivalent ways of obtaining a Summoner.
+ # Note that the region defaults to NA.
+ # summoner = cass.get_summoner(name)
+ # summoner = cass.get_summoner(name=summoner.name)
+ # summoner = cass.get_summoner(id=summoner.id)
+ # summoner = cass.get_summoner(account_id=summoner.account.id)
if __name__ == "__main__":
- test_cass()
+ print_summoner("Kalturi", "NA")
|
Create Python workunit value using separate function
Move the logic to create a Python dict with the workunit fields on it to
a separate function. This will be helpful for adding asynchronous
Workunit reporting shortly. | @@ -52,6 +52,7 @@ use logging::{Destination, Logger};
use rule_graph::{GraphMaker, RuleGraph};
use std::any::Any;
use std::borrow::Borrow;
+use std::collections::HashSet;
use std::ffi::CStr;
use std::fs::File;
use std::io;
@@ -61,6 +62,7 @@ use std::panic;
use std::path::{Path, PathBuf};
use std::time::Duration;
use tempfile::TempDir;
+use workunit_store::WorkUnit;
#[cfg(test)]
mod tests;
@@ -388,29 +390,10 @@ fn make_core(
)
}
-///
-/// Returns a Handle representing a dictionary where key is metric name string and value is
-/// metric value int.
-///
-#[no_mangle]
-pub extern "C" fn scheduler_metrics(
- scheduler_ptr: *mut Scheduler,
- session_ptr: *mut Session,
-) -> Handle {
- with_scheduler(scheduler_ptr, |scheduler| {
- with_session(session_ptr, |session| {
- let mut values = scheduler
- .metrics(session)
- .into_iter()
- .flat_map(|(metric, value)| vec![externs::store_utf8(metric), externs::store_i64(value)])
- .collect::<Vec<_>>();
- if session.should_record_zipkin_spans() {
- let workunits = session
- .workunit_store()
- .get_workunits()
- .lock()
+fn workunits_to_py_tuple_value(workunits: &HashSet<WorkUnit>) -> Value {
+ let workunit_values = workunits
.iter()
- .map(|workunit| {
+ .map(|workunit: &WorkUnit| {
let mut workunit_zipkin_trace_info = vec![
externs::store_utf8("name"),
externs::store_utf8(&workunit.name),
@@ -432,8 +415,32 @@ pub extern "C" fn scheduler_metrics(
externs::store_dict(&workunit_zipkin_trace_info)
})
.collect::<Vec<_>>();
+
+ externs::store_tuple(&workunit_values)
+}
+
+///
+/// Returns a Handle representing a dictionary where key is metric name string and value is
+/// metric value int.
+///
+#[no_mangle]
+pub extern "C" fn scheduler_metrics(
+ scheduler_ptr: *mut Scheduler,
+ session_ptr: *mut Session,
+) -> Handle {
+ with_scheduler(scheduler_ptr, |scheduler| {
+ with_session(session_ptr, |session| {
+ let mut values = scheduler
+ .metrics(session)
+ .into_iter()
+ .flat_map(|(metric, value)| vec![externs::store_utf8(metric), externs::store_i64(value)])
+ .collect::<Vec<_>>();
+ if session.should_record_zipkin_spans() {
+ let workunits = session.workunit_store().get_workunits();
+
+ let value = workunits_to_py_tuple_value(&workunits.lock());
values.push(externs::store_utf8("engine_workunits"));
- values.push(externs::store_tuple(&workunits));
+ values.push(value);
};
externs::store_dict(&values).into()
})
|
Grab() and Retrieve() Camera Interface
Using grab and retrieve() rather than read(), with additional checks. | @@ -492,7 +492,13 @@ class CameraInterface(wx.Frame, Module):
if self.device is None:
self.camera_lock.release()
return
- ret, frame = self.capture.read()
+ ret = self.capture.grab()
+ if not ret:
+ wx.CallAfter(self.camera_error_webcam)
+ self.capture = None
+ self.camera_lock.release()
+ return
+ ret, frame = self.capture.retrieve()
if not ret or frame is None:
wx.CallAfter(self.camera_error_webcam)
self.capture = None
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.