message
stringlengths
13
484
diff
stringlengths
38
4.63k
Update development_workflow.rst Fix syntax: this link was written in markdown format instead of an rst format linke [ci skip]
@@ -317,7 +317,7 @@ In more detail For more details on running tests, please see :ref:`testing-guidelines`. #. Make sure your code includes appropriate docstrings, in the - [numpydoc format](https://numpydoc.readthedocs.io/en/latest/format.html). + `Numpydoc format`_. If appropriate, as when you are adding a new feature, you should update the appropriate documentation in the ``docs`` directory; a detailed description is in :ref:`documentation-guidelines`. @@ -548,3 +548,4 @@ can delete any backup branches that may have been created:: .. _git book: https://git-scm.com/book/ .. _Astropy issue list: https://github.com/astropy/astropy/issues .. _git choose-your-own-adventure: http://sethrobertson.github.io/GitFixUm/fixup.html +.. _Numpydoc format: https://numpydoc.readthedocs.io/en/latest/format.html
move Pointwise const simpl. to optimized_for_numpy Currently, if all arguments of `Pointwise` are constant, the array is simplified as `Constant(self.eval())`. However, if the arguments are sparse, this simplification looses the sparsity information, which could have been used dependent arrays. This patch resolves this problem by moving the simplification to `optimized_for_numpy`.
@@ -2129,9 +2129,6 @@ class Pointwise(Array): return cls(*(prependaxes(appendaxes(arg, shape[r:]), shape[:l]) for arg, l, r in zip(args, offsets[:-1], offsets[1:]))) def _simplified(self): - if self.isconstant: - retval = self.eval() - return Constant(retval) if len(self.args) == 1 and isinstance(self.args[0], Transpose): arg, = self.args return Transpose(self.__class__(arg.func), arg.axes) @@ -2139,6 +2136,11 @@ class Pointwise(Array): if len(where) != self.ndim: return align(self.__class__(*uninserted), where, self.shape) + def _optimized_for_numpy(self): + if self.isconstant: + retval = self.eval() + return Constant(retval) + def _derivative(self, var, seen): if self.deriv is None: return super()._derivative(var, seen)
[Bugfix] Provide correct argument to Page.save Page.put uses minorEdit, whereas Page.save uses minor.
@@ -106,7 +106,7 @@ def put_text(page, new, summary, count, asynchronous=False): page.text = new try: page.save(summary=summary, asynchronous=asynchronous, - minorEdit=page.namespace() != 3) + minor=page.namespace() != 3) except pywikibot.EditConflict: pywikibot.output('Edit conflict! skip!') except pywikibot.ServerError:
make sure to update pip first to avoid those Retry errors
@@ -24,6 +24,7 @@ ENV PATH="/node_modules/.bin:$PATH" COPY ./requirements.txt . COPY ./requirements_prod.txt . +RUN pip install -U pip RUN pip install -r requirements.txt RUN pip install -r requirements_prod.txt
cloud-init: shrink the size of userdata Just cleaning things up. When userdata gets too big, it can hit the limits of various cloud providers.
#!/usr/bin/env bash - if [ $0 != "-bash" ] ; then pushd `dirname "$0"` 2>&1 > /dev/null fi @@ -7,9 +6,7 @@ dir=$(pwd) if [ $0 != "-bash" ] ; then popd 2>&1 > /dev/null fi - mkdir -p /var/log/cloudbench - logpath=/var/log/cloudbench/${USER}_openvpn_client.log tap=$1 mtu=$2 @@ -17,22 +14,8 @@ other_mtu=$3 VPNIP=$4 peer=$5 dunno=$6 - echo "client connected $(date) params: $@" >> $logpath - -# This is deliberate: We *must* call redis-cli here when VPN_ONLY = $True -# because is this the only time which the VPN is fully connected. It cannot be -# called earlier. - -# Additionally, we have to check on both the PENDING and Regular object types. The -# reason being here is that PENDING will have been deleted back when the VM's -# post_attach operations have already completed and CloudBench will have already -# populated the 'real' object, in which case we have to update that object as well. (bash -c "sleep 5; redis-cli -h SERVER_BOOTSTRAP -n OSCI_DBID -p OSCI_PORT hset TEST_USER:CLOUD_NAME:VM:PENDING:UUID cloud_init_vpn $VPNIP; exists=\$(redis-cli --raw -h SERVER_BOOTSTRAP -n OSCI_DBID -p OSCI_PORT hexists TEST_USER:CLOUD_NAME:VM:UUID cloud_init_vpn); if [ \$exists == 1 ] ; then redis-cli -h SERVER_BOOTSTRAP -n OSCI_DBID -p OSCI_PORT hset TEST_USER:CLOUD_NAME:VM:UUID cloud_init_vpn $VPNIP; redis-cli -h SERVER_BOOTSTRAP -n OSCI_DBID -p OSCI_PORT hset TEST_USER:CLOUD_NAME:VM:UUID prov_cloud_ip $VPNIP; fi" &) - -# Run cloudbench's cloud-agnostic userdata later. Backwards compatible with VPN_ONLY = False (/tmp/cb_post_boot.sh &) - env | sort >> $logpath - exit 0
[module/cpu] More useful rounding psutil.cpu_percent() only outputs to one decimal place anyway, so the trailing 0 is useless. The prepended 0 is also not important, will only be not 0 at 100% utilization, so why not let it be 100% then and take up one less column otherwise?
@@ -26,7 +26,7 @@ class Module(bumblebee.engine.Module): cmd="gnome-system-monitor") def utilization(self, widget): - return "{:06.02f}%".format(self._utilization) + return "{:6.01f}%".format(self._utilization) def update(self, widgets): self._utilization = psutil.cpu_percent(percpu=False)
TEST: check initial_nodes from gr1c.load_aut_json In existing function test_load_aut_json, assert contents of initial_nodes attribute from gr1c.load_aut_json Eventually, more tests should be written to treat various cases of initial conditions. Nonetheless this assertion is motivated now to enforce changes from PR
@@ -222,6 +222,7 @@ def test_aut_xml2mealy(): def test_load_aut_json(): g = gr1c.load_aut_json(REFERENCE_AUTJSON_smallbool) + assert set(g.initial_nodes) == {'0x1E8F990'} assert g.env_vars == dict(x='boolean'), (g.env_vars) assert g.sys_vars == dict(y='boolean'), (g.sys_vars) # `REFERENCE_AUTJSON_smallbool` defined above
Updated README Updated README with reverse string in Go
@@ -7,6 +7,7 @@ Welcome to Sample Programs in Go! - [Hello World in Go][2] - [Fizz Buzz in Go][3] - [Baklava in Go][7] +- [String Reverse in Go][2] - Solution borrowed from @toturkmen via the [baklava repo][1] ## Fun Facts
SplineWidget : Use additive compositing for drawing splines This means that color splines appear white for greyscale splines, rather than confusingly showing only a blue line.
@@ -206,6 +206,7 @@ class SplineWidget( GafferUI.Widget ) : transform.translate( 0, -self.__splineBound.top() ) painter.setTransform( transform ) + painter.setCompositionMode( QtGui.QPainter.CompositionMode.CompositionMode_Plus ) for s in self.__splinesToDraw : pen = QtGui.QPen( self._qtColor( s.color ) ) pen.setCosmetic( True )
alert can have zero(0) value kwargs.get('value', None) or "n/a" sets value to 'n/a' if provided value is zero
@@ -51,7 +51,9 @@ class Alert(object): self.status = kwargs.get('status', None) or "unknown" self.service = kwargs.get('service', None) or list() self.group = kwargs.get('group', None) or "Misc" - self.value = kwargs.get('value', None) or "n/a" + self.value = kwargs.get('value', None) + if self.value == None: + self.value = "n/a" self.text = kwargs.get('text', None) or "" self.tags = kwargs.get('tags', None) or list() self.attributes = kwargs.get('attributes', None) or dict()
[Dashboard] Fix NPE when there is no GPU on the node There is an NPE bug that causes browser crash when no GPU on the node. We can add a condition to fix it.
@@ -208,7 +208,9 @@ const NodeInfo: React.FC<{}> = () => { // Show GPU features only if there is at least one GPU in cluster. const showGPUs = - nodes.map((n) => n.gpus).filter((gpus) => gpus.length !== 0).length !== 0; + nodes + .map((n) => n.gpus) + .filter((gpus) => gpus !== undefined && gpus.length !== 0).length !== 0; // Don't show disk on Kubernetes. K8s node disk usage should be monitored // elsewhere.
Only extract script tags with children To allow supply system map plots to work by extracting the main script and mounting it to the body instead of dependecy scripts.
@@ -155,7 +155,7 @@ const Plot = ({ index, dashIndex, data, style }) => { let script = null; let content = HTMLReactParser(response.data, { replace: function(domNode) { - if (domNode.type === "script") { + if (domNode.type === "script" && domNode.children[0]) { script = domNode.children[0].data; } } @@ -187,10 +187,15 @@ const Plot = ({ index, dashIndex, data, style }) => { return ( <Card title={ + <div> + <span style={{ fontWeight: "bold" }}>{data.title}</span> + {data.scenario && ( <React.Fragment> - <h3 style={{ float: "left" }}>{data.title}</h3> + <span> - </span> <small>{data.scenario}</small> </React.Fragment> + )} + </div> } extra="Test" style={{ ...plotStyle, height: "" }}
Fix "dict_values object does not support indexing" Fixes
@@ -290,7 +290,7 @@ def get_credential(cloud, cred_name=None): elif default_credential is not None and default_credential in cred.keys(): return cred[default_credential] elif len(cred) == 1: - return cred.values()[0] + return list(cred.values())[0] else: return None
BUG: fixed file_cadance evaluation Fixed file_cadance evaluation, since DateOffset attributes change based on initialization.
@@ -10,7 +10,40 @@ import pandas as pds import pysat -logger = logging.getLogger(__name__) +logger = pysat.logger + + +def is_daily_file_cadance(file_cadance): + """ Evaluate file cadance to see if it is daily or greater than daily + + Parameters + ---------- + file_cadance : dt.timedelta or pds.DateOffset + pysat assumes a daily file cadance, but some instrument data file + contain longer periods of time. This parameter allows the specification + of regular file cadances greater than or equal to a day (e.g., weekly, + monthly, or yearly). (default=dt.timedelta(days=1)) + + Returns + ------- + is_daily : bool + True if the cadance is daily or less, False if the cadance is greater + than daily + + """ + is_daily = True + + if hasattr(file_cadance, 'days'): + if file_cadance.days > 1: + is_daily = False + else: + if not (hasattr(file_cadance, 'microseconds') or + hasattr(file_cadance, 'seconds') or + hasattr(file_cadance, 'minutes') or + hasattr(file_cadance, 'hours')): + is_daily = False + + return is_daily def list_files(tag=None, inst_id=None, data_path=None, format_str=None, @@ -92,7 +125,7 @@ def list_files(tag=None, inst_id=None, data_path=None, format_str=None, # If the data is not daily, pad the series. Both pds.DateOffset and # dt.timedelta contain the 'days' attribute, so evaluate using that - if (not out.empty) and file_cadance.days > 1: + if not out.empty and not is_daily_file_cadance(file_cadance): emonth = out.index[-1] out.loc[out.index[-1] + file_cadance - dt.timedelta(days=1)] = out.iloc[-1]
Swap wget with curl Minor change. The dockerfile contains curl only, so this switches wget with curl
@@ -19,6 +19,6 @@ if __name__ == "__main__": print "Downloading Breakthough Listen raw data" for filename in bl_filelist: bname = os.path.basename(filename) - os.system("wget %s; mv %s testdata/" % (filename, bname)) + os.system("curl -O %s; mv %s testdata/" % (filename, bname))
[remote.HTTP][FIX] Provide suitable default for last-modified In cases where the last-modified header cannot be parsed into an RFC2822 date (or when it is absent). parsedate_tz returns None, and mktime_tz raises an exception. This handles both the invalid and missing cases as timestamp=0 (effectively making the remote file ancient)
@@ -130,10 +130,16 @@ class RemoteObject(DomainObject): if self.exists(): with self.httpr(verb="HEAD") as httpr: - file_mtime = self.get_header_item(httpr, "last-modified", default=0) - logger.debug("HTTP mtime: {}".format(file_mtime)) + file_mtime = self.get_header_item(httpr, "last-modified", default=None) + logger.debug("HTTP last-modified: {}".format(file_mtime)) + epochTime = 0 + + if file_mtime is not None: modified_tuple = email.utils.parsedate_tz(file_mtime) + if modified_tuple is None: + logger.debug("HTTP last-modified not in RFC2822 format: `{}`".format(file_mtime)) + else: epochTime = email.utils.mktime_tz(modified_tuple) return epochTime
[cleanup] Cleanup sysop option in watchlist.py config.sysopnames is deprecated since Remove undocumented -sysop option rename option variables
@@ -18,7 +18,7 @@ Command line options: """ # # (C) Daniel Herding, 2005 -# (C) Pywikibot team, 2005-2019 +# (C) Pywikibot team, 2005-2020 # # Distributed under the terms of the MIT license. # @@ -27,11 +27,8 @@ from __future__ import absolute_import, division, unicode_literals import os import pywikibot - from pywikibot import config - from pywikibot.data.api import CachedRequest - from scripts.maintenance.cache import CacheEntry @@ -49,13 +46,13 @@ def isWatched(pageName, site=None): return pageName in watchlist -def refresh(site, sysop=False): +def refresh(site): """Fetch the watchlist.""" pywikibot.output('Retrieving watchlist for {0}.'.format(str(site))) - return list(site.watched_pages(sysop=sysop, force=True)) + return list(site.watched_pages(force=True)) -def refresh_all(sysop=False): +def refresh_all(): """Reload watchlists for all wikis where a watchlist is already present.""" cache_path = CachedRequest._get_cache_dir() files = os.listdir(cache_path) @@ -66,22 +63,18 @@ def refresh_all(sysop=False): entry.parse_key() entry._rebuild() if entry.site not in seen and 'watchlistraw' in entry._data: - refresh(entry.site, sysop) + refresh(entry.site) seen.add(entry.site) -def refresh_new(sysop=False): +def refresh_new(): """Load watchlists of all wikis for accounts set in user-config.py.""" pywikibot.output( 'Downloading all watchlists for your accounts in user-config.py') for family in config.usernames: for lang in config.usernames[family]: site = pywikibot.Site(lang, family) - refresh(site, sysop=sysop) - for family in config.sysopnames: - for lang in config.sysopnames[family]: - site = pywikibot.Site(lang, family) - refresh(site, sysop=sysop) + refresh(site) def main(*args): @@ -93,23 +86,20 @@ def main(*args): @param args: command line arguments @type args: str """ - all = False - new = False - sysop = False + opt_all = False + opt_new = False for arg in pywikibot.handle_args(args): if arg in ('-all', '-update'): - all = True + opt_all = True elif arg == '-new': - new = True - elif arg == '-sysop': - sysop = True - if all: - refresh_all(sysop=sysop) - elif new: - refresh_new(sysop=sysop) + opt_new = True + if opt_all: + refresh_all() + elif opt_new: + refresh_new() else: site = pywikibot.Site() - watchlist = refresh(site, sysop=sysop) + watchlist = refresh(site) pywikibot.output('{} pages in the watchlist.'.format(len(watchlist))) for page in watchlist: try:
update kubernetes driver Uniform '_prepare_entities' method type, remove method type warning
@@ -61,7 +61,8 @@ class KubernetesDriver(DriverBase): KUBERNETES_DATASOURCE, datasource_action) - def _prepare_entities(self, nodes): + @staticmethod + def _prepare_entities(nodes): entities = [] for item in nodes.items: metadata = item.metadata
ci: add QML Android CI builds for arm64 and arm32 architectures conservatively only builds on tagged commits, or any commit on the ci-qml-beta branch
@@ -171,7 +171,7 @@ task: CIRRUS_DOCKER_CONTEXT: contrib/build-wine task: - name: Android build + name: Android build (Kivy arm64) container: dockerfile: contrib/android/Dockerfile cpu: 2 @@ -181,6 +181,30 @@ task: binaries_artifacts: path: "dist/*" +task: + name: Android build (QML arm64) + container: + dockerfile: contrib/android/Dockerfile + cpu: 8 + memory: 24G + build_script: + - ./contrib/android/make_apk.sh qml arm64-v8a debug + binaries_artifacts: + path: "dist/*" + only_if: $CIRRUS_TAG != '' || $CIRRUS_BRANCH == 'ci-qml-beta' + +task: + name: Android build (QML arm32) + container: + dockerfile: contrib/android/Dockerfile + cpu: 8 + memory: 24G + build_script: + - ./contrib/android/make_apk.sh qml armeabi-v7a debug + binaries_artifacts: + path: "dist/*" + only_if: $CIRRUS_TAG != '' || $CIRRUS_BRANCH == 'ci-qml-beta' + task: name: MacOS build macos_instance:
Update .bash_profile rm obsolete solver variable
@@ -29,13 +29,6 @@ export DYLD_LIBRARY_PATH # for Mac #============================================================================== -#------------------------------------------------------------------------------ -# Choose your solver -#------------------------------------------------------------------------------ -export SOLVER="cbc" -#============================================================================== - - #------------------------------------------------------------------------------ # Uncomment the following if you are using the Xpress solver
Fix part of Make test coverage of core.controllers.collection_view 100% Fix part of Make test coverage of core.controllers.collection_viewer 100%
@@ -90,6 +90,13 @@ class CollectionViewerPermissionsTests(test_utils.GenericTestBase): self.get_html_response( '%s/%s' % (feconf.COLLECTION_URL_PREFIX, self.COLLECTION_ID)) + def test_invalid_collection_error(self): + self.login(self.EDITOR_EMAIL) + self.get_html_response( + '%s/%s' % (feconf.COLLECTION_URL_PREFIX, 'none'), + expected_status_int=404) + self.logout() + class CollectionViewerControllerEndToEndTests(test_utils.GenericTestBase): """Test the collection viewer controller using a sample collection.""" @@ -107,6 +114,11 @@ class CollectionViewerControllerEndToEndTests(test_utils.GenericTestBase): # Login as the user who will play the collection. self.login(self.VIEWER_EMAIL) + # Request invalid collection from data handler. + response_dict = self.get_json( + '%s/1' % feconf.COLLECTION_DATA_URL_PREFIX, + expected_status_int=404) + # Request the collection from the data handler. response_dict = self.get_json( '%s/0' % feconf.COLLECTION_DATA_URL_PREFIX)
Update SNMP exporter to 0.18.0 [FEATURE] Allow lookup chaining in a basic way [BUGFIX] Reduce and fix timeouts for SNMP requests
%define debug_package %{nil} Name: snmp_exporter -Version: 0.17.0 -Release: 2%{?dist} +Version: 0.18.0 +Release: 1%{?dist} Summary: Prometheus SNMP exporter. License: ASL 2.0 URL: https://github.com/prometheus/%{name}
Remove deprecated stream from help Add all stream choices to help for -s Remove newlines that were not rendered
@@ -172,10 +172,12 @@ class CliArgs: type=_stream_type, dest="streams", default=["metaout", "previewout"], - help=("Define which streams to enable \n" - "Format: stream_name or stream_name,max_fps \n" - "Example: -s metaout previewout \n" - "Example: -s metaout previewout,10 depth_sipp,10"))\ + choices=_stream_choices, + help=("Define which streams to enable as a space " + "delimited list (e.g. \"-s metaout " + "previewout\"). Optionally, append the FPS " + "with a comma to each stream name (e.g. \"-s " + "metaout previewout,10\")."))\ .completer=ChoicesCompleter(_stream_choices) parser.add_argument("-v", "--video", default=None, type=str, required=False,
Use sitevar for build season end [clowntown]
@@ -149,7 +149,9 @@ class MainBuildseasonHandler(CacheableHandler): self._cache_expiration = 60 * 5 def _render(self, *args, **kw): - endbuild_datetime_est = datetime.datetime(2018, 2, 20, 23, 59) + endbuild_datetime_est = datetime.datetime.strptime( + self.template_values['build_season_end'], "%Y-%m-%dT%H:%M:%S" + ) if 'build_season_end' in self.template_values else None endbuild_datetime_utc = pytz.utc.localize( endbuild_datetime_est + datetime.timedelta(hours=5)) week_events = EventHelper.getWeekEvents()
Add `ModelFittingError` to `botorch.exceptions.__init__` Summary: Pull Request resolved: Title
@@ -9,6 +9,7 @@ from botorch.exceptions.errors import ( BotorchTensorDimensionError, CandidateGenerationError, InputDataError, + ModelFittingError, UnsupportedError, ) from botorch.exceptions.warnings import ( @@ -33,6 +34,7 @@ __all__ = [ "InputDataError", "BadInitialCandidatesWarning", "CandidateGenerationError", + "ModelFittingError", "OptimizationWarning", "SamplingWarning", "UnsupportedError",
Update mppinverter.py add getSerialNumber
@@ -117,6 +117,19 @@ class mppInverter: inverter += '{}: {}'.format(cmd.name, cmd.description) return inverter + def getSerialNumber(self): + if self._serial_number is None: + response = self.execute("QID").response_dict + if response: + self._serial_number = response["serial_number"][0] + return self._serial_number + + def getAllCommands(self): + """ + Return list of defined commands + """ + return self._commands + def _getCommand(self, cmd): """ Returns the mppcommand object of the supplied cmd string @@ -138,12 +151,6 @@ class mppInverter: return command return None - def getAllCommands(self): - """ - Return list of defined commands - """ - return self._commands - def _doTestCommand(self, command): """ Performs a test command execution
print -> warn Sorry, didn`t see at first that print is used to write to README.md. warn should be used instead
@@ -41,7 +41,7 @@ def retrieve_repo(name): try: repo = g.get_repo(name) except Exception: - print(f"Error occured while getting {name} repo") + warn(f"Error occured while getting {name} repo") raise print('.', file=sys.stderr, end='', flush=True) check_freshness(repo)
Fix the decorator that disables `Node` registration Its argument was called `function` while it uses `f` internally instead.
@@ -429,7 +429,7 @@ def registry_changed_to(r): Node.registry = backup -def temporarily_modifies_registry(function): +def temporarily_modifies_registry(f): """ Backup registry before and restore it after execution of `function`. """ def result(*xs, **ks):
Updated styles for .active links. These were displaying bootstrap's bright blue when active/focus, instead of the darker blue HQ uses to show active.
} .pagination > .active > a { - background-color: @call-to-action-low; - color: #ffffff; + .button-variant(#ffffff; @cc-brand-low; @cc-brand-low); }
Fix an issue of "test_sorter" * If no command line arguments "shuffle" is specified, and it is also not programmatically defined in source code, then the default `test_sorter` of `TestRunner` should be `NoopSorter`, but incorrectly `ShuffleSorter` is used, although make no side effect to code logic.
@@ -342,7 +342,7 @@ that match ALL of the given tags. args["test_filter"] = filter_args # Cmdline supports shuffle ordering only for now - if "shuffle" in args: + if args.get("shuffle"): args["test_sorter"] = ordering.ShuffleSorter( seed=args["shuffle_seed"], shuffle_type=args["shuffle"] )
Fix enum reference in documentation. google.cloud.bigtable.enums.InstanceType -> google.cloud.bigtable.enums.Instance.Type
@@ -74,10 +74,10 @@ class Instance(object): :param instance_type: (Optional) The type of the instance. Possible values are represented by the following constants: - :data:`google.cloud.bigtable.enums.InstanceType.PRODUCTION`. - :data:`google.cloud.bigtable.enums.InstanceType.DEVELOPMENT`, + :data:`google.cloud.bigtable.enums.Instance.Type.PRODUCTION`. + :data:`google.cloud.bigtable.enums.Instance.Type.DEVELOPMENT`, Defaults to - :data:`google.cloud.bigtable.enums.InstanceType.UNSPECIFIED`. + :data:`google.cloud.bigtable.enums.Instance.Type.UNSPECIFIED`. :type labels: dict :param labels: (Optional) Labels are a flexible and lightweight
SDK - Components - Only yaml component files can be used as source Previously, if the file was a .zip archive, some functions like exception printing would fail as it's not a text file.
@@ -270,5 +270,5 @@ def _create_task_factory_from_component_spec(component_spec:ComponentSpec, compo factory_function_parameters, documentation='\n'.join(func_docstring_lines), func_name=name, - func_filename=component_filename + func_filename=component_filename if (component_filename and (component_filename.endswith('.yaml') or component_filename.endswith('.yml'))) else None, )
Python3: For debug mode, use smarter deep hash for unicode strings. * The old code created cached UTF8 variants that used a lot of memory for all strings created. And that also broke some tests that check object sizes.
@@ -344,14 +344,11 @@ Py_hash_t DEEP_HASH( PyObject *value ) FETCH_ERROR_OCCURRED_UNTRACED( &exception_type, &exception_value, &exception_tb ); -#if PYTHON_VERSION >= 330 - Py_ssize_t size; - char const *s = PyUnicode_AsUTF8AndSize( value, &size ); +#if PYTHON_VERSION >= 300 + char const *s = (char const *)PyUnicode_DATA( value ); + Py_ssize_t size = PyUnicode_GET_LENGTH( value ) * PyUnicode_KIND( value ); - if ( s != NULL ) - { DEEP_HASH_BLOB( &result, s, size ); - } #else PyObject *str = PyUnicode_AsUTF8String( value );
Multipart manual Add the functionality of multiple parts to the manual.
@@ -116,6 +116,18 @@ That can be prevented in two ways: #. Precede the field name with a ":" such as ``:note``. This makes KiCost ignore the field because it is in a different namespace. +------------------------ +Multiple Parts +------------------------ + +KiCost allow use of multiple parts with diferent quantities to one designator, an userfull resource to list parts to assembly conectors and so on. To use, the subparts have to be separeted by comma or semicolon and the quantity (optional, default is 1) separeted by ":". + +E.g., em maf# field of KiCad: + + ' 5 : PART1;PART2 , PART3:0.5 , 1/2:PARTE4' + +It is allowed decimal and fractional subquantities. In the spreadsheet the total part is ceiled to the next interger. + ------------------------ Schematic Variants ------------------------
DOC: Example for scipy.sparse.linalg.lgmres Added a simple example for the usage of scipy.sparse.linalg.lgmres (Matrices taken from scipy.linalg.solve)
@@ -96,6 +96,17 @@ def lgmres(A, b, x0=None, tol=1e-5, maxiter=1000, M=None, callback=None, .. [2] A.H. Baker, PhD thesis, University of Colorado (2003). http://amath.colorado.edu/activities/thesis/allisonb/Thesis.ps + Examples + -------- + >>> from scipy.sparse import csc_matrix + >>> from scipy.sparse.linalg import lgmres + >>> A = csc_matrix([[3, 2, 0], [1, -1, 0], [0, 5, 1]], dtype=float) + >>> b = np.array([2, 4, -1], dtype=float) + >>> x, exitCode = lgmres(A, b) + >>> print(exitCode) # 0 indicates successful convergence + 0 + >>> np.allclose(A.dot(x), b) + True """ A,M,x,b,postprocess = make_system(A,M,x0,b)
BindingScope: make docstring typing more specific for bindings TN:
@@ -1260,7 +1260,7 @@ class BindingScope(ResolvedExpression): def __init__(self, expr, bindings, abstract_expr=None): """ :type expr: ResolvedExpression - :type bindings: list[AbstractExpression.Expr] + :type bindings: list[AbstractVariable.Expr] :type abstract_expr: None|AbstractExpression """ super(BindingScope, self).__init__(abstract_expr=abstract_expr)
Not current with the gcr.io/endpoints-release updated endpoints-runtime image tag
@@ -44,7 +44,7 @@ spec: spec: containers: - name: esp - image: gcr.io/endpoints-release/endpoints-runtime:1 + image: gcr.io/endpoints-release/endpoints-runtime:1.16.0 args: [ "--http2_port=9000", "--service=SERVICE_NAME",
Add an IndexError handler Add an IndexError handler when trying to get port_offset from node name.
@@ -553,7 +553,10 @@ class DbTransfer(object): else: self.is_relay = False + try: self.port_offset = int(nodeinfo[6].split("#")[1]) + except IndexError: + self.port_offset = 0 logging.debug( "node_info >> group=%d class=%d speedlimit=%f traffic_rate=%f mu_only=%d sort=%d name=%s port_offset=%d",
doc: authentication change username to user. As it seems to be a user object, reading the doc made me think it was only a string (username)
@@ -49,7 +49,7 @@ To access the user, just use ``self.scope["user"]`` in your consumer code:: class ChatConsumer(WebsocketConsumer): def connect(self, event): - self.username = self.scope["user"] + self.user = self.scope["user"] Custom Authentication
Update environment variables in app.json to make the deploy button actually work
"logo": "https://avatars.githubusercontent.com/u/5545431?v=3&s=100", "keywords": ["ci", "python", "django", "salesforce", "github"], "env": { - "SITE_URL": { - "description": "The base url to the site. Typically this should be https://<your_app>.herokuapp.com", - "value": "" - }, - "FROM_EMAIL": { - "description": "The email address outbound mail should be sent from", + "CONNECTED_APP_CALLBACK_URL": { + "description": "The callback url configured for the Connected App. You should be able to get these values from the command `cumulusci2 org connected_app` in your locally configured instance of CumulusCI", "value": "" }, "CONNECTED_APP_CLIENT_ID": { "CONNECTED_APP_CLIENT_SECRET": { "description": "The Client Secret of the Salesforce Connected App. You should be able to get these values from the command `cumulusci2 org connected_app` in your locally configured instance of CumulusCI", "value": "" }, - "CONNECTED_APP_CALLBACK_URL": { - "description": "The callback url configured for the Connected App. You should be able to get these values from the command `cumulusci2 org connected_app` in your locally configured instance of CumulusCI", + "DJANGO_AWS_ACCESS_KEY_ID": { + "description": "The Amazon Web Services ID used for S3 storage of media", + "value": "" + }, + "DJANGO_AWS_SECRET_ACCESS_KEY": { + "description": "The Amazon Web Services secret access key used for S3 storage of media", + "value": "" + }, + "DJANGO_AWS_STORAGE_BUCKET_NAME": { + "description": "The Amazon Web Services S3 storage bucket name the site should use", + "value": "" + }, + "DJANGO_ADMIN_URL": { + "description": "The url path to the admin section of the site", + "value": "admin" + }, + "DJANGO_SECRET_KEY": { + "description": "The url path to the admin section of the site", + "generator": "secret" + }, + "DJANGO_SETTINGS_MODULE": { + "description": "The site settings to use", + "value": "config.settings.production" + }, + "FROM_EMAIL": { + "description": "The email address outbound mail should be sent from", "value": "" }, "GITHUB_USERNAME": { "description": "The url for handling Github webhooks. Ex: https://<app-name>.herokuapp.com/webhook/github", "generator": "secret" }, - "DJANGO_ADMIN_URL": { - "description": "The url path to the admin section of the site", - "value": "admin" - }, - "DJANGO_SECRET_KEY": { - "description": "The url path to the admin section of the site", - "generator": "secret" - }, - "DJANGO_SETTINGS_MODULE": { - "description": "The site settings to use", - "generator": "config.settings.production" - }, - "SFDX_PRIVATE_KEY": { + "SFDX_HUB_KEY": { "description": "The private key for the JWT authentication to the devhub for Salesforce DX. Required to use scratch orgs", "value": "", "required": false "description": "The username for the devhub org for Salesforce DX. Required to use scratch orgs", "value": "", "required": false + }, + "SITE_URL": { + "description": "The base url to the site. Typically this should be https://<your_app>.herokuapp.com", + "value": "" } }, "formation": {
Fix phoboslog module docstring Phoboslog was not build in the API doc, because a linebreak was not escaped correctly.
""" .. module:: phobos.phoboslog :platform: Unix, Windows, Mac - :synopsis: TODO: This module offers a simple way to log messages from phobos and uses blender integrated tools + :synopsis: Offers a simple way to log messages from Phobos and uses Blender integrated tools \ to display them. .. moduleauthor:: Ole Schwiegert, Kai von Szadkowski, Simon Reichel
[L10N] Fallbacks for pt and pt-br wowwiki has sites with pt and pt-br codes. Set their fallbacks to pt-br and pt
@@ -213,6 +213,8 @@ _LANG_TO_GROUP_NAME = defaultdict(str, { 'pms': 'eml', 'pnt': 'grc', 'ps': 'azb', + 'pt': 'pt', + 'pt-br': 'pt', 'qu': 'an', 'rm': 'rm', 'rmy': 'mo', @@ -339,6 +341,7 @@ _GROUP_NAME_TO_FALLBACKS = { 'nso': ['st', 'nso'], 'oc': ['fr', 'ca', 'es'], 'olo': ['fi'], + 'pt': ['pt', 'pt-br'], 'rm': ['de', 'it'], 'roa-rup': ['roa-rup', 'rup', 'ro'], 'rue': ['uk', 'ru'],
Enhance stats output * Don't display the summary if a suite only contains a single benchmark * Don't display benchmark names if all suites contain a single benchmark and all benchmarks have the same name
@@ -217,6 +217,14 @@ class Benchmarks: for filename in filenames: self.load_benchmark_suite(filename) + def has_same_unique_benchmark(self): + "True if all suites have one benchmark with the same name" + if any(len(suite) > 1 for suite in self.suites): + return False + names = self.suites[0].get_benchmark_names() + return all(suite.get_benchmark_names() == names + for suite in self.suites[1:]) + def include_benchmark(self, name): for suite in self.suites: try: @@ -411,7 +419,7 @@ def display_benchmarks(args, show_metadata=False, hist=False, stats=False, if use_title: show_filename = (data.get_nsuite() > 1) - show_name = show_filename or (len(data.suites[0]) > 1) + show_name = not data.has_same_unique_benchmark() if not show_filename and stats: show_filename = (len(data) > 1) @@ -446,7 +454,7 @@ def display_benchmarks(args, show_metadata=False, hist=False, stats=False, suite = item.suite format_title(item.filename, 1, lines=lines) - if stats: + if stats and len(suite) > 1: empty_line(lines) duration = suite.get_total_duration()
Add regression test Closes sympy/sympy#10445
@@ -36,6 +36,9 @@ def test_improper_integral(): assert integrate(log(x), (x, 0, 1)) == -1 assert integrate(x**(-2), (x, 1, oo)) == 1 + # issue sympy/sympy#10445: + assert integrate(1/(1 + exp(x)), (x, 0, oo)) == log(2) + def test_constructor(): # this is shared by Sum, so testing Integral's constructor
[DPER3][Shape inference] Update Shape Information in dper3 backend Summary: Pull Request resolved:
@@ -152,6 +152,9 @@ def SetPreLoadBlobs(meta_net_def, pre_load_blobs): for blob in pre_load_blobs: meta_net_def.preLoadBlobs.append(blob) +def SetTensorBoundShapes(meta_net_def, tensor_bound_shapes): + meta_net_def.tensorBoundShapes.CopyFrom(tensor_bound_shapes) + def GetArgumentByName(net_def, arg_name): for arg in net_def.arg: if arg.name == arg_name:
Using items() instead of six.iteritems() We'd better not use six.iteritems(), read follow doc
@@ -16,7 +16,6 @@ import os import re import pep8 -import six def flake8ext(f): @@ -61,7 +60,7 @@ def _regex_for_level(level, hint): log_translation_hint = re.compile( '|'.join('(?:%s)' % _regex_for_level(level, hint) - for level, hint in six.iteritems(_all_log_levels))) + for level, hint in _all_log_levels.items())) log_warn = re.compile( r"(.)*LOG\.(warn)\(\s*('|\"|_)")
added 'exclude_cluster_group'; should be a list containing, 'noise', 'mua', 'good', 'unsorted' (case insensitive) check for valid 'exclude_cluster_group' + message on what went wrong check if detected clustergroup is part of the default names removed cluster_group_int removed 'cluster_group_int' unit property renamed 'cluster_group_name' property to 'quality'
@@ -59,7 +59,7 @@ class KlustaSortingExtractor(SortingExtractor): default_cluster_groups = {0: 'Noise', 1: 'MUA', 2: 'Good', 3: 'Unsorted'} - def __init__(self, file_or_folder_path): + def __init__(self, file_or_folder_path, exclude_cluster_groups=None): assert HAVE_KLSX, "To use the KlustaSortingExtractor install h5py: \n\n pip install h5py\n\n" SortingExtractor.__init__(self) kwik_file_or_folder = Path(file_or_folder_path) @@ -89,11 +89,18 @@ class KlustaSortingExtractor(SortingExtractor): self._unit_ids = [] unique_units = [] klusta_units = [] - cluster_groups_int = [] cluster_groups_name = [] groups = [] unit = 0 + cs_to_exclude = [] + valid_group_names = [i[1].lower() for i in self.default_cluster_groups.items()] + if exclude_cluster_groups is not None: + assert isinstance(exclude_cluster_groups, list), 'exclude_cluster_groups should be a list' + for ec in exclude_cluster_groups: + assert ec in valid_group_names, f'select exclude names out of: {valid_group_names}' + cs_to_exclude.append(ec.lower()) + for channel_group in kf_reader.get('/channel_groups'): chan_cluster_id_arr = kf_reader.get(f'/channel_groups/{channel_group}/spikes/clusters/main')[()] chan_cluster_times_arr = kf_reader.get(f'/channel_groups/{channel_group}/spikes/time_samples')[()] @@ -106,13 +113,19 @@ class KlustaSortingExtractor(SortingExtractor): st = chan_cluster_times_arr[cluster_frame_idx] assert st.shape[0] > 0, 'no spikes in cluster' # this shouldnt happen cluster_group = kf_reader.get(f'/channel_groups/{channel_group}/clusters/main/{cluster_id}').attrs['cluster_group'] + + assert cluster_group in self.default_cluster_groups.keys(), f'cluster_group not in "default_dict: {cluster_group}' + cluster_group_name = self.default_cluster_groups[cluster_group] + + if cluster_group_name.lower() in cs_to_exclude: + continue + self._spiketrains.append(st) klusta_units.append(int(cluster_id)) unique_units.append(unit) unit += 1 groups.append(int(channel_group)) - cluster_groups_int.append(cluster_group) - cluster_groups_name.append(self.default_cluster_groups[cluster_group]) + cluster_groups_name.append(cluster_group_name) if len(np.unique(klusta_units)) == len(np.unique(unique_units)): self._unit_ids = klusta_units @@ -121,8 +134,7 @@ class KlustaSortingExtractor(SortingExtractor): self._unit_ids = unique_units for i, u in enumerate(self._unit_ids): self.set_unit_property(u, 'group', groups[i]) - self.set_unit_property(u, 'cluster_group', cluster_groups_int[i]) - self.set_unit_property(u, 'cluster_group_name', cluster_groups_name[i]) + self.set_unit_property(u, 'quality', cluster_groups_name[i].lower()) self._kwargs = {'file_or_folder_path': str(Path(file_or_folder_path).absolute())}
Oops, fix javah doc to suggest -h, not -d [ci skip] Listed the wrong option in the workaround suggestion.
@@ -104,7 +104,7 @@ belonging to an older release if there are multiple Java versions on the system, which will lead to incorrect results. To use with a newer Java, override the default values of &cv-link-JAVAH; (to contain the path to the <command>javac</command>) -and &cv-link-JAVAHFLAGS; (to contain at least a <option>-d</option> +and &cv-link-JAVAHFLAGS; (to contain at least a <option>-h</option> flag) and note that generating headers with <command>javac</command> requires supplying source <filename>.java</filename> files only,
Fix polyval overloads * add polyval overloads * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see
@@ -2012,14 +2012,19 @@ def test_where_attrs() -> None: ), ], ) -def test_polyval(use_dask, x: T_Xarray, coeffs: T_Xarray, expected) -> None: +def test_polyval( + use_dask: bool, + x: xr.DataArray | xr.Dataset, + coeffs: xr.DataArray | xr.Dataset, + expected: xr.DataArray | xr.Dataset, +) -> None: if use_dask: if not has_dask: pytest.skip("requires dask") coeffs = coeffs.chunk({"degree": 2}) x = x.chunk({"x": 2}) with raise_if_dask_computes(): - actual = xr.polyval(coord=x, coeffs=coeffs) + actual = xr.polyval(coord=x, coeffs=coeffs) # type: ignore xr.testing.assert_allclose(actual, expected)
Fix Rotek.RTBS profile HG-- branch : feature/macdb
@@ -185,7 +185,7 @@ class Script(BaseScript): iface = { "type": "physical", "name": "%s.%s" % (ifname, ri[1]["ssid"]), - "admin_status": a_status, + "admin_status": a_stat, "oper_status": o_status, "mac": mac, "snmp_ifindex": match.group("ifindex"), @@ -194,9 +194,8 @@ class Script(BaseScript): "subinterfaces": [{ "name": "%s.%s" % (ifname, ri[1]["ssid"]), "enabled_afi": ["BRIDGE"], - "admin_status": a_status, + "admin_status": a_stat, "oper_status": o_status, - "mtu": mtu, "mac": mac, "snmp_ifindex": match.group("ifindex"), "untagged_vlan": int(ri[1]["vlan"]),
utils/log: log_error marks Exception as logged. Mark the Exception is logged inside log_error, to prevent it form being logged repeatedly if re-raised.
@@ -165,6 +165,8 @@ def log_error(e, logger, critical=False): log_func(tb) log_func('{}({})'.format(e.__class__.__name__, e)) + e.logged = True + class ErrorSignalHandler(logging.Handler): """
suspicious self import potential trouble maker in the future
import copy import scipy -import tequila as tq -from tequila import braket +from tequila import braket, QTensor, simulate from tequila.hamiltonian.qubit_hamiltonian import QubitHamiltonian @@ -25,8 +24,8 @@ def krylov_method(krylov_circs:list, H:QubitHamiltonian, variables:dict=None, as """ n_krylov_states = len(krylov_circs) - HM = tq.QTensor(shape=[n_krylov_states,n_krylov_states]) - SM = tq.QTensor(shape=[n_krylov_states,n_krylov_states]) + HM = QTensor(shape=[n_krylov_states,n_krylov_states]) + SM = QTensor(shape=[n_krylov_states,n_krylov_states]) if variables is not None: krylov_circs_x = [U.map_variables(variables) for U in krylov_circs] @@ -46,8 +45,8 @@ def krylov_method(krylov_circs:list, H:QubitHamiltonian, variables:dict=None, as SM[i,j] = s_real + 1j*s_im SM[j,i] = s_real - 1j*s_im - h = tq.simulate(HM, *args, **kwargs) - s = tq.simulate(SM, *args, **kwargs) + h = simulate(HM, *args, **kwargs) + s = simulate(SM, *args, **kwargs) v,vv = scipy.linalg.eigh(h,s)
Update tests.py update the tests so that the bad_mask feature of pseudocolor is covered by tests Note, the test for the feature "mask_bad" for function pcv.visualize.pseudocolor is inside the test for pcv.threshold.mask_bad.
@@ -5954,6 +5954,9 @@ def test_plantcv_threshold_mask_bad_native(): l22 = len(np.unique(mask22)) assert ((np.shape(mask22) == sz) and (l22 == 2)) + # test on pseudocolor + pcv.visualize.pseudocolor(bad_img, bad_mask=mask22, bad_color="red") + def test_plantcv_threshold_mask_bad_native_bad_input(): # Test cache directory
fix bug in buildtest buildspec find where names of tests were not picked up properly
@@ -81,7 +81,7 @@ def func_buildspec_find(args): invalid_buildspecs[buildspec] = err continue - recipe = parse.recipe + recipe = parse.recipe["buildspecs"] cache[buildspec] = {} cache[buildspec]["sections"] = []
fix: Whitelist schema.org attributes so that they can be used in HTML Editor in web pages
@@ -163,7 +163,8 @@ acceptable_attributes = [ 'width', 'wrap', 'xml:lang', 'data-row', 'data-list', 'data-language', 'data-value', 'role', 'frameborder', 'allowfullscreen', 'spellcheck', 'data-mode', 'data-gramm', 'data-placeholder', 'data-comment', - 'data-id', 'data-denotation-char' + 'data-id', 'data-denotation-char', 'itemprop', 'itemscope', + 'itemtype', 'itemid', 'itemref', 'datetime' ] mathml_attributes = [
Trying a fix to anaconda logins on nightlies Summary: Pull Request resolved:
@@ -742,7 +742,7 @@ binary_linux_test_and_upload: &binary_linux_test_and_upload set +x echo 'If there is no more output then logging into Anaconda failed' timeout 30 \ - anaconda login \ + yes | anaconda login \ --username '"$CONDA_USERNAME"' \ --password '"$CONDA_PASSWORD"' \ >/dev/null 2>&1 @@ -910,7 +910,7 @@ binary_mac_upload: &binary_mac_upload set +x echo 'If there is no more output then logging into Anaconda failed' /usr/local/bin/gtimeout 30 \ - anaconda login \ + yes | anaconda login \ --username '"$CONDA_USERNAME"' \ --password '"$CONDA_PASSWORD"' \ >/dev/null 2>&1
Fix new steer saturated warning with joystick mode Fix steer sat warning with joystick mode
@@ -602,14 +602,14 @@ class Controls: lac_log.saturated = abs(actuators.steer) >= 0.9 # Send a "steering required alert" if saturation count has reached the limit - if lac_log.active and not CS.steeringPressed and self.CP.lateralTuning.which() == 'torque': + if lac_log.active and not CS.steeringPressed and self.CP.lateralTuning.which() == 'torque' and not self.joystick_mode: undershooting = abs(lac_log.desiredLateralAccel) / abs(1e-3 + lac_log.actualLateralAccel) > 1.2 turning = abs(lac_log.desiredLateralAccel) > 1.0 good_speed = CS.vEgo > 5 max_torque = abs(self.last_actuators.steer) > 0.99 if undershooting and turning and good_speed and max_torque: self.events.add(EventName.steerSaturated) - elif lac_log.active and lac_log.saturated and not CS.steeringPressed: + elif lac_log.active and not CS.steeringPressed and lac_log.saturated: dpath_points = lat_plan.dPathPoints if len(dpath_points): # Check if we deviated from the path
Update 008-blink_world_context.patch Replaced usage of PassRefPtr with RefPtr in Source/core
@@ -8,7 +8,7 @@ index a93834fbe86e..937b7f335693 100644 +v8::Local<v8::Context> WebLocalFrameImpl::WorldScriptContext( + v8::Isolate* isolate, int world_id) const { -+ PassRefPtr<DOMWrapperWorld> world = DOMWrapperWorld::EnsureIsolatedWorld( ++ RefPtr<DOMWrapperWorld> world = DOMWrapperWorld::EnsureIsolatedWorld( + isolate, world_id); + return ToScriptState(GetFrame(), *world)->GetContext(); +}
DOC: update OptimizeResult notes * DOC: update OptimizeResult notes Address the problem of `Notes` being placed before `Attributes` Edit the spelling in the first sentence.
@@ -111,10 +111,10 @@ class OptimizeResult(dict): Notes ----- - There may be additional attributes not listed above depending of the - specific solver. Since this class is essentially a subclass of dict - with attribute accessors, one can see which attributes are available - using the `keys()` method. + `OptimizeResult` may have additional attributes not listed here depending + on the specific solver being used. Since this class is essentially a + subclass of dict with attribute accessors, one can see which + attributes are available using the `OptimizeResult.keys` method. """ def __getattr__(self, name):
Fix bug Update to latest LTS node.js Also update babel to latest version.
"description": "Mozilla in-place localization tool.", "private": true, "dependencies": { - "babel": "^5.4.3", + "babel": "6.23.0", "yuglify": "0.1.4" }, "engines": { - "node": "0.12.2" + "node": "6.9.5" }, "repository": { "type": "git",
Update data URL CIFAR website is now using https. Since urllib does not follow the http to https redirection and thus never retrieves any data, the data URL needs an update.
@@ -78,7 +78,7 @@ INITIAL_LEARNING_RATE = 0.1 # Initial learning rate. # names of the summaries when visualizing a model. TOWER_NAME = 'tower' -DATA_URL = 'http://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz' +DATA_URL = 'https://www.cs.toronto.edu/~kriz/cifar-10-binary.tar.gz' def _activation_summary(x):
Cast.construct: fix rtype type name TN: minor
@@ -110,7 +110,7 @@ class Cast(AbstractExpression): Construct a resolved expression that is the result of casting a AST node. - :rtype: CastExpr + :rtype: Cast.Expr """ expr = construct( self.expr,
fontconfig/2.13.93: remove dependency on libgettext * [package] fontconfig/2.13.93: make libgettext optional Add with_nls option, default maintains status quo, makes libgettext optional on macos. * Disable nls unconditionally Remove dependency on libgettext on macos. The feature is only lightly used and apparently there is only one language in translations.
@@ -40,8 +40,6 @@ class FontconfigConan(ConanFile): self.requires("expat/2.2.10") if self.settings.os == "Linux": self.requires("libuuid/1.0.3") - elif self.settings.os == "Macos": - self.requires("libgettext/0.20.1") def build_requirements(self): self.build_requires("gperf/3.1") @@ -58,7 +56,9 @@ class FontconfigConan(ConanFile): if not self._autotools: args = ["--enable-static=%s" % ("no" if self.options.shared else "yes"), "--enable-shared=%s" % ("yes" if self.options.shared else "no"), - "--disable-docs"] + "--disable-docs", + "--disable-nls", + ] args.append("--sysconfdir=%s" % tools.unix_path(os.path.join(self.package_folder, "bin", "etc"))) args.append("--datadir=%s" % tools.unix_path(os.path.join(self.package_folder, "bin", "share"))) args.append("--datarootdir=%s" % tools.unix_path(os.path.join(self.package_folder, "bin", "share")))
fixed forwardConjugate The index of refraction of the Space matrix is now the back index of the current object. Since we "add" the Space after the current matrix (self), its index of refraction should match the back index of the current matrix.
@@ -1180,7 +1180,7 @@ class Matrix(object): conjugateMatrix = None # Unable to compute with inf else: distance = -self.B / self.D - conjugateMatrix = Space(d=distance) * self + conjugateMatrix = Space(d=distance, n=self.backIndex) * self return (distance, conjugateMatrix)
del_surrogate for HTML inline URLs Closes
@@ -86,7 +86,7 @@ class HTMLToTelegramParser(HTMLParser): EntityType = MessageEntityUrl else: EntityType = MessageEntityTextUrl - args['url'] = url + args['url'] = _del_surrogate(url) url = None self._open_tags_meta.popleft() self._open_tags_meta.appendleft(url)
ENH: do not dereference NULL pointer The pointer 'current' could be NULL in case the above 'while (current != NULL) {..}' loop finishes without break.
@@ -5178,7 +5178,7 @@ PyUFunc_RegisterLoopForDescr(PyUFuncObject *ufunc, } current = current->next; } - if (cmp == 0 && current->arg_dtypes == NULL) { + if (cmp == 0 && current != NULL && current->arg_dtypes == NULL) { current->arg_dtypes = PyArray_malloc(ufunc->nargs * sizeof(PyArray_Descr*)); if (arg_dtypes != NULL) {
feat: add ajax functionality to logout This PR is necessary for single page applications to handle redirects on logout.
@@ -765,15 +765,18 @@ class LogoutView(TemplateResponseMixin, View): if app_settings.LOGOUT_ON_GET: return self.post(*args, **kwargs) if not self.request.user.is_authenticated: - return redirect(self.get_redirect_url()) + response = redirect(self.get_redirect_url()) + return _ajax_response(self.request, response) ctx = self.get_context_data() - return self.render_to_response(ctx) + response = self.render_to_response(ctx) + return _ajax_response(self.request, response) def post(self, *args, **kwargs): url = self.get_redirect_url() if self.request.user.is_authenticated: self.logout() - return redirect(url) + response = redirect(url) + return _ajax_response(self.request, response) def logout(self): adapter = get_adapter(self.request)
[docs] preserve iframe_figure directory during build * [docs] preserve iframe_figure directory during build Currently none of the figures are copied with the notebook into the docs. * remove old target
@@ -47,16 +47,11 @@ help: clean: rm -rf $(BUILDDIR) -.PHONY: tutorials-tar -tutorials-tar: $(BUILDDIR)/html/tutorials.tar.gz - -$(BUILDDIR)/html/tutorials.tar.gz: $(wildcard tutorials/*.ipynb) - mkdir -p $(BUILDDIR)/html - tar cvf $(BUILDDIR)/html/tutorials.tar.gz $^ - .PHONY: html -html: tutorials-tar +html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html + rsync -avz tutorials/iframe_figures/ $(BUILDDIR)/html/tutorials/iframe_figures/ # https://github.com/plotly/plotly.py/blob/master/packages/python/plotly/plotly/io/_base_renderers.py#L522-L529 + tar cvf $(BUILDDIR)/html/tutorials.tar.gz $(BUILDDIR)/html/tutorials @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
Fixes resolving of tag parameter for repo.tag I accessed private variables instead of adding getters, because other parts of the code do the same and I didn't know if there was a reason for it. E.g.: remote.py line 409: (...) RemoteReference._common_path_default (...)
@@ -402,7 +402,18 @@ class Repo(object): def tag(self, path: PathLike) -> TagReference: """:return: TagReference Object, reference pointing to a Commit or Tag :param path: path to the tag reference, i.e. 0.1.5 or tags/0.1.5 """ - return TagReference(self, path) + full_path = self._to_full_tag_path(path) + return TagReference(self, full_path) + + @staticmethod + def _to_full_tag_path(path: PathLike): + if path.startswith(TagReference._common_path_default + '/'): + return path + if path.startswith(TagReference._common_default + '/'): + return Reference._common_path_default + '/' + path + else: + return TagReference._common_path_default + '/' + path + def create_head(self, path: PathLike, commit: str = 'HEAD', force: bool = False, logmsg: Optional[str] = None
kboot: Provide MAC addresses in device tree Read the MAC addresses from the ADT and store them in "local-mac-address" properties on the relevant nodes in the FDT. This relies on appropriate aliases in the provided FDT, which matches how U-Boot provides MAC addresses on embedded targets.
@@ -244,6 +244,41 @@ static int dt_set_cpus(void) return 0; } +static const char *aliases[] = { + "bluetooth0", + "ethernet0", + "wifi0", +}; + +static int dt_set_mac_addresses(void) +{ + int anode = adt_path_offset(adt, "/chosen"); + + if (anode < 0) + bail("ADT: /chosen not found\n"); + + for (size_t i = 0; i < sizeof(aliases) / sizeof(*aliases); i++) { + char propname[32]; + sprintf(propname, "mac-address-%s", aliases[i]); + + uint8_t addr[6]; + if (ADT_GETPROP_ARRAY(adt, anode, propname, addr) < 0) + continue; + + const char *path = fdt_get_alias(dt, aliases[i]); + if (path == NULL) + continue; + + int node = fdt_path_offset(dt, path); + if (node < 0) + continue; + + fdt_setprop(dt, node, "local-mac-address", addr, sizeof(addr)); + } + + return 0; +} + void kboot_set_initrd(void *start, size_t size) { initrd_start = start; @@ -292,6 +327,8 @@ int kboot_prepare_dt(void *fdt) return -1; if (dt_set_cpus()) return -1; + if (dt_set_mac_addresses()) + return -1; if (fdt_pack(dt)) bail("FDT: fdt_pack() failed\n");
Fix a broken link in the installation doc Fix a broken link for 'Vitrage-dashboard manual installation' in the vitrage installation doc. Closes-Bug:
@@ -21,7 +21,7 @@ Manual Installation of Vitrage (not using Devstack) * `Vitrage manual installation <https://github.com/openstack/vitrage/blob/master/doc/source/vitrage-manual-installation.rst>`_ -* `Vitrage-dashboard manual installation <https://github.com/openstack/vitrage-dashboard/blob/master/doc/source/vitrage-dashboard-manual-installation.rst>`_ +* `Vitrage-dashboard manual installation <https://github.com/openstack/vitrage-dashboard/blob/master/doc/source/installation.rst>`_ External Monitor Installation
Fix minor typo in modeladmin docs add_to_setings_menu -> add_to_settings_menu
@@ -66,7 +66,7 @@ greater than that if you wish to keep the explorer menu item at the top. **Expected value**: ``True`` or ``False`` If you'd like the menu item for your model to appear in Wagtail's 'Settings' -sub-menu instead of at the top level, add ``add_to_setings_menu = True`` to +sub-menu instead of at the top level, add ``add_to_settings_menu = True`` to your ``ModelAdmin`` class. This will only work for indivdual ``ModelAdmin`` classes registered with their
Remove redundant _ensure_default_security_group _ensure_default_security_group() is done in _ensure_default_security_group_handler [1] on each port BEFORE_CREATE event. No need to ensure once again right after sending this event. [1] TrivialFix
@@ -1395,9 +1395,6 @@ class Ml2Plugin(db_base_plugin_v2.NeutronDbPluginV2, registry.notify(resources.PORT, events.BEFORE_CREATE, self, context=context, port=attrs) - # NOTE(kevinbenton): triggered outside of transaction since it - # emits 'AFTER' events if it creates. - self._ensure_default_security_group(context, attrs['tenant_id']) def _create_port_db(self, context, port): attrs = port[port_def.RESOURCE_NAME]
Exercise uncaught OSError bugs on Windows. This exercises uncaught OSError issues when non-Windows safe characters are passed to Pad.get and Database.iter_items.
@@ -315,3 +315,18 @@ def test_offset_without_limit_query(pad): x = projects.children.offset(1).order_by("_slug").first() assert x["name"] == "Coffee" + + +def test_Pad_get_invalid_path(pad): + # On windows '<' and/or '>' are invalid in filenames. These were + # causing an OSError(errno=EINVAL) exception in Database.load_raw_data + # that was not being caught. This test exercises that. + assert pad.get("/<foo>") is None + + +def test_Database_iter_items_invalid_path(env): + # Check that there is no problem with uncaught + # OSError(errno=EINVAL) when a path contains non-filename-safe + # characters in Database.iter_items. + db = Database(env) + assert len(list(db.iter_items("/<foo>"))) == 0
search_icon: Fix contrast color of icon. Fixes
@@ -1641,10 +1641,12 @@ div.focused_table { } .search_icon { - color: hsl(0, 0%, 80%); + /* These rules match the .dropdown-toggle CSS for the gear menu. */ + color: inherit; + opacity: 0.5; text-decoration: none; &:hover { - color: hsl(0, 0%, 0%); + opacity: 1; } }
Failed to load /var/cache/salt/minion/roots/mtime_map when it contains invalid data. encoding was forced by but decoding exceptions are not handled. This will be fixed by this PR. Fixes:
@@ -172,7 +172,7 @@ def update(): mtime_map_path, line, ) - except OSError: + except (OSError, UnicodeDecodeError): pass # compare the maps, set changed to the return value @@ -266,6 +266,7 @@ def file_hash(load, fnd): except ( os.error, OSError, + UnicodeDecodeError, ): # Can't use Python select() because we need Windows support log.debug("Fileserver encountered lock when reading cache file. Retrying.") # Delete the file since its incomplete (either corrupted or incomplete)
master: add a comment to explain what's going on, and fix log msg. Closes
@@ -109,9 +109,13 @@ def create_child(*args): os.execvp(args[0], args) childfp.close() + # Decouple the socket from the lifetime of the Python socket object. + fd = os.dup(parentfp.fileno()) + parentfp.close() + LOG.debug('create_child() child %d fd %d, parent %d, cmd: %s', - pid, parentfp.fileno(), os.getpid(), Argv(args)) - return pid, os.dup(parentfp.fileno()) + pid, fd, os.getpid(), Argv(args)) + return pid, fd def flags(names):
Re-order nav items by geographic size As pointed out by this makes more sense.
@@ -86,8 +86,8 @@ h.end=i=function(){s.className=s.className.replace(RegExp(' ?'+y),'')}; <div id="navbar" class="collapse navbar-collapse"> <ul class="nav navbar-nav {% block active_class %}{% endblock %}"> <li class="active_analyse"><a href="{% url 'analyse' %}">Analyse</a></li> - <li class="active_ccg"><a href="{% url 'all_ccgs' %}">CCG dashboards</a></li> <li class="active_practice"><a href="{% url 'all_practices' %}">Practice dashboards</a></li> + <li class="active_ccg"><a href="{% url 'all_ccgs' %}">CCG dashboards</a></li> <li class="active_all_england"><a href="{% url 'all_england' %}">All England dashboard</a></li> <li class="dropdown active_trends"> <a href="#" class="dropdown-toggle" data-toggle="dropdown" role="button" aria-haspopup="true" aria-expanded="false">Trends <span class="caret"></span></a>
Update path to logo in README. Earlier, the logo wasnt rendering on pypi page.
-<p align="center"><img src="website/static/img/Hydra-Readme-logo2.svg" alt="logo" width="70%" /></p> +<p align="center"><img src="https://raw.githubusercontent.com/facebookresearch/hydra/master/website/static/img/Hydra-Readme-logo2.svg" alt="logo" width="70%" /></p> <p align="center"> <a href="https://pypi.org/project/hydra-core/">
[varLib] Variate GDEF LigCarets Untested.
@@ -784,8 +784,17 @@ class InstancerMerger(AligningMerger): self.location = location self.scalars = model.getScalars(location) [email protected](ot.CaretValue) +def merge(merger, self, lst): + assert self.Format == 1 + Coords = [a.Coordinate for a in lst] + model = merger.model + scalars = merger.scalars + self.Coordinate = otRound(model.interpolateFromMastersAndScalars(Coords, scalars)) + @InstancerMerger.merger(ot.Anchor) def merge(merger, self, lst): + assert self.Format == 1 XCoords = [a.XCoordinate for a in lst] YCoords = [a.YCoordinate for a in lst] model = merger.model @@ -833,6 +842,26 @@ class MutatorMerger(AligningMerger): self.instancer = VarStoreInstancer(store, font['fvar'].axes, location) [email protected](ot.CaretValue) +def merge(merger, self, lst): + + # Hack till we become selfless. + self.__dict__ = lst[0].__dict__.copy() + + if self.Format != 3: + return + + instancer = merger.instancer + dev = self.DeviceTable + del self.DeviceTable + if dev: + assert dev.DeltaFormat == 0x8000 + varidx = (dev.StartSize << 16) + dev.EndSize + delta = otRound(instancer[varidx]) + self.Coordinate += delta + + self.Format = 1 + @MutatorMerger.merger(ot.Anchor) def merge(merger, self, lst): @@ -927,6 +956,14 @@ def buildVarDevTable(store_builder, master_values): base, varIdx = store_builder.storeMasters(master_values) return base, builder.buildVarDevTable(varIdx) [email protected](ot.CaretValue) +def merge(merger, self, lst): + assert self.Format == 1 + self.Coordinate, DeviceTable = buildVarDevTable(merger.store_builder, [a.Coordinate for a in lst]) + if DeviceTable: + self.Format = 3 + self.DeviceTable = DeviceTable + @VariationMerger.merger(ot.Anchor) def merge(merger, self, lst): assert self.Format == 1
Workaround for T111513 It is wasteful to interrupt the bot just because of an unsupported interwiki link. Here, we only test if it *is* an interwiki link, then we skip it whatever syntax it has. This error only confirms that.
@@ -450,7 +450,12 @@ class CosmeticChangesToolkit(object): trailingChars = match.group('linktrail') newline = match.group('newline') - if not self.site.isInterwikiLink(titleWithSection): + try: + is_interwiki = self.site.isInterwikiLink(titleWithSection) + except ValueError: # T111513 + is_interwiki = True + + if not is_interwiki: # The link looks like this: # [[page_title|link_text]]trailing_chars # We only work on namespace 0 because pipes and linktrails work
HTCondorBatchSystem(): try iterating ClassAds with next(ads) before ads.next() As of at least htcondor 8.8.1, only next(ads) works
@@ -161,6 +161,9 @@ class HTCondorBatchSystem(AbstractGridEngineBatchSystem): # Make sure a ClassAd was returned try: + try: + ad = next(ads) + except TypeError: ad = ads.next() except StopIteration: logger.error( @@ -169,6 +172,9 @@ class HTCondorBatchSystem(AbstractGridEngineBatchSystem): # Make sure only one ClassAd was returned try: + try: + next(ads) + except TypeError: ads.next() except StopIteration: pass
[tests/brightness] Create open call, if it does not exist Hopefully, this fixes the failing Travis build.
@@ -54,7 +54,7 @@ class TestBrightnessModule(unittest.TestCase): self.assertEquals(self.module.brightness(self.anyWidget), "020%") self.assertEquals(len(self.module.brightness(self.anyWidget)), len("100%")) - @mock.patch('bumblebee.modules.brightness.open') + @mock.patch('bumblebee.modules.brightness.open', create=True) def test_error(self,mock_open): mock_open.side_effect = FileNotFoundError self.module.update_all()
Update wuzhicms-sqli.yaml fix bug
@@ -26,7 +26,7 @@ requests: matchers: - type: word words: - - '{{md5({{num}})}}' + - 'c8c605999f3d8352d7bb792cf3fdb25' part: body - type: status
Adds _times and auxinfo members to Circuit objects for future expansion. Namely to work with future support for time-dependent models and for custom user data to be associated with a circuit (like the comment member of a DataSet).
@@ -220,6 +220,8 @@ class Circuit(object): raise ValueError("line labels must contain at least %s" % str(explicit_lbls)) self._str = stringrep #can be None (lazy generation) + self._times = None # for FUTURE expansion + self.auxinfo = {} # for FUTURE expansion / user metadata @property
Improves documtation for interpolate_to_grid to specify that linear distance or degrees can be the units of the input coordinates (and resultantly, the resolution of the grid). Fixes
@@ -264,9 +264,9 @@ def interpolate_to_grid(x, y, z, interp_type='linear', hres=50000, Parameters ---------- x: array_like - x coordinate + x coordinate, can have units of linear distance or degrees y: array_like - y coordinate + y coordinate, can have units of linear distance or degrees z: array_like observation value interp_type: str @@ -314,6 +314,9 @@ def interpolate_to_grid(x, y, z, interp_type='linear', hres=50000, This function acts as a wrapper for `interpolate_points` to allow it to generate a regular grid. + This function interpolates points to a Cartesian plane, even if lat/lon coordinates + are provided. + See Also -------- interpolate_to_points
Update .travis.yml Fixed typo.
@@ -13,7 +13,7 @@ before_install: - pip install --upgrade pip - pip install poetry - pip install pre-commit - - poetry run python scipts/download_misc_dependencies.py + - poetry run python scripts/download_misc_dependencies.py install: - poetry install -v
LandBOSSE second integration Set the hub height to 80 m
@@ -220,6 +220,8 @@ def Init_LandBasedAssembly(prob, blade, Nsection_Tow, Analysis_Level=0, fst_vt={ # Set the machine rating prob['machine_rating'] = 1.5e6 + prob['hub_height'] = 80 + # >>>>>>>>>>>>>>> LandBOSSE inputs <<<<<<<<<<<<< # Leave all LandBOSSE inputs at their default for now.
Allow modification of max retries in OVSNeutronAgent Use class variable instead of module constant to allow modification of max_device_retries in classes inheriting from OVSNeutronAgent. Closes-Bug:
@@ -144,6 +144,7 @@ class OVSNeutronAgent(l2population_rpc.L2populationRpcCallBackTunnelMixin, # 1.5 Added binding_activate and binding_deactivate # 1.7 Add support for smartnic ports target = oslo_messaging.Target(version='1.7') + max_device_retries = constants.MAX_DEVICE_RETRIES def __init__(self, bridge_classes, ext_manager, conf=None): '''Constructor. @@ -2440,12 +2441,11 @@ class OVSNeutronAgent(l2population_rpc.L2populationRpcCallBackTunnelMixin, if sync: LOG.info("Agent out of sync with plugin!") consecutive_resyncs = consecutive_resyncs + 1 - if (consecutive_resyncs >= - constants.MAX_DEVICE_RETRIES): + if consecutive_resyncs >= self.max_device_retries: LOG.warning( "Clearing cache of registered ports," " retries to resync were > %s", - constants.MAX_DEVICE_RETRIES) + self.max_device_retries) ports.clear() ancillary_ports.clear() consecutive_resyncs = 0 @@ -2521,12 +2521,12 @@ class OVSNeutronAgent(l2population_rpc.L2populationRpcCallBackTunnelMixin, devices_not_to_retry = set() for dev in devices_set: retries = failed_devices_retries_map.get(dev, 0) - if retries >= constants.MAX_DEVICE_RETRIES: + if retries >= self.max_device_retries: devices_not_to_retry.add(dev) LOG.warning( "Device %(dev)s failed for %(times)s times and won't " "be retried anymore", { - 'dev': dev, 'times': constants.MAX_DEVICE_RETRIES}) + 'dev': dev, 'times': self.max_device_retries}) else: new_failed_devices_retries_map[dev] = retries + 1 return devices_not_to_retry
changed supported FS7 version to 7.1.0 there was a typo in the supported version number. changed from 7.0.1 to 7.1.0
generic: binaries: urls: - "7.0.1": https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.1.0/freesurfer-linux-centos6_x86_64-7.1.0.tar.gz + "7.1.0": https://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/7.1.0/freesurfer-linux-centos6_x86_64-7.1.0.tar.gz # 7.0.0 not included because it was recalled several days after release due to a bug. Replaced by 7.1.0. # From FreeSurfer team: we recommend that people NOT use 7.0.0 and use 7.1.0 instead. "6.0.1": ftp://surfer.nmr.mgh.harvard.edu/pub/dist/freesurfer/6.0.1/freesurfer-Linux-centos6_x86_64-stable-pub-v6.0.1.tar.gz
Fixed cleanup of 'terminado' and 'tornado' * Fixed cleanup of 'terminado' and 'tornado' After creating the snap, these are directories containing files. So you cannot delete them via 'rm' * 'snapcraft clean nikola -s pull;snapcraft' produced an error
@@ -52,6 +52,6 @@ parts: - ipykernel>=4.0.0 build-packages: [libjpeg-dev] install: | - find $SNAPCRAFT_PART_INSTALL -name '*.a' -exec rm {} \; - find $SNAPCRAFT_PART_INSTALL -name 'terminado' -type d -exec rm {} \; - find $SNAPCRAFT_PART_INSTALL -name 'tornado' -type d -exec rm {} \; + find $SNAPCRAFT_PART_INSTALL -name '*.a'|xargs -r rm + find $SNAPCRAFT_PART_INSTALL -name 'terminado' -type d|xargs -r rm -rf + find $SNAPCRAFT_PART_INSTALL -name 'tornado' -type d|xargs -r rm -rf
Generalize intrusive_ptr comment Summary: Pull Request resolved:
// weakcount == number of weak references to the object, // plus one more if refcount > 0 // -// - StorageImpl stays live as long as there are any strong +// - the underlying object stays live as long as there are any strong // or weak pointers to it (weakcount > 0, since strong // references count as a +1 to weakcount) // -// - finalizers are called and data_ptr is deallocated when refcount == 0 +// - underlying_object::release_resources() is called when refcount == 0 +// +// - the underlying object is destructed when weakcount == 0 (which implies +// refcount == 0) // // - Once refcount == 0, it can never again be > 0 (the transition // from > 0 to == 0 is monotonic) // -// - When you access StorageImpl via a weak pointer, you must -// atomically increment the use count, if it is greater than 0. -// If it is not, you must report that the storage is dead. -// struct THFinalizer { virtual void operator()() = 0;
Docstring for helper function and some cleanup per PEP8. Also, convert to 2 space indent to match TF Agents' style.
@@ -539,7 +539,6 @@ def get_outer_array_shape(nested_array, spec): return first_array.shape[:num_outer_dims] - def where(condition, true_outputs, false_outputs): """Generalization of tf.where where, returning a nest constructed though application of tf.where to the input nests. @@ -560,10 +559,18 @@ def where(condition, true_outputs, false_outputs): Interleaved output from `true_outputs` and `false_outputs` based on `condition`. """ + def _where_with_matching_ranks(t, f): + """Applies tf.where using a condition tensor whose rank may be less than + that of the true/false output tensors by padding the condition tensor's + shape with extra dimensions of size 1 to adhere to tf.where's parameter + requirements.""" rank_difference = tf.rank(t) - tf.rank(condition) - condition_shape = tf.concat([tf.shape(condition), tf.ones(rank_difference, dtype=tf.int32)], axis=0) + condition_shape = tf.concat([tf.shape(condition), + tf.ones(rank_difference, dtype=tf.int32)], + axis=0) reshaped_condition = tf.reshape(condition, condition_shape) return tf.where(reshaped_condition, t, f) - return tf.nest.map_structure(_where_with_matching_ranks, true_outputs, false_outputs) + return tf.nest.map_structure(_where_with_matching_ranks, true_outputs, + false_outputs)
predictionQueries.py updated Reduced the steps_per_epoch(previously equal to number of images for that set)
@@ -691,6 +691,7 @@ class client: # google chrome logger("Generating datasets for classes...") input_shape = (224, 224, 3) + #Assuming Downloaded Images in current Directory data_path=os.getcwd() for a_class in argv: generate_data(a_class) @@ -712,22 +713,23 @@ class client: loss="binary_crossentropy", metrics=['accuracy']) - train_datagen = ImageDataGenerator(shear_range = 0.2, + train_data = ImageDataGenerator(shear_range = 0.2, zoom_range = 0.2, horizontal_flip = True) - X_train = train_datagen.flow_from_directory(data_path+'/training_set', + X_train = train_data.flow_from_directory(data_path+'/training_set', target_size = (224, 224), batch_size = 32, class_mode = 'binary') - X_test = ImageDataGenerator.flow_from_directory(data_path+'/test_set', + test_data=ImageDataGenerator() + X_test = test_data.flow_from_directory(data_path+'/test_set', target_size = (224, 224), batch_size = 32, class_mode = 'binary') #Fitting/Training the model history=model.fit_generator(generator=X_train, - steps_per_epoch=X_train.n, + steps_per_epoch=X_train.n//X_train.batch_size, validation_data=X_test, - validation_steps=X_test.n, + validation_steps=X_test.n//X_test.batch_size, epochs=10 ) # storing values the model dictionary @@ -747,6 +749,7 @@ class client: # google chrome logger("Generating datasets for classes...") input_shape = (224, 224, 3) + #Assuming Downloaded Images in current Directory data_path=os.getcwd() # creates the appropriate dataset @@ -769,22 +772,23 @@ class client: optimizer='adam', loss='categorical_crossentropy', metrics=['accuracy']) - train_datagen = ImageDataGenerator(shear_range = 0.2, + train_data = ImageDataGenerator(shear_range = 0.2, zoom_range = 0.2, horizontal_flip = True) - X_train = train_datagen.flow_from_directory(data_path+'/training_set', + X_train = train_data.flow_from_directory(data_path+'/training_set', target_size = (224, 224), batch_size = 32, class_mode = 'categorical') - X_test = ImageDataGenerator.flow_from_directory(data_path+'/test_set', + test_data=ImageDataGenerator() + X_test = test_data.flow_from_directory(data_path+'/test_set', target_size = (224, 224), batch_size = 32, class_mode = 'categorical') #Fitting/Training the model history=model.fit_generator(generator=X_train, - steps_per_epoch=X_train.n, + steps_per_epoch=X_train.n//X_train.batch_size, validation_data=X_test, - validation_steps=X_test.n, + validation_steps=X_test.n//X_test.batch_size, epochs=10 ) logger("Finishing task and storing information in model...")
python-news escape fixes * Fix escapes in python-news posts No longer escapes markdown inside of codeblocks or pre-escaped markdown. * Add escaping to title of py-news posts * Fix typo
@@ -23,6 +23,14 @@ THREAD_URL = "https://mail.python.org/archives/list/{list}@python.org/thread/{id AVATAR_URL = "https://www.python.org/static/opengraph-icon-200x200.png" +# By first matching everything within a codeblock, +# when matching markdown it won't be within a codeblock +MARKDOWN_REGEX = re.compile( + r"(?P<codeblock>`.*?`)" # matches everything within a codeblock + r"|(?P<markdown>(?<!\\)[_|])", # matches unescaped `_` and `|` + re.DOTALL # required to support multi-line codeblocks +) + log = logging.getLogger(__name__) @@ -76,8 +84,11 @@ class PythonNews(Cog): @staticmethod def escape_markdown(content: str) -> str: - """Escape the markdown underlines and spoilers.""" - return re.sub(r"[_|]", lambda match: "\\" + match[0], content) + """Escape the markdown underlines and spoilers that aren't in codeblocks.""" + return MARKDOWN_REGEX.sub( + lambda match: match.group("codeblock") or "\\" + match.group("markdown"), + content + ) async def post_pep_news(self) -> None: """Fetch new PEPs and when they don't have announcement in #python-news, create it.""" @@ -109,7 +120,7 @@ class PythonNews(Cog): # Build an embed and send a webhook embed = discord.Embed( - title=new["title"], + title=self.escape_markdown(new["title"]), description=self.escape_markdown(new["summary"]), timestamp=new_datetime, url=new["link"], @@ -129,7 +140,7 @@ class PythonNews(Cog): self.bot.stats.incr("python_news.posted.pep") if msg.channel.is_news(): - log.trace("Publishing PEP annnouncement because it was in a news channel") + log.trace("Publishing PEP announcement because it was in a news channel") await msg.publish() # Apply new sent news to DB to avoid duplicate sending @@ -179,7 +190,7 @@ class PythonNews(Cog): # Build an embed and send a message to the webhook embed = discord.Embed( - title=thread_information["subject"], + title=self.escape_markdown(thread_information["subject"]), description=content[:1000] + f"... [continue reading]({link})" if len(content) > 1000 else content, timestamp=new_date, url=link,
Generic API: switch the Any_Argument_Index type to a Natural subtype This makes this (sub)type consistent with the index type of Value_Ref_Array, and will make it easier to match arrays of values to member arguments in user code. TN:
@@ -419,7 +419,7 @@ package Langkit_Support.Generic_API.Introspection is function Last_Struct_Member (Id : Language_Id) return Struct_Member_Index; -- Return the last struct member index that is valid for the given language - type Any_Argument_Index is new Natural; + subtype Any_Argument_Index is Natural; subtype Argument_Index is Any_Argument_Index range 1 .. Any_Argument_Index'Last; -- Index of a property argument
MAINT: stats: Rewrite Wilson confidence interval expressions. Rewrite the expressions for the confidence interval to more closely match the formulas given in Newcombe (1998).
@@ -168,32 +168,32 @@ def _binom_wilson_conf_int(k, n, confidence_level, alternative, correction): else: z = ndtri(confidence_level) - t = 1 + z**2/n - r = (p + z**2/(2*n)) / t - + # For reference, the formulas implemented here are from + # Newcombe (1998) (ref. [3] in the proportion_ci docstring). + denom = 2*(n + z**2) + center = (2*n*p + z**2)/denom + q = 1 - p if correction: if alternative == 'less' or k == 0: lo = 0.0 else: - dlo = ((z * sqrt(z**2 - 1/n + 4*n*p*(1 - p) + (4*p - 2)) + 1) / - (2*n*t)) - lo = r - dlo + dlo = (1 + z*sqrt(z**2 - 2 - 1/n + 4*p*(n*q + 1))) / denom + lo = center - dlo if alternative == 'greater' or k == n: hi = 1.0 else: - dhi = ((z * sqrt(z**2 - 1/n + 4*n*p*(1 - p) - (4*p - 2)) + 1) / - (2*n*t)) - hi = r + dhi + dhi = (1 + z*sqrt(z**2 + 2 - 1/n + 4*p*(n*q - 1))) / denom + hi = center + dhi else: - d = z/t * sqrt(p*(1-p)/n + (z/(2*n))**2) + delta = z/denom * sqrt(4*n*p*q + z**2) if alternative == 'less' or k == 0: lo = 0.0 else: - lo = r - d + lo = center - delta if alternative == 'greater' or k == n: hi = 1.0 else: - hi = r + d + hi = center + delta return lo, hi
Update README.md Documented what script node is used for in this demo
@@ -6,6 +6,7 @@ This example demonstrates how to run 2 stage inference on DepthAI using Gen2 Pip First, a face is detected on the image and then the cropped face frame is sent to age gender recognition network, which produces the estimated results +**This demo uses script node** to decode the face detection NN (1st stage NN) results. Script then crops out faces from the original high-res frame (based on face detections) and sends them to the age/gender recognition NN (2nd stage NN). Results of the second stage NN are then sent to the host. ## Demo
check_queued_build now returns a string describing its decision and actions
@@ -56,6 +56,7 @@ def check_queued_build(build_id): res_run = run_build.delay(build.id) build.task_id_run = res_run.id build.save() + return "Org is a scratch org, running build concurrently as task {}".format(res_run.id) else: # For persistent orgs, use the cache to lock the org @@ -67,12 +68,14 @@ def check_queued_build(build_id): res_run = run_build.delay(build.id, lock_id) build.task_id_run = res_run.id build.save() + return "Got a lock on the org, running as task {}".format(res_run.id) else: # Failed to get lock, queue next check time.sleep(1) res_check = check_queued_build.delay(build.id) build.task_id_check = res_check.id build.save() + return "Failed to get lock on org. {} has the org locked. Queueing next check.".format(cache.get(lock_id)) @django_rq.job('short') def set_github_status(build_id):
Add specification version to TIMESTAMP_SCHEMA It was excluded from the Timestamp schema definition in error. In the process of making metadata writing use the Timestamp schema strictly, this bug was discovered. Metadata previously written included specification version, but the schema check did not.
@@ -345,6 +345,7 @@ SNAPSHOT_SCHEMA = SCHEMA.Object( TIMESTAMP_SCHEMA = SCHEMA.Object( object_name = 'TIMESTAMP_SCHEMA', _type = SCHEMA.String('timestamp'), + spec_version = SPECIFICATION_VERSION_SCHEMA, version = securesystemslib.formats.METADATAVERSION_SCHEMA, expires = securesystemslib.formats.ISO8601_DATETIME_SCHEMA, meta = securesystemslib.formats.FILEDICT_SCHEMA)