message
stringlengths
13
484
diff
stringlengths
38
4.63k
Onefile: Remove experimental status in help output * Also added option to disable --standalone if given as a project option.
@@ -79,15 +79,23 @@ want to use "--python-flag=no_site" to avoid the "site.py" module, which can sav a lot of code dependencies. Defaults to off.""", ) +parser.add_option( + "--no-standalone", + action="store_false", + dest="is_standalone", + default=False, + help=SUPPRESS_HELP, +) + + parser.add_option( "--onefile", action="store_true", dest="is_onefile", default=False, help="""\ -In case of standalone mode, enable single file mode. This means not a folder, -but a compressed executable is created and used. Experimental at this time, -and not supported on all OSes. Defaults to off.""", +On top of standalone mode, enable onefile mode. This means not a folder, +but a compressed executable is created and used. Defaults to off.""", ) parser.add_option(
docs: installation: Talk about testing Windows and MacOS Related:
@@ -8,7 +8,9 @@ is another good way to install it. You could also use the docker container. **Windows and MacOS are not officially supported yet**. Support varies by which plugins you install. We do not currently have a list of what is supported and -what is not supported on those OSs. +what is not supported on those OSs. Most things should work. However, until we +are testing for everything we won't declare them to be officially supported. +Please create issues for any problems you encounter. First make sure you have the latest versions of ``pip``, ``setuptools``, and ``wheel``. Some ML libraries require them to be up-to-date.
disposition fix for copy codecs as well also removed disposition from attachmentcopycodec
@@ -522,7 +522,14 @@ class AudioCopyCodec(BaseCodec): lang = 'und' optlist.extend(['-metadata:s:a:' + stream, "language=" + lang]) if 'disposition' in safe: - optlist.extend(['-disposition:a:' + stream, str(safe['disposition'])]) + dispo = str(safe['disposition']) + if '+default' not in dispo: + dispo = dispo + '-default' + if '+forced' not in dispo: + dispo = dispo + '-forced' + optlist.extend(['-disposition:a:' + stream, dispo]) + else: + optlist.extend(['-disposition:a:' + stream, '-default-forced']) return optlist @@ -597,6 +604,15 @@ class SubtitleCopyCodec(BaseCodec): else: lang = 'und' optlist.extend(['-metadata:s:s:' + stream, "language=" + lang]) + if 'disposition' in safe: + dispo = str(safe['disposition']) + if '+default' not in dispo: + dispo = dispo + '-default' + if '+forced' not in dispo: + dispo = dispo + '-forced' + optlist.extend(['-disposition:s:' + stream, dispo]) + else: + optlist.extend(['-disposition:s:' + stream, '-default-forced']) return optlist @@ -606,18 +622,13 @@ class AttachmentCopyCodec(BaseCodec): """ codec_name = 'copy' encoder_options = {'map': int, - 'source': str, - 'disposition': str} + 'source': str} optlist = [] def parse_options(self, opt, stream=0): safe = self.safe_options(opt) - if 'disposition' in safe: - if len(safe['disposition'].strip()) < 1: - del safe['disposition'] - stream = str(stream) optlist = [] optlist.extend(['-c:t:' + stream, 'copy'])
test_osds: remove scenario leftover Since there's only only scenario available we don't need lvm_scenario and no_lvm_scenario. Also add missing assert for ceph-volume tests.
@@ -26,7 +26,6 @@ class TestOSDs(object): for osd in setup["osds"]: assert host.service("ceph-osd@%s" % osd).is_running - @pytest.mark.no_lvm_scenario def test_osd_services_are_enabled(self, node, host, setup): # TODO: figure out way to paramaterize node['osds'] for this test for osd in setup["osds"]: @@ -42,13 +41,13 @@ class TestOSDs(object): ) assert host.mount_point(osd_path).exists - @pytest.mark.lvm_scenario + @pytest.mark.no_docker def test_ceph_volume_is_installed(self, node, host): - host.exists('ceph-volume') + assert host.exists('ceph-volume') - @pytest.mark.lvm_scenario + @pytest.mark.no_docker def test_ceph_volume_systemd_is_installed(self, node, host): - host.exists('ceph-volume-systemd') + assert host.exists('ceph-volume-systemd') def _get_osd_id_from_host(self, node, osd_tree): children = []
Remove unused util functions The various refactorings left them jobless.
@@ -36,18 +36,6 @@ CONTINUOUS_KINDS = 'ifuc' SIZE_FACTOR = np.sqrt(np.pi) -def pop(dataframe, key, default): - """ - Pop element *key* from dataframe and return it. Return default - if it *key* not in dataframe - """ - try: - value = dataframe.pop(key) - except KeyError: - value = default - return value - - def is_scalar_or_string(val): """ Return whether the given object is a scalar or string like. @@ -64,32 +52,6 @@ def is_string(obj): return False -def is_sequence_of_strings(obj): - """ - Returns true if *obj* is iterable and contains strings - """ - # Note: cbook.is_sequence_of_strings has a bug because - # a numpy array of strings is recognized as being - # string_like and therefore not a sequence of strings - if not cbook.iterable(obj): - return False - if cbook.is_string_like(obj) and not isinstance(obj, np.ndarray): - return False - for o in obj: - if not cbook.is_string_like(o): - return False - return True - - -def is_sequence_of_booleans(obj): - """ - Return True if *obj* is array-like and contains boolean values - """ - if not cbook.iterable(obj): - return False - return all(isinstance(o, bool) for o in obj) - - def make_iterable(val): """ Return [*val*] if *val* is not iterable
New entry in Columbus, Ohio Woman holding up a sign is shot with rubber bullets
@@ -70,6 +70,7 @@ Three reporters repeatedly tell police that they are members of the press and sh ### Police pepper spray African-American photographer | May 31st. + Photographer being sprayed while seemingly calmly standing 10 feet away from the police line. **Links** @@ -77,3 +78,11 @@ Photographer being sprayed while seemingly calmly standing 10 feet away from the * https://i.redd.it/4ix8f3j6dy151.jpg * https://old.reddit.com/r/Columbus/comments/gtk192/photographer_being_pepper_sprayed_by_police/ * https://www.instagram.com/p/CA03DsTByLn/ + +### Woman holding a sign is shot with rubber bullets | May 31st + +A woman standing alone holding a sign is shot with rubber bullets. She falls to the ground and is carried away by othe protestors. + +**Links** + +* https://mobile.twitter.com/chadloder/status/1267113315613806592
Fix Error String Issue Summary: The string formatter causes issues for certain types of errors that appear, this is not the root cause fix since that will involve looking at what the actual error is but a step before that, but this should fix the error.
@@ -280,7 +280,7 @@ class CommandHandler(Counters, FacebookBase, FcrIface): retry_count += 1 except Exception as e: raise ttypes.SessionException( - message="bulk_run_remote failed: %r" % (e) + message=f"bulk_run_remote failed: {e}" ) from e # Split the request into chunks and run them on remote hosts
Fix to allow S20 Update() to work. S20 stretched from the end of 2019 into beginning of 2020. Apparently the MAST CBV file structure, which include a subdirectory by year, puts things into the year the sector started.
@@ -132,7 +132,7 @@ class Update(object): def get_cbvs(self): if self.sector <= 6: year = 2018 - elif self.sector <= 19: + elif self.sector <= 20: year = 2019 else: year = 2020
Update README.MD [formerly b6e5752778ae21bf04458a14704489673a17dab9] [formerly b5be6ceab56a82afbf9bd14881f797218acf20ca] [formerly dc40a46e8ec01114e363c8727db1c5c5161365ef]
<p align="center"> <img src="https://img.shields.io/badge/License-MIT-yellow.svg" alt="Ciphey"> <img src="https://github.com/brandonskerritt/Ciphey/workflows/Python%20application/badge.svg?branch=master" alt="Ciphey"> + <img alt="PyPI - Downloads" src="https://img.shields.io/pypi/dm/ciphey"> </p> # What is this?
Typo: let -> let's Small typo
@@ -134,7 +134,7 @@ class ReplayMemory(object): ###################################################################### -# Now, let's define our model. But first, let quickly recap what a DQN is. +# Now, let's define our model. But first, let's quickly recap what a DQN is. # # DQN algorithm # -------------
Live e2e long toggling Live toggling
@@ -48,10 +48,11 @@ def limit_accel_in_turns(v_ego, angle_steers, a_target, CP): class LongitudinalPlanner: def __init__(self, CP, init_v=0.0, init_a=0.0): self.CP = CP - params = Params() - # TODO read param in the loop for live toggling - mode = 'blended' if params.get_bool('EndToEndLong') else 'acc' - self.mpc = LongitudinalMpc(mode=mode) + self.params = Params() + self.param_read_counter = 0 + + self.mpc = LongitudinalMpc() + self.read_param() self.fcw = False @@ -64,6 +65,9 @@ class LongitudinalPlanner: self.j_desired_trajectory = np.zeros(CONTROL_N) self.solverExecutionTime = 0.0 + def read_param(self): + self.mpc.mode = 'blended' if self.params.get_bool('EndToEndLong') else 'acc' + def parse_model(self, model_msg): if (len(model_msg.position.x) == 33 and len(model_msg.velocity.x) == 33 and @@ -83,8 +87,11 @@ class LongitudinalPlanner: return x, v, a, j def update(self, sm): - v_ego = sm['carState'].vEgo + if self.param_read_counter % 50 == 0: + self.read_param() + self.param_read_counter += 1 + v_ego = sm['carState'].vEgo v_cruise_kph = sm['controlsState'].vCruise v_cruise_kph = min(v_cruise_kph, V_CRUISE_MAX) v_cruise = v_cruise_kph * CV.KPH_TO_MS
lnchannel: rm "is_closing" method - has confusing semantics (and there is intentional behaviour changes here, due to erroneous use of "is_closing")
@@ -196,9 +196,6 @@ class AbstractChannel(Logger, ABC): def is_open(self): return self.get_state() == ChannelState.OPEN - def is_closing(self): - return ChannelState.SHUTDOWN <= self.get_state() <= ChannelState.FORCE_CLOSING - def is_closed(self): # the closing txid has been saved return self.get_state() >= ChannelState.CLOSING @@ -785,7 +782,7 @@ class Channel(AbstractChannel): def can_send_ctx_updates(self) -> bool: """Whether we can send update_fee, update_*_htlc changes to the remote.""" - if not (self.is_open() or self.is_closing()): + if self.get_state() not in (ChannelState.OPEN, ChannelState.SHUTDOWN): return False if self.peer_state != PeerState.GOOD: return False @@ -794,7 +791,7 @@ class Channel(AbstractChannel): return True def can_send_update_add_htlc(self) -> bool: - return self.can_send_ctx_updates() and not self.is_closing() + return self.can_send_ctx_updates() and self.is_open() def is_frozen_for_sending(self) -> bool: if self.lnworker and self.lnworker.channel_db is None and not self.lnworker.is_trampoline_peer(self.node_id):
pcie: Add t6000 support This one seems to need one extra magic poke
@@ -152,6 +152,10 @@ int pcie_init(void) printf("pcie: Error applying %s for %s\n", "apcie-axi2af-tunables", path); return -1; } + + /* ??? */ + write32(rc_base + 0x4, 0); + if (tunables_apply_local(path, "apcie-common-tunables", 1)) { printf("pcie: Error applying %s for %s\n", "apcie-common-tunables", path); return -1;
Workaround for bug Pandas bug Recreate groupby object before apply because other functions affected its internals.
@@ -114,6 +114,8 @@ def test_mixed_dtypes_groupby(as_index): # TODO Add more apply functions apply_functions = [lambda df: df.sum(), min] + # Workaround for Pandas bug #34656. Recreate groupby object for Pandas + pandas_groupby = pandas_df.groupby(by=by[-1], as_index=as_index) for func in apply_functions: eval_apply(modin_groupby, pandas_groupby, func)
Adding ``/FCLEAN`` implementation Adding fclean implementation
@@ -383,6 +383,33 @@ class Files: command = f"/COPY,{fname1},{ext1},,{fname2},{ext2},,{distkey}" return self.run(command, **kwargs) + def fclean(self, **kwargs): + """Deletes all local files in all processors in a distributed parallel processing run. + + APDL Command: /FCLEAN + + Deletes all local files (``.rst``, ``.esav``, ``.emat``, ``.mode``, ``.mlv``, + ``.seld``, ``.dsub``, ``.ist``, ``.full``, ``.rdsp``, ``.rfrq``, ``.rnnn``, + ``.resf``, ``.stat``, ``.modesym``, ``.osave``, ``.erot``, ``.log``) + in all processors in a distributed parallel processing run. + + .. warning:: Because ``/FCLEAN`` deletes all local files, it should only be issued if you are sure that + none of those files are needed in downstream analyses. Deleting files that are necessary for + the next substep, load step, or analysis will prevent continuation of the run. + + Notes + ----- + + Issue ``/FCLEAN`` to delete all local files having the current Jobname (``/FILNAME``) and save + disk space in a distributed parallel processing run. Like other file deletion commands, deletion happens + immediately upon issuing this command. Different than other file deletion commands, it enables the + convenience of deleting all ``Jobname.*`` local files without having to issue separate commands specifying + each file type + + This command is valid only at the Begin Level. + """ + return self.run("/FCLEAN", **kwargs) + def fcomp(self, ident="", level="", **kwargs): """Specifies file compression level.
Minor docstring tweak for Sphinx doc generation Sphinx interprets the string "_p" as a reference. ``_p`` does not have this problem. TN:
@@ -132,8 +132,8 @@ base_langkit_docs = { Exception that is raised when an error occurs while evaluating any ${'function' if lang == 'ada' else 'AST node method'} whose name starts with - "${'P_' if lang == 'ada' else 'p_'}". This is the only exceptions that - such functions can raise. + ``${'P_' if lang == 'ada' else 'p_'}``. This is the only exceptions + that such functions can raise. """, #
SAMPLE_info supports only 2 columns Fixed sleuth to ignore any further columns in sample_info but <sample> <condition>
@@ -19,7 +19,7 @@ t2g_file = args[[5]] setwd(outdir) getwd() -sample_info = read.table(sample_info_file, header=T) +sample_info = read.table(sample_info_file, header=T)[,1:2] colnames(sample_info) = c("sample", "condition") print(sample_info) sample_info$sample
Add typing to data_structures/queue/queue_on_pseudo_stack.py * Add typing hacktoberfest * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see
"""Queue represented by a pseudo stack (represented by a list with pop and append)""" +from typing import Any class Queue: @@ -14,7 +15,7 @@ class Queue: @param item item to enqueue""" - def put(self, item): + def put(self, item: Any) -> None: self.stack.append(item) self.length = self.length + 1 @@ -23,7 +24,7 @@ class Queue: @return dequeued item that was dequeued""" - def get(self): + def get(self) -> Any: self.rotate(1) dequeued = self.stack[self.length - 1] self.stack = self.stack[:-1] @@ -35,7 +36,7 @@ class Queue: @param rotation number of times to rotate queue""" - def rotate(self, rotation): + def rotate(self, rotation: int) -> None: for i in range(rotation): temp = self.stack[0] self.stack = self.stack[1:] @@ -45,7 +46,7 @@ class Queue: """Reports item at the front of self @return item at front of self.stack""" - def front(self): + def front(self) -> Any: front = self.get() self.put(front) self.rotate(self.length - 1) @@ -53,5 +54,5 @@ class Queue: """Returns the length of this.stack""" - def size(self): + def size(self) -> int: return self.length
Pin futures to 3.1.1 To avoid 3.1.2 errors where it doesn't run when you use python 3.
@@ -11,6 +11,7 @@ tqdm==4.19.6 # progress bars requests==2.18.4 cherrypy==13.0.1 # Temporarily pinning this until CherryPy stops depending on namespaced package, see #2971 # pyup: <13.1.0 iceqube==0.0.4 +futures==3.1.1 porter2stemmer==1.0 unicodecsv==0.14.1 metafone==0.5
Update mediaprocessor.py remove codec based sorting
@@ -645,7 +645,7 @@ class MediaProcessor: # Sort incoming streams so that things like first language preferences respect these options audio_streams = info.audio try: - self.sortStreams(audio_streams, awl, self.settings.acodec) + self.sortStreams(audio_streams, awl) except: self.log.exception("Error sorting source audio streams [sort-streams].")
Update connector-ldap.yml include some more comments for all_users_filter
@@ -52,7 +52,10 @@ search_page_size: 200 require_tls_cert: False # (optional) all_users_filter (default value given below) -# all_users_filter specifies the query used to find all users in the directory. +# In order to obtain a more fine-tunned list of LDAP users, adapt the filter to your needs. +# Only resulted users from this filter predicate are ever considered by the User Sync tool; +# that is, all LDAP queries issued by User Sync include this predicate to filter out LDAP resources +# that are not subject of synchronization. # The default value specified here is appropriate for Active Directory, which has a # special field that is used to enable and disable users. The value for OpenLDAP # directories might be much simpler: "(&(objectClass=person)(objectClass=top))"
Update install instructions to make compatible with zsh Add quotation mark for StoneSoup pip installation This makes the command compatible with zsh.
@@ -60,7 +60,7 @@ following: git clone "https://github.com/dstl/Stone-Soup.git" cd Stone-Soup - python -m pip install -e .[dev] + python -m pip install -e ".[dev]" Please also see our :ref:`contributing:Contributing` page.
Accommodated unit of time. Changed _timeout_watch_time and _timeout_disable_time definitions to divide by 1000 do accommodate ms input instead of s input.
@@ -48,9 +48,9 @@ class AutofireCoil(SystemWideDevice): # pulse is handled via rule but add a handler so that we take notice anyway self.config['switch'].add_handler(self._hit) if self.config['enable_timeouts']: - self._timeout_watch_time = self.config['timeout_watch_time'] + self._timeout_watch_time = self.config['timeout_watch_time']/1000 self._timeout_max_hits = self.config['timeout_max_hits'] - self._timeout_disable_time = self.config['timeout_disable_time'] + self._timeout_disable_time = self.config['timeout_disable_time']/1000 self._timeout_hits = [] from time import time as _time from threading import Timer as _Timer
Update potential_vorticity_baroclinic() docstring Fixes
@@ -653,9 +653,6 @@ def potential_vorticity_baroclinic(potential_temperature, pressure, u, v, dx, dy the size of `u` along the applicable axis. lats : (M, N) ndarray latitudes of the wind data in radians or with appropriate unit information attached - axis : int, optional - The axis corresponding to the vertical dimension in the potential temperature - and pressure arrays, defaults to 0, the first dimension. Returns -------
settings_users: Remove unnecessary sort. The populate_users function doesn't need to sort the list of active and non-active users, because the list_render is called specifying to sort users by their full_name. Author: Clara Moraes Dantas
@@ -137,11 +137,9 @@ function failed_listing_users() { function populate_users() { const active_user_ids = people.get_active_human_ids(); let active_users = active_user_ids.map(user_id => people.get_by_user_id(user_id)); - active_users = _.sortBy(active_users, 'full_name'); const deactivated_user_ids = people.get_non_active_human_ids(); let deactivated_users = deactivated_user_ids.map(user_id => people.get_by_user_id(user_id)); - deactivated_users = _.sortBy(deactivated_users, 'full_name'); if (active_user_ids.length === 0 && deactivated_user_ids.length === 0) { failed_listing_users();
Don't reference long in types.pyi (It's a type alias for int anyway, and it will cause a problem in the initial import cycle once is merged.)
@@ -13,7 +13,7 @@ TypeType = type ObjectType = object IntType = int -LongType = long +LongType = int # Really long, but can't reference that due to a mypy import cycle FloatType = float BooleanType = bool ComplexType = complex
[docs] - [definitions] Update Repository page for Definitions This PR updates the **Repository** concept page for the new `Definitions` world. [Preview here](https://dagster-git-erin-repository-concept-definitions-elementl.vercel.app/concepts/repositories-workspaces/repositories). **Note**: This page will be removed from the sidenav in
--- -title: Repositories | Dagster +title: Repositories | Dagster Docs description: A repository is a collection of jobs, schedules, and sensor definitions that the Dagster CLI, Dagit and the Dagster Daemon can target to load them. --- # Repositories -A repository is a collection of software-defined assets, jobs, schedules, and sensors. Repositories are loaded as a unit by the Dagster CLI, Dagit and the Dagster Daemon. +<Note> + In 1.1.6, we introduced <PyObject object="Definitions" />, which replaces + repositories. While repositories will continue to work, we recommend migrating + to <code>Definitions</code>. Refer to the{" "} + <a href="/concepts/code-locations">Code locations documentation</a> for more + info. +</Note> -## Relevant APIs +A repository is a collection of software-defined assets, jobs, schedules, and sensors. Repositories are loaded as a unit by the Dagster CLI, Dagit and the dagster-daemon. -| Name | Description | -| ------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | -| <PyObject object="repository" decorator /> | The decorator used to define repositories. The decorator returns a <PyObject object="RepositoryDefinition" /> | -| <PyObject object="RepositoryDefinition" /> | Base class for repositories. You almost never want to use initialize this class directly. Instead, you should use the <PyObject object="repository" decorator /> which returns a <PyObject object="RepositoryDefinition" /> | - -## Overview - -A repository is a convenient way to organize your job and other definitions. Each repository: +A convenient way to organize your job and other definitions, each repository: - Includes various definitions: [Software-defined assets](/concepts/assets/software-defined-assets), [Jobs](/concepts/ops-jobs-graphs/jobs), [Schedules](/concepts/partitions-schedules-sensors/schedules), and [Sensors](/concepts/partitions-schedules-sensors/sensors). - Is loaded in a different process than Dagster system processes like Dagit. Any communication between the Dagster system and repository code occurs over an RPC mechanism, ensuring that problems in repository code can't affect Dagster or other repositories. - Can be loaded in its own Python environment, so you can manage your dependencies (or even your own Python versions) separately. -You can set up multiple repositories and load them all at once by creating a `workspace.yaml` file. This can be useful for grouping jobs and other artifacts by team for organizational purposes. See [Workspace](/concepts/repositories-workspaces/workspaces) to learn more about setting up multiple repositories. +You can set up multiple repositories and load them all at once by creating a `workspace.yaml` file. This can be useful for grouping jobs and other artifacts by team for organizational purposes. Refer to the [Workspace documentation](/concepts/repositories-workspaces/workspaces) to learn more about setting up multiple repositories. + +--- + +## Relevant APIs + +| Name | Description | +| ------------------------------------------ | ---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- | +| <PyObject object="repository" decorator /> | The decorator used to define repositories. The decorator returns a <PyObject object="RepositoryDefinition" /> | +| <PyObject object="RepositoryDefinition" /> | Base class for repositories. You almost never want to use initialize this class directly. Instead, you should use the <PyObject object="repository" decorator /> which returns a <PyObject object="RepositoryDefinition" /> | --- -## Defining a Repository +## Defining a repository Repositories are typically declared using the <PyObject object="repository" decorator /> decorator. For example: @@ -93,7 +101,9 @@ def my_repository(): The repository specifies a list of items, each of which can be a <PyObject object="AssetsDefinition"/>, <PyObject object="JobDefinition"/>, <PyObject module="dagster" object="ScheduleDefinition" />, or <PyObject module="dagster" object="SensorDefinition" />. If you include a schedule or sensor, the job it targets will be automatically also included on the repository. -## Using a Repository +--- + +## Using a repository If you save the code above as `repo.py`, you can then run the Dagster command line tools on it. Try running:
Remove failing unit tests Testing `information` cog seems redutant as it is not too important part of the bot.
@@ -97,79 +97,6 @@ class InformationCogTests(unittest.TestCase): self.assertEqual(admin_embed.title, "Admins info") self.assertEqual(admin_embed.colour, discord.Colour.red()) - @unittest.mock.patch('bot.exts.info.information.time_since') - def test_server_info_command(self, time_since_patch): - time_since_patch.return_value = '2 days ago' - - self.ctx.guild = helpers.MockGuild( - features=('lemons', 'apples'), - region="The Moon", - roles=[self.moderator_role], - channels=[ - discord.TextChannel( - state={}, - guild=self.ctx.guild, - data={'id': 42, 'name': 'lemons-offering', 'position': 22, 'type': 'text'} - ), - discord.CategoryChannel( - state={}, - guild=self.ctx.guild, - data={'id': 5125, 'name': 'the-lemon-collection', 'position': 22, 'type': 'category'} - ), - discord.VoiceChannel( - state={}, - guild=self.ctx.guild, - data={'id': 15290, 'name': 'listen-to-lemon', 'position': 22, 'type': 'voice'} - ) - ], - members=[ - *(helpers.MockMember(status=discord.Status.online) for _ in range(2)), - *(helpers.MockMember(status=discord.Status.idle) for _ in range(1)), - *(helpers.MockMember(status=discord.Status.dnd) for _ in range(4)), - *(helpers.MockMember(status=discord.Status.offline) for _ in range(3)), - ], - member_count=1_234, - icon_url='a-lemon.jpg', - ) - - coroutine = self.cog.server_info.callback(self.cog, self.ctx) - self.assertIsNone(asyncio.run(coroutine)) - - time_since_patch.assert_called_once_with(self.ctx.guild.created_at, precision='days') - _, kwargs = self.ctx.send.call_args - embed = kwargs.pop('embed') - self.assertEqual(embed.colour, discord.Colour.blurple()) - self.assertEqual( - embed.description, - textwrap.dedent( - f""" - **Server information** - Created: {time_since_patch.return_value} - Voice region: {self.ctx.guild.region} - Features: {', '.join(self.ctx.guild.features)} - - **Channel counts** - Category channels: 1 - Text channels: 1 - Voice channels: 1 - Staff channels: 0 - - **Member counts** - Members: {self.ctx.guild.member_count:,} - Staff members: 0 - Roles: {len(self.ctx.guild.roles)} - - **Member statuses** - {constants.Emojis.status_online} 2 - {constants.Emojis.status_idle} 1 - {constants.Emojis.status_dnd} 4 - {constants.Emojis.status_offline} 3 - """ - ) - ) - self.assertEqual(embed.thumbnail.url, 'a-lemon.jpg') - - class UserInfractionHelperMethodTests(unittest.TestCase): """Tests for the helper methods of the `!user` command.""" @@ -465,11 +392,6 @@ class UserEmbedTests(unittest.TestCase): embed.fields[1].value ) - self.assertEqual( - "basic infractions info", - embed.fields[3].value - ) - @unittest.mock.patch( f"{COG_PATH}.basic_user_infraction_counts", new=unittest.mock.AsyncMock(return_value=("Infractions", "basic infractions"))
Use dst dir for temp file Summary: Fixes Pull Request resolved:
@@ -378,7 +378,12 @@ def _download_url_to_file(url, dst, hash_prefix, progress): if content_length is not None and len(content_length) > 0: file_size = int(content_length[0]) - f = tempfile.NamedTemporaryFile(delete=False) + # We deliberately save it in a temp file and move it after + # download is complete. This prevents a local working checkpoint + # being overriden by a broken download. + dst_dir = os.path.dirname(dst) + f = tempfile.NamedTemporaryFile(delete=False, dir=dst_dir) + try: if hash_prefix is not None: sha256 = hashlib.sha256()
qt PayToEdit: rm redundant code This is already handled by `self.textChanged.connect(self.check_text)` in __init__.
@@ -257,13 +257,6 @@ class PayToEdit(CompletionTextEdit, ScanQRTextEdit, Logger): self.setMaximumHeight(h) self.verticalScrollBar().hide() - def qr_input(self, *, callback=None): - def _on_qr_success(data): - if data.lower().startswith(BITCOIN_BIP21_URI_SCHEME + ':'): - self.win.pay_to_URI(data) - # TODO: update fee - super(PayToEdit, self).qr_input(callback=_on_qr_success) - def resolve(self): self.is_alias = False if self.hasFocus():
Add currently undocumented premium_tier field. Needs more testing to find out what it is.
@@ -132,7 +132,7 @@ class Guild(Hashable): 'owner_id', 'mfa_level', 'emojis', 'features', 'verification_level', 'explicit_content_filter', 'splash', '_voice_states', '_system_channel_id', 'default_notifications', - 'description', 'max_presences', 'max_members') + 'description', 'max_presences', 'max_members', 'premium_tier') def __init__(self, *, data, state): self._channels = {} @@ -237,6 +237,7 @@ class Guild(Hashable): self.description = guild.get('description') self.max_presences = guild.get('max_presences') self.max_members = guild.get('max_members') + self.premium_tier = guild.get('premium_tier') for mdata in guild.get('members', []): member = Member(data=mdata, guild=self, state=state)
Show all elements in the namespace view Except for top level Property and InstanceSpecification.
@@ -31,26 +31,6 @@ if TYPE_CHECKING: from gaphor.core.modeling import ElementFactory from gaphor.core.eventmanager import EventManager -# The following items will be shown in the treeview, although they -# are UML.Namespace elements. -_default_filter_list = ( - UML.Class, - UML.Interface, - UML.Package, - UML.Component, - UML.Device, - UML.Node, - UML.Artifact, - UML.Interaction, - UML.UseCase, - UML.Actor, - Diagram, - UML.Profile, - UML.Stereotype, - UML.Property, - UML.Operation, -) - log = logging.getLogger(__name__) @@ -259,7 +239,6 @@ class Namespace(UIComponent): self.element_factory = element_factory self._namespace: Optional[NamespaceView] = None self.model = Gtk.TreeStore.new([object]) - self.toplevel_types = _default_filter_list def open(self): em = self.event_manager @@ -406,9 +385,10 @@ class Namespace(UIComponent): return None def _visible(self, element): - # Special case: Non-navigable properties - return type(element) in self.toplevel_types and not ( - isinstance(element, UML.Property) and element.namespace is None + """ Special case: Non-navigable properties. """ + return isinstance(element, UML.NamedElement) and not ( + isinstance(element, (UML.Property, UML.InstanceSpecification)) + and element.namespace is None ) def _add(self, element, iter=None): @@ -431,12 +411,11 @@ class Namespace(UIComponent): self.model.clear() toplevel = self.element_factory.select( - lambda e: isinstance(e, UML.NamedElement) - and type(e) in self.toplevel_types - and not e.namespace + lambda e: isinstance(e, UML.NamedElement) and not e.namespace ) for element in toplevel: + if self._visible(element): self._add(element) # Expand all root elements: @@ -458,7 +437,7 @@ class Namespace(UIComponent): @event_handler(ElementDeleted) def _on_element_delete(self, event: ElementDeleted): element = event.element - if type(element) in self.toplevel_types: + if isinstance(element, UML.NamedElement): iter = self.iter_for_element(element) # iter should be here, unless we try to delete an element who's # parent element is already deleted, so let's be lenient.
add task level fine-grainedness also add docs to indicate that users might want to consider using SimCSE's dropin replacement instead of the original SentEval harness when considering STS tasks
+"""Run any MEAD embeddings within the SentEval framework + +The SentEval framework (https://github.com/facebookresearch/SentEval) facilitates +testing the quality of sentence embeddings. To prepare your data, you can clone +that repo and do a `pip install -e .`. This program allows you to control which +sets of tasks are run. By default, it will run all STS and classification probing +examples. + +Please note that its common when evaluating against STS to use a different approach +from the one in SentEval (following SentenceBERT). A drop-in replacement is available +from the SimCSE repository (https://github.com/princeton-nlp/SimCSE). + +To use their modified benchmark, clone that repo instead +and within their SentEval directory, do a `pip install -e .`, and pay attention instead +to the ALL Spearman metrics: + +""" import argparse import baseline import sys @@ -18,6 +35,7 @@ SUBWORD_EXTRA = 30 def main(): parser = argparse.ArgumentParser(description='Run senteval harness') parser.add_argument('--nctx', default=512, type=int) + parser.add_argument('--tasks', nargs="+", default=['sts', 'class', 'probe']) parser.add_argument('--batchsz', default=20, type=int) parser.add_argument('--pool', help='Should a reduction be applied on the embeddings? Only use if your embeddings arent already pooled', type=str) parser.add_argument('--vec_id', help='Reference to a specific embedding type') @@ -105,12 +123,17 @@ def main(): return encoding se = senteval.engine.SE(params_senteval, batcher, prepare) - transfer_tasks = ['STS12', 'STS13', 'STS14', 'STS15', 'STS16', - 'MR', 'CR', 'MPQA', 'SUBJ', 'SST2', 'SST5', 'TREC', 'MRPC', - 'SICKEntailment', 'SICKRelatedness', 'STSBenchmark', - 'Length', 'WordContent', 'Depth', 'TopConstituents', + transfer_tasks = [] + if 'sts' in args.tasks: + transfer_tasks += ['STS12', 'STS13', 'STS14', 'STS15', 'STS16', 'SICKRelatedness', 'STSBenchmark'] + if 'class' in args.tasks: + transfer_tasks += ['MR', 'CR', 'MPQA', 'SUBJ', 'SST2', 'SST5', 'TREC', 'MRPC', + 'SICKEntailment'] + if 'probe' in args.tasks: + transfer_tasks += ['Length', 'WordContent', 'Depth', 'TopConstituents', 'BigramShift', 'Tense', 'SubjNumber', 'ObjNumber', 'OddManOut', 'CoordinationInversion'] + results = se.eval(transfer_tasks) print(results)
fix(graphene): use_https was clobbered by precomputed parent class Resolves
@@ -96,6 +96,7 @@ class GrapheneMetadata(PrecomputedMetadata): self.auth_header = { "Authorization": "Bearer %s" % token } + kwargs['use_https'] = bool(use_https) super(GrapheneMetadata, self).__init__(cloudpath, *args, **kwargs) version = self.server_path.version
resolved_ips could be None Only run len() when we know that resolved_ips is a list.
@@ -914,7 +914,7 @@ def _wait_for_ip(vm_ref, max_wait): resolved_ips = salt.utils.network.host_to_ips(vm_name) log.debug("Timeout waiting for VMware tools. The name {0} resolved " "to {1}".format(vm_name, str(resolved_ips))) - if len(resolved_ips) > 0: + if isinstance(resolved_ips, list) and len(resolved_ips): return resolved_ips[0] else: return False
TitanCNA: allow running with genomes with "chr" prefixes This will ensure that the right option is set with a compatible genome. Currently the list is hardcoded as I think it is not possible to detect those.
@@ -102,6 +102,8 @@ def _run_titancna(cn_file, het_file, ploidy, num_clusters, work_dir, data): with utils.chdir(tmp_dir): cmd = ("{export_cmd} && titanCNA.R --id {sample} --hetFile {het_file} --cnFile {cn_file} " "--numClusters {num_clusters} --ploidy {ploidy} --numCores {cores} --outDir {tmp_dir}") + if data["genome_build"] in ("hg19", "hg38"): + cmd += " --genomeStyle UCSC" do.run(cmd.format(**locals()), "TitanCNA CNV detection: ploidy %s, cluster %s" % (ploidy, num_clusters)) for fname in glob.glob(os.path.join(tmp_dir, cluster_dir + "*")): shutil.move(fname, ploidy_dir)
[Datasets] Remove the non-useful comment in `map_batches()` This PR is a quick fix to remove the non-useful comment introduced in probably during debugging. Replace the comment with a meaningful one.
@@ -614,7 +614,7 @@ class Dataset(Generic[T]): zero_copy_batch=zero_copy_batch, ) - # breakpoint() + # TODO(chengsu): pass function name to MapBatches logical operator. if hasattr(fn, "__self__") and isinstance( fn.__self__, ray.data.preprocessor.Preprocessor ):
Add an "error" token to the testsuite reference lexer TN:
@@ -6,9 +6,10 @@ from langkit.lexer import ( class Token(LexerToken): + Def = WithText() + Error = WithText() Example = WithText() Null = WithText() - Def = WithText() Comma = WithText() Dot = WithText() @@ -28,9 +29,10 @@ foo_lexer.add_rules( (Pattern(r'[ \n\r\t]+'), Ignore()), (Eof(), Token.Termination), + (Literal('def'), Token.Def), + (Literal('error'), Token.Error), (Literal('example'), Token.Example), (Literal('null'), Token.Null), - (Literal('def'), Token.Def), (Literal(','), Token.Comma), (Literal('.'), Token.Dot),
Report datadog user stats more frequently and decouple from calculated properties
@@ -74,8 +74,6 @@ def _update_calculated_properties(): get_domains_to_update_es_filter() ).fields(["name", "_id"]).run().hits - all_stats = all_domain_stats() - datadog_report_user_stats(commcare_users_by_domain=all_stats['commcare_users']) for r in results: dom = r["name"] domain_obj = Domain.get_by_name(dom) @@ -95,6 +93,12 @@ def _update_calculated_properties(): notify_exception(None, message='Domain {} failed on stats calculations with {}'.format(dom, e)) +@periodic_task(run_every=timedelta(hours=6), queue='background_queue') +def run_datadog_user_stats(): + all_stats = all_domain_stats() + datadog_report_user_stats(commcare_users_by_domain=all_stats['commcare_users']) + + def datadog_report_user_stats(commcare_users_by_domain): commcare_users_by_domain = summarize_user_counts(commcare_users_by_domain, n=50) for domain, user_count in commcare_users_by_domain.items():
fix(futures_pig): fix rename futures_pig_info and futures_pig_rank interface fix rename futures_pig_info and futures_pig_rank interface
@@ -1689,8 +1689,8 @@ if __name__ == "__main__": print(stock_hk_spot_em_df) stock_zh_a_hist_df = stock_zh_a_hist( - symbol="301183", - period="weekly", + symbol="430090", + period="daily", start_date="20220516", end_date="20220722", adjust="hfq",
Fix VOD issues with longer keyframe intervals * Fix VOD issues with longer keyframe intervals * Move probe function to util Update comment * Use recording duration for keyFrameDurations * Remove unused early return * Avoid clipping first clip
@@ -849,16 +849,13 @@ def vod_ts(camera_name, start_ts, end_ts): for recording in recordings: clip = {"type": "source", "path": recording.path} duration = int(recording.duration * 1000) - # Determine if offset is needed for first clip - if recording.start_time < start_ts: - offset = int((start_ts - recording.start_time) * 1000) - clip["clipFrom"] = offset - duration -= offset + # Determine if we need to end the last clip early if recording.end_time > end_ts: duration -= int((recording.end_time - end_ts) * 1000) if duration > 0: + clip["keyFrameDurations"] = [duration] clips.append(clip) durations.append(duration) else:
Update Dockerfile.xpress.pyjul reorder commands to run from most general to most specific
@@ -3,6 +3,8 @@ FROM nlaws/pyjul ENV APP_ENV=local ENV SRC_DIR=/opt/reopt/reo/src +# Add remote debugging capability +RUN apt-get update && apt-get install telnet # Install Xpress solver ENV XPRESSDIR=/opt/xpressmp @@ -35,8 +37,5 @@ COPY . /opt/reopt WORKDIR /opt/reopt EXPOSE 8000 RUN ["pip", "install", "-r", "requirements.txt"] +RUN pip install pdbpp # for remote debugging ENTRYPOINT ["/bin/bash", "-c"] - -# Add remote debugging capability -RUN apt-get update && apt-get install telnet -RUN pip install pdbpp
fix Exchange name change Exchange to Microsoft Exchange
"creation_date": "2019-06-03", "data_metadata": { "data_source": [ - "Exchange", + "Microsoft Exchange", "SMTP", "Cuckoo", "Splunk", "DeepSight" ], "providing_technologies": [ - "Exchange", + "Microsoft Exchange", "SMTP", "Cuckoo", "Splunk",
Update bulkresize.py file Add rename_img and output_path_concat functions This additions will seperate the login of renaming or the path concatination of the resized images from the resizing logic Now bulk_resizer function has only one functionality which is the resizing of the images
@@ -34,6 +34,16 @@ def get_extension(path): else: return False +def rename_img(path, number): + output_path = path + '/' + str(number) + '.jpg' + return output_path + +def output_path_concat(path, im_path): + output_path = path + '/' + \ + os.path.splitext(os.path.basename(im_path))[0] + '.jpg' + + return output_path + def bulk_resizer(input_path, output_path, desired_size=32, color=[0, 0, 0], rename=True): filepath = list_contents(input_path) @@ -57,11 +67,9 @@ def bulk_resizer(input_path, output_path, desired_size=32, new_im = cv2.copyMakeBorder(im, top, bottom, left, right, cv2.BORDER_CONSTANT, value=color) if rename: - output_path1 = output_path + "/" + str(img_no) + ".jpg" - img_no += 1 + output_path1 = rename_img(output_path, filepath.index(im_pth)) else: - output_path1 = output_path + "/" + \ - os.path.splitext(os.path.basename(im_pth))[0] + ".jpg" + output_path1 = output_path_concat(output_path, im_pth) cv2.imwrite(output_path1, new_im)
checkout: add --jobs support Use multiprocessing to run in parallel. When operating on multiple projects, this can speed things up. Across 1000 repos, it goes from ~9sec to ~5sec with the default -j8. Tested-by: Mike Frysinger
# See the License for the specific language governing permissions and # limitations under the License. +import functools +import multiprocessing import sys -from command import Command + +from command import Command, DEFAULT_LOCAL_JOBS, WORKER_BATCH_SIZE from progress import Progress @@ -31,27 +34,41 @@ The command is equivalent to: repo forall [<project>...] -c git checkout <branchname> """ + PARALLEL_JOBS = DEFAULT_LOCAL_JOBS def ValidateOptions(self, opt, args): if not args: self.Usage() + def _ExecuteOne(self, nb, project): + """Checkout one project.""" + return (project.CheckoutBranch(nb), project) + def Execute(self, opt, args): nb = args[0] err = [] success = [] all_projects = self.GetProjects(args[1:]) - pm = Progress('Checkout %s' % nb, len(all_projects)) - for project in all_projects: - pm.update() - - status = project.CheckoutBranch(nb) + def _ProcessResults(results): + for status, project in results: if status is not None: if status: success.append(project) else: err.append(project) + pm.update() + + pm = Progress('Checkout %s' % nb, len(all_projects)) + # NB: Multiprocessing is heavy, so don't spin it up for one job. + if len(all_projects) == 1 or opt.jobs == 1: + _ProcessResults(self._ExecuteOne(nb, x) for x in all_projects) + else: + with multiprocessing.Pool(opt.jobs) as pool: + results = pool.imap_unordered( + functools.partial(self._ExecuteOne, nb), all_projects, + chunksize=WORKER_BATCH_SIZE) + _ProcessResults(results) pm.end() if err:
expressen: fixed an issue we cant find the id fixes:
@@ -5,7 +5,7 @@ import re import json from svtplay_dl.service import Service -from svtplay_dl.log import log +from svtplay_dl.error import ServiceError from svtplay_dl.fetcher.hls import hlsparse from svtplay_dl.utils.text import decode_html_entities @@ -16,26 +16,14 @@ class Expressen(Service): def get(self): data = self.get_urldata() - match = re.search('="(https://www.expressen.se/tvspelare[^"]+)"', data) + match = re.search('data-article-data="([^"]+)"', data) if not match: - log.error("Can't find video id") + yield ServiceError("Cant find video file info") return - url = decode_html_entities(match.group(1)) - data = self.http.request("get", url) + data = decode_html_entities(match.group(1)) + janson = json.loads(data) + self.config.set("live", janson["isLive"]) - match = re.search("window.Player.settings = ({.*});", data.text) - if not match: - log.error("Can't find json info.") - - dataj = json.loads(match.group(1)) - if "streams" in dataj: - if "iPad" in dataj["streams"]: - streams = hlsparse(self.config, self.http.request("get", dataj["streams"]["iPad"]), - dataj["streams"]["iPad"], output=self.output) - for n in list(streams.keys()): - yield streams[n] - if "hashHls" in dataj["streams"]: - streams = hlsparse(self.config, self.http.request("get", dataj["streams"]["hashHls"]), - dataj["streams"]["hashHls"], output=self.output) + streams = hlsparse(self.config, self.http.request("get", janson["stream"]), janson["stream"], output=self.output) for n in list(streams.keys()): yield streams[n]
DOC: removed reference to old template Removed the API reference to the old netCDF pandas instrument template.
@@ -23,21 +23,12 @@ General :members: -Instrument Templates --------------------- - -General Instrument -^^^^^^^^^^^^^^^^^^ +Instrument Template +------------------- .. automodule:: pysat.instruments.templates.template_instrument :members: __doc__, init, default, load, list_files, list_remote_files, download, clean -netCDF Pandas -^^^^^^^^^^^^^ - -.. automodule:: pysat.instruments.templates.netcdf_pandas - :members: __doc__, init, load, list_files, download - Constellation -------------
fix: update docs link checker since docs are moved to wiki
@@ -24,6 +24,8 @@ def docs_link_exists(body): parts = parsed_url.path.split('/') if len(parts) == 5 and parts[1] == "frappe" and parts[2] in docs_repos: return True + if parsed_url.netloc in ["docs.erpnext.com", "frappeframework.com"]: + return True if __name__ == "__main__":
kivy: (fix) clicking "max" to send would raise for empty wallet fix
@@ -1110,7 +1110,8 @@ class ElectrumWindow(App, Logger): def cb(amount): if amount == '!': screen.is_max = True - screen.amount = self.get_max_amount() + ' ' + self.base_unit + max_amt = self.get_max_amount() + screen.amount = (max_amt + ' ' + self.base_unit) if max_amt else '' else: screen.amount = amount screen.is_max = False
Add automatic sdist/wheel deployment Reasons to use a distinct build deployment stage instead of `deploy` key: more customizable if using more than one job, waits until all jobs have succeeded
@@ -24,3 +24,14 @@ script: - twine check dist/* - flake8 - nosetests + +jobs: + include: + - stage: deploy + name: Upload release to PyPI + if: tag is present + script: twine upload dist/* + env: + - TWINE_USERNAME: coldfix-deploy + # TWINE_PASSWORD + - secure: "d8WYCQ56Se9Y9Z+GIwfLnMRgzfqiPm73XL8Cv3QBAeK/iyN8tsfoVknh5jqMv8ENwKVjuJ4jdCcNH4woW9aJQScPP7ZrOFBGGM+qOSZkCZZ4yOBtdU/oH0pb/eo5CmM/HeoszoZXzAjx3NDNFyQL9otIZNW2VnfBsfjTRgzMmW0="
add fetchItems to library use totalSize so we dont do any more http requests then needed.
@@ -360,6 +360,33 @@ class LibrarySection(PlexObject): # Private attrs as we dont want a reload. self._total_size = None + def fetchItems(self, ekey, cls=None, **kwargs): + """ Load the specified key to find and build all items with the specified tag + and attrs. See :func:`~plexapi.base.PlexObject.fetchItem` for more details + on how this is used. + + Use container_start and container_size for pagination. + """ + url_kw = {} + for key, value in dict(kwargs).items(): + if key == "container_start": + url_kw["X-Plex-Container-Start"] = kwargs.pop(key) + if key == "container_size": + url_kw["X-Plex-Container-Size"] = kwargs.pop(key) + + if ekey is None: + raise BadRequest('ekey was not provided') + data = self._server.query(ekey, params=url_kw) + + self._total_size = int(data.attrib.get("totalSize")) + items = self.findItems(data, cls, ekey, **kwargs) + + librarySectionID = data.attrib.get('librarySectionID') + if librarySectionID: + for item in items: + item.librarySectionID = librarySectionID + return items + @property def totalSize(self): if self._total_size is None: @@ -549,7 +576,7 @@ class LibrarySection(PlexObject): args['sort'] = self._cleanSearchSort(sort) if libtype is not None: args['type'] = utils.searchType(libtype) - # iterate over the results + results = [] subresults = [] args['X-Plex-Container-Start'] = 0 @@ -557,11 +584,13 @@ class LibrarySection(PlexObject): while True: key = '/library/sections/%s/all%s' % (self.key, utils.joinArgs(args)) subresults = self.fetchItems(key) - if not len(subresults): - break - else: results.extend(subresults) + # this si not set as condition in the while as + # this require a additional http request. + if self.totalSize <= len(results): + break + args['X-Plex-Container-Start'] += args['X-Plex-Container-Size'] return results[:maxresults]
erepo: replace copy_tree from disutils with copytree of shutil Fixes:
@@ -144,7 +144,7 @@ def _cached_clone(url, rev, for_write=False): revision checked out. If for_write is set prevents reusing this dir via cache. """ - from distutils.dir_util import copy_tree + from shutil import copytree # even if we have already cloned this repo, we may need to # fetch/fast-forward to get specified rev @@ -156,7 +156,7 @@ def _cached_clone(url, rev, for_write=False): # Copy to a new dir to keep the clone clean repo_path = tempfile.mkdtemp("dvc-erepo") logger.debug("erepo: making a copy of %s clone", url) - copy_tree(clone_path, repo_path) + copytree(clone_path, repo_path) # Check out the specified revision if for_write:
modals: Make settings page selectors more specific. The `#settings_page .right.show` selector was breaking the Emoji style inputs in Display settings on mobile responsive view. Fixes
@@ -989,7 +989,7 @@ form#add_new_subscription { #subscription_overlay .left, #subscription_overlay .right, #settings_page .left, - #settings_page .right { + #settings_page .content-wrapper.right { position: absolute; display: block; margin: 0; @@ -1008,7 +1008,7 @@ form#add_new_subscription { } #subscription_overlay .right, - #settings_page .right { + #settings_page .content-wrapper.right { position: absolute; left: 101%; @@ -1022,7 +1022,7 @@ form#add_new_subscription { } #subscription_overlay .right.show, - #settings_page .right.show { + #settings_page .content-wrapper.right.show { left: 0%; }
extra-filerefs include files even if no refs in states to apply Fixes
@@ -135,9 +135,9 @@ def lowstate_file_refs(chunks, extras=''): elif state.startswith('__'): continue crefs.extend(salt_refs(chunk[state])) - if crefs: if saltenv not in refs: refs[saltenv] = [] + if crefs: refs[saltenv].append(crefs) if extras: extra_refs = extras.split(',')
revert change to get_closed_and_deleted_ids case lite view doesn't include deleted cases
@@ -10,7 +10,7 @@ from casexml.apps.case.dbaccessors import ( get_related_indices, ) from casexml.apps.case.models import CommCareCase -from casexml.apps.case.util import get_case_xform_ids +from casexml.apps.case.util import get_case_xform_ids, iter_cases from casexml.apps.stock.models import StockTransaction from corehq.apps.commtrack.models import StockState from corehq.apps.hqcase.dbaccessors import ( @@ -20,9 +20,8 @@ from corehq.apps.hqcase.dbaccessors import ( get_case_ids_in_domain_by_owner, get_cases_in_domain_by_external_id, get_deleted_case_ids_by_owner, - get_all_case_owner_ids, iter_lite_cases_json) + get_all_case_owner_ids) from corehq.apps.hqcase.utils import get_case_by_domain_hq_user_id -from corehq.blobs.mixin import BlobMixin from corehq.dbaccessors.couchapps.cases_by_server_date.by_owner_server_modified_on import \ get_case_ids_modified_with_owner_since from corehq.dbaccessors.couchapps.cases_by_server_date.by_server_modified_on import \ @@ -141,16 +140,9 @@ class CaseAccessorCouch(AbstractCaseAccessor): WARNING this is inefficient (better version in SQL). """ - from dimagi.utils.couch.undo import DELETED_SUFFIX - - def _is_deleted(case_doc): - return case_doc['doc_type'].endswith(DELETED_SUFFIX) - - return [ - (case['_id'], case['closed'], _is_deleted(case)) - for case in iter_lite_cases_json(case_ids) - if case['domain'] == domain and (case['closed'] or _is_deleted(case)) - ] + return [(case.case_id, case.closed, case.is_deleted) + for case in iter_cases(case_ids) + if case.domain == domain and (case.closed or case.is_deleted)] @staticmethod def get_modified_case_ids(accessor, case_ids, sync_log):
Lexical envs: minor refactoring in Shed_Rebindings TN:
@@ -875,13 +875,6 @@ package body Langkit_Support.Lexical_Env is (From_Env : Lexical_Env; Rebindings : Env_Rebindings) return Env_Rebindings is - function Get_First_Rebindable_Env (L : Lexical_Env) return Lexical_Env - is - (if L = null - or else (L.Node /= No_Element and then Is_Rebindable (L.Node)) - then L - else Get_First_Rebindable_Env (Get_Env (L.Parent))); - First_Rebindable_Parent : Lexical_Env; Current_Last_Binding : Natural; begin @@ -892,16 +885,25 @@ package body Langkit_Support.Lexical_Env is Current_Last_Binding := Rebindings.Size; - -- Try to find a rebindable node in the parent chain - First_Rebindable_Parent := Get_First_Rebindable_Env (From_Env); + -- Look for the first environment in From_Env's parent chain whose Node + -- is rebindable. Use null if there is no such env. + First_Rebindable_Parent := From_Env; + while + First_Rebindable_Parent /= null + and then (First_Rebindable_Parent.Node = No_Element + or else not Is_Rebindable (First_Rebindable_Parent.Node)) + loop + First_Rebindable_Parent := Get_Env (First_Rebindable_Parent.Parent); + end loop; -- If there is no rebindable parent anywhere, it means we cannot have - -- rebindings. In that case, shed them all, e.g. return null rebindings. + -- rebindings. In that case, shed them all, i.e. return null rebindings. if First_Rebindable_Parent = null then return null; - else - -- If we find a rebindable parent, then we will shed every rebindings - -- between the top of the rebinding stack, and the corresponding + end if; + + -- If we fond a rebindable parent, then we will shed all rebindings + -- between the top of the rebinding stack and the corresponding -- rebinding. while Current_Last_Binding >= 1 @@ -912,12 +914,10 @@ package body Langkit_Support.Lexical_Env is Current_Last_Binding := Current_Last_Binding - 1; end loop; - if Current_Last_Binding /= 0 then - return Create (Rebindings.Bindings (1 .. Current_Last_Binding)); - else - return null; - end if; - end if; + return + (if Current_Last_Binding /= 0 + then Create (Rebindings.Bindings (1 .. Current_Last_Binding)) + else null); end Shed_Rebindings; ---------------------
List.get_type: use resolve_type on the list_cls TN:
@@ -941,7 +941,7 @@ class List(Parser): def get_type(self): with self.diagnostic_context(): if self.list_cls: - ret = self.list_cls + ret = resolve_type(self.list_cls) check_source_language( ret.is_list_type, 'Invalid list type for List parser: {}. '
Update whoisvalentine.py to address requested changes A few things have been changed to address the changes listed under PR Changed the color tag of the embed from both commands from discord.Color.dark_magenta() to bots.constants.Colours.pink Renamed valentine_facts to valentine_fact These changes are needed to fit the projects standards.
@@ -6,6 +6,8 @@ from random import choice import discord from discord.ext import commands +from bot.constants import Colours + log = logging.getLogger(__name__) with open(Path("bot", "resources", "valentines", "valentine_facts.json"), "r") as file: @@ -24,7 +26,7 @@ class ValentineFacts: embed = discord.Embed( title="Who is Saint Valentine?", description=FACTS['whois'], - color=discord.Color.dark_magenta() + color=Colours.pink ) embed.set_thumbnail( url='https://upload.wikimedia.org/wikipedia/commons/thumb/f/f1/Saint_Valentine_-_' @@ -34,14 +36,14 @@ class ValentineFacts: await ctx.channel.send(embed=embed) @commands.command() - async def valentine_facts(self, ctx): + async def valentine_fact(self, ctx): """ Shows a random fact about Valentine's Day. """ embed = discord.Embed( title=choice(FACTS['titles']), description=choice(FACTS['text']), - color=discord.Color.dark_magenta() + color=Colours.pink ) await ctx.channel.send(embed=embed)
Documentation: replacing unhelpful link The link that was previously used here does not contain information about supported framework version of PyTorch. Adding a link to the DLC images, which contains all supported PT version.
@@ -75,7 +75,7 @@ class PyTorch(Framework): framework_version (str): PyTorch version you want to use for executing your model training code. Defaults to ``None``. Required unless ``image_uri`` is provided. List of supported versions: - https://github.com/aws/sagemaker-python-sdk#pytorch-sagemaker-estimators. + https://github.com/aws/deep-learning-containers/blob/master/available_images.md. py_version (str): Python version you want to use for executing your model training code. One of 'py2' or 'py3'. Defaults to ``None``. Required unless ``image_uri`` is provided.
{CI} Update build.sh Support `.sql`
@@ -142,6 +142,7 @@ cat >>$testsrc_dir/setup.py <<EOL '*.md', '*.pem', '*.pfx', + '*.sql', '*.txt', '*.txt', '*.xml', @@ -157,6 +158,7 @@ cat >>$testsrc_dir/setup.py <<EOL '**/*.md', '**/*.pem', '**/*.pfx', + '**/*.sql', '**/*.txt', '**/*.txt', '**/*.xml',
Fix bug in result of compile if AST flag is set. If a class with __slots__ inherits a class without __slots__, any attribute can be set to its instances.
@@ -365,10 +365,7 @@ function compile() { root.parent_block = $B.builtins_scope $B.parser.dispatch_tokens(root, $.source) if($.flags == $B.PyCF_ONLY_AST){ - var ast = root.ast(), - klass = ast.constructor.$name - $B.create_python_ast_classes() - return $B.python_ast_classes[klass].$factory(ast) + return root.ast() } return $ } @@ -2450,6 +2447,19 @@ $B.$setattr = function(obj, attr, value){ var special_attrs = ["__module__"] if(klass && klass.__slots__ && special_attrs.indexOf(attr) == -1 && ! _setattr){ + var _slots = true + for(var kl of klass.__mro__){ + if(kl === _b_.object || kl === _b_.type){ + break + } + if(! kl.__slots__){ + // If class inherits from a class without __slots__, allow + // setattr for any attribute + _slots = false + break + } + } + if(_slots){ function mangled_slots(klass){ if(klass.__slots__){ if(Array.isArray(klass.__slots__)){ @@ -2482,6 +2492,7 @@ $B.$setattr = function(obj, attr, value){ throw $B.attr_error(attr, klass) } } + } if($test){console.log("attr", attr, "use _setattr", _setattr)} if(!_setattr){ if(obj.__dict__ === undefined){
call mem::forget on variable passed from Rust to Python Prior to this commit there was a Clippy lint error about calling mem::forget on a Copy trait, which doesn't have a destructor.
@@ -304,7 +304,7 @@ pub unsafe extern "C" fn block_publisher_summarize_block( *result = consensus.as_ptr(); *result_len = consensus.as_slice().len(); - mem::forget(result); + mem::forget(consensus); ErrorCode::Success }
Complete the docstrings for forward passes in the model base class modified: src/poem/models/base.py
@@ -126,8 +126,16 @@ class BaseModule(nn.Module): self.to(self.device) torch.cuda.empty_cache() - # Predicting scores calls the owa forward function, as this def predict_scores(self, triples): + """ + Calculate the scores for triples. + This method takes subject, relation and object of each triple and calculates the corresponding score. + + :param triples: torch.tensor, shape: (number of triples, 3) + The indices of (subject, relation, object) triples. + :return: numpy.ndarray, shape: (number of triples, 1) + The score for each triple. + """ scores = self.forward_owa(triples) return scores.detach().cpu().numpy() @@ -167,9 +175,12 @@ class BaseModule(nn.Module): ) -> torch.tensor: """ Forward pass for training with the OWA. + This method takes subject, relation and object of each triple and calculates the corresponding score. - :param batch: torch.tensor, shape: TODO: ??? - :return: torch.tensor, shape: TODO: ??? + :param batch: torch.tensor, shape: (batch_size, 3) + The indices of (subject, relation, object) triples. + :return: torch.tensor, shape: (batch_size, 1) + The score for each triple. """ raise NotImplementedError() @@ -180,7 +191,8 @@ class BaseModule(nn.Module): batch: torch.tensor, ) -> torch.tensor: """ - Forward pass for training and evaluation with the CWA. + Forward pass using right side (object) prediction for training with the CWA. + This method calculates the score for all possible objects for each (subject, relation) pair. :param batch: torch.tensor, shape: (batch_size, 2) The indices of (subject, relation) pairs. @@ -195,7 +207,8 @@ class BaseModule(nn.Module): batch: torch.tensor, ) -> torch.tensor: """ - Forward pass for evaluation with the CWA. + Forward pass using left side (subject) prediction for training with the CWA. + This method calculates the score for all possible subjects for each (relation, object) pair. :param batch: torch.tensor, shape: (batch_size, 2) The indices of (relation, object) pairs. @@ -204,7 +217,6 @@ class BaseModule(nn.Module): """ raise NotImplementedError() - # FIXME this isn't used anywhere def get_grad_params(self) -> Iterable[nn.Parameter]: """Get the parameters that require gradients.""" return get_params_requiring_grad(self)
Update HPE Aruba documentation for 5400R This documentation-only update adds a missing command for the HPE Aruba 5400R, which enables V3-only mode. This is necessary for a few OpenFlow commands related to pipeline configuration.
@@ -12,7 +12,7 @@ These switches include: - `3810 <http://www.arubanetworks.com/products/networking/switches/3810-series/>`_ - `2930F <http://www.arubanetworks.com/products/networking/switches/2930f-series/>`_ -The FAUCET pipeline is only supported from ``16.03`` release of the firmware onwards. +The FAUCET pipeline is only supported from ``16.03`` release of the firmware onwards. HPE Aruba recommends use of the latest available firmware, which can be downloaded from `HPE Support <https://www.hpe.com/networking/support>`_. For any queries, please post your question on HPE's `SDN forum <https://community.hpe.com/t5/SDN-Discussions/bd-p/sdn-discussions>`_. @@ -37,6 +37,17 @@ System & Network Requirements Switch ^^^^^^ +**Chassis configuration** + +Skip this step if you have a fixed configuration system (2930 or 3810). On a chassis system with insertable cards (5400R) new cards are configured to work in a backwards-compatible way (with reduced functionality) unless older cards are disabled in the chassis. To disable older (V2) cards and enable all functionality necessary to operate FAUCET, put the chassis into a mode where only V3 cards are allowed. + +* *Chassis system (5400R)* + +.. code-block:: none + + // Disable backwards compatibility, enable full Openflow flexibility + switch (config)# no allow-v2-modules + **VLAN/PORT configuration** To ensure any port/vlan configuration specified in the *faucet.yaml* file works, one needs to pre-configure all ``vlans`` on the switch. Every dataplane port on the switch is made a tagged member of every vlan. This permits FAUCET to perform flow matching and packet-out on any port/vlan combination. The control-plane port (either OOBM or a front-panel port) is kept separate, so that FAUCET does not attempt to modify the control-plane port state.
Fix Distributive link in README Distributive link originally pointed to consul.io, instead point to the Github repository for Distributive.
@@ -3,8 +3,8 @@ Distributive .. versionadded:: 1.1 -`Distributive <https://www.consul.io/>`_ is used in Mantl to run detailed, -granular health checks for various services. +`Distributive <https://github.com/CiscoCloud/distributive>`_ is used in Mantl to +run detailed, granular health checks for various services. This role is run several times as a dependency for other roles.
Deseasonify: remove `Evergreen` season The `SeasonBase` now serves as the fallback, off-season season for when no other season is available.
-from bot.seasons import SeasonBase - - -class Evergreen(SeasonBase): - """Evergreen Seasonal event attributes.""" - - bot_icon = "/logos/logo_seasonal/evergreen/logo_evergreen.png" - icon = ( - "/logos/logo_animated/heartbeat/heartbeat_512.gif", - "/logos/logo_animated/spinner/spinner_512.gif", - "/logos/logo_animated/tongues/tongues_512.gif", - "/logos/logo_animated/winky/winky_512.gif", - "/logos/logo_animated/jumper/jumper_512.gif", - "/logos/logo_animated/apple/apple_512.gif", - "/logos/logo_animated/blinky/blinky_512.gif", - "/logos/logo_animated/runner/runner_512.gif", - )
updated news.rst updated News.rst with information about fastqc reports
@@ -7,6 +7,8 @@ snakePipes 1.2.1 * Fixed a typo in ``createIndices``. * Implemented complex experimental design in RNAseq (differential gene expression), ChIP/ATACseq (differential binding). * Fixed an issue with ggplot2 and log transformation in RNAseq report Rmd. + * fastqc folder is created and its content will be added to multiqc only if fastqc flag is called. + * fastqc-trimmed folder is created and its content will be added to multiqc only if both fastqc and trim flags are called. snakePipes 1.2.0 ----------------
fix epacems_year_and_state handling "all" input Also use etl_params dict keys epacems_year/state instead of just year/state
@@ -4,6 +4,7 @@ from pathlib import Path import dask.dataframe as dd import pytest +from pudl.etl import _validate_params_epacems from pudl.output.epacems import epacems @@ -14,7 +15,8 @@ def epacems_year_and_state(etl_params): epacems = [item for item in etl_params['datapkg_bundle_settings'] [0]['datasets'] if 'epacems' in item.keys()] epacems = epacems[0]['epacems'] - return {'years': epacems['epacems_years'], 'states': epacems['epacems_states']} + params = _validate_params_epacems(epacems) + return params @pytest.fixture(scope='session') @@ -30,10 +32,10 @@ def epacems_parquet_path( def test_epacems_subset(epacems_year_and_state, epacems_parquet_path): """Minimal integration test of epacems(). Check if it returns a DataFrame.""" path = epacems_parquet_path - years = epacems_year_and_state['years'] + years = epacems_year_and_state['epacems_years'] # Use only Idaho if multiple states are given - states = epacems_year_and_state['states'] if len( - epacems_year_and_state['states']) == 1 else ['ID'] + states = epacems_year_and_state['epacems_states'] if len( + epacems_year_and_state['epacems_states']) == 1 else ['ID'] actual = epacems(columns=["gross_load_mw"], epacems_path=path, years=years, @@ -45,8 +47,8 @@ def test_epacems_subset(epacems_year_and_state, epacems_parquet_path): def test_epacems_subset_input_validation(epacems_year_and_state, epacems_parquet_path): """Check if invalid inputs raise exceptions.""" path = epacems_parquet_path - valid_year = epacems_year_and_state['years'][-1] - valid_state = epacems_year_and_state['states'][-1] + valid_year = epacems_year_and_state['epacems_years'][-1] + valid_state = epacems_year_and_state['epacems_states'][-1] valid_column = "gross_load_mw" invalid_state = 'confederacy'
Remove google safebrowsing flags Global Wayback policy is to archive everything, so its best to avoid disabling these flags.
@@ -163,9 +163,6 @@ class Chrome: '--disable-first-run-ui', '--no-first-run', '--homepage=about:blank', '--disable-direct-npapi-requests', '--disable-web-security', '--disable-notifications', - '--disable-client-side-phishing-detection', - '--safebrowsing-disable-auto-update', - '--safebrowsing-disable-download-protection', '--disable-extensions', '--disable-save-password-bubble'] if self.ignore_cert_errors: chrome_args.append('--ignore-certificate-errors')
Update README.md Correct Markdown syntax of the link.
@@ -7,7 +7,7 @@ pywebview is a lightweight cross-platform wrapper around a webview component tha pywebview is lightweight and has no dependencies on an external GUI framework. It uses native GUI for creating a web component window: WinForms on Windows, Cocoa on Mac OSX and Qt4/5 or GTK3 on Linux. If you choose to freeze your application, it does not bundle a heavy GUI toolkit with it keeping the executable size small. Compatible with both Python 2 and 3. While Android is not supported, you can use the same codebase with solutions like [Python for Android](https://github.com/kivy/python-for-android) for creating an APK. -Licensed under the BSD license. Maintained by [Roman Sirokov](https://github.com/r0x0r/) and [Shiva Prasad] (https://github.com/shivaprsdv) +Licensed under the BSD license. Maintained by [Roman Sirokov](https://github.com/r0x0r/) and [Shiva Prasad](https://github.com/shivaprsdv). # Gallery
Add cls command Note that this relies on Console Window object storing the panel in a `control` attribute (rather than `panel`) and I will standardise this in the next commit.
@@ -7,6 +7,10 @@ from ..mwindow import MWindow _ = wx.GetTranslation +RICHTEXT_TRANSLATE = { + "[Red]": "", +} + def register_panel_console(window, context): panel = ConsolePanel(window, wx.ID_ANY, context=context) @@ -27,6 +31,22 @@ def register_panel_console(window, context): window.on_pane_add(pane) context.register("pane/console", pane) + @context.console_command( + "cls", + help=_("Clear console screen"), + ) + def clear_console(channel, _, *args, **kwargs): + panels = [ + context.opened[x] + for x in ("window/Console", "window/Terminal") + if x in context.opened + ] + + panels.append( + context.registered["pane/console"] + ) + for panel in panels: + panel.control.clear() class ConsolePanel(wx.Panel): def __init__(self, *args, context=None, **kwargs): @@ -41,7 +61,7 @@ class ConsolePanel(wx.Panel): style=wx.richtext.RE_MULTILINE | wx.richtext.RE_READONLY ) - self.text_mainBeginSuppressUndo() + self.text_main.BeginSuppressUndo() font = wx.Font( 10, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL, ) @@ -90,6 +110,9 @@ class ConsolePanel(wx.Panel): def finalize(self, *args): self.context.channel("console").unwatch(self.update_text) + def clear(self): + self.text_main.Clear() + def update_text(self, text): if not wx.IsMainThread(): wx.CallAfter(self.update_text_gui, str(text) + "\n") @@ -155,7 +178,7 @@ class ConsolePanel(wx.Panel): class Console(MWindow): def __init__(self, *args, **kwds): super().__init__(581, 410, *args, **kwds) - self.panel = ConsolePanel(self, wx.ID_ANY, context=self.context) + self.control = ConsolePanel(self, wx.ID_ANY, context=self.context) _icon = wx.NullIcon _icon.CopyFromBitmap(icons8_console_50.GetBitmap()) self.SetIcon(_icon) @@ -163,7 +186,7 @@ class Console(MWindow): self.Layout() def window_open(self): - self.panel.initialize() + self.control.initialize() def window_close(self): - self.panel.finalize() + self.control.finalize()
Skip failing test Summary: Pull Request resolved:
@@ -3219,7 +3219,7 @@ def foo(x): self.checkScript(annotate_none, ()) self.checkScript(annotate_none_no_optional, ()) - @unittest.skipIf(PY2, "Python 3 required") + @unittest.skipIf(True, "Python 3 required") def test_type_annotate_py3(self): code = dedent(""" import torch
luhn: update tests to v1.3.0 Closes
@@ -5,7 +5,7 @@ import unittest from luhn import Luhn -# Tests adapted from `problem-specifications//canonical-data.json` @ v1.2.0 +# Tests adapted from `problem-specifications//canonical-data.json` @ v1.3.0 class LuhnTest(unittest.TestCase): def test_single_digit_strings_can_not_be_valid(self): @@ -29,6 +29,9 @@ class LuhnTest(unittest.TestCase): def test_invalid_credit_card(self): self.assertIs(Luhn("8273 1232 7352 0569").is_valid(), False) + def test_valid_number_with_an_even_number_of_digits(self): + self.assertIs(Luhn("095 245 88").is_valid(), True) + def test_valid_strings_with_a_non_digit_included_become_invalid(self): self.assertIs(Luhn("055a 444 285").is_valid(), False)
ENH: convert_tinydb_to_sqlite uses the original log date [CHANGED] derives from first line of the logfile if possible, otherwise leaves as conversion date
from __future__ import annotations +import contextlib import inspect import json import re @@ -589,22 +590,22 @@ def convert_directory_datastore( def convert_tinydb_to_sqlite(source: Path, dest: Optional[Path] = None) -> DataStoreABC: - try: + from datetime import datetime from fnmatch import translate + from .data_store import load_record_from_json + from .io_new import write_db + from .sqlite_data_store import _LOG_TABLE, DataStoreSqlite + + try: from tinydb import Query, TinyDB from tinydb.middlewares import CachingMiddleware from tinydb.storages import JSONStorage - - from cogent3.app.data_store import load_record_from_json - from cogent3.app.sqlite_data_store import DataStoreSqlite except ImportError as e: raise ImportError( "You need to install tinydb to be able to migrate to new datastore." ) from e - from .io_new import write_db - source = Path(source) storage = CachingMiddleware(JSONStorage) tinydb = TinyDB(str(source), storage=storage) @@ -630,7 +631,19 @@ def convert_tinydb_to_sqlite(source: Path, dest: Optional[Path] = None) -> DataS writer = write_db(data_store=dstore) for id, data, is_completed in data_list: if id.endswith(".log"): - writer.data_store.write_log(unique_id=id, data=data) + cmnd = f"UPDATE {_LOG_TABLE} SET data =?, log_name =?" + values = (data, id) + with contextlib.suppress(ValueError): + date = datetime.strptime( + data.split("\t", maxsplit=1)[0], "%Y-%m-%d %H:%M:%S" + ) + cmnd = f"{cmnd}, date=?" + values += (date,) + + cmnd = f"{cmnd} WHERE log_id=?" + values += (dstore._log_id,) + + dstore.db.execute(cmnd, values) else: writer.main(data, identifier=id)
update rebuildstaging to tell you how to deploy rather than doing it for you, since deploys are now done from commcarehq-ansible repo
@@ -67,7 +67,8 @@ fi if [[ $deploy = 'y' && $no_push != 'y' ]] then - rebuildstaging $args && git checkout autostaging && git submodule update --init --recursive && fab staging awesome_deploy:confirm=no + rebuildstaging $args && \ + echo "rebuildstaging will no longer deploy for you. From commcarehq-ansible, run `fab staging deploy`" else rebuildstaging $args fi
Update to use latest stable Ocean dwave-tabu -> 0.2.x dwave-neal -> 0.5.x dwave-hybrid -> 0.3.x dwave-networkx -> 0.8.x
@@ -29,12 +29,12 @@ else: install_requires = [ - 'dwave-networkx>=0.7.0,<0.8.0', + 'dwave-networkx>=0.8.0,<0.9.0', 'dwave-system>=0.7.0,<0.8.0', 'dwave-qbsolv>=0.2.7,<0.3.0', - 'dwave-hybrid>=0.2.0,<0.3.0', - 'dwave-neal>=0.4.0,<0.5.0', - 'dwave-tabu>=0.1.3,<0.2.0', + 'dwave-hybrid>=0.3.0,<0.4.0', + 'dwave-neal>=0.5.0,<0.6.0', + 'dwave-tabu>=0.2.0,<0.3.0', 'dimod>=0.8.0,<0.9.0', 'numpy<1.16.0', # only while we support py34 'pyqubo>=0.3.0',
Update response.py Providing more information in case of AudienceRestrictions conditions not satisfied
@@ -212,10 +212,8 @@ def for_me(conditions, myself): if audience.text.strip() == myself: return True else: - # print("Not for me: %s != %s" % (audience.text.strip(), - # myself)) - pass - + logger.debug("AudienceRestriction - One condition not satisfied: %s != %s" % (audience.text.strip(), myself)) + logger.debug("AudienceRestrictions not satisfied!") return False @@ -613,7 +611,7 @@ class AuthnResponse(StatusResponse): if not for_me(conditions, self.entity_id): if not lax: - raise Exception("Not for me!!!") + raise Exception("AudienceRestrictions conditions not satisfied! (Local entity_id=%s)" % self.entity_id) if conditions.condition: # extra conditions for cond in conditions.condition:
nix: add defaultText to services.lnbits.package Without this, evaluating the module doesn't provide a default value visible on search.nixos.org
let defaultUser = "lnbits"; cfg = config.services.lnbits; - inherit (lib) mkOption mkIf types optionalAttrs; + inherit (lib) mkOption mkIf types optionalAttrs literalExpression; in { @@ -25,6 +25,7 @@ in }; package = mkOption { type = types.package; + defaultText = literalExpression "pkgs.lnbits"; default = pkgs.lnbits; description = '' The lnbits package to use.
Update netwire.txt Adding Aliases field
# Copyright (c) 2014-2019 Maltrail developers (https://github.com/stamparm/maltrail/) # See the file 'LICENSE' for copying permission +# Aliases: netwiredrc, netwire + # Reference: https://www.sophos.com/en-us/threat-center/threat-analyses/viruses-and-spyware/Troj~NetWire-EK/detailed-analysis.aspx mommyreal.ddns.net
D( G(z).detach() ), added D means for monitoring and use os.makedirs py2.7 compatible
@@ -35,7 +35,10 @@ parser.add_argument('--outf', default='.', help='folder to output images and mod opt = parser.parse_args() print(opt) -os.makedirs(opt.outf, exist_ok=True) +try: + os.makedirs(opt.outf) +except OSError: + pass opt.manualSeed = random.randint(1, 10000) # fix seed print("Random Seed: ", opt.manualSeed) random.seed(opt.manualSeed) @@ -205,16 +208,17 @@ for epoch in range(opt.niter): output = netD(input) errD_real = criterion(output, label) errD_real.backward() + D_x = output.data.mean() # train with fake noise.data.resize_(batch_size, nz, 1, 1) noise.data.normal_(0, 1) - fake = netG(noise) - input.data.copy_(fake.data) + fake = netG(noise).detach() label.data.fill_(fake_label) - output = netD(input) + output = netD(fake) errD_fake = criterion(output, label) errD_fake.backward() + D_G_z1 = output.data.mean() errD = errD_real + errD_fake optimizerD.step() @@ -228,11 +232,12 @@ for epoch in range(opt.niter): output = netD(fake) errG = criterion(output, label) errG.backward() + D_G_z2 = output.data.mean() optimizerG.step() - print('[%d/%d][%d/%d] Loss_D: %f Loss_G: %f' + print('[%d/%d][%d/%d] Loss_D: %.4f Loss_G: %.4f D(x): %.4f D(G(z)): %.4f / %.4f' % (epoch, opt.niter, i, len(dataloader), - errD.data[0], errG.data[0])) + errD.data[0], errG.data[0], D_x, D_G_z1, D_G_z2)) if i % 100 == 0: vutils.save_image(real_cpu, '%s/real_samples.png' % opt.outf)
removing iOS and Android configuration These settings are now better managed natively by CMake
@@ -62,35 +62,12 @@ class CppRestSDKConan(ConanFile): if self._cmake: return self._cmake - if self.settings.os == "iOS": - with open('toolchain.cmake', 'w') as toolchain_cmake: - if self.settings.arch == "armv8": - arch = "arm64" - sdk = "iphoneos" - elif self.settings.arch == "x86_64": - arch = "x86_64" - sdk = "iphonesimulator" - sysroot = tools.XCRun(self.settings).sdk_path - toolchain_cmake.write('set(CMAKE_C_COMPILER /usr/bin/clang CACHE STRING "" FORCE)\n') - toolchain_cmake.write('set(CMAKE_CXX_COMPILER /usr/bin/clang++ CACHE STRING "" FORCE)\n') - toolchain_cmake.write('set(CMAKE_C_COMPILER_WORKS YES)\n') - toolchain_cmake.write('set(CMAKE_CXX_COMPILER_WORKS YES)\n') - toolchain_cmake.write('set(CMAKE_XCODE_EFFECTIVE_PLATFORMS "-%s" CACHE STRING "" FORCE)\n' % sdk) - toolchain_cmake.write('set(CMAKE_OSX_ARCHITECTURES "%s" CACHE STRING "" FORCE)\n' % arch) - toolchain_cmake.write('set(CMAKE_OSX_SYSROOT "%s" CACHE STRING "" FORCE)\n' % sysroot) - os.environ['CONAN_CMAKE_TOOLCHAIN_FILE'] = os.path.join(os.getcwd(), 'toolchain.cmake') - self._cmake = CMake(self, set_cmake_flags=True) self._cmake.definitions["BUILD_TESTS"] = False self._cmake.definitions["BUILD_SAMPLES"] = False self._cmake.definitions["WERROR"] = False self._cmake.definitions["CPPREST_EXCLUDE_WEBSOCKETS"] = not self.options.websockets self._cmake.definitions["CPPREST_EXCLUDE_COMPRESSION"] = not self.options.compression - if self.settings.os == "iOS": - self._cmake.definitions["IOS"] = True - elif self.settings.os == "Android": - self._cmake.definitions["ANDROID"] = True - self._cmake.definitions["CONAN_LIBCXX"] = '' self._cmake.configure(build_folder=self._build_subfolder) return self._cmake @@ -122,15 +99,6 @@ endfunction() if self.settings.compiler == 'clang' and str(self.settings.compiler.libcxx) in ['libstdc++', 'libstdc++11']: tools.replace_in_file(os.path.join(self._source_subfolder, 'Release', 'CMakeLists.txt'), 'libc++', 'libstdc++') - if self.settings.os == 'Android': - tools.replace_in_file(os.path.join(self._source_subfolder, 'Release', 'src', 'pch', 'stdafx.h'), - '#include "boost/config/stdlib/libstdcpp3.hpp"', - '//#include "boost/config/stdlib/libstdcpp3.hpp"') - # https://github.com/Microsoft/cpprestsdk/issues/372#issuecomment-386798723 - tools.replace_in_file(os.path.join(self._source_subfolder, 'Release', 'src', 'http', 'client', - 'http_client_asio.cpp'), - 'm_timer.expires_from_now(m_duration)', - 'm_timer.expires_from_now(std::chrono::microseconds(m_duration.count()))') def build(self): self._patch()
Add a note to uninstall pip packages on unstack Adding a note in quickstart guide to uninstall pip packages before restacking the environment. Closes Bug:
@@ -52,6 +52,16 @@ Run devstack:: $ cd /opt/stack/devstack $ ./stack.sh +.. note:: + + If the developer have a previous devstack environment and they want to re-stack + the environment, they need to uninstall the pip packages before restacking:: + + $ ./unstack.sh + $ ./clean.sh + $ pip freeze | grep -v '^\-e' | xargs sudo pip uninstall -y + $ ./stack.sh + Prepare your session to be able to use the various openstack clients including nova, neutron, and glance. Create a new shell, and source the devstack openrc script::
Install torchvision before all tests, tickles Summary: Pull Request resolved:
@@ -159,19 +159,20 @@ test_custom_script_ops() { } if [ -z "${JOB_BASE_NAME}" ] || [[ "${JOB_BASE_NAME}" == *-test ]]; then + test_torchvision test_python_nn test_python_all_except_nn test_aten - test_torchvision test_libtorch test_custom_script_ops else if [[ "${JOB_BASE_NAME}" == *-test1 ]]; then + test_torchvision test_python_nn elif [[ "${JOB_BASE_NAME}" == *-test2 ]]; then + test_torchvision test_python_all_except_nn test_aten - test_torchvision test_libtorch test_custom_script_ops fi
[images] only build images on deploy or dev * [images] only build images on deploy or dev The wheel container prevents this image from ever being cached. * Update build.yaml
@@ -1576,6 +1576,8 @@ steps: inputs: - from: /wheel-container.tar to: /wheel-container.tar + scopes: + - dev - kind: runImage name: test_hail_public_image image: @@ -1587,6 +1589,8 @@ steps: gsutil --version dependsOn: - hail_public_image + scopes: + - dev - kind: buildImage name: genetics_public_image dockerFile: docker/genetics/Dockerfile @@ -1594,6 +1598,8 @@ steps: publishAs: genetics-public dependsOn: - hail_public_image + scopes: + - dev - kind: runImage name: test_genetics_public_image image: @@ -1605,3 +1611,5 @@ steps: plink2 --version dependsOn: - genetics_public_image + scopes: + - dev
Update CentOS quickstart doc This line looked out of place seeing as I'm deploying Queens. I checked with Major Hayden who verified that adding the extra repo is no longer required.
@@ -61,7 +61,6 @@ system packages are upgraded and then reboot into the new kernel: ## CentOS # yum upgrade - # yum install https://rdoproject.org/repos/openstack-pike/rdo-release-pike.rpm # yum install git # reboot
DOC: update np.around docstring with note about floating-point error Fixes [ci-skip]
@@ -3125,10 +3125,35 @@ def around(a, decimals=0, out=None): ----- For values exactly halfway between rounded decimal values, NumPy rounds to the nearest even value. Thus 1.5 and 2.5 round to 2.0, - -0.5 and 0.5 round to 0.0, etc. Results may also be surprising due - to the inexact representation of decimal fractions in the IEEE - floating point standard [1]_ and errors introduced when scaling - by powers of ten. + -0.5 and 0.5 round to 0.0, etc. + + ``np.around`` uses a fast but sometimes inexact algorithm to round + floating-point datatypes. For positive `decimals` it is equivalent to + ``np.true_divide(np.rint(a * 10**decimals), 10**decimals)``, which is + inexact for large floating-point values or large values of `decimals` due + the inexact representation of decimal fractions in the IEEE floating point + standard [1]_ and errors introduced when scaling by powers of ten. For + instance, note the extra "1" in the following: + + >>> np.round(56294995342131.5, 3) + 56294995342131.51 + + If your goal is to print such values with a fixed number of decimals, it is + preferable to use numpy's float printing routines to limit the number of + printed decimals: + + >>> np.format_float_positional(56294995342131.5, precision=3) + '56294995342131.5' + + The float printing routines use an accurate but much more computationally + demanding algorithm to compute the number of digits after the decimal + point. + + Alternatively, Python's builtin `round` function uses a more accurate + but slower algorithm for 64-bit floating point values: + + >>> round(56294995342131.5, 3) + 56294995342131.5 References ----------
Ignore kill event in interchange command server The interchange is shut down by process termination, so this code path is not needed.
@@ -264,7 +264,7 @@ class Interchange(object): hub_channel.send_pyobj((MessageType.NODE_INFO, d)) @wrap_with_logs(target="interchange") - def _command_server(self, kill_event): + def _command_server(self): """ Command server to run async command to the interchange """ logger.debug("Command Server Starting") @@ -274,7 +274,7 @@ class Interchange(object): reply: Any # the type of reply depends on the command_req received (aka this needs dependent types...) - while not kill_event.is_set(): + while True: try: command_req = self.command_channel.recv_pyobj() logger.debug("Received command request: {}".format(command_req)) @@ -348,7 +348,6 @@ class Interchange(object): self._task_puller_thread.start() self._command_thread = threading.Thread(target=self._command_server, - args=(self._kill_event,), name="Interchange-Command") self._command_thread.start()
fix test_create_newdb for Python <3.8 The Path.unlink() method was introduced in 3.8 - using it broke the remaining tests in older Python because the test database was not created.
@@ -85,7 +85,9 @@ class TestCreatedbSubcommand(unittest.TestCase): def test_create_newdb(self): """Test creation of new empty pyani database.""" # Remove existing dbpath first - self.dbpath.unlink(missing_ok=True) + # The missing_ok argument does not come in until Python 3.8 + if self.dbpath.exists(): + self.dbpath.unlink() # Create new database subcommands.subcmd_createdb(self.argsdict["create_newdb"])
Another dce fix Summary: Pull Request resolved: Another place where onnx export is running dead code elimination after making the jit graph invalid. Fixing it.
@@ -351,7 +351,7 @@ def _model_to_graph(model, args, verbose=False, training=False, if do_constant_folding and _export_onnx_opset_version == 9: params_dict = torch._C._jit_pass_onnx_constant_fold(graph, params_dict) - torch._C._jit_pass_dce(graph) + torch._C._jit_pass_dce_allow_deleting_nodes_with_side_effects(graph) if verbose: print(graph)
Update rtd.yml Re-start from dev lock
@@ -7,28 +7,26 @@ dependencies: - xarray=0.16.1 - aiohttp=3.6.2 - dask=2.30.0 - - scipy=1.1.0 - - scikit-learn=0.21 -# - distributed=2.30.0 -# - matplotlib=3.3.2 + - distributed=2.30.0 + - matplotlib=3.3.2 + - zarr=2.4.0 + - scikit-learn=0.23.2 - ipython=7.18.1 - - conda-forge::netcdf4=1.5.4 -# - seaborn=0.11.0 - - conda-forge::erddapy=0.7.2 + - netcdf4=1.5.4 + - seaborn=0.11.0 + - erddapy=0.7.2 - fsspec=0.8.3 - - conda-forge::gsw=3.4.0 -# - bottleneck=1.3.2 + - gsw=3.4.0 + - bottleneck=1.3.2 - cftime=1.2.1 - - conda-forge::cfgrib=0.9.8.4 + - cfgrib=0.9.8.4 - pip=20.2.3 - tqdm=4.50.2 - ipykernel=5.3.4 -# - cartopy=0.18.0 -# - ipywidgets=7.5.1 -# - sphinx=3.2.1 # Automatically added by RTD -# - sphinx_rtd_theme=0.4.3 # Automatically added by RTD - - conda-forge::sphinx-autosummary-accessors=0.1.2 - - conda-forge::nbsphinx=0.7.1 + - cartopy=0.18.0 + - ipywidgets=7.5.1 + - sphinx-autosummary-accessors=0.1.2 + - nbsphinx=0.7.1 - numpydoc=1.1.0 - pip: - sphinx_issues==1.2.0 \ No newline at end of file
modified examples/cpp/README.md modified exaamples/cpp/README.md
A walkthrough example of developing algorithm in Python and running it in C++ on MNIST handwritten digit classification talk. +### [mnist_training](mnist_training) + +A walkthrough example of developing algorithm in C++ training with an nnp file of an initialized model on MNIST handwritten digit classification. + ### [cpp_graph](cpp_graph) A demonstration of graph construction using C++ low-level API. (Not well documented so far.)
Update test_preprocess.py We now have 4 specials, value needs to be updated.
@@ -57,8 +57,8 @@ class TestData(unittest.TestCase): self.assertEqual(Counter({'c': 6, 'b': 4, 'a': 2, 'e': 2, 'f': 1}), merged.freqs) - # 3 specicials + 2 words (since we pass 2 to merge_vocabs) - self.assertEqual(5, len(merged.itos)) + # 4 specicials + 2 words (since we pass 2 to merge_vocabs) + self.assertEqual(6, len(merged.itos)) self.assertTrue('b' in merged.itos)
Note that sigma_m is mosaicity Simplest possible fix that closes
@@ -568,7 +568,7 @@ def __init__( self._sigma_b = beam_divergence.sigma() - logger.info("Calculating E.S.D Reflecting Range.") + logger.info("Calculating E.S.D Reflecting Range (mosaicity).") reflecting_range = ComputeEsdReflectingRange( crystal, beam,
MAINT: Relax asserts to match relaxed reducelike resolution behaviour This closes which was due to the assert not being noticed triggered (not sure why) during initial CI run. The behaviour is relaxed, so the assert must also be relaxed.
@@ -3032,8 +3032,12 @@ PyUFunc_Accumulate(PyUFuncObject *ufunc, PyArrayObject *arr, PyArrayObject *out, return NULL; } - /* The below code assumes that all descriptors are identical: */ - assert(descrs[0] == descrs[1] && descrs[0] == descrs[2]); + /* + * The below code assumes that all descriptors are interchangeable, we + * allow them to not be strictly identical (but they typically should be) + */ + assert(PyArray_EquivTypes(descrs[0], descrs[1]) + && PyArray_EquivTypes(descrs[0], descrs[2])); if (PyDataType_REFCHK(descrs[2]) && descrs[2]->type_num != NPY_OBJECT) { /* This can be removed, but the initial element copy needs fixing */ @@ -3445,8 +3449,12 @@ PyUFunc_Reduceat(PyUFuncObject *ufunc, PyArrayObject *arr, PyArrayObject *ind, return NULL; } - /* The below code assumes that all descriptors are identical: */ - assert(descrs[0] == descrs[1] && descrs[0] == descrs[2]); + /* + * The below code assumes that all descriptors are interchangeable, we + * allow them to not be strictly identical (but they typically should be) + */ + assert(PyArray_EquivTypes(descrs[0], descrs[1]) + && PyArray_EquivTypes(descrs[0], descrs[2])); if (PyDataType_REFCHK(descrs[2]) && descrs[2]->type_num != NPY_OBJECT) { /* This can be removed, but the initial element copy needs fixing */
Remove code block from within link block The c.g.c. generator doesn't look like it parses this correctly. Instead of trying to post-process this or modify the generator, I think we should take the easy way out and simply remove the code block.
@@ -66,7 +66,7 @@ _DETAILED_HELP_TEXT = (""" <https://cloud.google.com/storage/docs/object-versioning>`_ - `Guide for using Object Versioning <https://cloud.google.com/storage/docs/using-object-versioning>`_ - - The `reference page for the ``gsutil versioning`` command + - The `reference page for the gsutil versioning command <https://cloud.google.com/storage/docs/gsutil/commands/versioning>`_ - `Overview of generation numbers and preconditions <https://cloud.google.com/storage/docs/generations-preconditions>`_
Pass rank parameter to BotorchModel Summary: Add rank parameter as optional argument for BoTorchModel __init__ function and cross_validate function in botorch.py and also in _get_model in botorch_defaults.py
@@ -170,6 +170,7 @@ class FixedNoiseGP(BatchedMultiOutputGPyTorchModel, ExactGP): train_Yvar: Tensor, covar_module: Optional[Module] = None, outcome_transform: Optional[OutcomeTransform] = None, + **kwargs: Any, ) -> None: r"""A single-task exact GP model using fixed noise levels.
Fix numerical typo in the examples Changes `heads=128` to `heads=12` in the example which is more realistic.
@@ -29,7 +29,7 @@ class MultiHeadAttention(tf.keras.layers.Layer): between them: ```python - mha = MultiHeadAttention(head_size=128, num_heads=128) + mha = MultiHeadAttention(head_size=128, num_heads=12) query = tf.random.uniform((32, 20, 200)) # (batch_size, query_elements, query_depth) key = tf.random.uniform((32, 15, 300)) # (batch_size, key_elements, key_depth) @@ -41,7 +41,7 @@ class MultiHeadAttention(tf.keras.layers.Layer): If `value` is not given then internally `value = key` will be used: ```python - mha = MultiHeadAttention(head_size=128, num_heads=128) + mha = MultiHeadAttention(head_size=128, num_heads=12) query = tf.random.uniform((32, 20, 200)) # (batch_size, query_elements, query_depth) key = tf.random.uniform((32, 15, 300)) # (batch_size, key_elements, key_depth)
chore(buildpacks): update heroku-buildpack-php to v117 See
@@ -36,7 +36,7 @@ download_buildpack https://github.com/heroku/heroku-buildpack-gradle.git download_buildpack https://github.com/heroku/heroku-buildpack-grails.git v20 download_buildpack https://github.com/heroku/heroku-buildpack-play.git v26 download_buildpack https://github.com/heroku/heroku-buildpack-python.git v97 -download_buildpack https://github.com/heroku/heroku-buildpack-php.git v109 +download_buildpack https://github.com/heroku/heroku-buildpack-php.git v117 download_buildpack https://github.com/heroku/heroku-buildpack-clojure.git v75 download_buildpack https://github.com/heroku/heroku-buildpack-scala.git v72 download_buildpack https://github.com/heroku/heroku-buildpack-go.git v46
Update views.py Fixing typo in 'version'
@@ -42,7 +42,7 @@ class InstalledPluginsAPIView(APIView): 'author': plugin_app_config.author, 'author_email': plugin_app_config.author_email, 'description': plugin_app_config.description, - 'verison': plugin_app_config.version + 'version': plugin_app_config.version } def get(self, request, format=None):
Elantra 2021: add missing FW add FW for 82e9cdd3f43bf83e|2021-05-15--02-42-51 (test route)
@@ -1232,6 +1232,7 @@ FW_VERSIONS = { b'\xf1\x87\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf1\x00CN7 MDPS C 1.00 1.06 \x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 4CNDC106', b'\xf1\x8756310/AA070\xf1\x00CN7 MDPS C 1.00 1.06 56310/AA070 4CNDC106', b'\xf1\x8756310AA050\x00\xf1\x00CN7 MDPS C 1.00 1.06 56310AA050\x00 4CNDC106', + b'\xf1\x8756310AA050\x00\xf1\x00CN7 MDPS C 1.00 1.06 56310AA050\x00 4CNDC106\xf1\xa01.06', ], (Ecu.fwdCamera, 0x7c4, None): [ b'\xf1\x00CN7 MFC AT USA LHD 1.00 1.00 99210-AB000 200819', @@ -1244,6 +1245,7 @@ FW_VERSIONS = { b'\xf1\x8758910-AA800\xf1\x00CN ESC \t 104 \x08\x03 58910-AA800', b'\xf1\x8758910-AB800\xf1\x00CN ESC \t 101 \x10\x03 58910-AB800', b'\xf1\x8758910-AA800\xf1\x00CN ESC \t 105 \x10\x03 58910-AA800', + b'\xf1\x8758910-AB800\xf1\x00CN ESC \t 101 \x10\x03 58910-AB800\xf1\xa01.01', ], (Ecu.transmission, 0x7e1, None): [ b'\xf1\x00HT6WA280BLHT6VA640A1CCN0N20NS5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',